[
  {
    "path": ".gitignore",
    "content": "# Compiled class file\n*.class\n\n# Log file\n*.log\n\n# BlueJ files\n*.ctxt\n\n# Mobile Tools for Java (J2ME)\n.mtj.tmp/\n\n# Package Files #\n*.jar\n*.war\n*.nar\n*.ear\n*.zip\n*.tar.gz\n*.rar\n\n# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml\nhs_err_pid*\nreplay_pid*\n"
  },
  {
    "path": ".idea/.gitignore",
    "content": "# Default ignored files\n/shelf/\n/workspace.xml\n# Editor-based HTTP Client requests\n/httpRequests/\n# Datasource local storage ignored files\n/dataSources/\n/dataSources.local.xml\n"
  },
  {
    "path": ".idea/ApifoxUploaderProjectSetting.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"ApifoxUploaderProjectSetting\">\n    <option name=\"apiAccessToken\" value=\"APS-1yePDncwnzrHnKdV8utrwmzzaFrjIxUf\" />\n    <option name=\"apiProjectIds\">\n      <array>\n        <option value=\"&lt;byte-array&gt;rO0ABXNyADZjb20uaXRhbmdjZW50LmlkZWEucGx1Z2luLmFwaS5hY2NvdW50LlByb2plY3RBbmRNb2R1bGUAAAAAAAAAAQIAFVoABmVuYWJsZUwACG1vZHVsZUlkdAASTGphdmEvbGFuZy9TdHJpbmc7TAAGb3RoZXIxcQB+AAFMAAdvdGhlcjEwcQB+AAFMAAdvdGhlcjExcQB+AAFMAAdvdGhlcjEycQB+AAFMAAZvdGhlcjJxAH4AAUwABm90aGVyM3EAfgABTAAGb3RoZXI0cQB+AAFMAAZvdGhlcjVxAH4AAUwABm90aGVyNnEAfgABTAAGb3RoZXI3cQB+AAFMAAZvdGhlcjhxAH4AAUwABm90aGVyOXEAfgABTAAKcGF0aEJlZm9yZXEAfgABTAANcHJvamVjdEZvbGRlcnEAfgABTAAPcHJvamVjdEZvbGRlcklkcQB+AAFMAAlwcm9qZWN0SWRxAH4AAUwAC3Byb2plY3ROYW1lcQB+AAFMAAxzY2hlbWFGb2xkZXJxAH4AAUwACHNjaGVtYUlkcQB+AAF4cAF0AAZzcGFydGFwcHBwcHBwcHBwcHB0AAB0AAnmoLnnm67lvZV0AAEwdAAJNDk4MDc5My4wdAAGc3BhcnRhcQB+AAVxAH4ABg==&lt;/byte-array&gt;\" />\n        <option value=\"&lt;byte-array&gt;rO0ABXNyADZjb20uaXRhbmdjZW50LmlkZWEucGx1Z2luLmFwaS5hY2NvdW50LlByb2plY3RBbmRNb2R1bGUAAAAAAAAAAQIAFVoABmVuYWJsZUwACG1vZHVsZUlkdAASTGphdmEvbGFuZy9TdHJpbmc7TAAGb3RoZXIxcQB+AAFMAAdvdGhlcjEwcQB+AAFMAAdvdGhlcjExcQB+AAFMAAdvdGhlcjEycQB+AAFMAAZvdGhlcjJxAH4AAUwABm90aGVyM3EAfgABTAAGb3RoZXI0cQB+AAFMAAZvdGhlcjVxAH4AAUwABm90aGVyNnEAfgABTAAGb3RoZXI3cQB+AAFMAAZvdGhlcjhxAH4AAUwABm90aGVyOXEAfgABTAAKcGF0aEJlZm9yZXEAfgABTAANcHJvamVjdEZvbGRlcnEAfgABTAAPcHJvamVjdEZvbGRlcklkcQB+AAFMAAlwcm9qZWN0SWRxAH4AAUwAC3Byb2plY3ROYW1lcQB+AAFMAAxzY2hlbWFGb2xkZXJxAH4AAUwACHNjaGVtYUlkcQB+AAF4cAF0AAx1b2ZzLXNlcnZpY2VwcHBwcHBwcHBwcHB0AAB0AAnmoLnnm67lvZV0AAEwdAAJNTUxMDA2Mi4wdAAEVU9GU3EAfgAFcQB+AAY=&lt;/byte-array&gt;\" />\n        <option value=\"&lt;byte-array&gt;rO0ABXNyADZjb20uaXRhbmdjZW50LmlkZWEucGx1Z2luLmFwaS5hY2NvdW50LlByb2plY3RBbmRNb2R1bGUAAAAAAAAAAQIAFVoABmVuYWJsZUwACG1vZHVsZUlkdAASTGphdmEvbGFuZy9TdHJpbmc7TAAGb3RoZXIxcQB+AAFMAAdvdGhlcjEwcQB+AAFMAAdvdGhlcjExcQB+AAFMAAdvdGhlcjEycQB+AAFMAAZvdGhlcjJxAH4AAUwABm90aGVyM3EAfgABTAAGb3RoZXI0cQB+AAFMAAZvdGhlcjVxAH4AAUwABm90aGVyNnEAfgABTAAGb3RoZXI3cQB+AAFMAAZvdGhlcjhxAH4AAUwABm90aGVyOXEAfgABTAAKcGF0aEJlZm9yZXEAfgABTAANcHJvamVjdEZvbGRlcnEAfgABTAAPcHJvamVjdEZvbGRlcklkcQB+AAFMAAlwcm9qZWN0SWRxAH4AAUwAC3Byb2plY3ROYW1lcQB+AAFMAAxzY2hlbWFGb2xkZXJxAH4AAUwACHNjaGVtYUlkcQB+AAF4cAF0ABNzcGFydGEtdW9mcy1zZXJ2aWNlcHBwcHBwcHBwcHBwdAAAdAAJ5qC555uu5b2VdAABMHQACTU1MTAwNjIuMHQABFVPRlNxAH4ABXEAfgAG&lt;/byte-array&gt;\" />\n        <option value=\"&lt;byte-array&gt;rO0ABXNyADZjb20uaXRhbmdjZW50LmlkZWEucGx1Z2luLmFwaS5hY2NvdW50LlByb2plY3RBbmRNb2R1bGUAAAAAAAAAAQIAFVoABmVuYWJsZUwACG1vZHVsZUlkdAASTGphdmEvbGFuZy9TdHJpbmc7TAAGb3RoZXIxcQB+AAFMAAdvdGhlcjEwcQB+AAFMAAdvdGhlcjExcQB+AAFMAAdvdGhlcjEycQB+AAFMAAZvdGhlcjJxAH4AAUwABm90aGVyM3EAfgABTAAGb3RoZXI0cQB+AAFMAAZvdGhlcjVxAH4AAUwABm90aGVyNnEAfgABTAAGb3RoZXI3cQB+AAFMAAZvdGhlcjhxAH4AAUwABm90aGVyOXEAfgABTAAKcGF0aEJlZm9yZXEAfgABTAANcHJvamVjdEZvbGRlcnEAfgABTAAPcHJvamVjdEZvbGRlcklkcQB+AAFMAAlwcm9qZWN0SWRxAH4AAUwAC3Byb2plY3ROYW1lcQB+AAFMAAxzY2hlbWFGb2xkZXJxAH4AAUwACHNjaGVtYUlkcQB+AAF4cAF0ABNzcGFydGUtdWNkbi1zZXJ2aWNlcHBwcHBwcHBwcHBwdAAAdAAJ5qC555uu5b2VdAABMHQACTU2NDUwMDkuMHQABFVDRE5xAH4ABXEAfgAG&lt;/byte-array&gt;\" />\n        <option value=\"&lt;byte-array&gt;rO0ABXNyADZjb20uaXRhbmdjZW50LmlkZWEucGx1Z2luLmFwaS5hY2NvdW50LlByb2plY3RBbmRNb2R1bGUAAAAAAAAAAQIAFVoABmVuYWJsZUwACG1vZHVsZUlkdAASTGphdmEvbGFuZy9TdHJpbmc7TAAGb3RoZXIxcQB+AAFMAAdvdGhlcjEwcQB+AAFMAAdvdGhlcjExcQB+AAFMAAdvdGhlcjEycQB+AAFMAAZvdGhlcjJxAH4AAUwABm90aGVyM3EAfgABTAAGb3RoZXI0cQB+AAFMAAZvdGhlcjVxAH4AAUwABm90aGVyNnEAfgABTAAGb3RoZXI3cQB+AAFMAAZvdGhlcjhxAH4AAUwABm90aGVyOXEAfgABTAAKcGF0aEJlZm9yZXEAfgABTAANcHJvamVjdEZvbGRlcnEAfgABTAAPcHJvamVjdEZvbGRlcklkcQB+AAFMAAlwcm9qZWN0SWRxAH4AAUwAC3Byb2plY3ROYW1lcQB+AAFMAAxzY2hlbWFGb2xkZXJxAH4AAUwACHNjaGVtYUlkcQB+AAF4cAF0ABNzcGFydGEtdW9mcy1jb25zb2xlcHBwcHBwcHBwcHBwdAAAdAAJ5qC555uu5b2VdAABMHQACTU1MTAwNjIuMHQABFVPRlNxAH4ABXEAfgAG&lt;/byte-array&gt;\" />\n      </array>\n    </option>\n    <option name=\"treeNodes\" value=\"&lt;byte-array&gt;rO0ABXNyABdqYXZhLnV0aWwuTGlua2VkSGFzaE1hcDTATlwQbMD7AgABWgALYWNjZXNzT3JkZXJ4cgARamF2YS51dGlsLkhhc2hNYXAFB9rBwxZg0QMAAkYACmxvYWRGYWN0b3JJAAl0aHJlc2hvbGR4cD9AAAAAAAAMdwgAAAAQAAAAAXQACTE1OTkzMzkuMHNyAC5jb20uaXRhbmdjZW50LmlkZWEucGx1Z2luLmFwaS5hY2NvdW50LlRyZWVOb2RlAAAAAAAAAAECAAtMAAdhbGxQYXRodAASTGphdmEvbGFuZy9TdHJpbmc7TAAIY2hpbGRyZW50AA9MamF2YS91dGlsL01hcDtMAAhmdWxsUGF0aHEAfgAFTAADa2V5cQB+AAVMAARuYW1lcQB+AAVMAAhwYXJlbnRJZHEAfgAFTAAJcHJvamVjdElkcQB+AAVMAAtwcm9qZWN0TmFtZXEAfgAFTAAGdGVhbUlkcQB+AAVMAAh0ZWFtTmFtZXEAfgAFTAAEdHlwZXQAMExjb20vaXRhbmdjZW50L2lkZWEvcGx1Z2luL2FwaS9hY2NvdW50L05vZGVUeXBlO3hwdAAM5Liq5Lq656m66Ze0c3EAfgAAP0AAAAAAAAx3CAAAABAAAAAMdAAJMjY5MzE1OS4wc3EAfgAEdAAZ5Liq5Lq656m66Ze0L+S4quS6uumhueebrnNxAH4AAD9AAAAAAAAAdwgAAAAQAAAAAHgAcHEAfgALdAAY5Liq5Lq66aG555uuICgyNjkzMTU5LjApdAAJMTU5OTMzOS4wdAAJMjY5MzE1OS4wdAAM5Liq5Lq66aG555uudAAJMTU5OTMzOS4wcH5yAC5jb20uaXRhbmdjZW50LmlkZWEucGx1Z2luLmFwaS5hY2NvdW50Lk5vZGVUeXBlAAAAAAAAAAASAAB4cgAOamF2YS5sYW5nLkVudW0AAAAAAAAAABIAAHhwdAAHUFJPSkVDVHQACTM1NjU3MzguMHNxAH4ABHQAJeS4quS6uuepuumXtC/oi43nqbnlpJbljZbigJTnlKjmiLfnq69zcQB+AAA/QAAAAAAAAHcIAAAAEAAAAAB4AHBxAH4AGHQAJOiLjeepueWkluWNluKAlOeUqOaIt+erryAoMzU2NTczOC4wKXQACTE1OTkzMzkuMHQACTM1NjU3MzguMHQAGOiLjeepueWkluWNluKAlOeUqOaIt+err3QACTE1OTkzMzkuMHBxAH4AFnQACTM1NjU3NzEuMHNxAH4ABHQAJeS4quS6uuepuumXtC/oi43nqbnlpJbljZbigJTnrqHnkIbnq69zcQB+AAA/QAAAAAAAAHcIAAAAEAAAAAB4AHBxAH4AIXQAJOiLjeepueWkluWNluKAlOeuoeeQhuerryAoMzU2NTc3MS4wKXQACTE1OTkzMzkuMHQACTM1NjU3NzEuMHQAGOiLjeepueWkluWNluKAlOeuoeeQhuerr3QACTE1OTkzMzkuMHBxAH4AFnQACTQ0MDQzNzAuMHNxAH4ABHQAFuS4quS6uuepuumXtC/ova/ku7bmna9zcQB+AAA/QAAAAAAAAHcIAAAAEAAAAAB4AHBxAH4AKnQAFei9r+S7tuadryAoNDQwNDM3MC4wKXQACTE1OTkzMzkuMHQACTQ0MDQzNzAuMHQACei9r+S7tuadr3QACTE1OTkzMzkuMHBxAH4AFnQACTQ2MjgzNTguMHNxAH4ABHQAJeS4quS6uuepuumXtC/kurrlipvotYTmupDnrqHnkIbns7vnu59zcQB+AAA/QAAAAAAAAHcIAAAAEAAAAAB4AHBxAH4AM3QAJOS6uuWKm+i1hOa6kOeuoeeQhuezu+e7nyAoNDYyODM1OC4wKXQACTE1OTkzMzkuMHQACTQ2MjgzNTguMHQAGOS6uuWKm+i1hOa6kOeuoeeQhuezu+e7n3QACTE1OTkzMzkuMHBxAH4AFnQACTQ3MTQzOTMuMHNxAH4ABHQAEuS4quS6uuepuumXtC9tb3ZpZXNxAH4AAD9AAAAAAAAAdwgAAAAQAAAAAHgAcHEAfgA8dAARbW92aWUgKDQ3MTQzOTMuMCl0AAkxNTk5MzM5LjB0AAk0NzE0MzkzLjB0AAVtb3ZpZXQACTE1OTkzMzkuMHBxAH4AFnQACTQ5ODA3OTMuMHNxAH4ABHQAE+S4quS6uuepuumXtC9zcGFydGFzcQB+AAA/QAAAAAAAAHcIAAAAEAAAAAB4AHBxAH4ARXQAEnNwYXJ0YSAoNDk4MDc5My4wKXQACTE1OTkzMzkuMHQACTQ5ODA3OTMuMHQABnNwYXJ0YXQACTE1OTkzMzkuMHBxAH4AFnQACTUxOTAyMjguMHNxAH4ABHQAFuS4quS6uuepuumXtC9rZW7kupHnm5hzcQB+AAA/QAAAAAAAAHcIAAAAEAAAAAB4AHBxAH4ATnQAFWtlbuS6keebmCAoNTE5MDIyOC4wKXQACTE1OTkzMzkuMHQACTUxOTAyMjguMHQACWtlbuS6keebmHQACTE1OTkzMzkuMHBxAH4AFnQACTUyMDg4NDQuMHNxAH4ABHQAF+S4quS6uuepuumXtC9NaW5NZWV0aW5nc3EAfgAAP0AAAAAAAAB3CAAAABAAAAAAeABwcQB+AFd0ABZNaW5NZWV0aW5nICg1MjA4ODQ0LjApdAAJMTU5OTMzOS4wdAAJNTIwODg0NC4wdAAKTWluTWVldGluZ3QACTE1OTkzMzkuMHBxAH4AFnQACTUzNDY4OTkuMHNxAH4ABHQAJeS4quS6uuepuumXtC/psbzph4zlnKjnur/kuqTlj4vlubPlj7BzcQB+AAA/QAAAAAAAAHcIAAAAEAAAAAB4AHBxAH4AYHQAJOmxvOmHjOWcqOe6v+S6pOWPi+W5s+WPsCAoNTM0Njg5OS4wKXQACTE1OTkzMzkuMHQACTUzNDY4OTkuMHQAGOmxvOmHjOWcqOe6v+S6pOWPi+W5s+WPsHQACTE1OTkzMzkuMHBxAH4AFnQACTU1MTAwNjIuMHNxAH4ABHQAEeS4quS6uuepuumXtC9VT0ZTc3EAfgAAP0AAAAAAAAx3CAAAABAAAAACdAALNC41ODUyMDM1RTdzcQB+AAR0ACDkuKrkurrnqbrpl7QvVU9GUy9VT0ZTQ29udHJvbGxlcnNxAH4AAD9AAAAAAAAAdwgAAAAQAAAAAHgAdAAOVU9GU0NvbnRyb2xsZXJxAH4AbXQADlVPRlNDb250cm9sbGVydAAJNTUxMDA2Mi4wdAAJNTUxMDA2Mi4wdAAEVU9GU3BwfnEAfgAUdAAGRk9MREVSdAALNC43MTg0MjE3RTdzcQB+AAR0ACLkuKrkurrnqbrpl7QvVU9GUy9CdWNrZXRDb250cm9sbGVyc3EAfgAAP0AAAAAAAAB3CAAAABAAAAAAeAB0ABBCdWNrZXRDb250cm9sbGVycQB+AHh0ABBCdWNrZXRDb250cm9sbGVydAAJNTUxMDA2Mi4wdAAJNTUxMDA2Mi4wdAAEVU9GU3BwcQB+AHZ4AHBxAH4AaXQAEFVPRlMgKDU1MTAwNjIuMCl0AAkxNTk5MzM5LjB0AAk1NTEwMDYyLjB0AARVT0ZTdAAJMTU5OTMzOS4wcHEAfgAWdAAJNTY0NTAwOS4wc3EAfgAEdAAR5Liq5Lq656m66Ze0L1VDRE5zcQB+AAA/QAAAAAAAAHcIAAAAEAAAAAB4AHBxAH4AhnQAEFVDRE4gKDU2NDUwMDkuMCl0AAkxNTk5MzM5LjB0AAk1NjQ1MDA5LjB0AARVQ0ROdAAJMTU5OTMzOS4wcHEAfgAWeABwcQB+AAN0AAzkuKrkurrnqbrpl7RwcHB0AAkxNTk5MzM5LjB0AAzkuKrkurrnqbrpl7R+cQB+ABR0AARURUFNeAA=&lt;/byte-array&gt;\" />\n    <option name=\"treeNodesJTree\" value=\"&lt;byte-array&gt;rO0ABXNyACFqYXZheC5zd2luZy50cmVlLkRlZmF1bHRUcmVlTW9kZWynvpEmGsXl2QMAA1oAEmFza3NBbGxvd3NDaGlsZHJlbkwADGxpc3RlbmVyTGlzdHQAJUxqYXZheC9zd2luZy9ldmVudC9FdmVudExpc3RlbmVyTGlzdDtMAARyb290dAAbTGphdmF4L3N3aW5nL3RyZWUvVHJlZU5vZGU7eHAAc3IAI2phdmF4LnN3aW5nLmV2ZW50LkV2ZW50TGlzdGVuZXJMaXN0kUjMLXPfDt4DAAB4cHB4c3IAJ2phdmF4LnN3aW5nLnRyZWUuRGVmYXVsdE11dGFibGVUcmVlTm9kZcRYv/zyqHHgAwADWgAOYWxsb3dzQ2hpbGRyZW5MAAhjaGlsZHJlbnQAEkxqYXZhL3V0aWwvVmVjdG9yO0wABnBhcmVudHQAIkxqYXZheC9zd2luZy90cmVlL011dGFibGVUcmVlTm9kZTt4cAFzcgAQamF2YS51dGlsLlZlY3RvctmXfVuAO68BAwADSQARY2FwYWNpdHlJbmNyZW1lbnRJAAxlbGVtZW50Q291bnRbAAtlbGVtZW50RGF0YXQAE1tMamF2YS9sYW5nL09iamVjdDt4cAAAAAAAAAABdXIAE1tMamF2YS5sYW5nLk9iamVjdDuQzlifEHMpbAIAAHhwAAAACnNxAH4ABgFzcQB+AAoAAAAAAAAADHVxAH4ADQAAABRzcQB+AAYBcHEAfgAPdXEAfgANAAAAAnQACnVzZXJPYmplY3RzcgAuY29tLml0YW5nY2VudC5pZGVhLnBsdWdpbi5hcGkuYWNjb3VudC5UcmVlTm9kZQAAAAAAAAABAgALTAAHYWxsUGF0aHQAEkxqYXZhL2xhbmcvU3RyaW5nO0wACGNoaWxkcmVudAAPTGphdmEvdXRpbC9NYXA7TAAIZnVsbFBhdGhxAH4AFkwAA2tleXEAfgAWTAAEbmFtZXEAfgAWTAAIcGFyZW50SWRxAH4AFkwACXByb2plY3RJZHEAfgAWTAALcHJvamVjdE5hbWVxAH4AFkwABnRlYW1JZHEAfgAWTAAIdGVhbU5hbWVxAH4AFkwABHR5cGV0ADBMY29tL2l0YW5nY2VudC9pZGVhL3BsdWdpbi9hcGkvYWNjb3VudC9Ob2RlVHlwZTt4cHQAGeS4quS6uuepuumXtC/kuKrkurrpobnnm65zcgAXamF2YS51dGlsLkxpbmtlZEhhc2hNYXA0wE5cEGzA+wIAAVoAC2FjY2Vzc09yZGVyeHIAEWphdmEudXRpbC5IYXNoTWFwBQfawcMWYNEDAAJGAApsb2FkRmFjdG9ySQAJdGhyZXNob2xkeHA/QAAAAAAAAHcIAAAAEAAAAAB4AHB0AAkyNjkzMTU5LjB0ABjkuKrkurrpobnnm64gKDI2OTMxNTkuMCl0AAkxNTk5MzM5LjB0AAkyNjkzMTU5LjB0AAzkuKrkurrpobnnm650AAkxNTk5MzM5LjBwfnIALmNvbS5pdGFuZ2NlbnQuaWRlYS5wbHVnaW4uYXBpLmFjY291bnQuTm9kZVR5cGUAAAAAAAAAABIAAHhyAA5qYXZhLmxhbmcuRW51bQAAAAAAAAAAEgAAeHB0AAdQUk9KRUNUeHNxAH4ABgFwcQB+AA91cQB+AA0AAAACcQB+ABRzcQB+ABV0ACXkuKrkurrnqbrpl7Qv6IuN56m55aSW5Y2W4oCU55So5oi356uvc3EAfgAbP0AAAAAAAAB3CAAAABAAAAAAeABwdAAJMzU2NTczOC4wdAAk6IuN56m55aSW5Y2W4oCU55So5oi356uvICgzNTY1NzM4LjApdAAJMTU5OTMzOS4wdAAJMzU2NTczOC4wdAAY6IuN56m55aSW5Y2W4oCU55So5oi356uvdAAJMTU5OTMzOS4wcHEAfgAmeHNxAH4ABgFwcQB+AA91cQB+AA0AAAACcQB+ABRzcQB+ABV0ACXkuKrkurrnqbrpl7Qv6IuN56m55aSW5Y2W4oCU566h55CG56uvc3EAfgAbP0AAAAAAAAB3CAAAABAAAAAAeABwdAAJMzU2NTc3MS4wdAAk6IuN56m55aSW5Y2W4oCU566h55CG56uvICgzNTY1NzcxLjApdAAJMTU5OTMzOS4wdAAJMzU2NTc3MS4wdAAY6IuN56m55aSW5Y2W4oCU566h55CG56uvdAAJMTU5OTMzOS4wcHEAfgAmeHNxAH4ABgFwcQB+AA91cQB+AA0AAAACcQB+ABRzcQB+ABV0ABbkuKrkurrnqbrpl7Qv6L2v5Lu25p2vc3EAfgAbP0AAAAAAAAB3CAAAABAAAAAAeABwdAAJNDQwNDM3MC4wdAAV6L2v5Lu25p2vICg0NDA0MzcwLjApdAAJMTU5OTMzOS4wdAAJNDQwNDM3MC4wdAAJ6L2v5Lu25p2vdAAJMTU5OTMzOS4wcHEAfgAmeHNxAH4ABgFwcQB+AA91cQB+AA0AAAACcQB+ABRzcQB+ABV0ACXkuKrkurrnqbrpl7Qv5Lq65Yqb6LWE5rqQ566h55CG57O757ufc3EAfgAbP0AAAAAAAAB3CAAAABAAAAAAeABwdAAJNDYyODM1OC4wdAAk5Lq65Yqb6LWE5rqQ566h55CG57O757ufICg0NjI4MzU4LjApdAAJMTU5OTMzOS4wdAAJNDYyODM1OC4wdAAY5Lq65Yqb6LWE5rqQ566h55CG57O757ufdAAJMTU5OTMzOS4wcHEAfgAmeHNxAH4ABgFwcQB+AA91cQB+AA0AAAACcQB+ABRzcQB+ABV0ABLkuKrkurrnqbrpl7QvbW92aWVzcQB+ABs/QAAAAAAAAHcIAAAAEAAAAAB4AHB0AAk0NzE0MzkzLjB0ABFtb3ZpZSAoNDcxNDM5My4wKXQACTE1OTkzMzkuMHQACTQ3MTQzOTMuMHQABW1vdmlldAAJMTU5OTMzOS4wcHEAfgAmeHNxAH4ABgFwcQB+AA91cQB+AA0AAAACcQB+ABRzcQB+ABV0ABPkuKrkurrnqbrpl7Qvc3BhcnRhc3EAfgAbP0AAAAAAAAB3CAAAABAAAAAAeABwdAAJNDk4MDc5My4wdAASc3BhcnRhICg0OTgwNzkzLjApdAAJMTU5OTMzOS4wdAAJNDk4MDc5My4wdAAGc3BhcnRhdAAJMTU5OTMzOS4wcHEAfgAmeHNxAH4ABgFwcQB+AA91cQB+AA0AAAACcQB+ABRzcQB+ABV0ABbkuKrkurrnqbrpl7Qva2Vu5LqR55uYc3EAfgAbP0AAAAAAAAB3CAAAABAAAAAAeABwdAAJNTE5MDIyOC4wdAAVa2Vu5LqR55uYICg1MTkwMjI4LjApdAAJMTU5OTMzOS4wdAAJNTE5MDIyOC4wdAAJa2Vu5LqR55uYdAAJMTU5OTMzOS4wcHEAfgAmeHNxAH4ABgFwcQB+AA91cQB+AA0AAAACcQB+ABRzcQB+ABV0ABfkuKrkurrnqbrpl7QvTWluTWVldGluZ3NxAH4AGz9AAAAAAAAAdwgAAAAQAAAAAHgAcHQACTUyMDg4NDQuMHQAFk1pbk1lZXRpbmcgKDUyMDg4NDQuMCl0AAkxNTk5MzM5LjB0AAk1MjA4ODQ0LjB0AApNaW5NZWV0aW5ndAAJMTU5OTMzOS4wcHEAfgAmeHNxAH4ABgFwcQB+AA91cQB+AA0AAAACcQB+ABRzcQB+ABV0ACXkuKrkurrnqbrpl7Qv6bG86YeM5Zyo57q/5Lqk5Y+L5bmz5Y+wc3EAfgAbP0AAAAAAAAB3CAAAABAAAAAAeABwdAAJNTM0Njg5OS4wdAAk6bG86YeM5Zyo57q/5Lqk5Y+L5bmz5Y+wICg1MzQ2ODk5LjApdAAJMTU5OTMzOS4wdAAJNTM0Njg5OS4wdAAY6bG86YeM5Zyo57q/5Lqk5Y+L5bmz5Y+wdAAJMTU5OTMzOS4wcHEAfgAmeHNxAH4ABgFzcQB+AAoAAAAAAAAAAnVxAH4ADQAAAApzcQB+AAYBcHEAfgCLdXEAfgANAAAAAnEAfgAUc3EAfgAVdAAg5Liq5Lq656m66Ze0L1VPRlMvVU9GU0NvbnRyb2xsZXJzcQB+ABs/QAAAAAAAAHcIAAAAEAAAAAB4AHQADlVPRlNDb250cm9sbGVydAALNC41ODUyMDM1RTd0AA5VT0ZTQ29udHJvbGxlcnQACTU1MTAwNjIuMHQACTU1MTAwNjIuMHQABFVPRlNwcH5xAH4AJHQABkZPTERFUnhzcQB+AAYBcHEAfgCLdXEAfgANAAAAAnEAfgAUc3EAfgAVdAAi5Liq5Lq656m66Ze0L1VPRlMvQnVja2V0Q29udHJvbGxlcnNxAH4AGz9AAAAAAAAAdwgAAAAQAAAAAHgAdAAQQnVja2V0Q29udHJvbGxlcnQACzQuNzE4NDIxN0U3dAAQQnVja2V0Q29udHJvbGxlcnQACTU1MTAwNjIuMHQACTU1MTAwNjIuMHQABFVPRlNwcHEAfgCZeHBwcHBwcHBweHEAfgAPdXEAfgANAAAAAnEAfgAUc3EAfgAVdAAR5Liq5Lq656m66Ze0L1VPRlNzcQB+ABs/QAAAAAAAAHcIAAAAEAAAAAB4AHB0AAk1NTEwMDYyLjB0ABBVT0ZTICg1NTEwMDYyLjApdAAJMTU5OTMzOS4wdAAJNTUxMDA2Mi4wdAAEVU9GU3QACTE1OTkzMzkuMHBxAH4AJnhzcQB+AAYBcHEAfgAPdXEAfgANAAAAAnEAfgAUc3EAfgAVdAAR5Liq5Lq656m66Ze0L1VDRE5zcQB+ABs/QAAAAAAAAHcIAAAAEAAAAAB4AHB0AAk1NjQ1MDA5LjB0ABBVQ0ROICg1NjQ1MDA5LjApdAAJMTU5OTMzOS4wdAAJNTY0NTAwOS4wdAAEVUNETnQACTE1OTkzMzkuMHBxAH4AJnhwcHBwcHBwcHhxAH4ACXVxAH4ADQAAAAJxAH4AFHNxAH4AFXQADOS4quS6uuepuumXtHNxAH4AGz9AAAAAAAAAdwgAAAAQAAAAAHgAcHQACTE1OTkzMzkuMHQADOS4quS6uuepuumXtHBwcHQACTE1OTkzMzkuMHQADOS4quS6uuepuumXtH5xAH4AJHQABFRFQU14cHBwcHBwcHBweHB1cQB+AA0AAAACcQB+ABRzcQB+ABV0AARSb290cHB0AAEwcQB+AMdwcHBwcHEAfgDDeHNxAH4ACgAAAAAAAAACdXEAfgANAAAACnQABHJvb3RxAH4ACXBwcHBwcHBweHg=&lt;/byte-array&gt;\" />\n  </component>\n</project>"
  },
  {
    "path": ".idea/codeStyles/codeStyleConfig.xml",
    "content": "<component name=\"ProjectCodeStyleConfiguration\">\n  <state>\n    <option name=\"PREFERRED_PROJECT_CODE_STYLE\" value=\"Default\" />\n  </state>\n</component>"
  },
  {
    "path": ".idea/compiler.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"CompilerConfiguration\">\n    <annotationProcessing>\n      <profile default=\"true\" name=\"Default\" enabled=\"true\" />\n      <profile name=\"Maven default annotation processors profile\" enabled=\"true\">\n        <sourceOutputDir name=\"target/generated-sources/annotations\" />\n        <sourceTestOutputDir name=\"target/generated-test-sources/test-annotations\" />\n        <outputRelativeToContentRoot value=\"true\" />\n        <module name=\"sparta-uac-console\" />\n        <module name=\"radium\" />\n        <module name=\"shadow\" />\n        <module name=\"hydra-framework-runtime\" />\n        <module name=\"hydra-architecture\" />\n        <module name=\"odin-architecture\" />\n        <module name=\"Summer\" />\n        <module name=\"Pinecone\" />\n        <module name=\"sparta-api-uac\" />\n        <module name=\"heist-framework-architecture\" />\n        <module name=\"sparta-uofs-console\" />\n        <module name=\"hydra-kom-default-driver\" />\n        <module name=\"hydra-message-control\" />\n        <module name=\"hydra-framework-service\" />\n        <module name=\"hydra-framework-storage\" />\n        <module name=\"ulf-lib-oltp-rdb\" />\n        <module name=\"redqueen-framework-service\" />\n        <module name=\"skynet-system\" />\n        <module name=\"Sparta\" />\n        <module name=\"hydra-lib-uofs-cache\" />\n        <module name=\"ulf-lib-construction\" />\n        <module name=\"heist-system-schedule\" />\n        <module name=\"hydra-framework-device\" />\n        <module name=\"redstone-message-stones\" />\n        <module name=\"hydra-lib-thrift-sdk\" />\n        <module name=\"redstone-architecture\" />\n        <module name=\"hydra-service-control\" />\n        <module name=\"heist-http-client-okhttp-suit\" />\n        <module name=\"hydra-architecture-conduct\" />\n        <module name=\"hydra-system-reign\" />\n        <module name=\"hydra-architecture-message\" />\n        <module name=\"skynet-architecture\" />\n        <module name=\"sparta-utask-console\" />\n        <module name=\"sailor-stream-distribute-sdk\" />\n        <module name=\"sparta-uofs-service\" />\n        <module name=\"springram\" />\n        <module name=\"sparta-ucdn-console\" />\n        <module name=\"ulfhedinn\" />\n        <module name=\"hydra-r-atlas-default-driver\" />\n        <module name=\"odin-framework-runtime\" />\n        <module name=\"Shadow\" />\n        <module name=\"ender-system-hydra\" />\n        <module name=\"hydra-lib-grpc-service-sdk\" />\n        <module name=\"sauron-core\" />\n        <module name=\"sparta-api-uofs\" />\n        <module name=\"summer\" />\n        <module name=\"slime\" />\n        <module name=\"jelly\" />\n        <module name=\"Radium\" />\n        <module name=\"redqueen-system\" />\n        <module name=\"skynet-cloud-deploy\" />\n        <module name=\"Saurye\" />\n        <module name=\"hydra-message-broadcast\" />\n        <module name=\"hydra-architecture-storage\" />\n        <module name=\"Slime\" />\n        <module name=\"redqueen-architecture\" />\n        <module name=\"sparta-ucdn-service\" />\n        <module name=\"Hydra\" />\n        <module name=\"hydra-system-tritium\" />\n        <module name=\"hydra-framework-config\" />\n        <module name=\"redqueen-computation-suit\" />\n        <module name=\"sparta-core-console\" />\n        <module name=\"Springram\" />\n        <module name=\"pinecone\" />\n        <module name=\"Ulfhedinn\" />\n        <module name=\"Jelly\" />\n        <module name=\"saurye\" />\n      </profile>\n    </annotationProcessing>\n    <bytecodeTargetLevel>\n      <module name=\"Autumn\" target=\"11\" />\n      <module name=\"Console\" target=\"11\" />\n      <module name=\"File\" target=\"11\" />\n      <module name=\"Logger\" target=\"11\" />\n      <module name=\"Messenger\" target=\"11\" />\n      <module name=\"Netty\" target=\"11\" />\n      <module name=\"Pinecontum\" target=\"11\" />\n      <module name=\"TaskJuggler\" target=\"11\" />\n    </bytecodeTargetLevel>\n  </component>\n  <component name=\"JavacSettings\">\n    <option name=\"ADDITIONAL_OPTIONS_OVERRIDE\">\n      <module name=\"Autumn\" options=\"-parameters\" />\n      <module name=\"Console\" options=\"-parameters\" />\n      <module name=\"File\" options=\"-parameters\" />\n      <module name=\"Hydra\" options=\"-parameters\" />\n      <module name=\"Jelly\" options=\"-parameters\" />\n      <module name=\"Logger\" options=\"-parameters\" />\n      <module name=\"Messenger\" options=\"-parameters\" />\n      <module name=\"Netty\" options=\"-parameters\" />\n      <module name=\"Pinecone\" options=\"-parameters\" />\n      <module name=\"Pinecones\" options=\"-parameters\" />\n      <module name=\"Pinecontum\" options=\"-parameters\" />\n      <module name=\"Radium\" options=\"-parameters\" />\n      <module name=\"Sauron\" options=\"-parameters\" />\n      <module name=\"Saurons\" options=\"\" />\n      <module name=\"Saurye\" options=\"-parameters\" />\n      <module name=\"Shadow\" options=\"-parameters\" />\n      <module name=\"Slime\" options=\"-parameters\" />\n      <module name=\"Sparta\" options=\"-parameters\" />\n      <module name=\"Springram\" options=\"-parameters\" />\n      <module name=\"Summer\" options=\"-parameters\" />\n      <module name=\"TaskJuggler\" options=\"-parameters\" />\n      <module name=\"Ulfhedinn\" options=\"-parameters\" />\n      <module name=\"Walnuts\" options=\"-parameters\" />\n      <module name=\"ender-system-hydra\" options=\"-parameters\" />\n      <module name=\"heist-framework-architecture\" options=\"-parameters\" />\n      <module name=\"heist-http-client-okhttp-suit\" options=\"-parameters\" />\n      <module name=\"heist-system-schedule\" options=\"-parameters\" />\n      <module name=\"hydra\" options=\"\" />\n      <module name=\"hydra-architecture\" options=\"-parameters\" />\n      <module name=\"hydra-architecture-conduct\" options=\"-parameters\" />\n      <module name=\"hydra-architecture-message\" options=\"-parameters\" />\n      <module name=\"hydra-architecture-storage\" options=\"-parameters\" />\n      <module name=\"hydra-framework-config\" options=\"-parameters\" />\n      <module name=\"hydra-framework-device\" options=\"-parameters\" />\n      <module name=\"hydra-framework-runtime\" options=\"-parameters\" />\n      <module name=\"hydra-framework-service\" options=\"-parameters\" />\n      <module name=\"hydra-framework-storage\" options=\"-parameters\" />\n      <module name=\"hydra-kom-default-driver\" options=\"-parameters\" />\n      <module name=\"hydra-lib-grpc-service-sdk\" options=\"-parameters\" />\n      <module name=\"hydra-lib-thrift-sdk\" options=\"-parameters\" />\n      <module name=\"hydra-lib-uofs-cache\" options=\"-parameters\" />\n      <module name=\"hydra-message-broadcast\" options=\"-parameters\" />\n      <module name=\"hydra-message-control\" options=\"-parameters\" />\n      <module name=\"hydra-r-atlas-default-driver\" options=\"-parameters\" />\n      <module name=\"hydra-service-control\" options=\"-parameters\" />\n      <module name=\"hydra-system-reign\" options=\"-parameters\" />\n      <module name=\"hydra-system-tritium\" options=\"-parameters\" />\n      <module name=\"jelly\" options=\"-parameters\" />\n      <module name=\"odin-architecture\" options=\"-parameters\" />\n      <module name=\"odin-framework-runtime\" options=\"-parameters\" />\n      <module name=\"pinecone\" options=\"-parameters\" />\n      <module name=\"radium\" options=\"-parameters\" />\n      <module name=\"redqueen-architecture\" options=\"-parameters\" />\n      <module name=\"redqueen-computation-suit\" options=\"-parameters\" />\n      <module name=\"redqueen-framework-service\" options=\"-parameters\" />\n      <module name=\"redqueen-system\" options=\"-parameters\" />\n      <module name=\"redstone-architecture\" options=\"-parameters\" />\n      <module name=\"redstone-message-stones\" options=\"-parameters\" />\n      <module name=\"sailor-stream-distribute-sdk\" options=\"-parameters\" />\n      <module name=\"sauron-core\" options=\"-parameters\" />\n      <module name=\"saurye\" options=\"-parameters\" />\n      <module name=\"shadow\" options=\"-parameters\" />\n      <module name=\"skynet-architecture\" options=\"-parameters\" />\n      <module name=\"skynet-cloud-deploy\" options=\"-parameters\" />\n      <module name=\"skynet-system\" options=\"-parameters\" />\n      <module name=\"slime\" options=\"-parameters\" />\n      <module name=\"sparta\" options=\"\" />\n      <module name=\"sparta-api-uac\" options=\"-parameters\" />\n      <module name=\"sparta-api-uofs\" options=\"-parameters\" />\n      <module name=\"sparta-core-console\" options=\"-parameters\" />\n      <module name=\"sparta-uac-console\" options=\"-parameters\" />\n      <module name=\"sparta-ucdn-console\" options=\"-parameters\" />\n      <module name=\"sparta-ucdn-service\" options=\"-parameters\" />\n      <module name=\"sparta-uofs-console\" options=\"-parameters\" />\n      <module name=\"sparta-uofs-service\" options=\"-parameters\" />\n      <module name=\"sparta-utask-console\" options=\"-parameters\" />\n      <module name=\"springram\" options=\"-parameters\" />\n      <module name=\"summer\" options=\"-parameters\" />\n      <module name=\"ulf-lib-construction\" options=\"-parameters\" />\n      <module name=\"ulf-lib-oltp-rdb\" options=\"-parameters\" />\n      <module name=\"ulfhedinn\" options=\"-parameters\" />\n    </option>\n  </component>\n</project>"
  },
  {
    "path": ".idea/dataSources.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"DataSourceManagerImpl\" format=\"xml\" multifile-model=\"true\">\n    <data-source source=\"LOCAL\" name=\"@node1.nutgit.com\" uuid=\"71727912-1734-4331-8895-792542b67243\">\n      <driver-ref>mysql.8</driver-ref>\n      <synchronize>true</synchronize>\n      <jdbc-driver>com.mysql.cj.jdbc.Driver</jdbc-driver>\n      <jdbc-url>jdbc:mysql://node1.nutgit.com:13393</jdbc-url>\n      <jdbc-additional-properties>\n        <property name=\"com.intellij.clouds.kubernetes.db.host.port\" />\n        <property name=\"com.intellij.clouds.kubernetes.db.enabled\" value=\"false\" />\n        <property name=\"com.intellij.clouds.kubernetes.db.container.port\" />\n      </jdbc-additional-properties>\n      <working-dir>$ProjectFileDir$</working-dir>\n    </data-source>\n    <data-source source=\"LOCAL\" name=\"@localhost\" uuid=\"593931c7-fd46-4365-a6f4-71bd41b5fd8b\">\n      <driver-ref>mysql.8</driver-ref>\n      <synchronize>true</synchronize>\n      <jdbc-driver>com.mysql.cj.jdbc.Driver</jdbc-driver>\n      <jdbc-url>jdbc:mysql://localhost:3306</jdbc-url>\n      <jdbc-additional-properties>\n        <property name=\"com.intellij.clouds.kubernetes.db.host.port\" />\n        <property name=\"com.intellij.clouds.kubernetes.db.enabled\" value=\"false\" />\n        <property name=\"com.intellij.clouds.kubernetes.db.container.port\" />\n      </jdbc-additional-properties>\n      <working-dir>$ProjectFileDir$</working-dir>\n    </data-source>\n  </component>\n</project>"
  },
  {
    "path": ".idea/dictionaries/project.xml",
    "content": "<component name=\"ProjectDictionaryState\">\n  <dictionary name=\"project\">\n    <words>\n      <w>Clientile</w>\n    </words>\n  </dictionary>\n</component>"
  },
  {
    "path": ".idea/dictionaries/undefined.xml",
    "content": "<component name=\"ProjectDictionaryState\">\n  <dictionary name=\"undefined\">\n    <words>\n      <w>arraytron</w>\n      <w>heistgram</w>\n      <w>heistotron</w>\n      <w>heistron</w>\n      <w>indexable</w>\n      <w>indexables</w>\n      <w>maptron</w>\n      <w>nonjron</w>\n      <w>pinecone</w>\n      <w>sauron</w>\n      <w>servgram</w>\n      <w>servtus</w>\n      <w>sitemap</w>\n    </words>\n  </dictionary>\n</component>"
  },
  {
    "path": ".idea/encodings.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"Encoding\">\n    <file url=\"file://$PROJECT_DIR$\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Archcraft/ender-system-hydra/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Archcraft/redstone-architecture/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Archcraft/redstone-message-stones/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Archcraft/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Archcraft/src/main/resources\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Console\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Console/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/File\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/File/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-architecture-conduct/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-architecture-message/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-architecture-storage/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-architecture/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-framework-config/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-framework-device/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-framework-runtime/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-framework-service/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-framework-storage/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-kom-default-driver/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-lib-grpc-service-sdk/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-lib-thrift-sdk/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-lib-uofs-cache/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-message-broadcast/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-message-control/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-r-atlas-default-driver/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-service-control/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-system-reign/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/hydra-system-tritium/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Hydra/src/main/resources\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Logger\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Logger/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Messenger\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Messenger/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Odin/odin-architecture/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Odin/odin-framework-runtime/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Odin/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Odin/src/main/resources\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Hydra\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Hydra/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Jelly\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Jelly/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Pinecone\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Pinecone/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Slime\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Slime/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Springram\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Springram/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Summer\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Summer/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Ulfhedinn\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/Ulfhedinn/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/src/main/resources\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/ulf-lib-construction/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Pinecones/ulf-lib-oltp-rdb/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/RedQueen/redqueen-architecture/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/RedQueen/redqueen-computation-suit/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/RedQueen/redqueen-framework-service/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/RedQueen/redqueen-system/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/RedQueen/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/RedQueen/src/main/resources\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Saurons\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Saurons/Radium\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Saurons/Radium/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Saurons/Saurye\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Saurons/Saurye/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Saurons/Shadow\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Saurons/Shadow/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Saurons/heist-framework-architecture/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Saurons/heist-http-client-okhttp-suit/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Saurons/heist-system-schedule/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Saurons/sauron-core/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Saurons/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Saurons/src/main/resources\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Skynet/skynet-architecture/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Skynet/skynet-cloud-deploy/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Skynet/skynet-system/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Skynet/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Skynet/src/main/resources\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Sparta/sparta-api-uac/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Sparta/sparta-api-uofs/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Sparta/sparta-core-console/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Sparta/sparta-uac-console/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Sparta/sparta-ucdn-console/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Sparta/sparta-ucdn-service/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Sparta/sparta-uofs-console/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Sparta/sparta-uofs-service/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Sparta/sparta-utask-console/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Sparta/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Sparta/src/main/resources\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/TaskJuggler\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/TaskJuggler/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Walnuts\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Walnuts/Sparta\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Walnuts/Sparta/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Walnuts/sailor-stream-distribute-sdk/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Walnuts/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/Walnuts/src/main/resources\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/src/main/java\" charset=\"UTF-8\" />\n    <file url=\"file://$PROJECT_DIR$/src/main/resources\" charset=\"UTF-8\" />\n  </component>\n</project>"
  },
  {
    "path": ".idea/jarRepositories.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"RemoteRepositoriesConfiguration\">\n    <remote-repository>\n      <option name=\"id\" value=\"central\" />\n      <option name=\"name\" value=\"Central Repository\" />\n      <option name=\"url\" value=\"https://repo.maven.apache.org/maven2\" />\n    </remote-repository>\n    <remote-repository>\n      <option name=\"id\" value=\"central\" />\n      <option name=\"name\" value=\"Central Repository\" />\n      <option name=\"url\" value=\"http://maven.aliyun.com/nexus/content/repositories/central/\" />\n    </remote-repository>\n    <remote-repository>\n      <option name=\"id\" value=\"central\" />\n      <option name=\"name\" value=\"Maven Central repository\" />\n      <option name=\"url\" value=\"https://repo1.maven.org/maven2\" />\n    </remote-repository>\n    <remote-repository>\n      <option name=\"id\" value=\"jboss.community\" />\n      <option name=\"name\" value=\"JBoss Community repository\" />\n      <option name=\"url\" value=\"https://repository.jboss.org/nexus/content/repositories/public/\" />\n    </remote-repository>\n    <remote-repository>\n      <option name=\"id\" value=\"central\" />\n      <option name=\"name\" value=\"Central Repository\" />\n      <option name=\"url\" value=\"https://maven.aliyun.com/repository/public\" />\n    </remote-repository>\n  </component>\n</project>"
  },
  {
    "path": ".idea/misc.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"ExternalStorageConfigurationManager\" enabled=\"true\" />\n  <component name=\"MavenProjectsManager\">\n    <option name=\"originalFiles\">\n      <list>\n        <option value=\"$PROJECT_DIR$/pom.xml\" />\n        <option value=\"$PROJECT_DIR$/Pinecones/Summer/pom.xml\" />\n      </list>\n    </option>\n  </component>\n  <component name=\"ProjectRootManager\" version=\"2\" languageLevel=\"JDK_11\" default=\"true\" project-jdk-name=\"11_x64\" project-jdk-type=\"JavaSDK\">\n    <output url=\"file://$PROJECT_DIR$/out\" />\n  </component>\n</project>"
  },
  {
    "path": ".idea/sqldialects.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"SqlDialectMappings\">\n    <file url=\"PROJECT\" dialect=\"MySQL\" />\n  </component>\n</project>"
  },
  {
    "path": ".idea/uiDesigner.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"Palette2\">\n    <group name=\"Swing\">\n      <item class=\"com.intellij.uiDesigner.HSpacer\" tooltip-text=\"Horizontal Spacer\" icon=\"/com/intellij/uiDesigner/icons/hspacer.svg\" removable=\"false\" auto-create-binding=\"false\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"1\" hsize-policy=\"6\" anchor=\"0\" fill=\"1\" />\n      </item>\n      <item class=\"com.intellij.uiDesigner.VSpacer\" tooltip-text=\"Vertical Spacer\" icon=\"/com/intellij/uiDesigner/icons/vspacer.svg\" removable=\"false\" auto-create-binding=\"false\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"6\" hsize-policy=\"1\" anchor=\"0\" fill=\"2\" />\n      </item>\n      <item class=\"javax.swing.JPanel\" icon=\"/com/intellij/uiDesigner/icons/panel.svg\" removable=\"false\" auto-create-binding=\"false\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"3\" hsize-policy=\"3\" anchor=\"0\" fill=\"3\" />\n      </item>\n      <item class=\"javax.swing.JScrollPane\" icon=\"/com/intellij/uiDesigner/icons/scrollPane.svg\" removable=\"false\" auto-create-binding=\"false\" can-attach-label=\"true\">\n        <default-constraints vsize-policy=\"7\" hsize-policy=\"7\" anchor=\"0\" fill=\"3\" />\n      </item>\n      <item class=\"javax.swing.JButton\" icon=\"/com/intellij/uiDesigner/icons/button.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"0\" hsize-policy=\"3\" anchor=\"0\" fill=\"1\" />\n        <initial-values>\n          <property name=\"text\" value=\"Button\" />\n        </initial-values>\n      </item>\n      <item class=\"javax.swing.JRadioButton\" icon=\"/com/intellij/uiDesigner/icons/radioButton.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"0\" hsize-policy=\"3\" anchor=\"8\" fill=\"0\" />\n        <initial-values>\n          <property name=\"text\" value=\"RadioButton\" />\n        </initial-values>\n      </item>\n      <item class=\"javax.swing.JCheckBox\" icon=\"/com/intellij/uiDesigner/icons/checkBox.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"0\" hsize-policy=\"3\" anchor=\"8\" fill=\"0\" />\n        <initial-values>\n          <property name=\"text\" value=\"CheckBox\" />\n        </initial-values>\n      </item>\n      <item class=\"javax.swing.JLabel\" icon=\"/com/intellij/uiDesigner/icons/label.svg\" removable=\"false\" auto-create-binding=\"false\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"0\" hsize-policy=\"0\" anchor=\"8\" fill=\"0\" />\n        <initial-values>\n          <property name=\"text\" value=\"Label\" />\n        </initial-values>\n      </item>\n      <item class=\"javax.swing.JTextField\" icon=\"/com/intellij/uiDesigner/icons/textField.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"true\">\n        <default-constraints vsize-policy=\"0\" hsize-policy=\"6\" anchor=\"8\" fill=\"1\">\n          <preferred-size width=\"150\" height=\"-1\" />\n        </default-constraints>\n      </item>\n      <item class=\"javax.swing.JPasswordField\" icon=\"/com/intellij/uiDesigner/icons/passwordField.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"true\">\n        <default-constraints vsize-policy=\"0\" hsize-policy=\"6\" anchor=\"8\" fill=\"1\">\n          <preferred-size width=\"150\" height=\"-1\" />\n        </default-constraints>\n      </item>\n      <item class=\"javax.swing.JFormattedTextField\" icon=\"/com/intellij/uiDesigner/icons/formattedTextField.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"true\">\n        <default-constraints vsize-policy=\"0\" hsize-policy=\"6\" anchor=\"8\" fill=\"1\">\n          <preferred-size width=\"150\" height=\"-1\" />\n        </default-constraints>\n      </item>\n      <item class=\"javax.swing.JTextArea\" icon=\"/com/intellij/uiDesigner/icons/textArea.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"true\">\n        <default-constraints vsize-policy=\"6\" hsize-policy=\"6\" anchor=\"0\" fill=\"3\">\n          <preferred-size width=\"150\" height=\"50\" />\n        </default-constraints>\n      </item>\n      <item class=\"javax.swing.JTextPane\" icon=\"/com/intellij/uiDesigner/icons/textPane.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"true\">\n        <default-constraints vsize-policy=\"6\" hsize-policy=\"6\" anchor=\"0\" fill=\"3\">\n          <preferred-size width=\"150\" height=\"50\" />\n        </default-constraints>\n      </item>\n      <item class=\"javax.swing.JEditorPane\" icon=\"/com/intellij/uiDesigner/icons/editorPane.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"true\">\n        <default-constraints vsize-policy=\"6\" hsize-policy=\"6\" anchor=\"0\" fill=\"3\">\n          <preferred-size width=\"150\" height=\"50\" />\n        </default-constraints>\n      </item>\n      <item class=\"javax.swing.JComboBox\" icon=\"/com/intellij/uiDesigner/icons/comboBox.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"true\">\n        <default-constraints vsize-policy=\"0\" hsize-policy=\"2\" anchor=\"8\" fill=\"1\" />\n      </item>\n      <item class=\"javax.swing.JTable\" icon=\"/com/intellij/uiDesigner/icons/table.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"6\" hsize-policy=\"6\" anchor=\"0\" fill=\"3\">\n          <preferred-size width=\"150\" height=\"50\" />\n        </default-constraints>\n      </item>\n      <item class=\"javax.swing.JList\" icon=\"/com/intellij/uiDesigner/icons/list.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"6\" hsize-policy=\"2\" anchor=\"0\" fill=\"3\">\n          <preferred-size width=\"150\" height=\"50\" />\n        </default-constraints>\n      </item>\n      <item class=\"javax.swing.JTree\" icon=\"/com/intellij/uiDesigner/icons/tree.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"6\" hsize-policy=\"6\" anchor=\"0\" fill=\"3\">\n          <preferred-size width=\"150\" height=\"50\" />\n        </default-constraints>\n      </item>\n      <item class=\"javax.swing.JTabbedPane\" icon=\"/com/intellij/uiDesigner/icons/tabbedPane.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"3\" hsize-policy=\"3\" anchor=\"0\" fill=\"3\">\n          <preferred-size width=\"200\" height=\"200\" />\n        </default-constraints>\n      </item>\n      <item class=\"javax.swing.JSplitPane\" icon=\"/com/intellij/uiDesigner/icons/splitPane.svg\" removable=\"false\" auto-create-binding=\"false\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"3\" hsize-policy=\"3\" anchor=\"0\" fill=\"3\">\n          <preferred-size width=\"200\" height=\"200\" />\n        </default-constraints>\n      </item>\n      <item class=\"javax.swing.JSpinner\" icon=\"/com/intellij/uiDesigner/icons/spinner.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"true\">\n        <default-constraints vsize-policy=\"0\" hsize-policy=\"6\" anchor=\"8\" fill=\"1\" />\n      </item>\n      <item class=\"javax.swing.JSlider\" icon=\"/com/intellij/uiDesigner/icons/slider.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"0\" hsize-policy=\"6\" anchor=\"8\" fill=\"1\" />\n      </item>\n      <item class=\"javax.swing.JSeparator\" icon=\"/com/intellij/uiDesigner/icons/separator.svg\" removable=\"false\" auto-create-binding=\"false\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"6\" hsize-policy=\"6\" anchor=\"0\" fill=\"3\" />\n      </item>\n      <item class=\"javax.swing.JProgressBar\" icon=\"/com/intellij/uiDesigner/icons/progressbar.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"0\" hsize-policy=\"6\" anchor=\"0\" fill=\"1\" />\n      </item>\n      <item class=\"javax.swing.JToolBar\" icon=\"/com/intellij/uiDesigner/icons/toolbar.svg\" removable=\"false\" auto-create-binding=\"false\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"0\" hsize-policy=\"6\" anchor=\"0\" fill=\"1\">\n          <preferred-size width=\"-1\" height=\"20\" />\n        </default-constraints>\n      </item>\n      <item class=\"javax.swing.JToolBar$Separator\" icon=\"/com/intellij/uiDesigner/icons/toolbarSeparator.svg\" removable=\"false\" auto-create-binding=\"false\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"0\" hsize-policy=\"0\" anchor=\"0\" fill=\"1\" />\n      </item>\n      <item class=\"javax.swing.JScrollBar\" icon=\"/com/intellij/uiDesigner/icons/scrollbar.svg\" removable=\"false\" auto-create-binding=\"true\" can-attach-label=\"false\">\n        <default-constraints vsize-policy=\"6\" hsize-policy=\"0\" anchor=\"0\" fill=\"2\" />\n      </item>\n    </group>\n  </component>\n</project>"
  },
  {
    "path": ".idea/vcs.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"VcsDirectoryMappings\">\n    <mapping directory=\"$PROJECT_DIR$\" vcs=\"Git\" />\n  </component>\n</project>"
  },
  {
    "path": "Archcraft/ender-system-hydra/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>archcraft</artifactId>\n        <groupId>com.archcraft</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.ender.system</groupId>\n    <artifactId>ender-system-hydra</artifactId>\n    <version>2.5.1</version>\n    <packaging>jar</packaging>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-system-reign</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.acorn.redqueen.kernel</groupId>\n            <artifactId>redqueen-system</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.acorn.skynet.kernel</groupId>\n            <artifactId>skynet-system</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.acorn.redqueen.kernel</groupId>\n            <artifactId>redqueen-system</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n\n\n</project>"
  },
  {
    "path": "Archcraft/ender-system-hydra/src/main/java/com/walnut/archcraft/ender/EnderHydra.java",
    "content": "package com.walnut.archcraft.ender;\n\nimport com.acorn.redqueen.system.ServiceCentralControl;\nimport com.acorn.skynet.system.SkynetSubsystem;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.regime.arch.Lord;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.name.UniNamespace;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.UniformProcessManager;\nimport com.pinecone.hydra.proc.image.FileSystemMappingImageLoader;\nimport com.pinecone.hydra.proc.image.ImageLoader;\nimport com.pinecone.hydra.proc.image.UniformMultiScopeImageLoader;\nimport com.pinecone.hydra.proc.image.kom.VirtualExeImageInstrument;\nimport com.pinecone.hydra.proc.image.kom.VirtualMappingExeImageInstrument;\nimport com.pinecone.hydra.reign.UnixInstitutionalizedMetaImperiumPrivy;\nimport com.pinecone.hydra.system.component.LogStatuses;\n\nimport com.pinecone.hydra.system.imperium.ImperiumPrivy;\nimport com.pinecone.hydra.system.imperium.KernelObjectRootMountPoint;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.system.ko.runtime.GenericRuntimeInstrumentConfig;\nimport com.pinecone.hydra.system.subsystem.CentralKernelLordFederation;\nimport com.pinecone.hydra.system.subsystem.KernelLordFederation;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.pinecone.ulf.util.guid.i64.GuidAllocator72;\nimport com.pinecone.ulf.util.guid.i64.GuidAllocator72V2;\nimport com.walnut.archcraft.ender.system.HydraEmpire;\nimport com.walnut.archcraft.ender.system.Hydroxy;\n\npublic class EnderHydra extends Tritium implements HydraEmpire {\n\n    protected GuidAllocator             mSystemGuidAllocator;\n    protected GuidAllocator72           mSystemGuidAllocator72;\n    protected ImageLoader               mSystemImageLoader;\n    protected ProcessManager            mSystemProcessManager;\n    protected UProcess                  mProxiedRootSystemProcess;\n    protected KernelObjectConfig        mFundamentalKernelObjectConfig;\n    protected VirtualExeImageInstrument mVirtualExeImageInstrument;\n    protected ImperiumPrivy             mImperiumPrivy;\n\n\n    protected KernelLordFederation      mLordFederation;\n    protected SkynetSubsystem           mSkynetSubsystem;\n    protected ServiceCentralControl     mServiceCentralControl;\n\n\n    public EnderHydra( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public EnderHydra( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    protected void prepare_system_skeleton() {\n        super.prepare_system_skeleton();\n        this.prepare_uniform_system();\n    }\n\n    protected void prepare_uniform_system_process_task_subsystem() {\n        this.mVirtualExeImageInstrument = new VirtualMappingExeImageInstrument( this, \"\" );\n        this.infoLifecycle( \"<Uniform Hydra> ProcessSubsystem[1] System VirtualExeImageInstrument Initialization\", LogStatuses.StatusDone );\n\n\n        ImageLoader localMappingImageLoader = new FileSystemMappingImageLoader( this, this.mVirtualExeImageInstrument );\n        this.infoLifecycle( \"<Uniform Hydra> ProcessSubsystem[2] System Scope LocalMappingImageLoader Initialization\", LogStatuses.StatusDone );\n        this.mSystemImageLoader         = new UniformMultiScopeImageLoader( this, localMappingImageLoader );\n        this.infoLifecycle( \"<Uniform Hydra> ProcessSubsystem[3] System Scope UniformMultiScopeImageLoader Initialization\", LogStatuses.StatusDone );\n\n\n        this.mSystemProcessManager = new UniformProcessManager(\n                this, null, \"SystemUniformProcessManager\", \"\", null\n        );\n        this.getDispenserCenter().getInstanceDispenser().registerInstance( \"__SystemTaskManager__\", this.mSystemProcessManager );\n        this.infoLifecycle( \"<Uniform Hydra> ProcessSubsystem[4] System ProcessManager Initialization\", LogStatuses.StatusDone );\n\n\n        this.mProxiedRootSystemProcess  = new Hydroxy( this );\n        this.mSystemProcessManager.applyRootUProcess( this.mProxiedRootSystemProcess );\n        this.mSystemProcessManager.register( this.mProxiedRootSystemProcess );\n        this.infoLifecycle( \"<Uniform Hydra> ProcessSubsystem[5] System Hydroxy Initialization\", LogStatuses.StatusDone );\n\n        this.infoLifecycle( \"<Uniform Hydra> Uniform System Process/Task Subsystem\", LogStatuses.StatusDone );\n    }\n\n    protected void prepare_uniform_system_imperium_privy() {\n        this.mImperiumPrivy = new UnixInstitutionalizedMetaImperiumPrivy( new UniNamespace( \"SystemUnixInstitutionalizedMetaImperiumPrivy\" ), this, null, this.fundamentalKernelObjectConfig() );\n        this.infoLifecycle(\n                \"<Uniform Hydra> System ImperiumPrivy Initialization. (name: `\" + this.mImperiumPrivy.getTargetingName() + \"`, class: `\" + this.mImperiumPrivy.getClass().getName() + \"`)\",\n                LogStatuses.StatusDone\n        );\n        this.mImperiumPrivy.getExpressInstrument().mount( KernelObjectRootMountPoint.SysImages.getMountPoint(), this.mVirtualExeImageInstrument );\n        this.infoLifecycle(\n                \"<Uniform Hydra::Privy> System VirtualExeImageInstrument Mount. (MountPoint: `/\" + KernelObjectRootMountPoint.SysImages.getMountPoint() + \"`)\",\n                LogStatuses.StatusDone\n        );\n\n\n        this.infoLifecycle( \"<Uniform Hydra> Uniform Imperium Privy\", LogStatuses.StatusDone );\n    }\n\n    @Override\n    protected void prepare_system_skeleton_before() {\n        this.prepare_uniform_system_guid_system();\n    }\n\n    protected void prepare_uniform_system_guid_system() {\n        this.mSystemGuidAllocator    = GUIDs.newGuidAllocator( 1984 ); // TODO MachineId allocation.\n        this.infoLifecycle(\n                \"<Uniform Hydra> System GUIDAllocator Initialization [Type: `\" + this.mSystemGuidAllocator.getClass().getName() + \"`]\",\n                LogStatuses.StatusDone\n        );\n\n        this.mSystemGuidAllocator72  = new GuidAllocator72V2();\n        this.infoLifecycle(\n                \"<Uniform Hydra> System GUIDAllocator72 Initialization [Type: `\" + this.mSystemGuidAllocator72.getClass().getName() + \"`]\",\n                LogStatuses.StatusDone\n        );\n    }\n\n    protected void prepare_uniform_system() {\n        this.infoLifecycle( \"<Hydra Empire> Uniform Operation System\", LogStatuses.StatusStart );\n\n        this.init_uniform_system_configuration();\n\n        this.prepare_uniform_system_process_task_subsystem();\n        this.init_process_kernel_subsystem();\n\n        this.prepare_modularized_subsystem();\n\n        this.infoLifecycle( \"<Hydra Empire> Uniform Operation System\", LogStatuses.StatusReady );\n        this.getLogger().info( \"[Welcome] [<Hydra Empire> Welcome to join the imperial army!]\" );\n    }\n\n    protected void prepare_modularized_subsystem() {\n        this.infoLifecycle( \"<Hydra Empire> [SummoningLords] Modularized Subsystem Initialization\", LogStatuses.StatusStart );\n        this.mLordFederation = new CentralKernelLordFederation( this );\n\n\n        this.mSkynetSubsystem = (SkynetSubsystem) this.mLordFederation.get( \"KernelSkynetLord\" );\n        if ( this.mSkynetSubsystem != null ) {\n            this.mSkynetSubsystem.vitalize();\n        }\n\n        this.mServiceCentralControl = (ServiceCentralControl) this.mLordFederation.get( \"KernelRedQueenLord\" );\n        if ( this.mServiceCentralControl != null ) {\n            this.mServiceCentralControl.vitalize();\n        }\n\n        this.getLogger().info( \"[ActionReport] <Hydra Empire> [SummoningLords] Empire now has {} lords.\", this.countEmpireLords() );\n        this.infoLifecycle( \"<Hydra Empire> [SummoningLords] Modularized Subsystem Initialization\", LogStatuses.StatusDone );\n    }\n\n    protected void init_process_kernel_subsystem() {\n        this.infoLifecycle( \"Uniform Process Subsystem\", LogStatuses.StatusStart );\n\n        this.prepare_uniform_system_imperium_privy();\n\n        this.infoLifecycle( \"Uniform Process Subsystem\", LogStatuses.StatusDone );\n    }\n\n    protected void init_uniform_system_configuration() {\n        this.infoLifecycle( \"Uniform System Configuration\", LogStatuses.StatusStart );\n\n        this.mFundamentalKernelObjectConfig = new GenericRuntimeInstrumentConfig();\n        this.infoLifecycle( \"<Uniform Hydra> System FundamentalKernelObjectConfig Initialization\", LogStatuses.StatusDone );\n\n        this.infoLifecycle( \"Uniform System Configuration\", LogStatuses.StatusDone );\n    }\n\n    @Override\n    protected void traceWelcomeInfo() {\n        this.pout().print( \"---------------------------------------------------------------\\n\" );\n        this.pout().print( \"\\u001B[31mBean Nuts Pinecone Ursus for Java\\u001B[0m\\n\" );\n        this.pout().print( \"\\u001B[31mHydra Kingdom Framework (Ender Hydra) \\u001B[0m\\n\" );\n        this.pout().print( \"\\u001B[32mCopyright(C) 2008-2028 Bean Nuts Foundation. All rights reserved.\\u001B[0m\\n\" );\n        this.pout().print( \"---------------------------------------------------------------\\n\" );\n        this.pout().print( \"\\u001B[31mDragon King\\u001B[0m\\n\" );\n        this.pout().print( \"\\u001B[32mWebsit: https://www.dragonking.cn/ \\u001B[0m\\n\" );\n\n        this.traceSubsystemWelcomeInfo();\n        this.traceSystemBootingInfo();\n        this.prepare_system_log4j_logger();\n        this.infoLifecycle( \"Initialization\", LogStatuses.StatusStart );\n    }\n\n    @Override\n    public GuidAllocator getSystemGuidAllocator() {\n        return this.mSystemGuidAllocator;\n    }\n\n    @Override\n    public GuidAllocator72 getSystemGuidAllocator72() {\n        return this.mSystemGuidAllocator72;\n    }\n\n    @Override\n    public ProcessManager processManager() {\n        return this.mSystemProcessManager;\n    }\n\n    @Override\n    public ImageLoader imageLoader() {\n        return this.mSystemImageLoader;\n    }\n\n    @Override\n    public Processum ownedLocalProcess() {\n        return this;\n    }\n\n    @Override\n    public UProcess ownedUniformProcess() {\n        return this.mProxiedRootSystemProcess;\n    }\n\n    @Override\n    public KernelObjectConfig fundamentalKernelObjectConfig() {\n        return this.mFundamentalKernelObjectConfig;\n    }\n\n    @Override\n    public ImperiumPrivy imperiumPrivy() {\n        return this.mImperiumPrivy;\n    }\n\n    @Override\n    public ServiceCentralControl redQueen() {\n        return this.mServiceCentralControl;\n    }\n\n    @Override\n    public SkynetSubsystem skynet() {\n        return this.mSkynetSubsystem;\n    }\n\n    @Override\n    public Lord getEmpireLordsByName( String lordName ) {\n        return this.mLordFederation.get( lordName );\n    }\n\n    @Override\n    public KernelLordFederation getLordFederation() {\n        return this.mLordFederation;\n    }\n\n    @Override\n    public int countEmpireLords() {\n        return this.mLordFederation.size();\n    }\n\n    @Override\n    public VirtualExeImageInstrument virtualExeImageInstrument() {\n        return this.mVirtualExeImageInstrument;\n    }\n}\n"
  },
  {
    "path": "Archcraft/ender-system-hydra/src/main/java/com/walnut/archcraft/ender/system/HydraEmpire.java",
    "content": "package com.walnut.archcraft.ender.system;\n\nimport com.acorn.redqueen.system.ServiceCentralControl;\nimport com.acorn.skynet.system.SkynetSubsystem;\nimport com.pinecone.framework.system.regime.arch.Lord;\nimport com.pinecone.hydra.proc.InstitutionalProcess;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.ProcessManagerSystema;\nimport com.pinecone.hydra.proc.image.ImageLoader;\nimport com.pinecone.hydra.proc.image.kom.VirtualExeImageInstrument;\nimport com.pinecone.hydra.system.centrum.Centrum;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.pinecone.hydra.system.subsystem.KernelLordFederation;\nimport com.pinecone.hydra.system.types.HydraKingdom;\n\npublic interface HydraEmpire extends Centrum, HydraKingdom, Slf4jTraceable, InstitutionalProcess, ProcessManagerSystema {\n\n    ProcessManager processManager();\n\n    ImageLoader imageLoader();\n\n    VirtualExeImageInstrument virtualExeImageInstrument();\n\n    ServiceCentralControl redQueen();\n\n    SkynetSubsystem skynet();\n\n    Lord getEmpireLordsByName( String lordName );\n\n    KernelLordFederation getLordFederation();\n\n    int countEmpireLords();\n\n}\n"
  },
  {
    "path": "Archcraft/ender-system-hydra/src/main/java/com/walnut/archcraft/ender/system/Hydroxy.java",
    "content": "package com.walnut.archcraft.ender.system;\n\nimport java.util.HashMap;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.config.StartupCommandParser;\nimport com.pinecone.hydra.proc.ArchUProcess;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.image.GenericClassImage;\nimport com.pinecone.hydra.proc.ns.GenericSegregationSpace;\nimport com.pinecone.hydra.proc.ns.ProcSpace;\nimport com.pinecone.hydra.system.component.LogStatuses;\n\npublic class Hydroxy extends ArchUProcess {\n\n    public Hydroxy(\n            HydraEmpire hostedSystem,\n            UProcess parent, ExecutionImage image, ProcSpace procSpace,\n            Map<String, String[]> startupArgs, Map<String, String[]> environmentVars\n    ) {\n        super( hostedSystem, parent, hostedSystem.processManager(), image, procSpace, startupArgs, environmentVars );\n\n        this.revealNearestSystem().infoLifecycle(\n                \"HydraSystemProcess [UProcessProxy] [Name: `\" + this.getName() + \"`]\",\n                LogStatuses.StatusStandby\n        );\n        this.revealNearestSystem().infoLifecycle( \"HydraSystemProcess Initialization\", LogStatuses.StatusDone );\n    }\n\n    public Hydroxy( HydraEmpire hostedSystem ) {\n        this(\n                hostedSystem, null,\n                new HydroxyImage( hostedSystem ), new GenericSegregationSpace(),\n                hostedSystem.getStartupCommandMap(), hostedSystem.getEnvironmentVars()\n        );\n    }\n\n    @Override\n    public RuntimeSystem parentSystem() {\n        return super.parentSystem();\n    }\n\n    @Override\n    public HydraEmpire revealNearestSystem() {\n        return (HydraEmpire) super.revealNearestSystem();\n    }\n}\n"
  },
  {
    "path": "Archcraft/ender-system-hydra/src/main/java/com/walnut/archcraft/ender/system/HydroxyImage.java",
    "content": "package com.walnut.archcraft.ender.system;\n\nimport java.util.Map;\n\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.image.ArchEntryPointRunnable;\nimport com.pinecone.hydra.proc.image.EntryPointRunnable;\nimport com.pinecone.hydra.proc.image.GenericClassImage;\nimport com.pinecone.hydra.proc.image.ImageLoadProcedureException;\nimport com.pinecone.hydra.proc.image.ImageLoader;\n\npublic class HydroxyImage extends GenericClassImage {\n\n    public HydroxyImage(\n            HydraEmpire system, String name, EntryPointRunnable entryPoint, Class<? extends UProcess> processClassType, ImageLoader imageLoader\n    ) throws ImageLoadProcedureException {\n        super(\n                name, entryPoint, processClassType, imageLoader\n        );\n    }\n\n    public HydroxyImage( HydraEmpire system ) throws ImageLoadProcedureException {\n        this(\n                system, \"SystemProcess\", new ArchEntryPointRunnable() {\n                    @Override\n                    public int main( Map<String, String[]> args ) {\n                        system.start();\n                        return 0;\n                    }\n                },\n                Hydroxy.class, system.imageLoader()\n        );\n    }\n\n\n}\n"
  },
  {
    "path": "Archcraft/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sauron</artifactId>\n        <groupId>com.sauron</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.archcraft</groupId>\n    <artifactId>archcraft</artifactId>\n    <packaging>pom</packaging>\n    <version>2.5.1</version>\n\n    <modules>\n        <module>redstone-architecture</module>\n        <module>redstone-message-stones</module>\n        <module>ender-system-hydra</module>\n    </modules>\n</project>"
  },
  {
    "path": "Archcraft/redstone-architecture/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>archcraft</artifactId>\n        <groupId>com.archcraft</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.redstone.kernel</groupId>\n    <artifactId>redstone-architecture</artifactId>\n    <version>2.1.0</version>\n\n\n    <dependencies>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter</artifactId>\n        </dependency>\n\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-test</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n            <version>2.6.13</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.summer.springram</groupId>\n            <artifactId>springram</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n            <artifactId>hydra-kom-default-driver</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.thrift</groupId>\n            <artifactId>libthrift</artifactId>\n            <version>0.18.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.sdk.thrift</groupId>\n            <artifactId>hydra-lib-thrift-sdk</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.sparta.api.uac</groupId>\n            <artifactId>sparta-api-uac</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n\n</project>"
  },
  {
    "path": "Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/architect/Bedrock.java",
    "content": "package com.walnut.archcraft.redstone.architect;\n\npublic interface Bedrock extends Stone {\n}\n"
  },
  {
    "path": "Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/architect/Redstone.java",
    "content": "package com.walnut.archcraft.redstone.architect;\n\npublic interface Redstone extends Stone {\n}\n"
  },
  {
    "path": "Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/architect/Stone.java",
    "content": "package com.walnut.archcraft.redstone.architect;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Stone extends Pinenut {\n}\n"
  },
  {
    "path": "Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/response/ArchResponseObjectManager.java",
    "content": "package com.walnut.archcraft.redstone.response;\n\nimport java.util.function.Supplier;\n\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.ArchSystemCascadeComponent;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\n\npublic abstract class ArchResponseObjectManager extends ArchSystemCascadeComponent implements ResponseObjectManager {\n\n    public ArchResponseObjectManager( Namespace name, Hydrogen system, HyComponent parent ) {\n        super( name, system, system.getComponentManager(), parent );\n    }\n\n    public ArchResponseObjectManager( Hydrogen system, HyComponent parent ) {\n        this( null, system, parent );\n    }\n\n    public ArchResponseObjectManager( Hydrogen system ) {\n        this( system, null );\n    }\n\n\n    @Override\n    public <T extends RedTraceableResponse> T newResponse(Supplier<T> cons) {\n        T response = cons.get();\n        response.setRequestId(this.nextTraceId());\n        return response;\n    }\n\n\n}\n"
  },
  {
    "path": "Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/response/GenericResultResponse.java",
    "content": "package com.walnut.archcraft.redstone.response;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport org.springframework.http.HttpStatus;\n\n\npublic class GenericResultResponse<T> implements RedResponseEntity<T> {\n    private Boolean    success;\n    private Integer    code = HttpStatus.OK.value();\n    private String     message;\n    private String     requestId;\n    private String     errorCode;\n    private T          data;\n\n    public static <T> GenericResultResponse<T > success() {\n        GenericResultResponse<T> result = new GenericResultResponse<>();\n        result.code = HttpStatus.OK.value();\n        result.success = true;\n        return result;\n    }\n\n    public static <T> GenericResultResponse<T > successMsg( String msg  ) {\n        GenericResultResponse<T> result = GenericResultResponse.success();\n        result.message  = msg;\n        return result;\n    }\n\n    public static <T> GenericResultResponse<T > success( T object ) {\n        GenericResultResponse<T> result = GenericResultResponse.success();\n        result.data = object;\n        return result;\n    }\n\n    public static <T> GenericResultResponse<T > error( String msg ) {\n        GenericResultResponse<T> result = new GenericResultResponse<>();\n        result.success = false;\n        result.message  = msg;\n        result.code = HttpStatus.INTERNAL_SERVER_ERROR.value();\n        return result;\n    }\n\n\n    @Override\n    public Boolean getSuccess() {\n        return this.success;\n    }\n\n    @Override\n    public void setSuccess( Boolean success ) {\n        this.success = success;\n    }\n\n    @Override\n    public Integer getCode() {\n        return this.code;\n    }\n\n    @Override\n    public void setCode( Integer code ) {\n        this.code = code;\n    }\n\n    @Override\n    public String getErrorCode() {\n        return this.errorCode;\n    }\n\n    @Override\n    public void setErrorCode( String errorCode ) {\n        this.errorCode = errorCode;\n    }\n\n    @Override\n    public void setRequestId( String requestId ) {\n        this.requestId = requestId;\n    }\n\n    @Override\n    public String getRequestId() {\n        return this.requestId;\n    }\n\n    @Override\n    public String getMessage() {\n        return this.message;\n    }\n\n    @Override\n    public void setMessage( String msg ) {\n        this.message = msg;\n    }\n\n    @Override\n    public T getData() {\n        return this.data;\n    }\n\n    @Override\n    public void setData( T data ) {\n        this.data = data;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat(new KeyValue[]{\n                new KeyValue<>( \"success\",   this.getSuccess() ),\n                new KeyValue<>( \"code\",      this.code         ),\n                new KeyValue<>( \"message\",   this.message      ),\n                new KeyValue<>( \"errorCode\", this.errorCode    ),\n                new KeyValue<>( \"requestId\", this.requestId    ),\n                new KeyValue<>( \"data\",      this.data         )\n        });\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n\n}\n"
  },
  {
    "path": "Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/response/RedResponse.java",
    "content": "package com.walnut.archcraft.redstone.response;\n\nimport java.io.Serializable;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface RedResponse extends Pinenut, Serializable {\n\n    Boolean getSuccess();\n\n    void setSuccess( Boolean success );\n\n    Integer getCode();\n\n    void setCode( Integer code );\n\n    String getErrorCode();\n\n    void setErrorCode( String errorCode );\n\n    String getMessage();\n\n    void setMessage( String msg );\n\n\n}\n"
  },
  {
    "path": "Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/response/RedResponseEntity.java",
    "content": "package com.walnut.archcraft.redstone.response;\n\npublic interface RedResponseEntity<T> extends RedTraceableResponse {\n\n    T getData();\n\n    void setData( T data );\n\n}\n"
  },
  {
    "path": "Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/response/RedTraceableResponse.java",
    "content": "package com.walnut.archcraft.redstone.response;\n\npublic interface RedTraceableResponse extends RedResponse {\n\n    void setRequestId( String requestId );\n\n    String getRequestId();\n\n}\n"
  },
  {
    "path": "Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/response/ResponseObjectManager.java",
    "content": "package com.walnut.archcraft.redstone.response;\n\nimport java.util.function.Supplier;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.system.HyComponent;\n\npublic interface ResponseObjectManager extends Pinenut, HyComponent {\n\n    String nextTraceId();\n\n    <T extends RedTraceableResponse> T newResponse(Supplier<T> cons);\n\n}\n"
  },
  {
    "path": "Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/system/Dummy.java",
    "content": "package com.walnut.archcraft.redstone.system;\n\npublic class Dummy {\n}\n"
  },
  {
    "path": "Archcraft/redstone-architecture/src/main/java/com/walnut/archcraft/redstone/util/Dummy.java",
    "content": "package com.walnut.archcraft.redstone.util;\n\npublic class Dummy {\n}\n"
  },
  {
    "path": "Archcraft/redstone-message-stones/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>archcraft</artifactId>\n        <groupId>com.archcraft</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.redstone.stones</groupId>\n    <artifactId>redstone-message-stones</artifactId>\n    <version>2.1.0</version>\n\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.redstone.kernel</groupId>\n            <artifactId>redstone-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-message-broadcast</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-message-control</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n\n</project>"
  },
  {
    "path": "Archcraft/redstone-message-stones/src/main/java/com/walnut/archcraft/redstone/Dummy.java",
    "content": "package com.walnut.archcraft.redstone;\n\npublic class Dummy {\n}\n"
  },
  {
    "path": "Archcraft/redstone-message-stones/src/main/java/com/walnut/archcraft/redstone/messge/PrimaryMessageWareStone.java",
    "content": "package com.walnut.archcraft.redstone.messge;\n\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.uma.DuplexAppointClient;\nimport com.pinecone.hydra.uma.DuplexAppointServer;\nimport com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode;\n\npublic interface PrimaryMessageWareStone extends Pinenut {\n\n    RuntimeSystem parentSystem();\n\n    Processum getParentProcess();\n\n    DuplexAppointServer getWolfKingAppointServer();\n\n    DuplexAppointClient getWolfAppointClient();\n\n    UlfBroadcastControlNode getPrimaryKafkaClient();\n\n    UlfBroadcastControlNode getPrimaryRocketClient();\n\n}\n"
  },
  {
    "path": "CHANGELOG.md",
    "content": "# 更新日志\n\n```markdown\n   格式：\n    ## [版本号] - 日期\n    ### 模板名称 (可选 console-ui, console, FileModule, common, ...)\n    - 🎈新增: {模块名称} {功能介绍}\n    - 🐞Bug: #{issue号} {bug描述}\n    - ⛏修复: #{issue号} {修复描述}\n    - 📝文档: {文件名} 添加注释\n    - 🚀性能: {类} {方法} {描述}\n    - 🎨样式: \n    - 🧹重构:\n    - 🧪测试: {类|方法} {测试结果}\n    - 🛑更名: {旧名} ➡ {新名}\n    - ❌移除: {模块|方法}\n    - 🚧施工\n\n\n    ------\n\n```\n\n------\n# 目录\n* [V 1.0.1]()\n* [V 1.0.0]()\n------\n## [V 1.0.0] - 2023.5.18\n### Messenger\n- ❌移除: `Message,MessageType`,移除所有的模块中与`Message`有关的代码\n- 🧹重构: 重构`Message`为`UlfUMCMessage`,构建了全新协议UlfUMC\n- 🎈新增: 新增`MessageBuilder,MessageFactory`用来构建专属的`UlfUMCMessage`类\n------\n## [V 1.0.0] - 2023.5.13\n### 🎈 TaskJuggler\n任务调度模块，主要负责节点的任务分配和处理\n- 🎈新增: 新增`Heist 劫匪` 作为`HeistCenter`的工作线程，负责处理单个任务，有失败重试机制\n- 🎈新增: 新增`HeistCenter` 任务调度中心,负责初始化任务进度，与`Harbor 港口`通信，是整个任务调度的**核心类**\n- 🎈新增: 新增`HeistConfig` 作为整个`Heist`家族的配置类\n- 🎈新增: 新增`Harbor 港口` 与 Master节点通信的核心类，目前具备向master节点发送任务查询申请，任务缓存，`HeistCenter`任务获取\n- 🎈新增: 新增`MqConfig` 消息队列初始化\n### 🎈 com.pinecone\n整个项目的核心公共代码所在地\n- 🎈新增: 新增`SystemConfig` 整个系统的配置类，可获取一些系统的全局变量\n- 🎈新增: 新增`RadiumConstPool` 项目的公共常量池，存放一些常量\n- 🎈新增: 新增`SystemUtils` 系统工具类,定制一些独属于该系统的工具\n- 🎈🚧新增: 新增`TimeUtil` 时间工具 (不推荐使用)\n### 🎈 Messenger\n负责定义消息类型，消息结构以及消息队列的一些全局变量管理，专门用来定义消息的模块\n- 🎈新增: 新增`Message` 消息类，目前消息队列通信的**核心类**\n- 🎈新增: 新增`MessageType` 消息类型类,目前有`Query,Post,Reply,ReplyPost,ShutDown`\n- 🎈新增: 新增`MessageConverterConfig` 主要负责mq中类传输的转化\n- 🎈新增: 新增`FunctionNamePool` 主要存放Master中对应的方法名称\n- 🎈新增: 新增`MqPool` 消息队列全局变量池\n### 🎈 Console\n项目启动模块\n### 🎈 File\n文件操作模块\n- 🎈新增: 新增 `JsonFileUtil` 工具类，用于进行json文件的读写操作\n- 🎈新增: 新增 `FileUtil` 工具类，用于进行文件复制文件删除等操作\n- 🎈新增: 新增 `FileCondition` 方法，用于对文件递归删除进行条件过滤\n- 🧪测试: 测试 `FileUtil` 工具类, 测试 `JsonFileUtil` 工具类\n- 🎈新增: 新增 `FileCacheManagerInstance` 将整个FileCacheManager转变为全局单例，防止重复使用调用\n- 🎈新增: 新增 `GlobalFileCache` 全局文件缓存，也负责为`FileCacheManagerInstance`提供初始化的文件缓存队列\n- 🎈新增: 新增 `FileCache` 文件缓冲池类，负责缓存文件内容，文件的读取，修改，追加，能够根据刷入时间或者写入字节，来进行自动刷盘操作\n- 🎈新增: 新增 `FileCacheManager` 文件缓冲池管理类，管理所有文件缓存池，轮询查看每个文件是否需要自动刷入，目前包含巡逻线程与刷入线程\n- 🎈新增: 新增 `FileCacheManagerInit` 用于启动初始化FileCacheManager\n------\n"
  },
  {
    "path": "File/File.iml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<module version=\"4\">\n  <component name=\"NewModuleRootManager\" LANGUAGE_LEVEL=\"JDK_11\">\n    <output url=\"file://$MODULE_DIR$/target/classes\" />\n    <output-test url=\"file://$MODULE_DIR$/target/test-classes\" />\n    <content url=\"file://$MODULE_DIR$\">\n      <sourceFolder url=\"file://$MODULE_DIR$/src/main/java\" isTestSource=\"false\" />\n      <sourceFolder url=\"file://$MODULE_DIR$/src/test/java\" isTestSource=\"true\" />\n      <excludeFolder url=\"file://$MODULE_DIR$/target\" />\n    </content>\n    <orderEntry type=\"inheritedJdk\" />\n    <orderEntry type=\"sourceFolder\" forTests=\"false\" />\n    <orderEntry type=\"module\" module-name=\"Pinecone\" />\n    <orderEntry type=\"library\" name=\"Maven: mysql:mysql-connector-java:8.0.23\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.google.protobuf:protobuf-java:3.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: commons-fileupload:commons-fileupload:1.3.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: commons-io:commons-io:2.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.aliyun.oss:aliyun-sdk-oss:3.10.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.httpcomponents:httpclient:4.5.13\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.httpcomponents:httpcore:4.4.14\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: commons-codec:commons-codec:1.15\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.jdom:jdom2:2.0.6\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.codehaus.jettison:jettison:1.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: stax:stax-api:1.0.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.aliyun:aliyun-java-sdk-core:3.4.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.aliyun:aliyun-java-sdk-ram:3.0.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.aliyun:aliyun-java-sdk-sts:3.0.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.aliyun:aliyun-java-sdk-ecs:4.2.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.aliyun:aliyun-java-sdk-kms:2.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.google.code.gson:gson:2.8.6\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-web:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-autoconfigure:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-logging:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: ch.qos.logback:logback-classic:1.2.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: ch.qos.logback:logback-core:1.2.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.logging.log4j:log4j-to-slf4j:2.13.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.logging.log4j:log4j-api:2.13.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.slf4j:jul-to-slf4j:1.7.30\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: jakarta.annotation:jakarta.annotation-api:1.3.5\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.yaml:snakeyaml:1.27\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-json:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.core:jackson-databind:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.core:jackson-annotations:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.core:jackson-core:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.module:jackson-module-parameter-names:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-tomcat:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.glassfish:jakarta.el:3.0.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.tomcat.embed:tomcat-embed-websocket:9.0.41\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-web:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-beans:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-webmvc:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-aop:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-context:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-expression:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.alibaba:fastjson:1.2.75\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.springframework.boot:spring-boot-starter-test:2.3.9.RELEASE\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.springframework.boot:spring-boot-test:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.springframework.boot:spring-boot-test-autoconfigure:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: com.jayway.jsonpath:json-path:2.4.0\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: net.minidev:json-smart:2.3\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: net.minidev:accessors-smart:1.2\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.ow2.asm:asm:5.0.4\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: jakarta.xml.bind:jakarta.xml.bind-api:2.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: jakarta.activation:jakarta.activation-api:1.2.2\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.assertj:assertj-core:3.18.1\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.hamcrest:hamcrest:2.2\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.junit.jupiter:junit-jupiter:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.junit.jupiter:junit-jupiter-api:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.opentest4j:opentest4j:1.2.0\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.junit.platform:junit-platform-commons:1.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.junit.jupiter:junit-jupiter-params:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.junit.jupiter:junit-jupiter-engine:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.junit.vintage:junit-vintage-engine:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.apiguardian:apiguardian-api:1.1.0\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.junit.platform:junit-platform-engine:1.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: junit:junit:4.13.1\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.mockito:mockito-core:3.6.28\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: net.bytebuddy:byte-buddy:1.10.18\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: net.bytebuddy:byte-buddy-agent:1.10.18\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.objenesis:objenesis:3.1\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.mockito:mockito-junit-jupiter:3.6.28\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.skyscreamer:jsonassert:1.5.0\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: com.vaadin.external.google:android-json:0.0.20131108.vaadin1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-core:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-jcl:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.springframework:spring-test:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"TEST\" name=\"Maven: org.xmlunit:xmlunit-core:2.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.projectlombok:lombok:1.18.16\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.slf4j:slf4j-api:1.7.30\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.tomcat.embed:tomcat-embed-core:9.0.41\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.tomcat:tomcat-annotations-api:9.0.41\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.aspectj:aspectjweaver:1.9.6\" level=\"project\" />\n  </component>\n</module>"
  },
  {
    "path": "File/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <groupId>com.Sauron</groupId>\n        <artifactId>sauron</artifactId>\n        <version>1.0-SNAPSHOT</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <artifactId>File</artifactId>\n    <packaging>jar</packaging>\n\n    <name>File</name>\n    <url>http://maven.apache.org</url>\n\n    <properties>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.Sauron</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>3.3.1</version>\n        </dependency>\n        <dependency>\n            <groupId>com.aliyun.oss</groupId>\n            <artifactId>aliyun-sdk-oss</artifactId>\n            <version>3.10.2</version>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n        </dependency>\n        <!--json-->\n        <dependency>\n            <groupId>com.alibaba</groupId>\n            <artifactId>fastjson</artifactId>\n            <version>1.2.75</version>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-test</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.projectlombok</groupId>\n            <artifactId>lombok</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>org.slf4j</groupId>\n            <artifactId>slf4j-api</artifactId>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "File/src/main/java/com/genius/App.java",
    "content": "package com.genius;\n\n/**\n * Hello world!\n *\n */\npublic class App \n{\n    public static void main( String[] args )\n    {\n        System.out.println( \"Hello World!\" );\n    }\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/cache/FileCache.java",
    "content": "package com.genius.cache;\n\nimport com.alibaba.fastjson.JSON;\nimport com.alibaba.fastjson.JSONArray;\nimport com.alibaba.fastjson.JSONObject;\n\nimport com.genius.exception.FileCacheException;\nimport com.genius.pojo.ConfigFile;\nimport com.genius.pojo.FileType;\nimport com.genius.util.JsonFileUtil;\nimport com.genius.util.TimeUtil;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.File;\nimport java.nio.file.Paths;\nimport java.util.Arrays;\nimport java.util.Map;\nimport java.util.Objects;\nimport java.util.concurrent.*;\nimport java.util.concurrent.atomic.AtomicInteger;\n\n/**\n * @author Genius\n * @date 2023/04/24 00:01\n **/\n\n//TODO 文件缓存写入优化 考虑是否要加一个缓存写入Buffer，将短时间内多个相同Key的内容存入Buffer中，合并存入\npublic class FileCache <T extends ConfigFile>{\n    private T configFile;                                       //文件配置类，得到文件信息和文件夹结构，更新文件存入时间\n    private Logger logger;\n\n    private ConcurrentHashMap<String,Object> jsonFile;          //文件内容缓存\n\n    private static int MAX_WRITE_BUFFER_LIMIT = 4096;    //最大写入缓存上线\n\n    private AtomicInteger writeByte;        //当前写入的字节数\n\n    private BlockingQueue<ConcurrentHashMap<String,Object>> syncChannel; //磁盘刷入阻塞队列\n\n    //TODO 优化 考虑是否采用一个定时线程管理所有FileCache的写入\n    private ExecutorService pool; //Sync线程池\n\n    private long autoSyncTime; //自动刷入时间\n\n    public FileCache(T configFile) throws FileCacheException {\n        init(configFile,10);\n    }\n\n    /**\n     * 构造方法\n     * @param configFile        指定的配置文件\n     * @param autoSyncTime      自动刷新时间\n     * @param maxWriteBufferLimit   写入上限\n     * @throws FileCacheException\n     */\n    public FileCache(T configFile,long autoSyncTime,int maxWriteBufferLimit)throws FileCacheException {\n        MAX_WRITE_BUFFER_LIMIT = maxWriteBufferLimit;\n        init(configFile,autoSyncTime);\n    }\n\n    /**\n     * 初始化方法\n     * @param configFile 配置文件\n     * @param autoSyncTime 自动刷入时间\n     * @throws FileCacheException\n     */\n    private void init(T configFile, long autoSyncTime) throws FileCacheException {\n        this.configFile = configFile;\n        this.configFile.updateConfigTime();//更新一下当前的时间\n\n        this.logger = LoggerFactory.getLogger(\"FileCache:\"+this.configFile.getFileName());\n\n        this.autoSyncTime = autoSyncTime;\n        if(!load(getFullFilePath())){\n            throw new FileCacheException(\"FileCache Init Error,please Check if your path is correct\");\n        }\n\n        this.writeByte = new AtomicInteger(0);\n        this.syncChannel = new ArrayBlockingQueue<>(20);\n        this.pool = Executors.newSingleThreadExecutor();\n        pool.submit(new SyncMan());\n    }\n\n    /**\n     * 加载文件内容\n     * @return boolean\n     */\n    private boolean load(String path){\n        Map<String, Object> stringObjectMap = JsonFileUtil.readJsonFile(path);\n        if(Objects.isNull(stringObjectMap)){\n           logger.error(\"{}配置文件不存在!\",path);\n           return false;\n        }\n\n        this.jsonFile = new ConcurrentHashMap<>(stringObjectMap);\n        return true;\n    }\n\n    /**\n     * 重新加载内存池Map\n     * @return boolean\n     */\n    public synchronized boolean reload(){\n        return load(Paths.get(this.configFile.getFilePath(),this.configFile.getFileName()).toString());\n    }\n\n    /**\n     * 递进寻找JsonObject中的对象，并改写\n     * @param data\n     * @param keys\n     * @return\n     * @throws InterruptedException\n     */\n    public int writeKeys(Object data,String...keys) throws InterruptedException, FileCacheException {\n       return writeKeys(false,data,keys);\n    }\n\n    private int writeKeys(boolean isAppend,Object data,String...keys) throws FileCacheException, InterruptedException {\n\n        if(Objects.isNull(data)){return 0;}\n\n        String jsonDataStr = JSON.toJSONString(data);\n        int writeBytes = jsonDataStr.getBytes().length;\n        if(writeBytes==0){return 0;}\n\n        Object jsonData = writeInData(isAppend,data,keys);\n        if(Objects.isNull(jsonData)){return 0;}\n\n        ConcurrentHashMap<String,Object> temp = new ConcurrentHashMap<>(JSONObject.parseObject(JSON.toJSONString(jsonFile),Map.class));\n        int newBytes = writeByte.updateAndGet(x -> x + writeBytes >= MAX_WRITE_BUFFER_LIMIT ? 0 : x + writeBytes);\n\n        //TODO 此处会发生脏读问题，即put进入的Map版本不是当前版本，但是目前没有发现该问题是否会影响到文件写入\n        if(newBytes==0){\n            logger.debug(\"缓冲区已满，刷入磁盘\");\n            syncChannel.put(temp);\n        }\n        return writeBytes;\n    }\n\n    //写入\n    public int write(Object data,String key) throws InterruptedException, FileCacheException {\n       return this.writeKeys(data,key);\n    }\n\n    private Object writeInData(boolean isAppend,Object value,String...keys) throws FileCacheException {\n        String[] finds = Arrays.copyOf(keys, keys.length - 1);\n        Object data = this.get(\"data\");\n        Object temp = this.get(finds);\n        if(temp instanceof JSONArray){\n            try{\n                //元素添加\n                int index = Integer.parseInt(keys[keys.length-1]);\n                if(index==-1){\n                    ((JSONArray) temp).add(value);\n                }else{\n                    String oldValue = ((JSONArray) temp).get(index).toString();\n                    value = isAppend?oldValue+value.toString():value;\n                    if(oldValue.equals(value)){\n                        return null;\n                    }\n                    ((JSONArray) temp).add(index,value);\n                }\n            }catch (Exception e){\n                return null;\n            }\n        }\n        else if(temp instanceof JSONObject){\n            String key = keys[keys.length-1];\n            String oldValue = ((JSONObject) temp).get(key).toString();\n            value = isAppend?oldValue+value:value;\n            if(oldValue.equals(value)){\n                return null;\n            }\n            ((JSONObject) temp).put(key,value);\n        }\n        else{\n            throw new FileCacheException(\"the keys is error!\");\n        }\n        return data;\n    }\n\n    /**\n     * 追加内容，支持数组添加内容，添加内容，需要将最后一个key置为-1\n     * @param keys  要查找的key\n     * @param append 追加内容\n     * @return\n     * @throws InterruptedException\n     */\n    public int append(Object append,String...keys) throws InterruptedException, FileCacheException {\n        return writeKeys(true,append,keys);\n    }\n\n    /**\n     * 根据key数组，不断向下获取内容\n     * @param keys\n     * @return\n     */\n    public Object get(String...keys){\n        Object jsonObject = this.get(\"data\");\n        for (String key : keys) {\n            if(jsonObject instanceof JSONObject){\n                jsonObject = ((JSONObject) jsonObject).get(key);\n            }\n            else if(jsonObject instanceof JSONArray){\n                jsonObject = ((JSONArray) jsonObject).get(Integer.parseInt(key));\n            }else{\n                return jsonObject;\n            }\n        }\n        return jsonObject;\n    }\n\n    /**\n     * 获取文件内容\n     * @param key\n     * @return\n     */\n    public Object get(String key){\n        return jsonFile.get(key);\n    }\n\n    /**\n     * 清除已写入的字节数记录\n     */\n    protected void clearWriteBytes(){\n        writeByte.updateAndGet(x->0);\n    }\n\n    /**\n     * 判断当前时间是否超过更新时间\n     * @return boolean\n     */\n    public boolean needAutoSync(){\n        long now = TimeUtil.getCurrentSecond();\n        return now - TimeUtil.getSecond(configFile.getUpdateTime())>autoSyncTime;\n    }\n\n    /**\n     * 强制刷入磁盘\n     */\n    public void forceSync(){\n        if(writeByte.get()==0){\n            logger.info(\"未发生版本变化\");\n            return;\n        }\n        clearWriteBytes();\n        ConcurrentHashMap<String,Object> temp = new ConcurrentHashMap<>(JSONObject.parseObject(JSON.toJSONString(jsonFile),Map.class));\n        try {\n            syncChannel.put(temp);\n        } catch (InterruptedException e) {\n            logger.error(\"自动刷入失败\");\n        }\n    }\n\n    /**\n     * 缓冲区刷入\n     * @return\n     */\n    private boolean sync(ConcurrentHashMap<String,Object> take){\n        configFile.updateConfigTime(); //刷新配置文件刷入时间\n        String dir = getFullFilePath();\n        configFile.onlyUpdateTime(take);\n        File file = JsonFileUtil.writeJsonFile(dir, take);\n        logger.debug(\"正在写入{}新版本\",dir);\n        return Objects.isNull(file);\n    }\n\n\n\n\n    public BlockingQueue getFileChannel(){\n        return this.syncChannel;\n    }\n\n    public String getFullFilePath(){\n        return Paths.get(this.configFile.getFilePath(), this.configFile.getFileName()).toString();\n    }\n\n    public String getFilePath(){\n        return this.configFile.getFilePath();\n    }\n\n    public String getFileName(){\n        return this.configFile.getFileName();\n    }\n\n    public long getSyncTime(){\n        return this.autoSyncTime;\n    }\n\n    public FileType getFileType(){\n        return this.configFile.getFileType();\n    }\n\n    class SyncMan implements Runnable{\n\n        @Override\n        public void run() {\n            for(;;){\n                try {\n                    ConcurrentHashMap<String, Object> take = syncChannel.take();\n                    sync(take);\n                } catch (InterruptedException e) {\n                    throw new RuntimeException(e);\n                }\n            }\n        }\n    }\n\n    @Override\n    public int hashCode() {\n        return Objects.hash(configFile);\n    }\n\n    @Override\n    public boolean equals(Object obj) {\n        if(obj instanceof FileCache){\n            if(((FileCache) obj).getFullFilePath().equals(this.getFullFilePath())){\n                return true;\n            }else if(obj.hashCode() == this.hashCode()){\n                return true;\n            }\n        }\n        return false;\n    }\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/cache/FileCacheManager.java",
    "content": "package com.genius.cache;\n\nimport org.example.util.TimeUtil;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.util.List;\nimport java.util.concurrent.BlockingQueue;\nimport java.util.concurrent.CopyOnWriteArrayList;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\nimport java.util.concurrent.atomic.AtomicLong;\n\n/**\n * @author Genius\n * @date 2023/04/24 17:35\n **/\n\n/**\n * 文件自动刷入管理类，不断监听文件是否需要自动写入\n */\npublic class FileCacheManager {\n\n    private Logger logger = LoggerFactory.getLogger(FileCacheManager.class);\n    private final List<FileCache> fileCaches;\n\n    private AtomicLong sleepTime; //睡眠时间\n\n    private ExecutorService watchPool;  //巡逻线程\n\n    private ExecutorService autoSyncer; //生产者线程\n\n    private volatile Watcher watcher;\n\n    protected FileCacheManager(List<FileCache> fileCaches){\n        this.fileCaches = new CopyOnWriteArrayList<>(fileCaches);\n        initSleepTime();\n        this.watchPool = Executors.newSingleThreadExecutor();\n        this.autoSyncer = Executors.newFixedThreadPool(fileCaches.size());\n    }\n\n    /**\n     * 根据文件缓存的刷盘时间得到一个最小的睡眠时间，减少空转\n     */\n    private void initSleepTime(){\n        AtomicLong minSleepTime = new AtomicLong(Long.MAX_VALUE);\n        fileCaches.forEach(item->{\n            minSleepTime.set(Long.min(minSleepTime.get(), item.getSyncTime()));\n        });\n        this.sleepTime = minSleepTime;\n    }\n\n    public void start(){\n        if(!fileCaches.isEmpty()){\n            if(watcher==null){\n                synchronized (FileCacheManager.class){\n                    if(watcher==null){\n                        watcher = new Watcher();\n                        this.watchPool.submit(watcher);\n                    }\n                }\n            }\n        }\n    }\n\n    public boolean addFileCache(FileCache fileCache){\n        if (this.fileCaches.indexOf(fileCache)==-1) {\n            fileCaches.add(fileCache);\n            initSleepTime();\n        }\n        return false;\n    }\n\n    public List<FileCache> getRunnableFileCaches(){\n        return this.fileCaches;\n    }\n\n    class Watcher implements Runnable{\n\n        @Override\n        public void run() {\n            for(;;){\n                long now = TimeUtil.getCurrentSecond();\n                for(FileCache cache:fileCaches){\n                    BlockingQueue fileChannel = cache.getFileChannel();\n                    if(fileChannel.isEmpty()){\n                        if(cache.needAutoSync()){\n                            logger.info(\"检测到需要强制刷新的文件 {}\",cache.getFileName());\n                            autoSyncer.submit(new AutoSyncer(cache));\n                        }\n                    }\n                }\n                now -= TimeUtil.getCurrentSecond();\n                if(now<sleepTime.get()){\n                    try {\n                        Thread.sleep((sleepTime.get()-now)*1000);\n                    } catch (InterruptedException e) {\n                        throw new RuntimeException(e);\n                    }\n                }\n            }\n        }\n    }\n\n    class AutoSyncer implements Runnable{\n\n        FileCache fileCache;\n\n        public AutoSyncer(FileCache fileCache){\n            this.fileCache = fileCache;\n        }\n        @Override\n        public void run() {\n            BlockingQueue fileChannel = this.fileCache.getFileChannel();\n            if(fileChannel.isEmpty()){\n                if(fileCache.needAutoSync()) {\n                    fileCache.forceSync();\n                }\n            }\n        }\n    }\n\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/cache/FileCacheManagerInstance.java",
    "content": "package com.genius.cache;\n\n\nimport com.genius.constpool.GlobalFileCache;\n\nimport java.util.List;\n\n/**\n * @author Genius\n * @date 2023/04/25 22:14\n **/\n\n//FileCacheManager单例实体类\npublic class FileCacheManagerInstance {\n\n    //获取全局的一个fileCaches\n    private static List<FileCache> fileCaches = GlobalFileCache.fileCaches;\n    private static volatile FileCacheManager Instance;\n\n    public static FileCacheManager getInstance(){\n        if(Instance==null){\n            synchronized (FileCacheManagerInstance.class){\n                if(Instance==null){\n                    Instance = new FileCacheManager(fileCaches);\n                }\n            }\n        }\n        return Instance;\n    }\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/constpool/GlobalFileCache.java",
    "content": "package com.genius.constpool;\n\n\n\nimport com.genius.cache.FileCache;\n\nimport java.util.List;\n\n/**\n * @author Genius\n * @date 2023/04/25 23:03\n **/\n\n/**\n * 全局文件缓存池，用于存放全局文件缓存，便于跨模块调用\n */\npublic class GlobalFileCache {\n\n\n    public static List<FileCache> fileCaches\n            = List.of();\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/exception/FileCacheException.java",
    "content": "package com.genius.exception;\n\n/**\n * @author Genius\n * @date 2023/04/24 00:57\n **/\npublic class FileCacheException extends Exception{\n\n    String message;\n\n    public FileCacheException(String ErrorMessage){\n        this.message = ErrorMessage;\n    }\n\n    @Override\n    public String getMessage() {\n        return message;\n    }\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/method/FileCondition.java",
    "content": "package com.genius.method;\n\nimport java.nio.file.Path;\n\n@FunctionalInterface\npublic interface FileCondition {\n    boolean condition(Path path);\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/pojo/CommonConfigFile.java",
    "content": "package com.genius.pojo;\n\nimport org.example.common.ConfigFile;\n\n/**\n * @author Genius\n * @date 2023/04/26 00:12\n **/\npublic class CommonConfigFile extends ConfigFile {\n\n    public CommonConfigFile(String filePath, String fileName, Object data) {\n        super(filePath, fileName, data);\n    }\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/pojo/ConfigFile.java",
    "content": "package com.genius.pojo;\n\nimport java.time.LocalDateTime;\nimport java.time.format.DateTimeFormatter;\nimport java.util.Map;\n\n/**\n * @author Genius\n * @date 2023/04/21 02:24\n **/\n\n//配置文件的抽象类，只负责构建配置文件最基础的架构，一般不用来存放配置文件本身的内容\npublic abstract class ConfigFile<T> {\n\n    private FileType fileType;\n    private String filePath;\n    private String fileName;\n\n    private T data;     //json文件的结构不是文件的数据 例如 {username:\"\",password:\"\"}\n\n    //上一次更新时间\n    private LocalDateTime updateTime;\n\n    /**\n    * 用于最开始创建配置文件结构的打包\n     * @return Map\n     */\n    public Map<String, Object> packageConfig() {\n       return this.packageConfig(this.data);\n    }\n\n    /**\n     * 用于给外部函数提供的内容打包\n     * @return\n     */\n    public Map<String,Object> packageConfig(T data){\n       updateConfigTime();\n        return Map.of(\n                \"data\",data,\n                \"updateTime\", updateTime.format(DateTimeFormatter.ofPattern(\"yyyy-MM-dd HH:mm:ss\"))\n        );\n    }\n\n    /**\n     * 只进行时间更新操作\n     * @param map\n     * @return\n     */\n    public Map<String,Object> onlyUpdateTime(Map<String,Object> map){\n        updateConfigTime();\n        if (map.containsKey(\"updateTime\")) {\n            map.put(\"updateTime\",updateTime.format(DateTimeFormatter.ofPattern(\"yyyy-MM-dd HH:mm:ss\")));\n        }\n        return map;\n    }\n\n    /**\n     * 更新配置文件类本身的时间\n     */\n    public void updateConfigTime(){\n        updateTime = LocalDateTime.now();\n    }\n\n\n    public ConfigFile() {\n    }\n\n    public ConfigFile(String filePath, String fileName, T data) {\n        this.filePath = filePath;\n        this.fileName = fileName;\n        this.data = data;\n        this.updateTime = LocalDateTime.now();\n        this.fileType = FileType.COMMON;\n    }\n\n    public ConfigFile(String filePath,String fileName,T data,FileType fileType){\n        this.filePath = filePath;\n        this.fileName = fileName;\n        this.data = data;\n        this.updateTime = LocalDateTime.now();\n        this.fileType = fileType;\n    }\n\n    public String getFilePath() {\n        return this.filePath;\n    }\n\n    public String getFileName() {\n        return this.fileName;\n    }\n\n    public LocalDateTime getUpdateTime() {return this.updateTime;}\n\n    public FileType getFileType(){\n        return this.fileType;\n    }\n\n    //不推荐直接使用\n    public T getData() {\n        return this.data;\n    }\n\n    public void setData(T data){this.data = data;}\n\n    public void setFileName(String fileName) {\n        this.fileName = fileName;\n    }\n\n    public void setFilePath(String filePath){\n        this.filePath = filePath;\n    }\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/pojo/FileType.java",
    "content": "package com.genius.pojo;\n\n/**\n * @author Genius\n * @date 2023/04/26 01:48\n **/\npublic enum FileType {\n    LOGGER(\"日志\"),\n    COMMON(\"普通文件\");\n    private final String name;\n    FileType(String name){\n        this.name = name;\n    }\n\n    public String getName(){\n        return this.name;\n    }\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/pojo/oss/AliyunOSS.java",
    "content": "package com.genius.pojo.oss;\n\nimport com.aliyun.oss.OSS;\nimport com.aliyun.oss.OSSClientBuilder;\nimport com.aliyun.oss.OSSException;\nimport com.aliyun.oss.model.*;\nimport lombok.AllArgsConstructor;\nimport lombok.Data;\nimport lombok.NoArgsConstructor;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.context.annotation.Configuration;\nimport org.springframework.stereotype.Component;\n\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.nio.file.Paths;\nimport java.util.ArrayList;\nimport java.util.List;\n\n/**\n * @author Genius\n * @date 2023/05/01 16:38\n **/\n\n@Data\n@AllArgsConstructor\n@NoArgsConstructor\n@Component\n@Configuration\npublic class AliyunOSS implements OssAble {\n\n    private Logger logger = LoggerFactory.getLogger(AliyunOSS.class);\n    private String ENDPOINT;\n\n    private String ACCESS_KEY_ID;\n\n    private String ACCESS_KEY_SECRET;\n\n    private long PART_SIZE = 1024*1024;\n\n    public OSS getOssInstance(){\n        return new OSSClientBuilder().build(ENDPOINT,ACCESS_KEY_ID,ACCESS_KEY_SECRET);\n    }\n\n\n    /**\n     * 单文件上传\n     * @param filePath\n     * @param bucketName\n     * @param objectName\n     * @return boolean\n     */\n    @Override\n    public String simpleUpload(String filePath, String bucketName, String objectName) {\n        File file = new File(filePath);\n        try(InputStream inputStream = new FileInputStream(file)) {\n           return this.simpleUpload(inputStream,bucketName,objectName);\n        } catch (IOException e){\n            return null;\n        }\n    }\n\n    @Override\n    public String simpleUpload(InputStream inputStream, String bucketName, String objectName) {\n        OSS ossClient = this.getOssInstance();\n        try{\n            PutObjectRequest putObjectRequest = new PutObjectRequest(bucketName,objectName,inputStream);\n            ossClient.putObject(putObjectRequest);\n        }catch (OSSException e){\n            loggerError(e);\n            return null;\n        }finally {\n            shutDown(ossClient);\n        }\n\n        return Paths.get(bucketName,objectName).toString();\n    }\n\n    @Override\n    public String multipartUpload(String filePath, String bucketName, String objectName) {\n        OSS ossClient = this.getOssInstance();\n        try {\n            InitiateMultipartUploadRequest request = new InitiateMultipartUploadRequest(bucketName,objectName);\n            InitiateMultipartUploadResult upresult = ossClient.initiateMultipartUpload(request);\n            String uploadId = upresult.getUploadId();\n            File sampleFile = new File(filePath);\n            long fileLength = sampleFile.length();\n            int partCount = (int)(fileLength/PART_SIZE);\n            if(fileLength%PART_SIZE!=0){\n                partCount++;\n            }\n            List<PartETag> partETags =  new ArrayList<PartETag>();\n            for(int i=0;i<partCount;i++){\n                long startPos = i * PART_SIZE;\n                long curPartSize = (i+1==partCount)?(fileLength-startPos):PART_SIZE;\n                try(InputStream inputStream = new FileInputStream(sampleFile)){\n                    inputStream.skip(startPos);\n                    UploadPartRequest uploadPartRequest = new UploadPartRequest();\n                    uploadPartRequest.setBucketName(bucketName);\n                    uploadPartRequest.setKey(objectName);\n                    uploadPartRequest.setUploadId(uploadId);\n                    uploadPartRequest.setInputStream(inputStream);\n                    uploadPartRequest.setPartSize(curPartSize);\n                    uploadPartRequest.setPartNumber(i+1);\n                    UploadPartResult uploadPartResult = ossClient.uploadPart(uploadPartRequest);\n                    partETags.add(uploadPartResult.getPartETag());\n                }catch (IOException e){\n                    return null;\n                }\n            }\n            CompleteMultipartUploadRequest completeMultipartUploadRequest =\n                    new CompleteMultipartUploadRequest(bucketName, objectName, uploadId, partETags);\n\n            CompleteMultipartUploadResult completeMultipartUploadResult = ossClient.completeMultipartUpload(completeMultipartUploadRequest);\n        }catch (OSSException oe){\n            loggerError(oe);\n            return null;\n        }finally {\n            shutDown(ossClient);\n        }\n        return Paths.get(bucketName,objectName).toString();\n    }\n\n\n    @Override\n    public boolean endPointUpload(String filePath, String bucketName, String objectName) {\n        return false;\n    }\n\n    @Override\n    public File downloadFile(String downloadPath, String bucketName, String objectName) {\n        OSS ossClient = this.getOssInstance();\n        File file = new File(downloadPath);\n        try {\n            ossClient.getObject(new GetObjectRequest(bucketName, objectName), new File(downloadPath));\n        }catch (OSSException e){\n            loggerError(e);\n        }finally {\n            shutDown(ossClient);\n        }\n        return file;\n    }\n\n    @Override\n    public boolean deleteFile(String fileName, String bucketName) {\n        return deleteFiles(List.of(fileName),bucketName);\n    }\n\n    @Override\n    public boolean deleteFiles(List<String> fileNames, String bucketName) {\n        OSS ossClient = this.getOssInstance();\n        try{\n            DeleteObjectsRequest deleteObjectsRequest = new DeleteObjectsRequest(bucketName);\n            deleteObjectsRequest.setKeys(fileNames);\n            ossClient.deleteObjects(deleteObjectsRequest);\n        }catch (OSSException e){\n            loggerError(e);\n            return false;\n        }finally {\n            shutDown(ossClient);\n        }\n        return true;\n    }\n\n\n    private void loggerError(OSSException e){\n        logger.error(\"Error Message:{}\",e.getErrorMessage());\n        logger.error(\"Error Code:{}\" + e.getErrorCode());\n        logger.error(\"Request ID:{}\" + e.getRequestId());\n        logger.error(\"Host ID:{}\" + e.getHostId());\n    }\n\n    private void shutDown(OSS ossClient){\n        if(ossClient != null){\n            ossClient.shutdown();\n        }\n    }\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/pojo/oss/OssAble.java",
    "content": "package com.genius.pojo.oss;\n\nimport java.io.File;\nimport java.io.InputStream;\nimport java.util.List;\n\npublic interface OssAble {\n\n    String simpleUpload(String filePath,String bucketName,String objectName);\n\n    String simpleUpload(InputStream inputStream,String bucketName,String objectName);\n\n    String multipartUpload(String filePath,String bucketName,String objectName);\n\n    boolean endPointUpload(String filePath,String bucketName,String objectName);\n\n    File downloadFile(String downloadPath,String bucketName,String objectName);\n\n    boolean deleteFile(String fileName,String bucketName);\n\n    boolean deleteFiles(List<String> fileNames,String bucketName);\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/util/FileUtil.java",
    "content": "package com.genius.util;\n\n/**\n * @author Genius\n * @date 2023/04/20 11:03\n **/\n\n\nimport com.genius.method.FileCondition;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.nio.channels.FileChannel;\nimport java.nio.file.*;\nimport java.nio.file.attribute.BasicFileAttributes;\n\n/**\n * 文件工具类\n */\npublic class FileUtil {\n\n    private static Logger logger = LoggerFactory.getLogger(FileUtil.class);\n\n    /**\n     * 判断文件是否存在\n     *\n     * @param dir 文件路径需要包含文件名\n     * @return Boolean\n     */\n    public static Boolean isFileExist(String dir) {\n        return new File(dir).exists();\n    }\n\n    /**\n     * 复制文件 比 Files更快\n     *\n     * @param srcPath  源文件路径\n     * @param destPath 目标文件路径\n     * @return file\n     */\n    public static File copyFile(String srcPath, String destPath) {\n        try (\n                FileChannel src = new FileInputStream(srcPath).getChannel();\n                FileChannel dest = new FileInputStream(destPath).getChannel()\n        ) {\n            dest.transferFrom(src, 0, src.size());\n        } catch (IOException e) {\n            logger.error(\"复制文件失败\", e);\n            return null;\n        }\n        return new File(destPath);\n    }\n\n    /**\n     * 删除文件\n     *\n     * @param path     文件路径\n     * @param filename 文件名\n     * @return boolean\n     */\n    public static boolean deleteFile(String path, String filename) {\n        return deleteFile(Paths.get(path, filename).toString());\n    }\n\n    /**\n     * 删除文件\n     *\n     * @param path 文件路径\n     * @return boolean\n     * @throws IOException IOException\n     */\n    public static boolean deleteFile(String path) {\n        try {\n            Files.delete(Paths.get(path));\n        } catch (IOException e) {\n            logger.error(\"删除文件失败\", e);\n            return false;\n        }\n        return true;\n    }\n\n    /**\n     * 文件目录递归删除\n     *\n     * @param path 文件路径\n     * @return boolean\n     * @throws IOException IOException\n     */\n    public static boolean deleteDirectory(String path) throws IOException {\n        FileCondition condition = file -> !file.toString().startsWith(\"C:\") || !file.toString().startsWith(\"root\");\n        return deleteDirectory0(path, condition, condition, condition);\n    }\n\n    /**\n     * 文件目录递归删除\n     *\n     * @param path      文件路径\n     * @param visit     访问文件时触发该方法\n     * @param preVisit  访问子目录前触发该方法\n     * @param postVisit 访问目录之后触发该方法\n     * @return boolean\n     * @throws IOException IOException\n     */\n    public static boolean deleteDirectory(String path, FileCondition visit, FileCondition preVisit, FileCondition postVisit) throws IOException {\n        return deleteDirectory0(path, visit, preVisit, postVisit);\n    }\n\n    /**\n     * 递归删除\n     *\n     * @param path 文件路径\n     * @return file\n     */\n    private static boolean deleteDirectory0(String path, FileCondition visit, FileCondition preVisit, FileCondition postVisit) throws IOException {\n        Files.walkFileTree(Paths.get(path), new SimpleFileVisitor<Path>() {\n                    // 在访问文件时触发该方法\n                    @Override\n                    public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {\n                        if (!visit.condition(file)) {\n                            logger.info(\"文件被跳过: {}\", file);\n                            return FileVisitResult.SKIP_SUBTREE;\n                        }\n                        Files.delete(file);\n                        logger.info(\"文件被删除: {}\", file);\n                        return FileVisitResult.CONTINUE;\n                    }\n\n                    // 在访问子目录前触发该方法\n                    @Override\n                    public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {\n                        if (!preVisit.condition(dir)) {\n                            logger.info(\"目录被跳过: {}\", dir);\n                            return FileVisitResult.SKIP_SUBTREE;\n                        }\n                        logger.info(\"目录被访问: {}\", dir);\n                        return FileVisitResult.CONTINUE;\n                    }\n\n                    // 在访问目录之后触发该方法\n                    @Override\n                    public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {\n                        if (!postVisit.condition(dir)) {\n                            logger.info(\"目录被跳过: {}\", dir);\n                            return FileVisitResult.SKIP_SUBTREE;\n                        }\n                        Files.delete(dir);\n                        logger.info(\"目录被删除: {}\", dir);\n                        return FileVisitResult.CONTINUE;\n                    }\n\n                    // 在访问失败时触发该方法\n                    @Override\n                    public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {\n                        // 写一些具体的业务逻辑\n                        return super.visitFileFailed(file, exc);\n                    }\n\n                }\n        );\n        return true;\n    }\n\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/util/JsonFileUtil.java",
    "content": "package com.genius.util;\n\nimport com.alibaba.fastjson.*;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.File;\nimport java.nio.charset.StandardCharsets;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Objects;\n\n/**\n * @author Genius\n * @date 2023/04/20 10:53\n **/\n\n/**\n * 操作json文件\n */\npublic class JsonFileUtil {\n\n    private static final Logger logger = LoggerFactory.getLogger(JsonFileUtil.class);\n\n    /* ------------------read json file------------------ */\n\n    /**\n     * 读取json文件\n     * @param filePath 文件路径，应该为全路径\n     * @param fileName 文件名\n     * @return Map<String, Object> 返回Map<String, Object>类型的数据\n     */\n    public static Map<String, Object> readJsonFile(String filePath, String fileName) {\n        return readJsonFile(Paths.get(filePath, fileName).toString());\n    }\n\n    /**\n     * 读取json文件\n     * @param fullPath 文件路径,包含文件名\n     * @return Map<String, Object> 返回Map<String, Object>类型的数据\n     */\n    public static Map<String,Object> readJsonFile(String fullPath){\n        Map<String, Object> maps = null;\n        JSONObject jsonObject = readJsonFileToJSONObject(fullPath);\n        if (Objects.nonNull(jsonObject)) {\n            maps = jsonObject.getInnerMap();\n        }\n        return maps;\n    }\n\n    /**\n     * 读取json文件转为JSONObject\n     * @param fullPath 文件路径,包含文件名\n     * @return JSONObject\n     */\n    public static JSONObject readJsonFileToJSONObject(String fullPath){\n        return readJsonFileToObject(fullPath, JSONObject.class);\n    }\n\n    /** 读取json文件转为对应的类\n     * @param fullPath 文件路径,包含文件名\n     * @param clazz 类\n     * @return T\n     */\n    public static <T> T readJsonFileToObject(String fullPath, Class<T> clazz){\n        T t = null;\n        Path dir = Paths.get(fullPath);\n        try{\n            if (FileUtil.isFileExist(dir.toString())) {\n                String res = Files.readString(dir, StandardCharsets.UTF_8);\n                logger.debug(\"读取json文件成功, 文件内容为: {}\", res);\n\n                t = JSON.parseObject(res, clazz);\n            }\n        }catch (Exception e){\n            logger.error(\"读取json文件失败\", e);\n        }\n        return t;\n    }\n\n    /* ------------------write json file------------------ */\n\n    /**\n    * 写入json文件\n    * @param filePath 文件路径，应该为全路径\n    * @param fileName 文件名\n    * @param data 写入Json数据\n    * @return file\n     */\n    public static File writeJsonFile(String filePath, String fileName, Map<String, Object> data) {\n        return writeJsonFile(Paths.get(filePath, fileName).toString(), data);\n    }\n\n    /**\n     * 读取json文件\n     * @param fullPath 文件路径，应该为全路径\n     * @param data 写入Json数据\n     * @return file\n     */\n    public static File writeJsonFile(String fullPath, Map<String, Object> data) {\n       return writeJsonFile0(fullPath, JSON.toJSONString(data,true),true);\n    }\n\n    /**\n     * 写入json文件,如果文件不存在则报错\n     * @param filePath 文件路径，应该为全路径\n     * @param fileName 文件名\n     * @param data 写入Json数据\n     * @return file\n     */\n    public static <T> File writeJsonFileIsExist(String filePath, String fileName, T data) {\n        return writeJsonFileIsExist(Paths.get(filePath, fileName).toString(), data);\n    }\n\n    /**\n     * 写入json文件,如果文件不存在则报错\n     * @param fullPath 文件路径，应该为全路径\n     * @param data 写入Json数据\n     * @return file\n     */\n    public static <T> File writeJsonFileIsExist(String fullPath, T data) {\n        return writeJsonFile0(fullPath, JSON.toJSONString(data,true),false);\n    }\n\n    /**\n     * 将Obj写入json文件\n     * @param filePath 文件路径，应该为全路径\n     * @param fileName 文件名\n     * @param obj 写入Json数据\n     * @return file\n     */\n    public static <T> File writeJsonFile(String filePath, String fileName, T obj) {\n        return writeJsonFile(Paths.get(filePath, fileName).toString(), obj);\n    }\n\n    /**\n     * 将Obj写入json文件\n     * @param fullPath 文件路径，应该为全路径\n     * @param obj 写入Json数据\n     * @return file\n     */\n    public static <T> File writeJsonFile(String fullPath, T obj) {\n        return writeJsonFile0(fullPath, JSON.toJSONString(obj,true),true);\n    }\n\n    /**\n     * 写入json文件，如果文件不存在则自动创建\n     * @param fullPath 文件路径，应该为全路径\n     * @param json  写入Json数据\n     * @param autoCreate  是否自动创建文件\n     * @return  file\n     */\n    public static File writeJsonFile0(String fullPath, String json,boolean autoCreate) {\n        Path dir = Paths.get(fullPath);\n        try{\n            if(!FileUtil.isFileExist(dir.toString())&&autoCreate){\n                Files.createFile(dir);\n                logger.debug(\"新建文件{}\",dir);\n            }\n            if (FileUtil.isFileExist(dir.toString())) {\n                Files.writeString(dir, json, StandardCharsets.UTF_8);\n                logger.debug(\"写入json文件成功, 文件内容为: {}\", json);\n            }else {\n                logger.error(\"写入json文件失败, 文件不存在\");\n                return null;\n            }\n        }catch (Exception e){\n            logger.error(\"写入json文件失败\", e);\n            return null;\n        }\n        return dir.toFile();\n    }\n\n    /**\n     * 写入大j对象到Json文件中\n     * @param fullPath  文件路径，应该为全路径\n     * @param data 写入Json数据\n     * @return file\n     */\n    public static File writeBigJsonFile(String fullPath, Map<String, Object> data) {\n        Path dir = Paths.get(fullPath);\n        try{\n            if(!FileUtil.isFileExist(dir.toString())){\n                Files.createFile(dir);\n                logger.info(\"新建文件{}\",dir);\n            }\n            if (FileUtil.isFileExist(dir.toString())) {\n                JSONWriter writer = new JSONWriter(Files.newBufferedWriter(dir, StandardCharsets.UTF_8));\n                writer.startObject();\n                for (Map.Entry<String, Object> stringObjectEntry : data.entrySet()) {\n                    String key = stringObjectEntry.getKey();\n                    Object value = stringObjectEntry.getValue();\n                    writer.writeKey(key);\n                    writer.writeValue(value);\n                    logger.info(\"写入json类成功, 类内容为: {}:{}\", key, value);\n                }\n                writer.endObject();\n                writer.close();\n\n            }else {\n                logger.error(\"写入json文件失败, 文件不存在\");\n                return null;\n            }\n        }catch (Exception e){\n            logger.error(\"写入json文件失败\", e);\n            return null;\n        }\n        return dir.toFile();\n    }\n\n    /**\n     * 写入大j对象到Json文件中\n     * @param fullPath  文件路径，应该为全路径\n     * @param Objs 写入大对象的数组\n     * @return file\n     */\n    public static <T> File writeBigJsonFile(String fullPath, List<T> Objs) {\n        Path dir = Paths.get(fullPath);\n        try{\n            if(!FileUtil.isFileExist(dir.toString())){\n                Files.createFile(dir);\n                logger.info(\"新建文件{}\",dir);\n            }\n            if (FileUtil.isFileExist(dir.toString())) {\n                JSONWriter writer = new JSONWriter(Files.newBufferedWriter(dir, StandardCharsets.UTF_8));\n                writer.startArray();\n                for (T obj : Objs) {\n                    writer.writeValue(obj);\n                    logger.info(\"写入json类成功, 类内容为: {}\", obj);\n                }\n                writer.endArray();\n                writer.close();\n\n            }else {\n                logger.error(\"写入json文件失败, 文件不存在\");\n                return null;\n            }\n        }catch (Exception e){\n            logger.error(\"写入json文件失败\", e);\n            return null;\n        }\n        return dir.toFile();\n    }\n}\n"
  },
  {
    "path": "File/src/main/java/com/genius/util/OSSUtil.java",
    "content": "package com.genius.util;\n\n/**\n * @author Genius\n * @date 2023/05/01 16:34\n **/\npublic class OSSUtil {\n}\n"
  },
  {
    "path": "File/src/test/java/com/genius/AppTest.java",
    "content": "package com.genius;\n\nimport junit.framework.Test;\nimport junit.framework.TestCase;\nimport junit.framework.TestSuite;\n\n/**\n * unit test for simple App.\n */\npublic class AppTest \n    extends TestCase\n{\n    /**\n     * Create the test case\n     *\n     * @param testName name of the test case\n     */\n    public AppTest( String testName )\n    {\n        super( testName );\n    }\n\n    /**\n     * @return the suite of tests being tested\n     */\n    public static Test suite()\n    {\n        return new TestSuite( AppTest.class );\n    }\n\n    /**\n     * Rigourous Test :-)\n     */\n    public void testApp()\n    {\n        assertTrue( true );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kernel</groupId>\n    <artifactId>hydra-architecture</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime</groupId>\n            <artifactId>slime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/Hydra.java",
    "content": "package com.pinecone.hydra;\n\nimport com.pinecone.framework.system.Framework;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.system.architecture.SystemComponentManager;\nimport com.pinecone.hydra.system.HySkeleton;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.MultiComponentSystem;\nimport com.pinecone.hydra.system.SystemSkeleton;\n\nimport java.nio.file.Path;\n\npublic abstract class Hydra extends Framework implements Hydrogen {\n    private HySkeleton                 mComponentManager ;\n\n    protected boolean                  mDebugMode        ;\n    protected Path                     mWorkingPath      ;\n    protected String                   mServiceID        ;\n\n\n    public Hydra(){\n        this( new String[0], null, null );\n    }\n\n    public Hydra( String[] args ){\n        this( args, null, null );\n    }\n\n    public Hydra( String[] args, String szName ){\n        this( args, szName, null );\n    }\n\n    public Hydra( String[] args, CascadeSystem parent ){\n        this( args, null, parent );\n    }\n\n    public Hydra( String[] args, String szName, CascadeSystem parent, HySkeleton manager ){\n        super( args, szName, parent );\n\n        if( manager == null ) {\n            manager = new SystemSkeleton( this );\n        }\n        this.mComponentManager = manager;\n    }\n\n    public Hydra( String[] args, String szName, CascadeSystem parent ){\n        this( args, szName, parent, null );\n    }\n\n\n    protected void prepare_system_skeleton() {\n\n    }\n\n\n    @Override\n    public HySkeleton getComponentManager() {\n        return this.mComponentManager;\n    }\n\n    @Override\n    public MultiComponentSystem apply( SystemComponentManager manager ) {\n        this.mComponentManager = (HySkeleton)manager;\n        return this;\n    }\n\n    @Override\n    public boolean isDebugMode() {\n        return this.mDebugMode;\n    }\n\n    @Override\n    public Path getWorkingPath() {\n        return this.mWorkingPath;\n    }\n\n    @Override\n    public String getServiceID() {\n        return this.mServiceID;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/Hydradom.java",
    "content": "package com.pinecone.hydra;\n\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.hydra.system.HySkeleton;\nimport com.pinecone.hydra.system.subsystem.CentralMicroSystemCabinet;\nimport com.pinecone.hydra.system.subsystem.KernelMicroSystemCabinet;\nimport com.pinecone.hydra.system.types.HydraKingdom;\n\npublic abstract class Hydradom extends Hydra implements HydraKingdom {\n    protected KernelMicroSystemCabinet    mKernelMicroSystemCabinet;\n\n\n    public Hydradom(){\n        this( new String[0], null, null );\n    }\n\n    public Hydradom( String[] args ){\n        this( args, null, null );\n    }\n\n    public Hydradom( String[] args, String szName ){\n        this( args, szName, null );\n    }\n\n    public Hydradom( String[] args, CascadeSystem parent ){\n        this( args, null, parent );\n    }\n\n    public Hydradom( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    public Hydradom( String[] args, String szName, CascadeSystem parent, HySkeleton manager ){\n        super( args, szName, parent, manager );\n    }\n\n    @Override\n    protected void prepare_system_skeleton() {\n        this.mKernelMicroSystemCabinet = new CentralMicroSystemCabinet( this );\n\n        this.getComponentManager().addComponent( this.mKernelMicroSystemCabinet );\n    }\n\n    public KernelMicroSystemCabinet getKernelMicroSystemCabinet() {\n        return this.mKernelMicroSystemCabinet;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/Container.java",
    "content": "package com.pinecone.hydra.deploy;\n\npublic interface Container extends Integration {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/Deploy.java",
    "content": "package com.pinecone.hydra.deploy;\n\nimport com.pinecone.hydra.device.Deployment;\n\npublic interface Deploy extends Deployment {\n    String getStatus();\n\n    void setStatus(String status);\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/Integration.java",
    "content": "package com.pinecone.hydra.deploy;\n\npublic interface Integration extends Deploy {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/Namespace.java",
    "content": "package com.pinecone.hydra.deploy;\n\npublic interface Namespace {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/PhysicalHost.java",
    "content": "package com.pinecone.hydra.deploy;\n\npublic interface PhysicalHost extends Server {\n    String getHardwareSpecs();\n\n    void setHardwareSpecs(String hardwareSpecs);\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/Quick.java",
    "content": "package com.pinecone.hydra.deploy;\n\npublic interface Quick extends Integration {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/Server.java",
    "content": "package com.pinecone.hydra.deploy;\n\npublic interface Server extends Deploy {\n    String getName();\n    void setName(String name);\n\n    String getIpAddress();\n    void setIpAddress(String ipAddress);\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/deploy/VirtualMachine.java",
    "content": "package com.pinecone.hydra.deploy;\n\npublic interface VirtualMachine extends Server {\n\n    PhysicalHost getAffiliateHost();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/device/Deployment.java",
    "content": "package com.pinecone.hydra.device;\n\npublic interface Deployment extends Device {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/device/Device.java",
    "content": "package com.pinecone.hydra.device;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Device extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/device/Disk.java",
    "content": "package com.pinecone.hydra.device;\n\npublic interface Disk extends Device {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/express/Deliver.java",
    "content": "package com.pinecone.hydra.express;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Deliver extends Pinenut {\n    String   getName();\n\n    Express  getExpress();\n\n    void toDispatch( Package that ) throws Exception;\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/express/Express.java",
    "content": "package com.pinecone.hydra.express;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Express extends Pinenut {\n    Deliver  recruit ( String szName );\n\n    Express  register( Deliver deliver );\n\n    Express  fired   ( Deliver deliver );\n\n    boolean  hasOwnDeliver( Deliver deliver );\n\n    boolean  hasOwnDeliver( String deliverName );\n\n    int      size    ();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/express/Package.java",
    "content": "package com.pinecone.hydra.express;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Package extends Pinenut {\n    Deliver getDeliver();\n\n    String  getConsignee();\n\n    Package entrust( Deliver deliver );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ArchModularizedSubsystem.java",
    "content": "package com.pinecone.hydra.system;\n\nimport org.slf4j.Logger;\n\nimport com.pinecone.framework.system.ModularizedSubsystem;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\n\npublic abstract class ArchModularizedSubsystem implements ModularizedSubsystem {\n\n    protected Hydrogen             mPrimarySystem;\n\n    protected String               mszName;\n\n    protected Logger               mLogger;\n    protected PatriarchalConfig    mSubsystemConfig;\n\n    public ArchModularizedSubsystem( Hydrogen primarySystem, String name, PatriarchalConfig config ) {\n        this.mPrimarySystem    = primarySystem;\n        this.mszName           = name;\n        this.mLogger           = primarySystem.getTracerScope().newLogger( name );\n        this.mSubsystemConfig  = config;\n    }\n\n    @Override\n    public PatriarchalConfig getSubsystemConfig() {\n        return this.mSubsystemConfig;\n    }\n\n    @Override\n    public Hydrogen parentSystem() {\n        return this.mPrimarySystem;\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    public Logger getLogger() {\n        return this.mLogger;\n    }\n\n    protected abstract void traceWelcomeInfo() ;\n\n    @Override\n    public void release() {\n\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ArchSystemAutoAssembleComponent.java",
    "content": "package com.pinecone.hydra.system;\n\nimport com.pinecone.framework.system.architecture.CascadeComponent;\nimport com.pinecone.framework.unit.affinity.ObjectOverrider;\nimport com.pinecone.framework.unit.affinity.RecursiveUnitOverrider;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport org.slf4j.Logger;\n\npublic abstract class ArchSystemAutoAssembleComponent extends ArchSystemCascadeComponent implements Slf4jTraceable {\n    protected Logger                             mLogger;\n    protected ObjectOverrider<String, Object >   mObjectOverrider        ;\n    protected DynamicFactory                     mUniformFactory         ;\n\n\n    protected ArchSystemAutoAssembleComponent(Namespace name, Hydrogen system, SystemCascadeComponentManager manager, CascadeComponent parent ) {\n        super( name, system, manager, parent );\n\n        this.mLogger             = system.getTracerScope().newLogger( this.className() );\n        this.mObjectOverrider    = new RecursiveUnitOverrider<>();\n        this.mUniformFactory     = new GenericDynamicFactory( system.getTaskManager().getClassLoader() );\n    }\n\n    protected ArchSystemAutoAssembleComponent( Namespace name, SystemCascadeComponentManager manager, CascadeComponent parent ) {\n        this( name, manager.getSystem(), manager, parent );\n    }\n\n    protected ArchSystemAutoAssembleComponent( Namespace name, SystemCascadeComponentManager manager ) {\n        this( name, manager, null );\n    }\n\n    public DynamicFactory getSharedUniformFactory() {\n        return this.mUniformFactory;\n    }\n\n    @Override\n    public Logger getLogger() {\n        return this.mLogger;\n    }\n\n    public ObjectOverrider<String, Object> getObjectOverrider() {\n        return this.mObjectOverrider;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ArchSystemCascadeComponent.java",
    "content": "package com.pinecone.hydra.system;\n\nimport com.pinecone.framework.system.architecture.ArchCascadeComponent;\nimport com.pinecone.framework.system.architecture.CascadeComponent;\nimport com.pinecone.framework.util.name.Namespace;\n\npublic abstract class ArchSystemCascadeComponent extends ArchCascadeComponent implements HyComponent {\n    private Hydrogen mSystem;\n\n    protected ArchSystemCascadeComponent( Namespace name, Hydrogen system, SystemCascadeComponentManager manager, CascadeComponent parent ) {\n        super( name, manager, parent );\n        this.mSystem = system;\n    }\n\n    protected ArchSystemCascadeComponent( Namespace name, SystemCascadeComponentManager manager, CascadeComponent parent ) {\n        this( name, manager.getSystem(), manager, parent );\n    }\n\n    protected ArchSystemCascadeComponent( Namespace name, SystemCascadeComponentManager manager ) {\n        this( name, manager, null );\n    }\n\n    @Override\n    public SystemCascadeComponentManager getComponentManager() {\n        return (SystemCascadeComponentManager) super.getComponentManager();\n    }\n\n    @Override\n    public Hydrogen getSystem() {\n        return this.mSystem;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ArchSystemCascadeComponentManager.java",
    "content": "package com.pinecone.hydra.system;\n\nimport com.pinecone.framework.system.architecture.ArchCascadeComponentManager;\n\npublic abstract class ArchSystemCascadeComponentManager extends ArchCascadeComponentManager implements SystemCascadeComponentManager {\n    protected Hydrogen mSystem;\n\n    protected ArchSystemCascadeComponentManager( Hydrogen system ){\n        super();\n        this.mSystem = system;\n    }\n\n    @Override\n    public Hydrogen getSystem() {\n        return this.mSystem;\n    }\n\n    @Override\n    public SystemCascadeComponent getRootComponentByFullName(String fullName) {\n        return (SystemCascadeComponent)super.getRootComponentByFullName(fullName);\n    }\n\n    @Override\n    public SystemCascadeComponent getComponentByFullName(String fullName) {\n        return (SystemCascadeComponent)super.getComponentByFullName(fullName);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/BlockSystem.java",
    "content": "package com.pinecone.hydra.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface BlockSystem extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/DistributedSystem.java",
    "content": "package com.pinecone.hydra.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface DistributedSystem extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/FederalSystem.java",
    "content": "package com.pinecone.hydra.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface FederalSystem extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/HierarchySystem.java",
    "content": "package com.pinecone.hydra.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface HierarchySystem extends Pinenut {\n\n    HyHierarchy getServiceArch();\n\n    boolean isTopmostArchy();\n\n    HyHierarchy getTopmostArchy();\n\n    boolean isBottommostArchy();\n\n    HyHierarchy getBottommostArchy();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/HyComponent.java",
    "content": "package com.pinecone.hydra.system;\n\npublic interface HyComponent extends SystemCascadeComponent {\n    @Override\n    Hydrogen getSystem();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/HyHierarchy.java",
    "content": "package com.pinecone.hydra.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface HyHierarchy extends Pinenut {\n    String getName();\n\n    boolean isDominantClass();\n\n    boolean isWorkerClass();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/HySkeleton.java",
    "content": "package com.pinecone.hydra.system;\n\npublic interface HySkeleton extends SystemCascadeComponentManager {\n    Hydrogen getSystem();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/Hydrogen.java",
    "content": "package com.pinecone.hydra.system;\n\nimport com.pinecone.framework.system.Pinecore;\nimport com.pinecone.hydra.system.component.Slf4jTracerScope;\n\nimport java.nio.file.Path;\n\npublic interface Hydrogen extends Pinecore, ScopedSystem, MultiComponentSystem {\n\n    @Override\n    HySkeleton getComponentManager();\n\n    String getServiceID();\n\n    Path getWorkingPath();\n\n    boolean isDebugMode();\n\n    Slf4jTracerScope getTracerScope();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/MultiComponentSystem.java",
    "content": "package com.pinecone.hydra.system;\n\nimport com.pinecone.framework.system.architecture.SystemComponentManager;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface MultiComponentSystem extends Pinenut {\n    SystemComponentManager getComponentManager();\n\n    MultiComponentSystem apply( SystemComponentManager manager );\n}"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ScopedSystem.java",
    "content": "package com.pinecone.hydra.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.MultiScopeMap;\n\npublic interface ScopedSystem extends Pinenut {\n    MultiScopeMap<String, Object > getGlobalConfigScope();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/SystemCascadeComponent.java",
    "content": "package com.pinecone.hydra.system;\n\nimport com.pinecone.framework.system.architecture.CascadeComponent;\nimport com.pinecone.framework.system.architecture.SystemComponent;\n\npublic interface SystemCascadeComponent extends CascadeComponent, SystemComponent {\n    @Override\n    SystemCascadeComponentManager getComponentManager();\n\n    default Hydrogen getSystem() {\n        return this.getComponentManager().getSystem();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/SystemCascadeComponentManager.java",
    "content": "package com.pinecone.hydra.system;\n\nimport com.pinecone.framework.system.architecture.CascadeComponentManager;\nimport com.pinecone.framework.system.architecture.SystemComponentManager;\n\npublic interface SystemCascadeComponentManager extends SystemComponentManager, CascadeComponentManager {\n    Hydrogen getSystem();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/SystemSkeleton.java",
    "content": "package com.pinecone.hydra.system;\n\npublic class SystemSkeleton extends ArchSystemCascadeComponentManager implements HySkeleton {\n    public SystemSkeleton( Hydrogen system ){\n        super( system );\n    }\n\n    @Override\n    public Hydrogen getSystem() {\n        return this.mSystem;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/centrum/CentralControlSubsystem.java",
    "content": "package com.pinecone.hydra.system.centrum;\n\nimport com.pinecone.framework.system.ModularizedSubsystem;\nimport com.pinecone.framework.system.regime.arch.Lord;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\n\npublic interface CentralControlSubsystem extends ModularizedSubsystem, Lord, Slf4jTraceable {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/centrum/Centrum.java",
    "content": "package com.pinecone.hydra.system.centrum;\n\n/**\n *  Pinecone Ursus For Java, Uniformity Centralized Metasystem\n *  Author: Harald.E (Dragon King)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Uniformity Centralized Metasystem\n *  统一中央集权元系统\n *  *****************************************************************************************\n *  1). Top-level abstraction and aggregation of large-scale distributed systems,\n *      enabling centralized global control-ability and simplified manipulation of complex systems.\n *  2). Meta-level aggregation of global resources, tasks, services, data, and intelligence,\n *      aimed at centralized planning and large-scale absolute control.\n *  3). Core architecture: atomic-level design of meta-information, control scheduling, intelligence,\n *      and auditing, centralized aggregation, and highest-level external abstraction.\n *  *****************************************************************************************\n *  1). 大型分布式系统的顶级抽象化、汇总化，面向中央全局可控，大型系统简单操纵。\n *  2). 面向全局资源、任务、服务、数据、情报等元汇总，全局统筹规划、大规模绝对控制。\n *  3). 核心架构：元信息、控制调度、情报、审计原子化，中央汇总，对外最高抽象。\n *  *****************************************************************************************\n */\npublic interface Centrum extends UniformCentralSystem, Metasystem {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/centrum/Metasystem.java",
    "content": "package com.pinecone.hydra.system.centrum;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Metasystem extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/centrum/UniformCentralSystem.java",
    "content": "package com.pinecone.hydra.system.centrum;\n\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.architecture.Component;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.system.DistributedSystem;\nimport com.pinecone.hydra.system.HierarchySystem;\nimport com.pinecone.hydra.system.imperium.ImperiumPrivy;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.ulf.util.guid.i64.GuidAllocator72;\n\npublic interface UniformCentralSystem extends HierarchySystem, DistributedSystem {\n\n    GuidAllocator getSystemGuidAllocator();\n\n    GuidAllocator72 getSystemGuidAllocator72();\n\n    Component imageLoader();\n\n    KernelObjectConfig fundamentalKernelObjectConfig();\n\n    ImperiumPrivy imperiumPrivy();\n\n\n    static UniformCentralSystem evalCentralSystem( Processum that ) {\n        if ( that instanceof UniformCentralSystem ) {\n            return (UniformCentralSystem) that;\n        }\n\n        RuntimeSystem rs = that.parentSystem();\n        if ( rs instanceof UniformCentralSystem ) {\n            return (UniformCentralSystem) rs;\n        }\n\n        throw new IllegalArgumentException( \"Not in UniformCentralSystem family.\" );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/ComponentInitializationException.java",
    "content": "package com.pinecone.hydra.system.component;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class ComponentInitializationException extends Exception implements Pinenut {\n    public ComponentInitializationException    () {\n        super();\n    }\n\n    public ComponentInitializationException    ( String message ) {\n        super(message);\n    }\n\n    public ComponentInitializationException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ComponentInitializationException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected ComponentInitializationException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/GenericResourceDispenserCenter.java",
    "content": "package com.pinecone.hydra.system.component;\n\nimport com.pinecone.framework.system.construction.StructureInstanceDispenser;\nimport com.pinecone.framework.system.construction.UnifyCentralInstanceDispenser;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.ArchSystemCascadeComponent;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\n\npublic class GenericResourceDispenserCenter extends ArchSystemCascadeComponent implements ResourceDispenserCenter {\n    protected StructureInstanceDispenser mInstanceDispenser;\n\n    public GenericResourceDispenserCenter(Namespace name, Hydrogen system, HyComponent parent ) {\n        super( name, system, system.getComponentManager(), parent );\n\n        this.mInstanceDispenser = new UnifyCentralInstanceDispenser();\n    }\n\n    public GenericResourceDispenserCenter(Hydrogen system, HyComponent parent ) {\n        this( null, system, parent );\n    }\n\n    public GenericResourceDispenserCenter( Hydrogen system ) {\n        this( system, null );\n    }\n\n    @Override\n    public StructureInstanceDispenser getInstanceDispenser() {\n        return this.mInstanceDispenser;\n    }\n\n    @Override\n    public Hydrogen getSystem() {\n        return super.getSystem();\n    }\n}"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/GenericTracerScope.java",
    "content": "package com.pinecone.hydra.system.component;\n\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.ArchSystemCascadeComponent;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class GenericTracerScope extends ArchSystemCascadeComponent implements Slf4jTracerScope {\n    public GenericTracerScope(Namespace name, Hydrogen system, HyComponent parent ) {\n        super( name, system, system.getComponentManager(), parent );\n    }\n\n    public GenericTracerScope(Hydrogen system, HyComponent parent ) {\n        this( null, system, parent );\n    }\n\n    public GenericTracerScope( Hydrogen system ) {\n        this( system, null );\n    }\n\n    @Override\n    public Hydrogen getSystem() {\n        return super.getSystem();\n    }\n\n    @Override\n    public String getLoggerName( String name ){\n        return String.format( \"%s<%s>\", this.getSystem().className(), name );\n    }\n\n    @Override\n    public Logger newLogger( String name ){\n        return LoggerFactory.getLogger( this.getLoggerName( name ) );\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/LogStatuses.java",
    "content": "package com.pinecone.hydra.system.component;\n\npublic final class LogStatuses {\n    public static final String StatusStart        = \"Start\";\n    public static final String StatusDone         = \"Done\";\n    public static final String StatusVitalization = \"Vitalization\";\n    public static final String StatusVitalized    = \"Vitalized\";\n    public static final String StatusTermination  = \"Termination\";\n    public static final String StatusTerminated   = \"Terminated\";\n    public static final String StatusStandby      = \"Standby\";\n    public static final String StatusReady        = \"Ready\";\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/LoggingConfigurator.java",
    "content": "package com.pinecone.hydra.system.component;\n\nimport java.util.Map;\n\nimport ch.qos.logback.classic.Level;\nimport ch.qos.logback.classic.Logger;\nimport ch.qos.logback.classic.LoggerContext;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.system.ConformitySystem;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\n\npublic class LoggingConfigurator implements TracerConfigurator {\n\n    protected org.slf4j.Logger log =  org.slf4j.LoggerFactory.getLogger( this.getClass() );\n\n    protected ConformitySystem mConformitySystem;\n\n    public LoggingConfigurator( ConformitySystem conformitySystem ) {\n        this.mConformitySystem = conformitySystem;\n    }\n\n    @Override\n    public void apply() {\n        PatriarchalConfig tracer = this.mConformitySystem.getSystemConfig().getChild( \"Tracer\" );\n        if ( tracer != null ) {\n            PatriarchalConfig logging = tracer.getChild( \"Logging\" );\n            if ( logging != null ) {\n                PatriarchalConfig levels = logging.getChild( \"Levels\" );\n                if ( levels instanceof JSONConfig ) {\n                    JSONConfig joLevels = (JSONConfig) levels;\n                    apply( joLevels );\n                    log.info( \"[Lifecycle] Set logging levels. <Done>\" );\n                }\n            }\n        }\n    }\n\n    public static void apply( Map<String, Object> levelMap ) {\n        LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();\n\n        for (Map.Entry<String, Object> entry : levelMap.entrySet()) {\n            String loggerName = entry.getKey();\n            String levelStr   = entry.getValue().toString();\n\n            Level level = Level.toLevel(levelStr, Level.INFO);\n\n            if ( \"root\".equalsIgnoreCase(loggerName) ) {\n                context.getLogger(Logger.ROOT_LOGGER_NAME).setLevel(level);\n            }\n            else {\n                context.getLogger(loggerName).setLevel(level);\n            }\n        }\n    }\n}"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/ResourceDispenserCenter.java",
    "content": "package com.pinecone.hydra.system.component;\n\nimport com.pinecone.framework.system.construction.StructureInstanceDispenser;\nimport com.pinecone.hydra.system.HyComponent;\n\npublic interface ResourceDispenserCenter extends HyComponent {\n    StructureInstanceDispenser getInstanceDispenser() ;\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/Slf4jTraceable.java",
    "content": "package com.pinecone.hydra.system.component;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport org.slf4j.Logger;\n\npublic interface Slf4jTraceable extends Pinenut {\n    Logger getLogger();\n\n    default Slf4jTraceable infoLifecycle( String szWhat, String szStateOrExtra ) {\n        this.getLogger().info( \"[Lifecycle] [{}] <{}>\", szWhat, szStateOrExtra );\n        return this;\n    }\n\n    default Slf4jTraceable infoLifecycle( String szStateOrExtra ) {\n        StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();\n        return this.infoLifecycle( stackTraceElements[ 2 ].getMethodName(), szStateOrExtra );\n    }\n\n    default Slf4jTraceable infoLifecycleDone( String szWhat ) {\n        return this.infoLifecycle( szWhat, LogStatuses.StatusDone );\n    }\n\n    default Slf4jTraceable infoLifecycleInitializationDone() {\n        return this.infoLifecycle( this.className() + \"::Constructor\", LogStatuses.StatusDone );\n    }\n\n    default Slf4jTraceable infoCriticalOperation( String szWhat, String szStateOrExtra ) {\n        this.getLogger().info( \"[CriticalOperation] [{}] <{}>\", szWhat, szStateOrExtra );\n        return this;\n    }\n\n    default Slf4jTraceable warnSimple( String szStateOrExtra ) {\n        StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();\n        this.getLogger().warn( \"[{}] <{}>\", stackTraceElements[ 2 ].getMethodName(), szStateOrExtra );\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/Slf4jTracerScope.java",
    "content": "package com.pinecone.hydra.system.component;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic interface Slf4jTracerScope extends TracerScope {\n    @Override\n    default Logger newLogger( String name ){\n        return LoggerFactory.getLogger( this.getLoggerName( name ) );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/TracerConfigurator.java",
    "content": "package com.pinecone.hydra.system.component;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface TracerConfigurator extends Pinenut {\n\n    void apply();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/component/TracerScope.java",
    "content": "package com.pinecone.hydra.system.component;\n\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\n\n\npublic interface TracerScope extends HyComponent {\n    @Override\n    Hydrogen getSystem();\n\n    String getLoggerName( String name );\n\n    Object newLogger( String name );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/identifier/KOPathResolver.java",
    "content": "package com.pinecone.hydra.system.identifier;\n\nimport com.pinecone.framework.util.name.path.BasicPathResolver;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\n\npublic class KOPathResolver extends BasicPathResolver {\n    public KOPathResolver( KernelObjectConfig config ) {\n        super( config.getPathNameSeparator(), config.getPathNameSepRegex() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/imperium/ImperiumPrivy.java",
    "content": "package com.pinecone.hydra.system.imperium;\n\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.ko.runtime.CentralizedRuntimeInstrument;\n\n/**\n *  Pinecone Ursus For Java Imperium Privy Council\n *  Author: Harald.E (Dragon King), Ken\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Imperium Privy Council\n *  Information and control are separated in this architecture, where the Privy serves as a central intelligence marshaling database,\n *  and in principle, does not have direct control privilege over specific objects.\n *  Its core functions are to centralize the collection of information from all objects, facilitating uniformed orchestration, analysis, and control.\n *\n *  Imperium Privy Council (枢密院，掌文书、行咨询)，\n *  信息、控制分离架构，枢密是中央情报编组数据库，原则上没有具体对象的实际控制权。\n *  其核心职能为：中心化收集所有对象的信息，便于统一统筹决策、分析和控制。\n *\n *  e.g. \\Device\\HarddiskVolume3\\Users\\dragonking\\AppData\\Local\\ => {name: xxx, handle: 123, typeId: 37}\n *  e.g. /proc/137/status                                       => {name: scsi_eh_26, State: S, Pid: 137}\n *  *****************************************************************************************\n */\npublic interface ImperiumPrivy extends HyComponent {\n\n    CentralizedRuntimeInstrument getExpressInstrument();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/imperium/KernelObjectRootMountPoint.java",
    "content": "package com.pinecone.hydra.system.imperium;\n\npublic enum KernelObjectRootMountPoint {\n    KernelConfig  ( \"KernelConfig\", KernelRootMountPoint.Config.getConfigSection() + \".Kernel\", KernelRootMountPoint.Config.getMountPoint() + \"/kernel\" ),\n    Registry      ( \"Registry\", KernelRootMountPoint.Config.getConfigSection() + \".Registry\", KernelRootMountPoint.Config.getMountPoint() + \"/registry\" ),\n\n    TaskMeta      ( \"TaskMeta\", KernelRootMountPoint.Meta.getConfigSection() + \".Task\", KernelRootMountPoint.Meta.getMountPoint() + \"/task\" ),\n    ServiceMeta   ( \"ServiceMeta\", KernelRootMountPoint.Meta.getConfigSection() + \".Service\", KernelRootMountPoint.Meta.getMountPoint() + \"/service\" ),\n    DeployMeta    ( \"DeployMeta\", KernelRootMountPoint.Meta.getConfigSection() + \".Deploy\", KernelRootMountPoint.Device.getMountPoint() + \"/deploy\" ),\n\n    SysImages     ( \"SysImages\", KernelRootMountPoint.System.getConfigSection() + \".Images\", KernelRootMountPoint.System.getMountPoint() + \"/public/global/exe/images\" )\n    ;\n\n\n    private final String name;\n\n    private final String configSection;\n\n    private final String mountPoint;\n\n    KernelObjectRootMountPoint( String name, String configSection, String mountPoint ) {\n        this.name          = name;\n        this.configSection = configSection;\n        this.mountPoint    = mountPoint;\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public String getConfigSection() {\n        return this.configSection;\n    }\n\n    public String getMountPoint() {\n        return this.mountPoint;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/imperium/KernelPrivyFileSystemConstants.java",
    "content": "package com.pinecone.hydra.system.imperium;\n\npublic final class KernelPrivyFileSystemConstants {\n    public static final String Root = \"$\";\n    public static final String NomenclatureSeparator = \".\";\n    public static final String FileSystemRoot = Root + NomenclatureSeparator + \"KPFS\"; // $.KPFS\n    public static final String PathNameSeparator = \"/\";\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/imperium/KernelRootMountPoint.java",
    "content": "package com.pinecone.hydra.system.imperium;\n\npublic enum KernelRootMountPoint {\n    Config                   ( \"Config\", KernelPrivyFileSystemConstants.FileSystemRoot + \".Config\", \"conf\" ),\n    Device                   ( \"Device\", KernelPrivyFileSystemConstants.FileSystemRoot + \".Device\", \"dev\" ),\n    UserHome                 ( \"UserHome\", KernelPrivyFileSystemConstants.FileSystemRoot + \".UserHome\", \"home\" ),\n    Mount                    ( \"Mount\", KernelPrivyFileSystemConstants.FileSystemRoot + \".Mount\", \"mnt\" ),\n    System                   ( \"System\", KernelPrivyFileSystemConstants.FileSystemRoot + \".System\", \"sys\" ),\n    Process                  ( \"Process\", KernelPrivyFileSystemConstants.FileSystemRoot + \".Process\", \"proc\" ),\n    Variable                 ( \"Variable\", KernelPrivyFileSystemConstants.FileSystemRoot + \".Variable\", \"var\" ),\n    Meta                     ( \"Meta\", KernelPrivyFileSystemConstants.FileSystemRoot + \".Meta\", \"meta\" ),\n    ;\n\n\n    private final String name;\n\n    private final String configSection;\n\n    private final String mountPoint;\n\n    KernelRootMountPoint( String name, String configSection, String mountPoint ) {\n        this.name          = name;\n        this.configSection = configSection;\n        this.mountPoint    = mountPoint;\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public String getConfigSection() {\n        return this.configSection;\n    }\n\n    public String getMountPoint() {\n        return this.mountPoint;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/ArchKernelObjectConfig.java",
    "content": "package com.pinecone.hydra.system.ko;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.system.Nullable;\n\npublic abstract class ArchKernelObjectConfig implements KernelObjectConfig {\n    protected String mszPathNameSeparator = KernelObjectConstants.PathNameSeparator;\n\n    protected String mszFullNameSeparator = KernelObjectConstants.FullNameSeparator;\n\n    protected String mszPathNameSepRegex  = KernelObjectConstants.PathNameSepRegex;\n\n    protected String mszFullNameSepRegex  = KernelObjectConstants.FullNameSepRegex;\n\n    protected int    mnShortPathLength    = KernelObjectConstants.ShortPathLength;\n\n    protected ArchKernelObjectConfig() {\n\n    }\n\n    public ArchKernelObjectConfig( @Nullable Map<String, Object> config ){\n        this();\n        if ( config == null ) {\n            return;\n        }\n        this.mszPathNameSeparator = (String) config.getOrDefault(\"PathNameSeparator\", KernelObjectConstants.PathNameSeparator);\n        this.mszFullNameSeparator = (String) config.getOrDefault(\"FullNameSeparator\", KernelObjectConstants.FullNameSeparator);\n        this.mszPathNameSepRegex  = (String) config.getOrDefault(\"PathNameSepRegex\", KernelObjectConstants.PathNameSepRegex);\n        this.mszFullNameSepRegex  = (String) config.getOrDefault(\"FullNameSepRegex\", KernelObjectConstants.FullNameSepRegex);\n        this.mnShortPathLength    = ( (Number) config.getOrDefault(\"ShortPathLength\", KernelObjectConstants.ShortPathLength) ).intValue();\n    }\n\n    @Override\n    public String getPathNameSeparator() {\n        return this.mszPathNameSeparator;\n    }\n\n    @Override\n    public String getFullNameSeparator() {\n        return this.mszFullNameSeparator;\n    }\n\n    @Override\n    public String getPathNameSepRegex() {\n        return this.mszPathNameSepRegex;\n    }\n\n    @Override\n    public String getFullNameSepRegex() {\n        return this.mszFullNameSepRegex;\n    }\n\n    @Override\n    public int getShortPathLength() {\n        return this.mnShortPathLength;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/CascadeInstrument.java",
    "content": "package com.pinecone.hydra.system.ko;\n\nimport com.pinecone.framework.system.regime.Instrument;\nimport com.pinecone.framework.system.regimentation.UniformCascadeNodus;\nimport com.pinecone.framework.util.name.Namespace;\n\npublic interface CascadeInstrument extends UniformCascadeNodus, Instrument {\n    String EmptySuperiorPathScope = \"\";\n\n    @Override\n    CascadeInstrument parent();\n\n    void setParent( CascadeInstrument parent );\n\n    @Override\n    default boolean isRoot() {\n        return this.parent() == null;\n    }\n\n    default CascadeInstrument root() {\n        return (CascadeInstrument) UniformCascadeNodus.super.root();\n    }\n\n    @Override\n    Namespace getTargetingName();\n\n    @Override\n    void setTargetingName( Namespace name );\n\n    @Override\n    default void setTargetingName( String name ) {\n        UniformCascadeNodus.super.setTargetingName( name );\n    }\n\n    @Override\n    default String getSimpleName() {\n        return this.getTargetingName().getSimpleName();\n    }\n\n    @Override\n    default String getFullName() {\n        return this.getTargetingName().getFullName();\n    }\n\n    /**\n     * Superior Path Scope (Specialized namespace)\n     * 上级键空间（专门命名空间）\n     * e.g. `/proc`/pid/name => Scope : `/proc`\n     * @return ParentPathScope\n     */\n    String getSuperiorPathScope();\n\n    void applySuperiorPathScope( String superiorPathScope );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/CascadeKOTreeInstrument.java",
    "content": "package com.pinecone.hydra.system.ko;\n\npublic interface CascadeKOTreeInstrument extends CascadeKernelObjectInstrument, KernelObjectTreeInstrument {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/CascadeKernelObjectInstrument.java",
    "content": "package com.pinecone.hydra.system.ko;\n\npublic interface CascadeKernelObjectInstrument extends KernelObjectInstrument, CascadeInstrument {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/InstrumentException.java",
    "content": "package com.pinecone.hydra.system.ko;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class InstrumentException extends Exception implements Pinenut {\n\n    public InstrumentException() {\n        super();\n    }\n\n    public InstrumentException( String message ) {\n        super(message);\n    }\n\n    public InstrumentException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public InstrumentException( Throwable cause ) {\n        super(cause);\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/KernelObject.java",
    "content": "package com.pinecone.hydra.system.ko;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface KernelObject extends Pinenut {\n\n    GUID getGuid();\n\n    String objectFunctionName();\n\n    String objectCategoryName();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/KernelObjectConfig.java",
    "content": "package com.pinecone.hydra.system.ko;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface KernelObjectConfig extends Pinenut {\n    String getPathNameSeparator();\n\n    String getFullNameSeparator();\n\n    String getPathNameSepRegex();\n\n    String getFullNameSepRegex();\n\n    int getShortPathLength();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/KernelObjectConstants.java",
    "content": "package com.pinecone.hydra.system.ko;\n\nimport com.pinecone.hydra.unit.imperium.ImperialTreeConstants;\n\npublic final class KernelObjectConstants {\n    public static String PathNameSeparator = \"/\";\n\n    public static String FullNameSeparator = \".\";\n\n    public static String PathNameSepRegex  = \"/\";\n\n    public static String FullNameSepRegex  = \"\\\\.\";\n\n    public static int    ShortPathLength   = ImperialTreeConstants.DefaultShortPathLength;\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/KernelObjectInstrument.java",
    "content": "package com.pinecone.hydra.system.ko;\n\nimport com.pinecone.framework.system.regime.Instrument;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\npublic interface KernelObjectInstrument extends Instrument {\n    GuidAllocator getGuidAllocator();\n\n    KernelObjectConfig getConfig();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/KernelObjectTreeInstrument.java",
    "content": "package com.pinecone.hydra.system.ko;\n\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\n\npublic interface KernelObjectTreeInstrument extends KernelObjectInstrument {\n\n    ImperialTree getMasterTrieTree();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/MetaPersistenceException.java",
    "content": "package com.pinecone.hydra.system.ko;\n\npublic class MetaPersistenceException extends InstrumentException {\n\n    public MetaPersistenceException() {\n        super();\n    }\n\n    public MetaPersistenceException( String message ) {\n        super(message);\n    }\n\n    public MetaPersistenceException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public MetaPersistenceException( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/QueryableInstrument.java",
    "content": "package com.pinecone.hydra.system.ko;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\n\npublic interface QueryableInstrument extends KernelObjectInstrument {\n\n    String getPath( GUID objectGuid );\n\n    String querySystemKernelObjectPath( GUID objectGuid ) ;\n\n    GUID queryGUIDByPath( String path );\n\n    EntityNode queryNode( String path );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/UOIUtils.java",
    "content": "package com.pinecone.hydra.system.ko;\n\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.framework.util.uoi.UniformObjectLoaderFactory;\n\npublic final class UOIUtils {\n    public static UOI createJavaClass( String className, String resourceDetail ) {\n        return UOI.create(\n                String.format( \"%s://%s/%s\", UniformObjectLoaderFactory.DefaultJavaClassType, resourceDetail, className )\n        );\n    }\n\n    public static UOI createLocalJavaClass( String className ) {\n        return UOIUtils.createJavaClass( className, \"\" );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/action/ActionObject.java",
    "content": "package com.pinecone.hydra.system.ko.action;\n\nimport com.pinecone.hydra.system.ko.KernelObject;\n\npublic interface ActionObject extends KernelObject {\n\n    String FunctionName = ActionObject.class.getSimpleName().replace( \"Object\", \"\" );\n\n    @Override\n    default String objectFunctionName() {\n        return FunctionName;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/action/EventObject.java",
    "content": "package com.pinecone.hydra.system.ko.action;\n\npublic interface EventObject extends ActionObject {\n\n    String FunctionName = EventObject.class.getSimpleName().replace( \"Object\", \"\" );\n\n    @Override\n    default String objectFunctionName() {\n        return FunctionName;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/control/ControlObject.java",
    "content": "package com.pinecone.hydra.system.ko.control;\n\nimport com.pinecone.hydra.system.ko.KernelObject;\n\npublic interface ControlObject extends KernelObject {\n\n    String FunctionName = ControlObject.class.getSimpleName().replace( \"Object\", \"\" );\n\n    @Override\n    default String objectFunctionName() {\n        return FunctionName;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/dao/GUIDNameManipulator.java",
    "content": "package com.pinecone.hydra.system.ko.dao;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface GUIDNameManipulator extends Pinenut {\n    List<GUID > getGuidsByName( String name );\n\n    List<GUID > getGuidsByNameID( String name, GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/driver/KOIMappingDriver.java",
    "content": "package com.pinecone.hydra.system.ko.driver;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.homotype.StereotypicInjector;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.system.Hydrogen;\n\npublic interface KOIMappingDriver extends Pinenut {\n    String getVersionSignature();\n\n    Hydrogen getSystem();\n\n    Processum getSuperiorProcess();\n\n    KOIMasterManipulator getMasterManipulator();\n\n    // Temp, TODO\n    StereotypicInjector autoConstruct( Class<?> stereotype, Map config, Object instance );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/driver/KOIMappingDriverFactory.java",
    "content": "package com.pinecone.hydra.system.ko.driver;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface KOIMappingDriverFactory extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/driver/KOIMasterManipulator.java",
    "content": "package com.pinecone.hydra.system.ko.driver;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface KOIMasterManipulator extends Pinenut {\n    KOISkeletonMasterManipulator getSkeletonMasterManipulator();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/driver/KOISkeletonMasterManipulator.java",
    "content": "package com.pinecone.hydra.system.ko.driver;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface KOISkeletonMasterManipulator extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/entity/ObjectHandle.java",
    "content": "package com.pinecone.hydra.system.ko.entity;\n\nimport com.pinecone.hydra.system.ko.handle.HandleObject;\n\npublic interface ObjectHandle extends HandleObject {\n    String FunctionName = HandleObject.class.getSimpleName();\n\n    @Override\n    default String objectFunctionName() {\n        return FunctionName;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/entity/ObjectTable.java",
    "content": "package com.pinecone.hydra.system.ko.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ObjectTable extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/AppliableKHandle.java",
    "content": "package com.pinecone.hydra.system.ko.handle;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface AppliableKHandle extends KHandle {\n\n    KHandle applyTreeNodeName( String szTreeNodeName );\n\n    KHandle applyTreeNodeGuid( GUID treeNodeGuid ) ;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/ArchKHandle.java",
    "content": "package com.pinecone.hydra.system.ko.handle;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic abstract class ArchKHandle implements KHandle {\n\n    protected String mszTreeNodeName;\n\n    protected GUID   mTreeNodeGuid;\n\n    public ArchKHandle( String treeNodeName, GUID treeNodeGuid ) {\n        this.mszTreeNodeName   = treeNodeName;\n        this.mTreeNodeGuid     = treeNodeGuid;\n    }\n\n    protected ArchKHandle() {\n        this( null, null );\n    }\n\n    public KHandle applyTreeNodeName( String szTreeNodeName ) {\n        this.mszTreeNodeName = szTreeNodeName;\n        return this;\n    }\n\n    public KHandle applyTreeNodeGuid( GUID treeNodeGuid ) {\n        this.mTreeNodeGuid = treeNodeGuid;\n        return this;\n    }\n\n    @Override\n    public String getName() {\n        return this.mszTreeNodeName;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.mTreeNodeGuid;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/HandleObject.java",
    "content": "package com.pinecone.hydra.system.ko.handle;\n\nimport com.pinecone.hydra.system.ko.KernelObject;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface HandleObject extends TreeNode, KernelObject {\n\n    String FunctionName = HandleObject.class.getSimpleName().replace( \"Object\", \"\" );\n\n    @Override\n    default String objectFunctionName() {\n        return FunctionName;\n    }\n\n    @Override\n    default String objectCategoryName() {\n        return \"Handle\";\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/HandleType.java",
    "content": "package com.pinecone.hydra.system.ko.handle;\n\npublic enum HandleType {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/KHandle.java",
    "content": "package com.pinecone.hydra.system.ko.handle;\n\npublic interface KHandle extends HandleObject {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/KOMMountPointHandle.java",
    "content": "package com.pinecone.hydra.system.ko.handle;\n\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\n\npublic interface KOMMountPointHandle extends ObjectTreeAddressingSectionHandle, KOMInstrument {\n\n    KOMInstrument revealWrapped();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/ObjectTreeAddressingSectionHandle.java",
    "content": "package com.pinecone.hydra.system.ko.handle;\n\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\n\npublic interface ObjectTreeAddressingSectionHandle extends KHandle, SectionHandle {\n\n    EntityNode queryNode( String path );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/ObjectTreeGUIDAddressingSectionHandle.java",
    "content": "package com.pinecone.hydra.system.ko.handle;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface ObjectTreeGUIDAddressingSectionHandle extends KHandle, SectionHandle {\n\n    GUID queryGUIDByPath( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/handle/SectionHandle.java",
    "content": "package com.pinecone.hydra.system.ko.handle;\n\npublic interface SectionHandle extends HandleObject {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ArchKOMTree.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.framework.util.name.path.PathResolver;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.centrum.UniformCentralSystem;\nimport com.pinecone.hydra.system.ko.CascadeInstrument;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.ArchRegimentObjectModel;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.imperium.operator.OperatorFactory;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Objects;\n\npublic abstract class ArchKOMTree extends ArchRegimentObjectModel implements KOMInstrument {\n    protected Namespace             mThisNamespace;\n    protected KOMInstrument         mParentInstrument;\n\n    protected Hydrogen              hydrogen;\n\n    protected Processum             superiorProcess;\n\n    protected GuidAllocator         guidAllocator;\n    protected OperatorFactory       operatorFactory;\n\n    protected PathResolver          pathResolver;\n\n    protected PathSelector          pathSelector;\n\n    protected DynamicFactory        dynamicFactory;\n\n    public ArchKOMTree (\n            Processum superiorProcess, KOIMasterManipulator masterManipulator,\n            OperatorFactory operatorFactory, KernelObjectConfig kernelObjectConfig, PathSelector pathSelector,\n            KOMInstrument parent, String name, String superiorPathScope, @Nullable GuidAllocator guidAllocator\n    ){\n        this( superiorProcess, masterManipulator, kernelObjectConfig, parent, name, superiorPathScope, guidAllocator );\n\n        this.pathSelector              =  pathSelector;\n        this.operatorFactory           =  operatorFactory;\n    }\n\n    public ArchKOMTree (\n            Processum superiorProcess, KOIMasterManipulator masterManipulator, KernelObjectConfig kernelObjectConfig,\n            KOMInstrument parent, String name, String superiorPathScope, @Nullable GuidAllocator guidAllocator\n    ){\n        super( masterManipulator, kernelObjectConfig, superiorPathScope );\n        this.superiorProcess                 = superiorProcess;\n        if ( this.superiorProcess instanceof Hydrogen) {\n            this.hydrogen = (Hydrogen) this.superiorProcess;\n        }\n        else {\n            this.hydrogen = (Hydrogen) superiorProcess.parentSystem();\n        }\n\n        this.guidAllocator                   = guidAllocator;\n        this.dynamicFactory                  = new GenericDynamicFactory( this.superiorProcess.getTaskManager().getClassLoader() );\n        this.mParentInstrument               = parent;\n        this.setTargetingName( name );\n        this.prepare_uniform_skeleton();\n    }\n\n    protected void prepare_uniform_skeleton() {\n        if ( this.superiorProcess != null ) {\n            if ( this.guidAllocator == null && this.hydrogen instanceof UniformCentralSystem ) {\n                UniformCentralSystem system = (UniformCentralSystem) this.hydrogen;\n                this.guidAllocator = system.getSystemGuidAllocator();\n            }\n        }\n\n        if ( this.guidAllocator == null ) {\n            throw new IllegalArgumentException( \"GUIDAllocator is undefined.\" );\n        }\n    }\n\n    //************************************** CascadeInstrument **************************************\n    @Override\n    public KOMInstrument parent() {\n        return this.mParentInstrument;\n    }\n\n    @Override\n    public Processum getSuperiorProcess() {\n        return this.superiorProcess;\n    }\n\n    @Override\n    public void setParent( CascadeInstrument parent ) {\n        this.mParentInstrument = (KOMInstrument) parent;\n    }\n\n    @Override\n    public Namespace getTargetingName() {\n        return this.mThisNamespace;\n    }\n\n    @Override\n    public void setTargetingName( Namespace name ) {\n        this.mThisNamespace = name;\n    }\n\n    //************************************** CascadeInstrument End **************************************\n\n\n    @Override\n    public void applyGuidAllocator( GuidAllocator guidAllocator ) {\n        this.guidAllocator = guidAllocator;\n    }\n\n    @Override\n    public GUID put( TreeNode treeNode ) {\n        TreeNodeOperator operator = this.operatorFactory.getOperator( treeNode.getMetaType() );\n        return operator.insert( treeNode );\n    }\n\n    @Override\n    public boolean contains( GUID nodeGuid ) {\n        return this.imperialTree.contains( nodeGuid );\n    }\n\n    @Override\n    public TreeNode get( GUID guid, int depth ) {\n        return this.getOperatorByGuid( guid ).get( guid, depth );\n    }\n\n    @Override\n    public TreeNode getAsRootDepth( GUID guid ) {\n        return this.getOperatorByGuid( guid ).getAsRootDepth( guid );\n    }\n\n    protected String getNS( GUID guid, String szSeparator ) {\n        String path = this.imperialTree.getCachePath(guid);\n        if ( path != null ) {\n            return path;\n        }\n\n        ImperialTreeNode node = this.imperialTree.getNode(guid);\n        if ( node == null ) {\n            return null;\n        }\n\n        GUID owner = this.imperialTree.getOwner(guid);\n        if ( owner == null ){\n            String assemblePath = this.getNodeName(node);\n            while ( !node.getParentGUIDs().isEmpty() && this.allNonNull( node.getParentGUIDs() ) ){\n                List<GUID> parentGuids = node.getParentGUIDs();\n                for( int i = 0; i < parentGuids.size(); ++i ){\n                    if ( parentGuids.get(i) != null ){\n                        node = this.imperialTree.getNode( parentGuids.get(i) );\n                        break;\n                    }\n                }\n                String nodeName = this.getNodeName(node);\n                assemblePath = nodeName + szSeparator + assemblePath;\n            }\n            this.imperialTree.insertCachePath( guid, assemblePath );\n            return assemblePath;\n        }\n        else{\n            String assemblePath = this.getNodeName( node );\n            while ( !node.getParentGUIDs().isEmpty() && this.allNonNull( node.getParentGUIDs() ) ){\n                node = this.imperialTree.getNode( owner );\n                String nodeName = this.getNodeName( node );\n                assemblePath = nodeName + szSeparator + assemblePath;\n                owner = this.imperialTree.getOwner( node.getGuid() );\n            }\n            this.imperialTree.insertCachePath( guid, assemblePath );\n            return assemblePath;\n        }\n    }\n\n    @Override\n    public String getPath( GUID guid ) {\n        return this.getNS( guid, this.kernelObjectConfig.getPathNameSeparator() );\n    }\n\n    @Override\n    public String getFullName( GUID guid ) {\n        return this.getNS( guid, this.kernelObjectConfig.getFullNameSeparator() );\n    }\n\n    protected TreeNodeOperator getOperatorByGuid( GUID guid ) {\n        ImperialTreeNode node = this.imperialTree.getNode( guid );\n        if ( node == null ){\n            return null;\n        }\n        TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class<? >[]{this.getClass()}, this );\n        return this.operatorFactory.getOperator( newInstance.getMetaType() );\n    }\n\n    @Override\n    public TreeNode get( GUID guid ) {\n        TreeNodeOperator operator = this.getOperatorByGuid( guid );\n        if( operator == null ) {\n            return null;\n        }\n        return operator.get( guid );\n    }\n\n    /** Final Solution 20240929: 无法获取类型 */\n    @Override\n    public GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ) {\n        if( szTargetSep != null ) {\n            path = path.replace( szBadSep, szTargetSep );\n        }\n\n        String[] parts = this.pathResolver.segmentPathParts( path );\n        List<String > resolvedParts = this.pathResolver.resolvePath( parts );\n        path = this.pathResolver.assemblePath( resolvedParts );\n\n        GUID guid = this.imperialTree.queryGUIDByPath( path );\n        if ( guid != null ){\n            return guid;\n        }\n\n\n        guid = this.pathSelector.searchGUID( resolvedParts );\n        if( guid != null ){\n            this.imperialTree.insertCachePath( guid, path );\n        }\n        return guid;\n    }\n\n    @Override\n    public GUID queryGUIDByPath( String path ) {\n        return this.queryGUIDByNS( path, null, null );\n    }\n\n    @Override\n    public void remove( GUID guid ) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        TreeNode newInstance = (TreeNode)node.getType().newInstance();\n        TreeNodeOperator operator = this.operatorFactory.getOperator( newInstance.getMetaType() );\n        operator.purge( guid );\n    }\n\n    @Override\n    public abstract Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) ;\n\n    public Object queryEntityHandle( String path ) {\n        return this.queryEntityHandleByNS( path, null, null );\n    }\n\n    @Override\n    public void remove( String path ) {\n        Object handle = this.queryEntityHandle( path );\n        if( handle instanceof GUID ) {\n            this.remove( (GUID) handle );\n        }\n    }\n\n    @Override\n    public List<TreeNode > getChildren( GUID guid ) {\n        List<GUIDImperialTrieNode> childNodes = this.imperialTree.getChildren( guid );\n        ArrayList<TreeNode > nodes = new ArrayList<>();\n        for( GUIDImperialTrieNode node : childNodes ){\n            TreeNode treeNode =  this.get(node.getGuid());\n            nodes.add( treeNode );\n        }\n        return nodes;\n    }\n\n    @Override\n    public List<GUID > fetchChildrenGuids( GUID guid ) {\n        return this.imperialTree.fetchChildrenGuids( guid );\n    }\n\n    public EntityNode queryNodeByNS( String path, String szBadSep, String szTargetSep ) {\n        Object ret = this.queryEntityHandleByNS( path, szBadSep, szTargetSep );\n        if( ret instanceof EntityNode ) {\n            return (EntityNode) ret;\n        }\n        else if( ret instanceof GUID ) {\n            return this.get( (GUID) ret );\n        }\n\n        return null;\n    }\n\n    public TreeNode queryTreeNodeByNS( String path, String szBadSep, String szTargetSep ) {\n        Object ret = this.queryEntityHandleByNS( path, szBadSep, szTargetSep );\n        if( ret instanceof TreeNode ) {\n            return (TreeNode) ret;\n        }\n        else if( ret instanceof GUID ) {\n            return this.get( (GUID) ret );\n        }\n\n        return null;\n    }\n\n    @Override\n    public List<? extends TreeNode > fetchRoot() {\n        List<GUID> guids = this.imperialTree.fetchRoot();\n        ArrayList<TreeNode> treeNodes = new ArrayList<>();\n        for( GUID guid : guids ){\n            TreeNode treeNode = this.get(guid);\n            treeNodes.add(treeNode);\n        }\n        return treeNodes;\n    }\n\n    @Override\n    public void rename( GUID guid, String name ) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        TreeNode newInstance = (TreeNode)node.getType().newInstance();\n        TreeNodeOperator operator = this.operatorFactory.getOperator( newInstance.getMetaType() );\n        operator.updateName( guid, name );\n\n        this.imperialTree.removeCachePath( guid );\n    }\n\n    @Override\n    public EntityNode queryNode( String path ) {\n        return this.queryNodeByNS( path, null, null );\n    }\n\n    @Override\n    public TreeNode queryTreeNode( String path ) {\n        return this.queryTreeNodeByNS( path, null, null );\n    }\n\n    @Override\n    public GUID queryGUIDByFN( String fullName ) {\n        return this.queryGUIDByNS(\n                fullName, this.kernelObjectConfig.getFullNameSeparator(), this.kernelObjectConfig.getPathNameSeparator()\n        );\n    }\n\n    private String getNodeName( ImperialTreeNode node ){\n        UOI type = node.getType();\n        TreeNode newInstance = (TreeNode)type.newInstance();\n        TreeNodeOperator operator = this.operatorFactory.getOperator(newInstance.getMetaType());\n        TreeNode treeNode = operator.get(node.getGuid());\n        return treeNode.getName();\n    }\n\n\n    private boolean allNonNull( List<?> list ) {\n        return list.stream().noneMatch( Objects::isNull );\n    }\n\n    @Override\n    public GuidAllocator getGuidAllocator() {\n        return this.guidAllocator;\n    }\n\n    @Override\n    public ImperialTree getMasterTrieTree() {\n        return this.imperialTree;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ArchReparseKOMTree.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.system.identifier.KOPathResolver;\nimport com.pinecone.hydra.system.ko.CascadeInstrument;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\nimport com.pinecone.hydra.unit.imperium.operator.OperatorFactory;\n\npublic abstract class ArchReparseKOMTree extends ArchKOMTree implements ReparseKOMTree {\n    protected ReparseKOMTreeAddition mReparseKOM;\n\n    public ArchReparseKOMTree(\n            Processum superiorProcess, KOIMasterManipulator masterManipulator , OperatorFactory operatorFactory, KernelObjectConfig kernelObjectConfig, PathSelector pathSelector,\n            KOMInstrument parent, String name, String superiorPathScope, @Nullable GuidAllocator guidAllocator\n    ){\n        this( superiorProcess, masterManipulator, kernelObjectConfig, parent, name, superiorPathScope, guidAllocator );\n        this.pathResolver                  =  new KOPathResolver( kernelObjectConfig );\n        this.pathSelector                  =  pathSelector;\n        this.operatorFactory               =  operatorFactory;\n        this.mReparseKOM                   =  new GenericReparseKOMTreeAddition( this );\n    }\n\n    public ArchReparseKOMTree (\n            Processum superiorProcess, KOIMasterManipulator masterManipulator ,KernelObjectConfig kernelObjectConfig, KOMInstrument parent, String name, String superiorPathScope, @Nullable GuidAllocator guidAllocator\n    ){\n        super( superiorProcess, masterManipulator, kernelObjectConfig, parent, name, superiorPathScope, guidAllocator );\n    }\n\n    public ArchReparseKOMTree (\n            Processum superiorProcess, KOIMasterManipulator masterManipulator ,KernelObjectConfig kernelObjectConfig, KOMInstrument parent, String name, @Nullable GuidAllocator guidAllocator\n    ){\n        this( superiorProcess, masterManipulator, kernelObjectConfig, parent, name, CascadeInstrument.EmptySuperiorPathScope, guidAllocator );\n    }\n\n    @Override\n    public ReparseLinkNode queryReparseLinkByNS( String path, String szBadSep, String szTargetSep ) {\n        return this.mReparseKOM.queryReparseLinkByNS( path, szBadSep, szTargetSep );\n    }\n\n    @Override\n    public ReparseLinkNode queryReparseLink( String path ) {\n        return this.queryReparseLinkByNS( path, null, null );\n    }\n\n    @Override\n    public void affirmOwnedNode( GUID parentGuid, GUID childGuid ) {\n        this.mReparseKOM.affirmOwnedNode( parentGuid, childGuid );\n    }\n\n    @Override\n    public void newHardLink( GUID sourceGuid, GUID targetGuid ) {\n        this.mReparseKOM.newHardLink( sourceGuid, targetGuid );\n    }\n\n    @Override\n    public void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName ) {\n        this.mReparseKOM.newLinkTag( originalGuid, dirGuid, tagName);\n    }\n\n    @Override\n    public void updateLinkTag( GUID tagGuid, String tagName ) {\n        this.mReparseKOM.updateLinkTag( tagGuid, tagName );\n    }\n\n    @Override\n    public void removeReparseLink( GUID guid ) {\n        this.mReparseKOM.removeReparseLink( guid );\n    }\n\n    @Override\n    public void newLinkTag( String originalPath, String dirPath, String tagName ) {\n        this.mReparseKOM.newLinkTag( originalPath, dirPath, tagName );\n    }\n\n    /** ReparseLinkNode or GUID **/\n    @Override\n    public Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) {\n        return this.mReparseKOM.queryEntityHandleByNS( path, szBadSep, szTargetSep );\n    }\n\n    @Override\n    public void remove( String path ) {\n        this.mReparseKOM.remove( path );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ExpressInstrument.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport com.pinecone.hydra.system.ko.runtime.CentralizedRuntimeInstrument;\nimport com.pinecone.hydra.system.ko.runtime.DirectMappingTrieRuntimeInstrument;\n\npublic interface ExpressInstrument extends CentralizedRuntimeInstrument, DirectMappingTrieRuntimeInstrument {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/GenericReparseKOMTreeAddition.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\n\npublic class GenericReparseKOMTreeAddition implements ReparseKOMTreeAddition {\n    protected ArchKOMTree           mKOMTree;\n    protected ImperialTree mImperialTree;\n    protected ReparsePointSelector  mReparsePointSelector;\n\n    public GenericReparseKOMTreeAddition( ArchKOMTree tree, ReparsePointSelector reparsePointSelector ) {\n        this.mKOMTree              = tree;\n        this.mImperialTree = tree.getMasterTrieTree();\n        this.mReparsePointSelector = reparsePointSelector ;\n    }\n\n    public GenericReparseKOMTreeAddition( ArchKOMTree tree ) {\n        this.mKOMTree              = tree;\n        this.mImperialTree = tree.getMasterTrieTree();\n        this.mReparsePointSelector = new ReparseLinkSelector( (MultiFolderPathSelector) this.mKOMTree.pathSelector ) ;\n    }\n\n    @Override\n    public ReparseLinkNode queryReparseLinkByNS(String path, String szBadSep, String szTargetSep ) {\n        if( szTargetSep != null ) {\n            path = path.replace( szBadSep, szTargetSep );\n        }\n\n        String[] parts = this.mKOMTree.pathResolver.segmentPathParts( path );\n        return this.mReparsePointSelector.searchLinkNode( parts );\n    }\n\n    @Override\n    public ReparseLinkNode queryReparseLink(String path) {\n        return this.queryReparseLinkByNS( path, null, null );\n    }\n\n    @Override\n    public void affirmOwnedNode( GUID parentGuid, GUID childGuid ) {\n        this.mImperialTree.affirmOwnedNode( childGuid, parentGuid );\n    }\n\n    @Override\n    public void newHardLink( GUID sourceGuid, GUID targetGuid ) {\n        this.mImperialTree.newHardLink( sourceGuid, targetGuid );\n    }\n\n    @Override\n    public void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName ) {\n        this.mImperialTree.newLinkTag( originalGuid, dirGuid, tagName, this.mKOMTree );\n    }\n\n    @Override\n    public void updateLinkTag( GUID tagGuid, String tagName ) {\n        this.mImperialTree.updateLinkTagName( tagGuid, tagName );\n    }\n\n    @Override\n    public void removeReparseLink( GUID guid ) {\n        this.mImperialTree.removeReparseLink( guid );\n    }\n\n    @Override\n    public void newLinkTag(String originalPath, String dirPath, String tagName) {\n        GUID originalGuid           = this.mKOMTree.queryGUIDByPath( originalPath );\n        GUID dirGuid                = this.mKOMTree.queryGUIDByPath( dirPath );\n\n        if( this.mImperialTree.getOriginalGuid( tagName, dirGuid ) == null ) {\n            this.mImperialTree.newLinkTag( originalGuid, dirGuid, tagName, this.mKOMTree );\n        }\n    }\n\n    /** ReparseLinkNode or GUID **/\n    @Override\n    public Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) {\n        if( szTargetSep != null ) {\n            path = path.replace( szBadSep, szTargetSep );\n        }\n\n        String[] parts = this.mKOMTree.pathResolver.segmentPathParts( path );\n        return this.mReparsePointSelector.search( parts );\n    }\n\n    @Override\n    public void remove( String path ) {\n        Object handle = this.mKOMTree.queryEntityHandle( path );\n        if( handle instanceof GUID ) {\n            this.mKOMTree.remove( (GUID) handle );\n        }\n        else if( handle instanceof ReparseLinkNode ) {\n            ReparseLinkNode linkNode = (ReparseLinkNode) handle;\n            this.removeReparseLink( linkNode.getTagGuid() );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/KOMInstrument.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.system.ko.CascadeKOTreeInstrument;\nimport com.pinecone.hydra.system.ko.QueryableInstrument;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.util.Collection;\nimport java.util.List;\n\npublic interface KOMInstrument extends CascadeKOTreeInstrument, QueryableInstrument {\n    @Override\n    KOMInstrument parent();\n\n    void applyGuidAllocator( GuidAllocator guidAllocator );\n\n    @Override\n    default void setTargetingName( String name ) {\n        CascadeKOTreeInstrument.super.setTargetingName( name );\n    }\n\n    @Override\n    String getPath( GUID objectGuid );\n\n    @Override\n    String querySystemKernelObjectPath( GUID objectGuid ) ;\n\n    String getFullName( GUID objectGuid );\n\n    @Override\n    GUID queryGUIDByPath( String path );\n\n    GUID queryGUIDByFN  ( String fullName );\n\n    default GUID assertPath( String path, String pathType ) throws IllegalArgumentException {\n        GUID guid      = this.queryGUIDByPath( path );\n        if( guid == null ) {\n            throw new IllegalArgumentException( \"Undefined \" + pathType + \" '\" + path + \"'\" );\n        }\n\n        return guid;\n    }\n\n    default GUID assertPath( String path ) throws IllegalArgumentException {\n        return this.assertPath( path, \"path\" );\n    }\n\n    boolean contains( GUID nodeGuid );\n\n    GUID put( TreeNode treeNode );\n\n    TreeNode get( GUID objectGuid );\n\n    GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep );\n\n    TreeNode get( GUID guid, int depth );\n\n    TreeNode getAsRootDepth( GUID guid );\n\n    void remove( GUID guid );\n\n    void remove( String path );\n\n    Collection<TreeNode > getChildren( GUID guid );\n\n    Collection<GUID > fetchChildrenGuids( GUID guid );\n\n    Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep );\n\n    @Override\n    EntityNode queryNode( String path );\n\n    TreeNode queryTreeNode( String path );\n\n    List<? extends TreeNode > fetchRoot();\n\n    void rename( GUID guid, String name );\n\n    Processum getSuperiorProcess();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/KOMSelector.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\n/**\n * Kernel Object Model\n * Same as Document Object Model (DOM)\n */\npublic interface KOMSelector extends Pinenut {\n    // Return with json.\n    Object querySelectorJ                 ( String szSelector );\n\n    Object querySelector                  ( String szSelector );\n\n    List querySelectorAll                 ( String szSelector );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/MultiFolderPathSelector.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Stack;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.name.path.PathResolver;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\n\npublic class MultiFolderPathSelector implements PathSelector {\n    protected PathResolver                    pathResolver;\n    protected ImperialTree                    imperialTree;\n    protected GUIDNameManipulator[]           dirManipulators;\n    protected GUIDNameManipulator[]           fileManipulators;\n\n    public MultiFolderPathSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator[] dirMans, GUIDNameManipulator[] fileMans ) {\n        this.pathResolver         = pathResolver;\n        this.imperialTree = trieTree;\n        this.dirManipulators      = dirMans;\n        this.fileManipulators     = fileMans;\n    }\n\n    @Override\n    public GUID searchGUID( String[] parts ) {\n        return this.searchGUID( parts, null );\n    }\n\n    @Override\n    public GUID searchGUID( String[] parts, @Nullable String[] lpResolvedPath ) {\n        List<String > resolvedParts = this.pathResolver.resolvePath( parts );\n        if( lpResolvedPath != null ) {\n            lpResolvedPath[ 0 ] = this.pathResolver.assemblePath( resolvedParts );\n        }\n\n        return this.searchGUID( resolvedParts );\n    }\n\n    @Override\n    public GUID searchGUID( List<String> resolvedParts ) {\n        //return dfsSearchGUID(fileMan, dirMan, resolvedParts, 0, null);\n        return (GUID) this.dfsSearch( resolvedParts );\n    }\n\n\n    @Override\n    public GUID searchGUID( GUID parentId, String[] parts ) {\n        return this.searchGUID( parentId, parts, null );\n    }\n\n    @Override\n    public GUID searchGUID( GUID parentId, String[] parts, @Nullable String[] lpResolvedPath ) {\n        List<String > resolvedParts = this.pathResolver.resolvePath( parts );\n        if( lpResolvedPath != null ) {\n            lpResolvedPath[ 0 ] = this.pathResolver.assemblePath( resolvedParts );\n        }\n\n        return this.searchGUID( parentId, resolvedParts );\n    }\n\n    @Override\n    public GUID searchGUID( GUID parentId, List<String> resolvedParts ) {\n        //return dfsSearchGUID(fileMan, dirMan, resolvedParts, 0, null);\n        return (GUID) this.dfsSearch( parentId, resolvedParts );\n    }\n\n\n    @Override\n    public Object querySelector( String szSelector ) {\n        return this.searchGUID( this.pathResolver.resolvePathParts( szSelector ) );\n    }\n\n    @Override\n    public List querySelectorAll( String szSelector ) {\n        return List.of( this.querySelector( szSelector ) )  ;\n    }\n\n    @Override\n    public Object querySelectorJ( String szSelector ) {\n        return JSON.stringify( this.querySelector( szSelector ) );\n    }\n\n    protected Object dfsSearch( List<String > parts ) {\n        return this.dfsSearch( null, parts );\n    }\n\n    /** Iterative DFS, 迭代 DFS 法 **/\n    protected Object dfsSearch( GUID parentId, List<String > parts ) {\n        Stack<StandardPathSelector.SearchArgs> stack = new Stack<>();\n        stack.push( new StandardPathSelector.SearchArgs( parentId, 0 ) );\n\n        while ( !stack.isEmpty() ) {\n            StandardPathSelector.SearchArgs currentArgs = stack.pop();\n            int depth       = currentArgs.depth;\n            GUID parentGuid = currentArgs.parentGuid;\n\n            // If we've reached the last part, try to match the current part with all file manipulators\n            // 如果是第一个部分，判断路径长度，来决定查询器的使用\n            if ( depth == parts.size() ) {\n                continue;\n            }\n\n            String currentPart = parts.get( depth );\n            List<GUID > guids;\n\n            if ( depth == 0 ) {\n                if ( parts.size() > 1 ) {\n                    // Case1: If more than one part, first part can only be a directory.\n                    guids = this.searchDirAndLinksFirstCase( currentPart );\n                }\n                else {\n                    // Case2: If there's only one part, it could be either file or directory.\n                    // 只有一个部分，可能是文件或文件夹，查询所有操纵器. [且必须是Root]\n                    guids = this.fetchAllGuidsRootCase( currentPart );\n                }\n            }\n            else {\n                // Case3: For middle and last parts, retrieve children GUIDs using distributedTrieTree\n                guids = this.imperialTree.fetchChildrenGuids( parentGuid );\n            }\n\n            if ( guids == null || guids.isEmpty() ) {\n                continue;\n            }\n\n            for ( GUID guid : guids ) {\n                Object blocker = this.tryTerminationBlock( currentPart, guid );\n                if ( blocker != null ) {\n                    return blocker;\n                }\n\n                if ( this.isGuidMatchingPartName( guid, currentPart, depth, parts.size() ) ) {\n                    if ( depth == parts.size() - 1 ) {\n                        return this.beforeDFSTermination( currentPart, guid );\n                    }\n                    stack.push( new StandardPathSelector.SearchArgs( guid, depth + 1 ) );\n                }\n            }\n        }\n\n        return null;\n    }\n\n    /** Recursive DFS, 废弃递归 DFS 法，留着考古**/\n    @Deprecated\n    protected Object dfsSearch( List<String> parts, int depth, GUID parentGuid ) {\n        String currentPart = parts.get(depth);\n        List<GUID> guids;\n\n        if ( depth == 0 ) {\n            if ( parts.size() > 1 ) {\n                // Case1: If more than one part, first part can only be a directory.\n                guids = this.searchDirAndLinksFirstCase( currentPart );\n            }\n            else {\n                // Case2: If there's only one part, it could be either file or directory.\n                // 只有一个部分，可能是文件或文件夹，查询所有操纵器. [且必须是Root]\n                guids = /*this.*/fetchAllGuidsRootCase( currentPart );\n            }\n        }\n        else {\n            // Case3: For middle and last parts, retrieve children GUIDs using distributedTrieTree\n            guids = this.imperialTree.fetchChildrenGuids( parentGuid );\n        }\n\n        if ( guids == null || guids.isEmpty() ) {\n            return null;\n        }\n\n        // 索引法遍历所有可能的 GUID，并继续向下递归.\n        // Indexing method traverses all possible GUIs and continues to recursively descend.\n        for ( GUID guid : guids ) {\n            // Using index to find.\n            Object blocker = this.tryTerminationBlock( currentPart, guid );\n            if ( blocker != null ) {\n                return blocker;\n            }\n\n            if ( this.isGuidMatchingPartName( guid, currentPart, depth, parts.size() ) ) {\n                if ( depth == parts.size() - 1 ) {\n                    return this.beforeDFSTermination( currentPart, guid );\n                }\n\n                Object result = this.dfsSearch( parts, depth + 1, guid );\n                if ( result != null ) {\n                    return result;\n                }\n            }\n        }\n\n        return null;\n    }\n\n    protected Object beforeDFSTermination( String currentPart, GUID guid ) {\n        return guid;\n    }\n\n    protected Object tryTerminationBlock( String currentPart, GUID guid ) {\n        return null;\n    }\n\n    protected boolean checkPartInAllManipulators( GUID guid, String partName ) {\n        for ( GUIDNameManipulator manipulator : this.fileManipulators ) {\n            List<GUID > guids = manipulator.getGuidsByNameID( partName, guid );\n            if ( guids != null && !guids.isEmpty() ) {\n                return true;\n            }\n        }\n\n        List<GUID > guids = this.searchDirAndLinks( guid, partName );\n        return guids != null && !guids.isEmpty();\n    }\n\n    protected boolean isGuidMatchingPartName( GUID guid, String partName, int depth, int nParts ) {\n        // 在中间部分只匹配文件夹，最后一部分匹配文件和文件夹\n        // In the last part, check both files and directories\n\n        if ( depth == nParts - 1 ) {\n            return this.checkPartInAllManipulators( guid, partName );\n        }\n        else {\n            // Middle part: Directory only.\n            //List<GUID > guids = this.dirManipulator.getGuidsByNameID( partName, guid );\n            List<GUID > guids = this.searchDirAndLinks( guid, partName );\n            return guids != null && !guids.isEmpty();\n        }\n    }\n\n    protected List<GUID > searchLinks ( GUID guid, String partName ) {\n        GUID linkGuid = this.imperialTree.getOriginalGuidByNodeGuid( partName, guid );\n        if( linkGuid != null ) {\n            return List.of( linkGuid );\n        }\n        return null;\n    }\n\n    protected List<GUID > searchDirAndLinks ( GUID guid, String partName ) {\n        for( GUIDNameManipulator dirMans : this.dirManipulators ) {\n            List<GUID > guids = dirMans.getGuidsByNameID( partName, guid );\n            if( guids != null && !guids.isEmpty() ) {\n                return guids;\n            }\n        }\n\n        return this.searchLinks( guid, partName );\n    }\n\n    protected List<GUID > searchLinksFirstCase ( String partName ) {\n        return this.imperialTree.fetchOriginalGuidRoot( partName );\n    }\n\n    protected List<GUID > searchDirAndLinksFirstCase ( String partName ) {\n        for( GUIDNameManipulator dirMans : this.dirManipulators ) {\n            List<GUID > guids = dirMans.getGuidsByName( partName );\n            if( guids != null && !guids.isEmpty() ) {\n                return guids;\n            }\n        }\n\n        return this.searchLinksFirstCase( partName );\n    }\n\n    protected List<GUID > fetchDirsAllGuids( String partName ) {\n        if( this.dirManipulators.length > 0 ) {\n            List<GUID > guids = this.dirManipulators[ 0 ].getGuidsByName( partName );\n            for ( int i = 1; i < this.dirManipulators.length; ++i ) {\n                guids.addAll( this.dirManipulators[ i ].getGuidsByName( partName ) );\n            }\n            guids.removeIf( guid -> !this.imperialTree.isRoot( guid ) );\n            return guids;\n        }\n\n        return new ArrayList<>();\n    }\n\n    protected void fetchAllOriginalGuidsRootCase( List<GUID > guids, String partName ) {\n        guids.addAll( this.imperialTree.fetchOriginalGuidRoot( partName ) );\n    }\n\n    protected List<GUID > fetchAllGuidsRootCase( String partName ) {\n        List<GUID > guids = this.fetchDirsAllGuids( partName );\n\n        // Notice: Critical error, querying root element should checks if it is the root.\n        for ( GUIDNameManipulator manipulator : this.fileManipulators ) {\n            List<GUID > gs = manipulator.getGuidsByName( partName );\n            for( GUID guid : gs ) {\n                if( this.imperialTree.isRoot( guid ) ) {\n                    guids.add( guid );\n                }\n            }\n        }\n\n\n        this.fetchAllOriginalGuidsRootCase( guids, partName );\n        return guids;\n    }\n\n    static class SearchArgs {\n        GUID parentGuid;\n        int  depth;\n\n        SearchArgs( GUID parentGuid, int depth ) {\n            this.parentGuid = parentGuid;\n            this.depth      = depth;\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/PathSelector.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface PathSelector extends KOMSelector {\n    GUID searchGUID( String[] parts );\n\n    GUID searchGUID( String[] parts, @Nullable String[] lpResolvedPath );\n\n    GUID searchGUID( List<String > resolvedParts );\n\n\n    GUID searchGUID( GUID parentID, String[] parts );\n\n    GUID searchGUID( GUID parentID, String[] parts, @Nullable String[] lpResolvedPath );\n\n    GUID searchGUID( GUID parentID, List<String > resolvedParts );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ProxiedKOMMountPointHandle.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport java.util.Collection;\nimport java.util.List;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.ko.CascadeInstrument;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.system.ko.handle.ArchKHandle;\nimport com.pinecone.hydra.system.ko.handle.KOMMountPointHandle;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic class ProxiedKOMMountPointHandle extends ArchKHandle implements KOMMountPointHandle {\n\n    protected KOMInstrument mWarpedInstrument;\n\n    public ProxiedKOMMountPointHandle( String treeNodeName, GUID treeNodeGuid, KOMInstrument warpedInstrument ) {\n        super( treeNodeName, treeNodeGuid );\n        this.mWarpedInstrument = warpedInstrument;\n    }\n\n    @Override\n    public void applyGuidAllocator( GuidAllocator guidAllocator ) {\n        this.mWarpedInstrument.applyGuidAllocator( guidAllocator );\n    }\n\n    @Override\n    public KOMInstrument revealWrapped() {\n        return this.mWarpedInstrument;\n    }\n\n    @Override\n    public KOMInstrument parent() {\n        return this.mWarpedInstrument.parent();\n    }\n\n    @Override\n    public void setParent( CascadeInstrument parent ) {\n        this.mWarpedInstrument.setParent( parent );\n    }\n\n    @Override\n    public Namespace getTargetingName() {\n        return this.mWarpedInstrument.getTargetingName();\n    }\n\n    @Override\n    public void setTargetingName( Namespace name ) {\n        this.mWarpedInstrument.setTargetingName( name );\n    }\n\n    @Override\n    public String getSuperiorPathScope() {\n        return this.mWarpedInstrument.getSuperiorPathScope();\n    }\n\n    @Override\n    public void applySuperiorPathScope( String superiorPathScope ) {\n        this.mWarpedInstrument.applySuperiorPathScope( superiorPathScope );\n    }\n\n    @Override\n    public String getPath( GUID objectGuid ) {\n        return this.mWarpedInstrument.getPath( objectGuid );\n    }\n\n    @Override\n    public String querySystemKernelObjectPath( GUID objectGuid ) {\n        return this.mWarpedInstrument.querySystemKernelObjectPath( objectGuid );\n    }\n\n    @Override\n    public String getFullName( GUID objectGuid ) {\n        return this.mWarpedInstrument.getFullName( objectGuid );\n    }\n\n    @Override\n    public GUID queryGUIDByPath( String path ) {\n        return this.mWarpedInstrument.queryGUIDByPath( path );\n    }\n\n    @Override\n    public GUID queryGUIDByFN( String fullName ) {\n        return this.mWarpedInstrument.queryGUIDByFN( fullName );\n    }\n\n    @Override\n    public boolean contains( GUID nodeGuid ) {\n        return this.mWarpedInstrument.contains( nodeGuid );\n    }\n\n    @Override\n    public GUID put( TreeNode treeNode ) {\n        return this.mWarpedInstrument.put( treeNode );\n    }\n\n    @Override\n    public TreeNode get( GUID objectGuid ) {\n        return this.mWarpedInstrument.get( objectGuid );\n    }\n\n    @Override\n    public GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ) {\n        return this.mWarpedInstrument.queryGUIDByNS( path, szBadSep, szTargetSep );\n    }\n\n    @Override\n    public TreeNode get( GUID guid, int depth ) {\n        return this.mWarpedInstrument.get( guid, depth );\n    }\n\n    @Override\n    public TreeNode getAsRootDepth( GUID guid ) {\n        return this.mWarpedInstrument.getAsRootDepth( guid );\n    }\n\n    @Override\n    public void remove( GUID guid ) {\n        this.mWarpedInstrument.remove( guid );\n    }\n\n    @Override\n    public void remove( String path ) {\n        this.mWarpedInstrument.remove( path );\n    }\n\n    @Override\n    public Collection<TreeNode> getChildren( GUID guid ) {\n        return this.mWarpedInstrument.getChildren( guid );\n    }\n\n    @Override\n    public Collection<GUID> fetchChildrenGuids( GUID guid ) {\n        return this.mWarpedInstrument.fetchChildrenGuids( guid );\n    }\n\n    @Override\n    public Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) {\n        return this.mWarpedInstrument.queryEntityHandleByNS( path, szBadSep, szTargetSep );\n    }\n\n    @Override\n    public EntityNode queryNode( String path ) {\n        return this.mWarpedInstrument.queryNode( path );\n    }\n\n    @Override\n    public TreeNode queryTreeNode( String path ) {\n        return this.mWarpedInstrument.queryTreeNode( path );\n    }\n\n    @Override\n    public List<? extends TreeNode> fetchRoot() {\n        return this.mWarpedInstrument.fetchRoot();\n    }\n\n    @Override\n    public void rename( GUID guid, String name ) {\n        this.mWarpedInstrument.rename( guid, name );\n    }\n\n    @Override\n    public Processum getSuperiorProcess() {\n        return this.mWarpedInstrument.getSuperiorProcess();\n    }\n\n    @Override\n    public GuidAllocator getGuidAllocator() {\n        return this.mWarpedInstrument.getGuidAllocator();\n    }\n\n    @Override\n    public ImperialTree getMasterTrieTree() {\n        return this.mWarpedInstrument.getMasterTrieTree();\n    }\n\n    @Override\n    public KernelObjectConfig getConfig() {\n        return this.mWarpedInstrument.getConfig();\n    }\n\n    @Override\n    public String getName() {\n        return this.mszTreeNodeName;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.mTreeNodeGuid;\n    }\n\n    @Override\n    public String toJSONString() {\n        return this.mWarpedInstrument.toJSONString();\n    }\n\n    @Override\n    public String toString() {\n        return this.mWarpedInstrument.toString();\n    }\n}"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ReparseKOMTree.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\n\npublic interface ReparseKOMTree extends KOMInstrument {\n    void newLinkTag( String originalPath, String dirPath, String tagName );\n\n    void removeReparseLink( GUID guid );\n\n    void affirmOwnedNode( GUID parentGuid, GUID childGuid );\n\n    void newHardLink( GUID sourceGuid, GUID targetGuid );\n\n    void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName );\n\n    void updateLinkTag( GUID tagGuid, String tagName );\n\n    ReparseLinkNode queryReparseLinkByNS(String path, String szBadSep, String szTargetSep );\n\n    /** ReparseLinkNode or GUID **/\n    Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep );\n\n    ReparseLinkNode queryReparseLink( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ReparseKOMTreeAddition.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\n\npublic interface ReparseKOMTreeAddition extends Pinenut {\n    ReparseLinkNode queryReparseLinkByNS( String path, String szBadSep, String szTargetSep ) ;\n\n    ReparseLinkNode queryReparseLink( String path );\n\n    void affirmOwnedNode( GUID parentGuid, GUID childGuid ) ;\n\n    void newHardLink( GUID sourceGuid, GUID targetGuid ) ;\n\n    void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName ) ;\n\n    void updateLinkTag( GUID tagGuid, String tagName ) ;\n\n    void removeReparseLink( GUID guid ) ;\n\n    void newLinkTag( String originalPath, String dirPath, String tagName ) ;\n\n    void remove( String path );\n\n    Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ReparseLinkSelector.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.name.path.PathResolver;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\n\npublic class ReparseLinkSelector extends MultiFolderPathSelector implements ReparsePointSelector {\n    public ReparseLinkSelector( MultiFolderPathSelector pathSelector ) {\n        super( pathSelector.pathResolver, pathSelector.imperialTree, pathSelector.dirManipulators, pathSelector.fileManipulators );\n    }\n\n    public ReparseLinkSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) {\n        super( pathResolver, trieTree, new GUIDNameManipulator[]{ dirMan }, fileMans );\n    }\n\n    public ReparseLinkSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator[] dirMans, GUIDNameManipulator[] fileMans ) {\n        super( pathResolver, trieTree, dirMans, fileMans );\n    }\n\n    @Override\n    public Object search( String[] parts ) {\n        List<String> resolvedParts = this.pathResolver.resolvePath(parts);\n        return this.dfsSearch( resolvedParts );\n    }\n\n    @Override\n    public ReparseLinkNode searchLinkNode( String[] parts ) {\n        Object result = this.search( parts );\n        if( result instanceof ReparseLinkNode ) {\n            return (ReparseLinkNode) result;\n        }\n        return null;\n    }\n\n    @Override\n    protected Object beforeDFSTermination( String currentPart, GUID guid ) {\n        ReparseLinkNode reparseLinkNode = this.imperialTree.getReparseLinkNodeByNodeGuid( currentPart, guid );\n        if ( reparseLinkNode != null ) {\n            return reparseLinkNode;\n        }\n        return guid;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/ReparsePointSelector.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\n\npublic interface ReparsePointSelector extends PathSelector {\n    ReparseLinkNode searchLinkNode( String[] parts );\n\n    Object search( String[] parts );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/SimpleMultiFolderPathSelector.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.name.path.PathResolver;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\n\npublic class SimpleMultiFolderPathSelector extends MultiFolderPathSelector {\n    public SimpleMultiFolderPathSelector( PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator[] dirMans, GUIDNameManipulator[] fileMans ) {\n        super( pathResolver, trieTree, dirMans, fileMans );\n    }\n\n    @Override\n    protected List<GUID > searchLinks ( GUID guid, String partName ) {\n        return null;\n    }\n\n    @Override\n    protected List<GUID > searchLinksFirstCase ( String partName ) {\n        return null;\n    }\n\n    @Override\n    protected void fetchAllOriginalGuidsRootCase( List<GUID > guids, String partName ) {\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/SimplePathSelector.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.name.path.PathResolver;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\n\npublic class SimplePathSelector extends StandardPathSelector {\n    public SimplePathSelector( PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) {\n        super( pathResolver, trieTree, dirMan, fileMans );\n    }\n\n    @Override\n    protected List<GUID> searchLinks ( GUID guid, String partName ) {\n        return null;\n    }\n\n    @Override\n    protected List<GUID > searchLinksFirstCase ( String partName ) {\n        return null;\n    }\n\n    @Override\n    protected void fetchAllOriginalGuidsRootCase( List<GUID > guids, String partName ) {\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/kom/StandardPathSelector.java",
    "content": "package com.pinecone.hydra.system.ko.kom;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.name.path.PathResolver;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\n\npublic class StandardPathSelector extends MultiFolderPathSelector implements PathSelector {\n    public StandardPathSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) {\n        super( pathResolver, trieTree, new GUIDNameManipulator[]{ dirMan }, fileMans );\n    }\n\n    public GUIDNameManipulator getDirManipulator() {\n        return this.dirManipulators[ 0 ];\n    }\n\n    @Override\n    protected List<GUID > searchDirAndLinks ( GUID guid, String partName ) {\n        List<GUID > guids = this.dirManipulators[ 0 ].getGuidsByNameID( partName, guid );\n        if( guids != null && !guids.isEmpty() ) {\n            return guids;\n        }\n\n        GUID linkGuid = this.imperialTree.getOriginalGuidByNodeGuid( partName, guid );\n        if( linkGuid != null ) {\n            return List.of( linkGuid );\n        }\n        return null;\n    }\n\n    @Override\n    protected List<GUID > searchDirAndLinksFirstCase ( String partName ) {\n        List<GUID > guids = this.dirManipulators[ 0 ].getGuidsByName( partName );\n        if( guids != null && !guids.isEmpty() ) {\n            return guids;\n        }\n\n        return this.imperialTree.fetchOriginalGuidRoot( partName );\n    }\n\n    @Override\n    protected List<GUID > fetchDirsAllGuids(String partName ) {\n        List<GUID > guids = this.dirManipulators[ 0 ].getGuidsByName( partName );\n        guids.removeIf( guid -> !this.imperialTree.isRoot( guid ) );\n        return guids;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/meta/ElementObject.java",
    "content": "package com.pinecone.hydra.system.ko.meta;\n\nimport com.pinecone.hydra.system.ko.KernelObject;\nimport com.pinecone.hydra.unit.imperium.entity.ElementumNode;\n\npublic interface ElementObject extends ElementumNode, KernelObject {\n\n    String FunctionName = ElementObject.class.getSimpleName().replace( \"Object\", \"\" );\n\n    @Override\n    default String objectFunctionName() {\n        return FunctionName;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/ArchDirectMappingTrieRuntimeKOMTree.java",
    "content": "package com.pinecone.hydra.system.ko.runtime;\n\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.unit.trie.DirectoryNode;\nimport com.pinecone.framework.unit.trie.TrieNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic abstract class ArchDirectMappingTrieRuntimeKOMTree extends ArchRuntimeKOMTree implements DirectMappingTrieRuntimeInstrument {\n    public ArchDirectMappingTrieRuntimeKOMTree( @Nullable Processum superiorProcess, String superiorPathScope, KernelObjectConfig kernelObjectConfig, @Nullable GuidAllocator guidAllocator ) {\n        super( superiorProcess, superiorPathScope, kernelObjectConfig, guidAllocator );\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object elm ) {\n        return this.mNodeIndex.hasOwnProperty( elm );\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        return this.queryGUIDByPath( key.toString() ) != null;\n    }\n\n    @Override\n    public TrieNode<TreeNode> getOwnProperty( String path ) {\n        return this.mNodeIndex.queryNode( path );\n    }\n\n    @Override\n    public DirectoryNode<TreeNode > fetchOwnChildren( String path ) {\n        TrieNode<TreeNode> self = this.getOwnProperty( path );\n        if ( self == null ) {\n            return null;\n        }\n\n        return self.evinceDirectory();\n    }\n\n    @Override\n    public Collection<String > fetchOwnMappingPath() {\n        return this.mNodeIndex.keySet();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/ArchRuntimeKOMTree.java",
    "content": "package com.pinecone.hydra.system.ko.runtime;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.unit.Units;\nimport com.pinecone.framework.unit.trie.TrieMap;\nimport com.pinecone.framework.unit.trie.UniTrieMaptron;\nimport com.pinecone.framework.util.CollectionUtils;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.centrum.UniformCentralSystem;\nimport com.pinecone.hydra.system.ko.CascadeInstrument;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.system.ko.handle.ObjectTreeAddressingSectionHandle;\nimport com.pinecone.hydra.system.ko.handle.ObjectTreeGUIDAddressingSectionHandle;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\nimport com.pinecone.hydra.unit.imperium.ArchUniformInstitutionalizedInstrument;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\npublic abstract class ArchRuntimeKOMTree extends ArchUniformInstitutionalizedInstrument implements RuntimeInstrument {\n    protected Namespace                          mThisNamespace;\n    protected KOMInstrument                      mParentInstrument;\n\n    protected TrieMap<String, TreeNode  >        mNodeIndex;\n    protected Map<GUID, RuntimeTreeNode >        mNodeTable;\n\n    protected Processum                          superiorProcess;\n\n    protected RuntimeSystem                      superiorSystem;\n\n    protected GuidAllocator                      guidAllocator;\n\n    protected DynamicFactory                     dynamicFactory;\n\n    protected KernelObjectConfig                 kernelObjectConfig;\n\n\n    public ArchRuntimeKOMTree( @Nullable Processum superiorProcess, String superiorPathScope, KernelObjectConfig kernelObjectConfig, @Nullable GuidAllocator guidAllocator ) {\n        super( superiorPathScope );\n\n        this.kernelObjectConfig  = kernelObjectConfig;\n        this.mNodeIndex          = new UniTrieMaptron<>( ConcurrentHashMap::new );\n        this.mNodeTable          = new ConcurrentHashMap<>();\n        this.superiorProcess     = superiorProcess;\n        this.guidAllocator       = guidAllocator;\n\n        if ( this.superiorProcess != null ) {\n            if ( this.superiorProcess instanceof RuntimeSystem ) {\n                this.superiorSystem = (RuntimeSystem) this.superiorProcess;\n            }\n            else  {\n                this.superiorSystem = this.superiorProcess.parentSystem();\n            }\n            if ( this.guidAllocator == null && this.superiorSystem instanceof UniformCentralSystem ) {\n                UniformCentralSystem system = (UniformCentralSystem) this.superiorSystem;\n                this.guidAllocator = system.getSystemGuidAllocator();\n            }\n        }\n\n        if ( this.guidAllocator == null ) {\n            throw new IllegalArgumentException( \"GUIDAllocator is undefined.\" );\n        }\n    }\n\n    //************************************** CascadeInstrument **************************************\n    @Override\n    public KOMInstrument parent() {\n        return this.mParentInstrument;\n    }\n\n    @Override\n    public Processum getSuperiorProcess() {\n        return this.superiorProcess;\n    }\n\n    @Override\n    public void setParent( CascadeInstrument parent ) {\n        this.mParentInstrument = (KOMInstrument) parent;\n    }\n\n    @Override\n    public Namespace getTargetingName() {\n        return this.mThisNamespace;\n    }\n\n    @Override\n    public void setTargetingName( Namespace name ) {\n        this.mThisNamespace = name;\n    }\n\n    //************************************** CascadeInstrument End **************************************\n\n\n    @Override\n    public void applyGuidAllocator( GuidAllocator guidAllocator ) {\n        this.guidAllocator = guidAllocator;\n    }\n\n    @Override\n    public GuidAllocator getGuidAllocator() {\n        return this.guidAllocator;\n    }\n\n    @Override\n    public String getSuperiorPathScope() {\n        return this.superiorPathScope;\n    }\n\n    @Override\n    public void applySuperiorPathScope( String superiorPathScope ) {\n        this.superiorPathScope = superiorPathScope;\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Collection<TreeNode> fetchTreeNodes() {\n        return (Collection) this.mNodeTable.values();\n    }\n\n\n    @Override\n    public KOMInstrument implicated( GUID objectGuid ) {\n        RuntimeTreeNode treeNode = this.mNodeTable.get( objectGuid );\n        if ( treeNode == null ) {\n            for( RuntimeTreeNode node : this.mNodeTable.values() ) {\n                if ( node.treeNode instanceof KOMInstrument ) {\n                    KOMInstrument instrument = (KOMInstrument) node.treeNode;\n                    TreeNode sn = instrument.get( objectGuid );\n                    if ( sn != null ) {\n                        return instrument;\n                    }\n                }\n            }\n\n            return null;\n        }\n\n        if ( treeNode instanceof KOMInstrument ) {\n            return (KOMInstrument) treeNode;\n        }\n        return null;\n    }\n\n    @Override\n    public KernelObjectConfig getConfig() {\n        return this.kernelObjectConfig;\n    }\n\n    @Override\n    public String getPath( GUID guid ) {\n        RuntimeTreeNode treeNode = this.mNodeTable.get( guid );\n        if ( treeNode == null ) {\n            for( RuntimeTreeNode node : this.mNodeTable.values() ) {\n                if ( node.treeNode instanceof KOMInstrument ) {\n                    KOMInstrument instrument = (KOMInstrument) node.treeNode;\n                    String path = instrument.getPath( guid );\n                    if ( StringUtils.isNoneEmpty( path ) ) {\n                        return path;\n                    }\n                }\n            }\n\n            return null;\n        }\n        return treeNode.getPath();\n    }\n\n    @Override\n    public String getFullName( GUID guid ) {\n        return this.getPath( guid );\n    }\n\n    @Override\n    public GUID queryGUIDByPath( String path ) {\n        return this.queryGUIDByPathForward(path);\n    }\n\n    protected GUID queryGUIDByPathBackward( String path ) {\n        TreeNode treeNode = this.mNodeIndex.get( path );\n        if ( treeNode != null ) {\n            return treeNode.getGuid();\n        }\n\n        String[] split = path.split(this.kernelObjectConfig.getPathNameSepRegex());\n        for( int i = split.length - 2; i >= 0; --i ) {\n            TreeNode node = this.mNodeIndex.get( this.concatenateFullPathBySegments(split, 0, i) );\n            if( node instanceof RuntimeTreeNode ) {\n                RuntimeTreeNode rtn = (RuntimeTreeNode)node;\n                if ( rtn.treeNode instanceof ObjectTreeGUIDAddressingSectionHandle ) {\n                    ObjectTreeGUIDAddressingSectionHandle pointHandle = (ObjectTreeGUIDAddressingSectionHandle) rtn.treeNode;\n                    GUID guid = pointHandle.queryGUIDByPath( this.concatenateFullPathBySegments(split, i + 1, split.length - 1) );\n                    //this.mNodeIndex.put( path, pointHandle.get(guid) );\n                    return guid;\n                }\n            }\n        }\n        return null;\n    }\n\n    protected GUID queryGUIDByPathForward( String path ) {\n        TreeNode treeNode = this.mNodeIndex.get( path );\n        if ( treeNode != null ) {\n            return treeNode.getGuid();\n        }\n\n        String[] split = path.split(this.kernelObjectConfig.getPathNameSeparator());\n        for( int i = 0; i < split.length; ++i ) {\n            TreeNode node = this.mNodeIndex.get( this.concatenateFullPathBySegments(split, 0, i) );\n            if( node instanceof RuntimeTreeNode ) {\n                RuntimeTreeNode rtn = (RuntimeTreeNode)node;\n                if ( rtn.treeNode instanceof ObjectTreeGUIDAddressingSectionHandle ) {\n                    ObjectTreeGUIDAddressingSectionHandle pointHandle = (ObjectTreeGUIDAddressingSectionHandle) rtn.treeNode;\n                    GUID guid = pointHandle.queryGUIDByPath( this.concatenateFullPathBySegments(split, i + 1, split.length - 1) );\n                    //this.mNodeIndex.put( path, pointHandle.get(guid) );\n                    return guid;\n                }\n            }\n        }\n        return null;\n    }\n\n    protected String concatenateFullPathBySegments( String[] segments, int start, int end ) {\n        StringBuilder stringBuilder = new StringBuilder();\n        for( int i = start; i <= end; ++i ) {\n            if (stringBuilder.length() > 0) {\n                stringBuilder.append(this.kernelObjectConfig.getPathNameSeparator());\n            }\n            stringBuilder.append( segments[ i ] );\n        }\n        return stringBuilder.toString();\n    }\n\n    @Override\n    public GUID queryGUIDByFN( String fullName ) {\n        return this.queryGUIDByPath( fullName );\n    }\n\n    @Override\n    public boolean contains( GUID nodeGuid ) {\n        return this.mNodeTable.containsKey( nodeGuid );\n    }\n\n    @Override\n    public GUID put( TreeNode treeNode ) throws IllegalArgumentException {\n        RuntimeTreeNode runtimeTreeNode;\n        if ( treeNode instanceof RuntimeTreeNode ) {\n            runtimeTreeNode = (RuntimeTreeNode) treeNode;\n        }\n        else {\n            throw new IllegalArgumentException( \"TreeNode which been putted should be `RuntimeTreeNode`.\" );\n        }\n        this.mNodeTable.put( treeNode.getGuid(), runtimeTreeNode );\n        return treeNode.getGuid();\n    }\n\n    @Override\n    public TreeNode add( String mountPointPath, TreeNode that ) {\n        RuntimeTreeNode runtimeTreeNode;\n        if ( that instanceof RuntimeTreeNode ) {\n            runtimeTreeNode = (RuntimeTreeNode) that;\n        }\n        else {\n            runtimeTreeNode = new RuntimeTreeNode( that, mountPointPath );\n        }\n\n        this.mNodeTable.put( that.getGuid(), runtimeTreeNode );\n        this.mNodeIndex.put( mountPointPath, runtimeTreeNode );\n        return that;\n    }\n\n\n    @Override\n    public TreeNode get( GUID guid ) {\n        return this.mNodeTable.get( guid );\n    }\n\n    @Override\n    public TreeNode get( GUID guid, int depth ) {\n        return this.mNodeTable.get( guid );\n    }\n\n    @Override\n    public TreeNode getAsRootDepth( GUID guid ) {\n        return this.mNodeTable.get( guid );\n    }\n\n    @Override\n    public void remove( GUID guid ) {\n        RuntimeTreeNode treeNode = this.mNodeTable.get( guid );\n        if ( treeNode != null ) {\n            this.mNodeIndex.remove( treeNode.getPath() );\n            this.mNodeTable.remove( guid );\n        }\n    }\n\n    @Override\n    public void remove( String path ) {\n        GUID guid = this.queryGUIDByPath( path );\n        if ( guid != null ) {\n            this.remove( guid );\n        }\n    }\n\n    @Override\n    public void rename( GUID guid, String name ) {\n        throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public Collection<TreeNode> getChildren( GUID guid ) {\n        for( RuntimeTreeNode node : this.mNodeTable.values() ) {\n            if ( node.treeNode instanceof KOMInstrument ) {\n                KOMInstrument instrument = (KOMInstrument) node.treeNode;\n                Collection<TreeNode > cs = instrument.getChildren( guid );\n                if ( CollectionUtils.isNoneEmpty( cs ) ) {\n                    return cs;\n                }\n            }\n        }\n        return Units.emptyList();\n    }\n\n    @Override\n    public Collection<GUID> fetchChildrenGuids( GUID guid ) {\n        for( RuntimeTreeNode node : this.mNodeTable.values() ) {\n            if ( node.treeNode instanceof KOMInstrument ) {\n                KOMInstrument instrument = (KOMInstrument) node.treeNode;\n                Collection<GUID> cs = instrument.fetchChildrenGuids( guid );\n                if ( CollectionUtils.isNoneEmpty( cs ) ) {\n                    return cs;\n                }\n            }\n        }\n        return Units.emptyList();\n    }\n\n    @Override\n    public List<? extends TreeNode> fetchRoot() {\n        throw new UnsupportedOperationException();\n    }\n\n    @Override\n    public Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) {\n        if( szTargetSep != null ) {\n            path = path.replace( szBadSep, szTargetSep );\n        }\n\n        TreeNode treeNode = this.mNodeIndex.get( path );\n        if ( treeNode != null ) {\n            if( treeNode instanceof RuntimeTreeNode ) {\n                return ( (RuntimeTreeNode) treeNode).treeNode;\n            }\n            return treeNode;\n        }\n\n        String[] split = path.split( this.kernelObjectConfig.getPathNameSeparator() );\n        for( int i = 0; i < split.length; ++i ) {\n            TreeNode node = this.mNodeIndex.get( this.concatenateFullPathBySegments(split, 0, i) );\n            if( node instanceof RuntimeTreeNode ) {\n                RuntimeTreeNode rtn = (RuntimeTreeNode)node;\n                if ( rtn.treeNode instanceof ObjectTreeAddressingSectionHandle ) {\n                    ObjectTreeAddressingSectionHandle pointHandle = (ObjectTreeAddressingSectionHandle) rtn.treeNode;\n                    EntityNode entityNode = pointHandle.queryNode( this.concatenateFullPathBySegments(split, i + 1, split.length - 1) );\n                    return entityNode;\n                }\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public EntityNode queryNode( String path ) {\n        Object o = this.queryEntityHandleByNS( path, null, null );\n        if( o instanceof EntityNode ) {\n            return (EntityNode) o;\n        }\n        return null;\n    }\n\n    @Override\n    public TreeNode queryTreeNode( String path ) {\n        Object o = this.queryEntityHandleByNS( path, null, null );\n        if( o instanceof TreeNode ) {\n            return (TreeNode) o;\n        }\n        // Runtime KOM shouldn`t be GUID.\n//        else if( o instanceof GUID ) {\n//            return this.get( (GUID) o );\n//        }\n        return null;\n    }\n\n    @Override\n    public GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ) {\n        if( szTargetSep != null ) {\n            path = path.replace( szBadSep, szTargetSep );\n        }\n        return this.queryGUIDByPath( path );\n    }\n\n    @Override\n    public String querySystemKernelObjectPath( GUID objectGuid ) {\n        String thisScopePath = this.getPath( objectGuid );\n        if ( thisScopePath == null ) {\n            return null;\n        }\n\n        KOMInstrument imp = this.implicated( objectGuid );\n        if ( imp != null ) {\n            thisScopePath = imp.querySystemKernelObjectPath( objectGuid );\n        }\n\n        return this.getSuperiorPathScope() + this.getConfig().getPathNameSeparator() + thisScopePath;\n    }\n\n    @Override\n    public ImperialTree getMasterTrieTree() {\n        return null;\n    }\n\n    static class RuntimeTreeNode implements TreeNode {\n        private TreeNode treeNode;\n\n        private String   path;\n\n        public RuntimeTreeNode( TreeNode treeNode, String path ) {\n            this.treeNode = treeNode;\n            this.path     = path;\n        }\n\n        @Override\n        public String getName() {\n            return this.treeNode.getName();\n        }\n\n        @Override\n        public GUID getGuid() {\n            return this.treeNode.getGuid();\n        }\n\n        public TreeNode getTreeNode() {\n            return this.treeNode;\n        }\n\n        public String getPath() {\n            return this.path;\n        }\n\n        @Override\n        public String toJSONString() {\n            return this.treeNode.toJSONString();\n        }\n\n        @Override\n        public String toString() {\n            return this.treeNode.toString();\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/CentralizedRuntimeInstrument.java",
    "content": "package com.pinecone.hydra.system.ko.runtime;\n\nimport com.pinecone.hydra.system.ko.handle.ObjectTreeAddressingSectionHandle;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\n\npublic interface CentralizedRuntimeInstrument extends RuntimeInstrument {\n\n    KOMInstrument mount( String mountPointPath, KOMInstrument that );\n\n    KOMInstrument mount( String mountPointPath, String treeNodeName, KOMInstrument that );\n\n    ObjectTreeAddressingSectionHandle directMount( String mountPointPath, ObjectTreeAddressingSectionHandle that );\n\n    ObjectTreeAddressingSectionHandle directMount( String mountPointPath, String treeNodeName, ObjectTreeAddressingSectionHandle that );\n\n    KOMInstrument getMountedInstrument ( String mountPointPath );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/DirectMappingTrieRuntimeInstrument.java",
    "content": "package com.pinecone.hydra.system.ko.runtime;\n\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.unit.trie.DirectoryNode;\nimport com.pinecone.framework.unit.trie.TrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface DirectMappingTrieRuntimeInstrument extends RuntimeInstrument, PineUnit {\n\n    TrieNode<TreeNode> getOwnProperty( String path );\n\n    DirectoryNode<TreeNode > fetchOwnChildren( String path );\n\n    Collection<String > fetchOwnMappingPath();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/GenericRuntimeInstrumentConfig.java",
    "content": "package com.pinecone.hydra.system.ko.runtime;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.hydra.system.ko.ArchKernelObjectConfig;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\n\npublic class GenericRuntimeInstrumentConfig extends ArchKernelObjectConfig implements KernelObjectConfig {\n    public GenericRuntimeInstrumentConfig() {\n        super();\n    }\n\n    public GenericRuntimeInstrumentConfig( @Nullable Map<String, Object> config ){\n        super( config );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/KernelExpressInstrument.java",
    "content": "package com.pinecone.hydra.system.ko.runtime;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.system.ko.handle.AppliableKHandle;\nimport com.pinecone.hydra.system.ko.handle.KOMMountPointHandle;\nimport com.pinecone.hydra.system.ko.handle.ObjectTreeAddressingSectionHandle;\nimport com.pinecone.hydra.system.ko.kom.ExpressInstrument;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\nimport com.pinecone.hydra.system.ko.kom.ProxiedKOMMountPointHandle;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic class KernelExpressInstrument extends ArchDirectMappingTrieRuntimeKOMTree implements ExpressInstrument {\n\n    public KernelExpressInstrument( @Nullable Processum superiorProcess, String superiorPathScope, KernelObjectConfig kernelObjectConfig, @Nullable GuidAllocator guidAllocator ) {\n        super( superiorProcess, superiorPathScope, kernelObjectConfig, guidAllocator );\n    }\n\n    public KernelExpressInstrument( @Nullable Processum superiorProcess, String superiorPathScope, KernelObjectConfig kernelObjectConfig ) {\n        this( superiorProcess, superiorPathScope, kernelObjectConfig, null );\n    }\n\n    @Override\n    public KOMInstrument mount( String mountPointPath, KOMInstrument that ) {\n        String[] debris = mountPointPath.split( this.getConfig().getPathNameSepRegex() );\n        if ( debris.length < 1 ) {\n            throw new IllegalArgumentException( \"Path given should not be empty.\" );\n        }\n        this.mount( mountPointPath, debris[ debris.length - 1 ], that );\n        that.setParent( this );\n        that.applySuperiorPathScope( mountPointPath );\n        return that;\n    }\n\n    @Override\n    public KOMInstrument mount( String mountPointPath, String treeNodeName, KOMInstrument that ) {\n        KOMMountPointHandle handle = new ProxiedKOMMountPointHandle(\n                treeNodeName, this.guidAllocator.nextGUID(), that\n        );\n        this.add( mountPointPath, handle );\n        return that;\n    }\n\n    @Override\n    public ObjectTreeAddressingSectionHandle directMount( String mountPointPath, ObjectTreeAddressingSectionHandle that ) {\n        if ( that instanceof AppliableKHandle ) {\n            String[] debris = mountPointPath.split( this.getConfig().getPathNameSepRegex() );\n            if ( debris.length < 1 ) {\n                throw new IllegalArgumentException( \"Path given should not be empty.\" );\n            }\n            this.directMount( mountPointPath, debris[ debris.length - 1 ], that );\n        }\n        this.add( mountPointPath, that );\n        return that;\n    }\n\n    @Override\n    public ObjectTreeAddressingSectionHandle directMount( String mountPointPath, String treeNodeName, ObjectTreeAddressingSectionHandle that ) {\n        if ( that instanceof AppliableKHandle ) {\n            AppliableKHandle handle = (AppliableKHandle) that;\n            if ( that.getGuid() == null ) {\n                handle.applyTreeNodeGuid( this.guidAllocator.nextGUID() );\n            }\n            handle.applyTreeNodeName( treeNodeName );\n        }\n        this.add( mountPointPath, that );\n        return that;\n    }\n\n    @Override\n    public KOMInstrument getMountedInstrument( String mountPointPath ) {\n        TreeNode tn = this.mNodeIndex.get( mountPointPath );\n        if ( tn instanceof RuntimeTreeNode ) {\n            tn = ((RuntimeTreeNode) tn).getTreeNode();\n        }\n\n        if ( tn instanceof KOMMountPointHandle ) {\n            return ((KOMMountPointHandle) tn).revealWrapped();\n        }\n        else if ( tn instanceof KOMInstrument ) {\n            return (KOMInstrument) tn;\n        }\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/ko/runtime/RuntimeInstrument.java",
    "content": "package com.pinecone.hydra.system.ko.runtime;\n\nimport java.util.Collection;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface RuntimeInstrument extends KOMInstrument {\n\n    Collection<TreeNode> fetchTreeNodes();\n\n    TreeNode add( String mountPointPath, TreeNode that );\n\n    KOMInstrument implicated( GUID objectGuid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/polity/RepublicSystem.java",
    "content": "package com.pinecone.hydra.system.polity;\n\nimport com.pinecone.hydra.system.FederalSystem;\nimport com.pinecone.hydra.system.HierarchySystem;\n\npublic interface RepublicSystem extends HierarchySystem, FederalSystem {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/ArchMicroSystem.java",
    "content": "package com.pinecone.hydra.system.subsystem;\n\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.hydra.system.Hydrogen;\n\npublic abstract class ArchMicroSystem implements MicroSystem {\n    protected String            mszName;\n    protected Hydrogen          mSystem;\n\n    protected PatriarchalConfig mSubsystemConfig;\n\n    public ArchMicroSystem( String name, Hydrogen system, PatriarchalConfig config ) {\n        this.mszName           = name;\n        this.mSystem           = system;\n        this.mSubsystemConfig  = config;\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public Hydrogen getMasterSystem(){\n        return this.mSystem;\n    }\n\n    @Override\n    public PatriarchalConfig getSubsystemConfig() {\n        return this.mSubsystemConfig;\n    }\n\n    protected abstract void traceWelcomeInfo();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/ArchSubsystemDirector.java",
    "content": "package com.pinecone.hydra.system.subsystem;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.ClassUtils;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.ArchSystemCascadeComponent;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\n\npublic abstract class ArchSubsystemDirector extends ArchSystemCascadeComponent implements SubsystemDirector {\n    protected DynamicFactory                          mDynamicFactory;\n    protected PatriarchalConfig                       mSubsystemConfig;\n    protected PatriarchalConfig                       mSegmentConfig;\n    protected boolean                                 mSegmentEnabled;\n\n    public ArchSubsystemDirector( Namespace name, Hydrogen system, HyComponent parent ) {\n        super( name, system, system.getComponentManager(), parent );\n\n        this.mDynamicFactory    = new GenericDynamicFactory( this.getSystem().getTaskManager().getClassLoader() );\n        this.mSubsystemConfig   = this.getSystem().getSystemConfig().getChild( \"Subsystem\" );\n        this.prepare_segment();\n        this.mSegmentEnabled    = (boolean) this.mSegmentConfig.get( \"Enable\" );\n    }\n\n    public ArchSubsystemDirector( Hydrogen system, HyComponent parent ) {\n        this( null, system, parent );\n    }\n\n    public ArchSubsystemDirector( Hydrogen system ) {\n        this( system, null );\n    }\n\n    protected abstract void prepare_segment();\n\n    protected abstract void prepare_each_sub( String key, Object dy );\n\n    protected abstract Object instantiate( Map config, String name ) throws ClassNotFoundException ;\n\n    @SuppressWarnings( \"unchecked\" )\n    protected void prepare_init_subsystem_config( PatriarchalConfig seg ) {\n        if ( seg instanceof Map ) {\n            Map<String, Object> cms = (Map<String, Object>) seg;\n            for ( Map.Entry<String, Object> kv : cms.entrySet() ) {\n                Object dy = kv.getValue();\n                if( dy instanceof String ) {\n                    try {\n                        PatriarchalConfig sysConfig = seg.getChildFromPath( Path.of((String) dy) );\n                        cms.put( kv.getKey(), sysConfig );\n                        dy = sysConfig;\n                    }\n                    catch ( IOException e ) {\n                        throw new ProxyProvokeHandleException( e );\n                    }\n                }\n                else if( dy.getClass().isPrimitive() || ClassUtils.isPrimitiveWrapper( dy.getClass() ) ) {\n                    continue;\n                }\n\n                if ( dy instanceof Map ) {\n                    Map tm = (Map) dy;\n                    Boolean lifecycleWithPrimarySystem = (Boolean) tm.get( \"LifecycleWithPrimarySystem\" );\n                    if ( lifecycleWithPrimarySystem != null && !lifecycleWithPrimarySystem) {\n                        continue;\n                    }\n                }\n\n                this.prepare_each_sub( kv.getKey(), dy );\n            }\n        }\n    }\n\n    @Override\n    public PatriarchalConfig getSubsystemConfig() {\n        return this.mSubsystemConfig;\n    }\n\n    @Override\n    public PatriarchalConfig getSegmentConfig() {\n        return this.mSegmentConfig;\n    }\n\n    @Override\n    public Object instantiate( String fullName ) {\n        try {\n            Object c = this.mSegmentConfig.get( fullName );\n            if ( c instanceof Map ) {\n                Map tm = (Map) c;\n                return this.instantiate( tm, fullName );\n            }\n        }\n        catch ( ClassNotFoundException e ) {\n            return null;\n        }\n        return null;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/Cabinet.java",
    "content": "package com.pinecone.hydra.system.subsystem;\n\nimport com.pinecone.framework.system.regime.arch.Director;\n\npublic interface Cabinet extends Director {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/CentralKernelLordFederation.java",
    "content": "package com.pinecone.hydra.system.subsystem;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.concurrent.ConcurrentHashMap;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.regime.arch.Lord;\nimport com.pinecone.framework.util.CollectionUtils;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\n\npublic class CentralKernelLordFederation extends ArchSubsystemDirector implements KernelLordFederation {\n    protected Logger log = LoggerFactory.getLogger( \"CentralKernelLordFederation\" );\n\n    protected Map<String, Lord> mEmpireLords;   // Domain subsystem.\n\n    public CentralKernelLordFederation( Namespace name, Hydrogen system, HyComponent parent ) {\n        super( name, system, parent );\n\n        this.mEmpireLords = new ConcurrentHashMap<>();\n        this.prepare_init_subsystem_config( this.mSegmentConfig );\n\n        this.log.info( \"[Lifecycle] LordFederation prepared, ready to start. <Done>\" );\n    }\n\n    public CentralKernelLordFederation( Hydrogen system, HyComponent parent ) {\n        this( null, system, parent );\n    }\n\n    public CentralKernelLordFederation( Hydrogen system ) {\n        this( system, null );\n    }\n\n    @Override\n    protected void prepare_segment() {\n        this.mSegmentConfig     = this.mSubsystemConfig.getChild( \"SystemFederation\" );\n    }\n\n    @Override\n    protected void prepare_each_sub( String key, Object dy ) {\n        if ( !this.mSegmentEnabled ) {\n            return;\n        }\n\n        if( dy instanceof Map ) {\n            try {\n                Map tm = (Map) dy;\n                String name = (String) tm.get( \"Name\" );\n                if( name == null ) {\n                    name = key;\n                }\n\n                Lord lord = this.instantiate( tm, name );\n                this.register( name, lord );\n\n                if( lord == null ) {\n                    throw new IllegalArgumentException( \"Instancing Lord compromised with illegal arguments.\" );\n                }\n            }\n            catch ( ClassNotFoundException e ) {\n                throw new ProxyProvokeHandleException( e );\n            }\n        }\n        else {\n            throw new IllegalArgumentException( \"Lord config should be map or json format.\" );\n        }\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    protected Lord instantiate( Map config, String name ) throws ClassNotFoundException {\n        Class<? > clazz = this.mDynamicFactory.getClassLoader().loadClass( (String)config.get( KernelLordFederation.KeyMainClass ) );\n\n        JSONConfig p = null;\n        if ( this.mSegmentConfig instanceof JSONConfig ) {\n            p = (JSONConfig) this.mSegmentConfig;\n        }\n        Object      ins = this.mDynamicFactory.optNewInstance( clazz, new Object[] {\n                this.getSystem(), name, new JSONConfig( (Map<String, Object>) config, p )\n        } );\n        return (Lord) ins;\n    }\n\n    @Override\n    public Lord instantiate( String fullName ) {\n        Lord ms = (Lord) super.instantiate( fullName );\n        this.register( fullName, ms );\n        return ms;\n    }\n\n\n    @Override\n    public void register( String name, Lord system ) {\n        this.mEmpireLords.put( name, system );\n    }\n\n    @Override\n    public void deregister( String name ) {\n        this.mEmpireLords.remove( name );\n    }\n\n    @Override\n    public Lord get( String name ) {\n        return this.mEmpireLords.get( name );\n    }\n\n    @Override\n    public void clearLords() {\n        for ( Lord system : this.mEmpireLords.values() ) {\n            system.release();\n        }\n\n        this.mEmpireLords.clear();\n    }\n\n    @Override\n    public Set<Map.Entry<String, Lord > > entrySet() {\n        return this.mEmpireLords.entrySet();\n    }\n\n    @Override\n    public int size() {\n        return this.mEmpireLords.size();\n    }\n\n    @Override\n    public Map<String, Object> addConfig( String key, Object dyPathOrObject ) {\n        Map<String, Object> cms = CollectionUtils.genericConvert( (Map) this.mSegmentConfig );\n        if( dyPathOrObject instanceof String ) {\n            try {\n                PatriarchalConfig sysConfig = this.mSegmentConfig.getChildFromPath( Path.of((String) dyPathOrObject) );\n                cms.put( key, sysConfig );\n                return CollectionUtils.genericConvert( (Map) sysConfig );\n            }\n            catch ( IOException e ) {\n                return null;\n            }\n        }\n        else {\n            cms.put( key, dyPathOrObject );\n        }\n        return CollectionUtils.genericConvert( (Map) dyPathOrObject );\n    }\n\n    @Override\n    public Lord instantiate( String fullName, Object confPathOrObject ) {\n        if ( !this.mSegmentConfig.containsKey( fullName ) ) {\n            Map<String, Object> m = this.addConfig( fullName, confPathOrObject );\n            if ( m == null ) {\n                return null;\n            }\n        }\n        return this.instantiate( fullName );\n    }\n\n    @Override\n    public Hydrogen getSystem() {\n        return super.getSystem();\n    }\n}"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/CentralMicroSystemCabinet.java",
    "content": "package com.pinecone.hydra.system.subsystem;\n\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.concurrent.ConcurrentHashMap;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\n\npublic class CentralMicroSystemCabinet extends ArchSubsystemDirector implements KernelMicroSystemCabinet {\n    protected ConcurrentHashMap<String, MicroSystem > mRegistry;\n    protected PatriarchalConfig                       mConfMicroSystems;\n\n\n    public CentralMicroSystemCabinet( Namespace name, Hydrogen system, HyComponent parent ) {\n        super( name, system, parent );\n\n        this.mRegistry          = new ConcurrentHashMap<>();\n        this.prepare_init_subsystem_config( this.mConfMicroSystems );\n    }\n\n    public CentralMicroSystemCabinet( Hydrogen system, HyComponent parent ) {\n        this( null, system, parent );\n    }\n\n    public CentralMicroSystemCabinet( Hydrogen system ) {\n        this( system, null );\n    }\n\n    @Override\n    protected void prepare_segment() {\n        this.mSegmentConfig     = this.mSubsystemConfig.getChild( \"SystemCabinet\" );\n        this.mConfMicroSystems  = this.mSegmentConfig.getChild( \"MicroSystems\" );\n    }\n\n    @Override\n    protected void prepare_each_sub( String key, Object dy ) {\n        if ( !this.mSegmentEnabled ) {\n            return;\n        }\n\n        if( dy instanceof Map ) {\n            try {\n                Map tm = (Map) dy;\n                String name = (String) tm.get( \"Name\" );\n                if( name == null ) {\n                    name = key;\n                }\n\n                MicroSystem is = this.instantiate( tm, name );\n                this.register( name, is );\n\n                if( is == null ) {\n                    throw new IllegalArgumentException( \"Instancing MicroSystem compromised with illegal arguments.\" );\n                }\n            }\n            catch ( ClassNotFoundException e ) {\n                throw new ProxyProvokeHandleException( e );\n            }\n        }\n        else {\n            throw new IllegalArgumentException( \"MicroSystem config should be map or json format.\" );\n        }\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    protected MicroSystem instantiate( Map config, String name ) throws ClassNotFoundException {\n        Class<? > clazz = this.mDynamicFactory.getClassLoader().loadClass( (String)config.get( KernelMicroSystemCabinet.KeyMainClass ) );\n\n        JSONConfig p = null;\n        if ( this.mSegmentConfig instanceof JSONConfig ) {\n            p = (JSONConfig) this.mSegmentConfig;\n        }\n        Object      ins = this.mDynamicFactory.optNewInstance( clazz, new Object[] {\n                name, this.getSystem(), new JSONConfig( (Map<String, Object>) config, p )\n        } );\n        return (MicroSystem) ins;\n    }\n\n    @Override\n    public MicroSystem instantiate( String fullName ) {\n        MicroSystem ms = (MicroSystem) super.instantiate( fullName );\n        this.register( fullName, ms );\n        return ms;\n    }\n\n    @Override\n    public void register( String name, MicroSystem system ) {\n        this.mRegistry.put( name, system );\n    }\n\n    @Override\n    public void deregister( String name ) {\n        this.mRegistry.remove( name );\n    }\n\n    @Override\n    public MicroSystem get( String name ) {\n        return this.mRegistry.get( name );\n    }\n\n    @Override\n    public void clearCabinet() {\n        for( MicroSystem system : this.mRegistry.values() ) {\n            system.release();\n        }\n\n        this.mRegistry.clear();\n    }\n\n    @Override\n    public Set<Map.Entry<String, MicroSystem > > entrySet() {\n        return this.mRegistry.entrySet();\n    }\n\n    @Override\n    public int size() {\n        return this.mRegistry.size();\n    }\n\n\n    @Override\n    public Hydrogen getSystem() {\n        return super.getSystem();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/Federation.java",
    "content": "package com.pinecone.hydra.system.subsystem;\n\nimport com.pinecone.framework.system.regime.arch.Director;\n\npublic interface Federation extends Director {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/KernelLordFederation.java",
    "content": "package com.pinecone.hydra.system.subsystem;\n\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.pinecone.framework.system.regime.arch.Lord;\n\npublic interface KernelLordFederation extends SubsystemDirector, Federation {\n\n    String KeyMainClass = \"MainClass\";\n\n    void register( String name, Lord system );\n\n    void deregister( String name );\n\n    Lord get( String name );\n\n    void clearLords() ;\n\n    Set<Map.Entry<String, Lord > > entrySet();\n\n    int size();\n\n    Map<String, Object> addConfig(String key, Object dyPathOrObject );\n\n    Lord instantiate( String fullName, Object confPathOrObject );\n\n    Lord instantiate( String fullName );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/KernelMicroSystemCabinet.java",
    "content": "package com.pinecone.hydra.system.subsystem;\n\nimport java.util.Map;\nimport java.util.Set;\n\npublic interface KernelMicroSystemCabinet extends SubsystemDirector, Cabinet {\n    String KeyMainClass = \"MainClass\";\n\n    void register( String name, MicroSystem system );\n\n    void deregister( String name );\n\n    MicroSystem get( String name );\n\n    void clearCabinet() ;\n\n    Set<Map.Entry<String, MicroSystem > > entrySet();\n\n    int size();\n\n    MicroSystem instantiate( String fullName );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/MicroSystem.java",
    "content": "package com.pinecone.hydra.system.subsystem;\n\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.executum.Systema;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\n\npublic interface MicroSystem extends Systema {\n    void release();\n\n    RuntimeSystem getMasterSystem();\n\n    PatriarchalConfig getSubsystemConfig();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/subsystem/SubsystemDirector.java",
    "content": "package com.pinecone.hydra.system.subsystem;\n\nimport com.pinecone.framework.system.regime.arch.Director;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.hydra.system.HyComponent;\n\npublic interface SubsystemDirector extends Director, HyComponent {\n\n    PatriarchalConfig getSubsystemConfig();\n\n    PatriarchalConfig getSegmentConfig();\n\n    Object instantiate( String fullName );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/system/types/HydraKingdom.java",
    "content": "package com.pinecone.hydra.system.types;\n\nimport com.pinecone.hydra.system.HierarchySystem;\nimport com.pinecone.hydra.system.Hydrogen;\n\npublic interface HydraKingdom extends Hydrogen, HierarchySystem {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/ArchRegimentObjectModel.java",
    "content": "package com.pinecone.hydra.unit.imperium;\n\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\n\npublic abstract class ArchRegimentObjectModel extends ArchUniformInstitutionalizedInstrument implements KOMInstrument {\n    protected ImperialTree          imperialTree;\n\n    protected TreeMasterManipulator treeMasterManipulator;\n\n    protected KernelObjectConfig    kernelObjectConfig;\n\n    public ArchRegimentObjectModel( TreeMasterManipulator masterManipulator, KernelObjectConfig kernelObjectConfig, String superiorPathScope ) {\n        super( superiorPathScope );\n        this.treeMasterManipulator = masterManipulator;  // [1st]\n        this.kernelObjectConfig    = kernelObjectConfig; // [2st]\n        this.imperialTree          = new RegimentedImperialTree( this );\n    }\n\n    public ArchRegimentObjectModel( KOIMasterManipulator masterManipulator, KernelObjectConfig kernelObjectConfig, String superiorPathScope ) {\n        this( (TreeMasterManipulator) masterManipulator.getSkeletonMasterManipulator(), kernelObjectConfig, superiorPathScope );\n    }\n\n    public ImperialTree getMasterTrieTree() {\n        return this.imperialTree;\n    }\n\n    TreeMasterManipulator getTreeMasterManipulator() {\n        return this.treeMasterManipulator;\n    }\n\n    @Override\n    public KernelObjectConfig getConfig() {\n        return this.kernelObjectConfig;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/ArchUniformInstitutionalizedInstrument.java",
    "content": "package com.pinecone.hydra.unit.imperium;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\n\n/**\n *  Pinecone Ursus For Java Uniform Institutionalized Instrument\n *  Author: Harald.E (Dragon King)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Uniform Institutionalized Instrument\n *  统一编制化元信息中级模型\n *  *****************************************************************************************\n */\npublic abstract class ArchUniformInstitutionalizedInstrument implements KOMInstrument {\n\n    protected String                superiorPathScope;\n\n    public ArchUniformInstitutionalizedInstrument( String superiorPathScope ) {\n        this.superiorPathScope = superiorPathScope;\n    }\n\n\n    @Override\n    public String querySystemKernelObjectPath( GUID objectGuid ) {\n        String thisScopePath = this.getPath( objectGuid );\n        if ( thisScopePath == null ) {\n            return null;\n        }\n\n        return this.getSuperiorPathScope() + this.getConfig().getPathNameSeparator() + thisScopePath;\n    }\n\n    @Override\n    public String getSuperiorPathScope() {\n        return this.superiorPathScope;\n    }\n\n    @Override\n    public void applySuperiorPathScope( String superiorPathScope ) {\n        this.superiorPathScope = superiorPathScope;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/GUIDImperialTrieNode.java",
    "content": "package com.pinecone.hydra.unit.imperium;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport com.pinecone.framework.util.uoi.UOI;\n\nimport java.util.List;\n\n/**\n *  Pinecone Ursus For Java GUIDDistributedTrieNode\n *  Author:\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  **********************************************************\n *  Thanks for genius`s contribution.\n *  **********************************************************\n */\npublic class GUIDImperialTrieNode implements ImperialTreeNode {\n    //节点id\n    protected long enumId;\n    //节点uuid\n    protected GUID guid;\n    //父节点uuid\n    protected List<GUID > parentGUID;\n    //基础信息uuid\n    protected GUID attributesGUID;\n    //元信息uuid\n    protected GUID nodeMetadataGUID;\n    //节点的类型方便获取数据\n    protected UOI type;\n\n\n    public GUIDImperialTrieNode() {\n    }\n\n    public GUIDImperialTrieNode(\n            long enumId, GUID guid, List<GUID> parentGUID, GUID baseDataGUID, GUID nodeMetadataGUID, UOI type\n    ) {\n        this.enumId = enumId;\n        this.guid = guid;\n        this.parentGUID = parentGUID;\n        this.attributesGUID = baseDataGUID;\n        this.nodeMetadataGUID = nodeMetadataGUID;\n        this.type = type;\n    }\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public void setEnumId( long enumId ) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid( GUID guid ) {\n        this.guid = guid;\n    }\n\n    @Override\n    public List<GUID > getParentGUIDs() {\n        return this.parentGUID;\n    }\n\n    @Override\n    public void setParentGUID( List<GUID> parentGUID ) {\n        this.parentGUID = parentGUID;\n    }\n\n    @Override\n    public GUID getAttributesGUID() {\n        return this.attributesGUID;\n    }\n\n    @Override\n    public void setBaseDataGUID( GUID baseDataGUID ) {\n        this.attributesGUID = baseDataGUID;\n    }\n\n    @Override\n    public GUID getNodeMetadataGUID() {\n        return this.nodeMetadataGUID;\n    }\n\n    @Override\n    public void setNodeMetadataGUID( GUID nodeMetadataGUID ) {\n        this.nodeMetadataGUID = nodeMetadataGUID;\n    }\n\n    @Override\n    public UOI getType() {\n        return this.type;\n    }\n\n    @Override\n    public void setType(UOI type) {\n        this.type = type;\n    }\n\n    @Override\n    public String toString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"class\", this.className() ),\n                new KeyValue<>( \"guid\", this.getGuid() ),\n                new KeyValue<>( \"type\", this.getType() )\n        } );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/ImperialTree.java",
    "content": "package com.pinecone.hydra.unit.imperium;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.KernelObjectInstrument;\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\n\nimport java.util.List;\n\n/**\n *  Pinecone Ursus For Java Imperial Tree\n *  Author: Harald.E (Dragon King), Ken\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Imperium (Imperial Tree)\n *  It is a distributed uniformed institutionalization system tree used for uniformed and systematic institutionalization of controlled distributed objects.\n *  Similar to kernel object management in other OS, it ensures that kernel objects and target-controlled objects in the system are marshaled\n *  and accessed in a uniformed URL-style.\n *  This data structure is based on a prefix tree and a GUID system, which can also be utilized for other marshaling purposes.\n *\n *  Imperium (统治树)，\n *  是一种分布式统一编制体系树，用于对受控分布式对象进行统一系统性编制。\n *  与其他操作系统内核对象管理类似，使得系统中的内核对象和欲控对象，整整齐齐的被编制和统一URL式访问。\n *  该数据结构基于前缀树和GUID身份证体系，是一类通用数据结构，也可用于其他编制或编组目的的实现。\n *\n *  e.g. \\Device\\HarddiskVolume3\\Users\\dragonking\\AppData\\Local\\\n *  e.g. /proc/137/task\n *  *****************************************************************************************\n */\npublic interface ImperialTree extends PineUnit {\n\n    void insert( ImperialTreeNode distributedConfTreeNode );\n\n    void affirmOwnedNode( GUID nodeGUID, GUID parentGUID );\n\n    GUIDImperialTrieNode getNode(GUID guid );\n\n    void purge( GUID guid );\n\n    void removeTreeNodeOnly( GUID guid );\n\n    void put( GUID guid, GUIDImperialTrieNode distributedTreeNode );\n\n    boolean contains( GUID key );\n\n    boolean containsChild( GUID parentGuid, GUID childGuid );\n\n    GUID queryGUIDByPath( String path );\n\n    List<GUIDImperialTrieNode> getChildren(GUID guid );\n\n    List<GUID > fetchChildrenGuids( GUID parentGuid );\n\n    List<GUID > fetchParentGuids( GUID guid );\n\n    void removeInheritance( GUID childGuid,GUID parentGuid );\n\n\n    String getCachePath( GUID guid );\n\n    void removeCachePath( GUID guid );\n\n    GUID getOwner( GUID guid );\n\n    void setOwner( GUID sourceGuid, GUID targetGuid );\n\n    void setGuidLineage( GUID sourceGuid, GUID targetGuid );\n\n    List<GUID > getSubordinates( GUID guid );\n\n    void insertCachePath( GUID guid,String path );\n\n\n    List<GUID > fetchRoot();\n\n    boolean isRoot( GUID guid );\n\n\n\n\n    /** Link / Reference */\n    long queryLinkedCount( GUID guid, LinkedType linkedType );\n\n    long queryAllLinkedCount( GUID guid );\n\n    default long queryStrongLinkedCount( GUID guid ) {\n        return this.queryLinkedCount( guid, LinkedType.Owned );\n    }\n\n    default long queryWeakLinkedCount( GUID guid ) {\n        return this.queryLinkedCount( guid, LinkedType.Hard );\n    }\n\n    void newHardLink( GUID sourceGuid, GUID targetGuid );\n\n    void moveTo( GUID sourceGuid, GUID destinationGuid );\n\n    void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName, KernelObjectInstrument instrument );\n\n    void updateLinkTagName( GUID tagGuid, String tagName );\n\n\n    /** Link Tag */\n    GUID getOriginalGuid( String tagName, GUID parentDirGUID );\n\n    GUID getOriginalGuidByNodeGuid( String tagName, GUID nodeGUID );\n\n    List<GUID > fetchOriginalGuid( String tagName );\n\n    List<GUID > fetchOriginalGuidRoot( String tagName );\n\n    ReparseLinkNode getReparseLinkNode( String tagName, GUID parentDirGuid );\n\n    ReparseLinkNode getReparseLinkNodeByNodeGuid( String tagName, GUID nodeGUID );\n\n    GUID getOriginalGuid( GUID tagGuid );\n\n    void removeReparseLink( GUID guid );\n\n    boolean isTagGuid( GUID guid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/ImperialTreeConstants.java",
    "content": "package com.pinecone.hydra.unit.imperium;\n\npublic final class ImperialTreeConstants {\n    public static int DefaultShortPathLength  =  330;\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/ImperialTreeNode.java",
    "content": "package com.pinecone.hydra.unit.imperium;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\n\nimport java.util.List;\n\n/**\n *  Pinecone Ursus For Java\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Uniform Distributed Trie Tree (UDTT)\n *  *****************************************************************************************\n */\npublic interface ImperialTreeNode extends Pinenut {\n    long getEnumId();\n    void setEnumId(long enumId);\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n\n    List<GUID> getParentGUIDs();\n    void setParentGUID(List<GUID> parentGUID);\n\n    GUID getAttributesGUID();\n    void setBaseDataGUID(GUID baseDataGUID);\n\n    GUID getNodeMetadataGUID();\n    void setNodeMetadataGUID(GUID nodeMetadataGUID);\n\n    UOI getType();\n    void setType( UOI type );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/LinkedType.java",
    "content": "package com.pinecone.hydra.unit.imperium;\n\npublic enum LinkedType {\n    Owned           ( \"Owned\" ),\n\n    // HardLink\n    Hard            ( \"Hard\" ),\n    WeaK            (\"Weak\");\n\n    private final String value;\n\n    LinkedType( String value ){\n        this.value = value;\n    }\n\n    public String getName(){\n        return this.value;\n    }\n\n    public static String queryName( LinkedType type ) {\n        return type.getName();\n    }\n\n    public static LinkedType queryLinkedType( String sz ) {\n        return LinkedType.valueOf( sz );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/RegimentedImperialTree.java",
    "content": "package com.pinecone.hydra.unit.imperium;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.system.ko.KernelObjectInstrument;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.ulf.util.guid.i64.GUID72;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.util.List;\n\npublic class RegimentedImperialTree implements UniImperialTree {\n    static TreeMasterManipulator evalTreeMasterManipulator( KOMInstrument komInstrument ) {\n        return ((ArchRegimentObjectModel) komInstrument).getTreeMasterManipulator();\n    }\n\n    static KernelObjectConfig evalKernelObjectConfig( KOMInstrument komInstrument ) {\n        return komInstrument.getConfig();\n    }\n\n    protected TrieTreeManipulator      trieTreeManipulator;\n\n    protected TireOwnerManipulator     tireOwnerManipulator;\n\n    protected TriePathCacheManipulator triePathCacheManipulator;\n\n    protected KernelObjectConfig       kernelObjectConfig;\n\n    protected int                      shortPathLength;\n\n    public RegimentedImperialTree( TreeMasterManipulator masterManipulator ) {\n        this.trieTreeManipulator      =  masterManipulator.getTrieTreeManipulator();\n        this.tireOwnerManipulator     =  masterManipulator.getTireOwnerManipulator();\n        this.triePathCacheManipulator =  masterManipulator.getTriePathCacheManipulator();\n        this.shortPathLength          =  ImperialTreeConstants.DefaultShortPathLength;\n    }\n\n    public RegimentedImperialTree( TreeMasterManipulator masterManipulator, KernelObjectConfig config ) {\n        this( masterManipulator );\n\n        this.kernelObjectConfig = config;\n        this.shortPathLength    = config.getShortPathLength();\n    }\n\n    public RegimentedImperialTree( KOMInstrument komInstrument ) {\n        this(\n                RegimentedImperialTree.evalTreeMasterManipulator(komInstrument),\n                RegimentedImperialTree.evalKernelObjectConfig(komInstrument)\n        );\n    }\n\n\n\n    @Override\n    public void insert( ImperialTreeNode node ) {\n        this.trieTreeManipulator.insert( this.tireOwnerManipulator, (GUIDImperialTrieNode) node );\n    }\n\n    @Override\n    public void affirmOwnedNode( GUID nodeGUID, GUID parentGUID ){\n        GUID owner = this.tireOwnerManipulator.getOwner( nodeGUID );\n        if ( owner == null ){\n            this.tireOwnerManipulator.remove( nodeGUID, owner );\n            this.tireOwnerManipulator.insertOwnedNode( nodeGUID, parentGUID );\n        }\n        else {\n            this.tireOwnerManipulator.insertOwnedNode( nodeGUID, parentGUID );\n        }\n    }\n\n    @Override\n    public GUIDImperialTrieNode getNode( GUID guid ){\n        return this.trieTreeManipulator.getNode( guid );\n    }\n\n\n    @Override\n    public void purge( GUID guid ){\n        this.trieTreeManipulator.purge( guid );\n    }\n\n    @Override\n    public void removeTreeNodeOnly( GUID guid ) {\n        this.trieTreeManipulator.removeTreeNode( guid );\n    }\n\n    @Override\n    public void put( GUID guid, GUIDImperialTrieNode distributedTreeNode ){\n        this.trieTreeManipulator.insertNode( guid, distributedTreeNode );\n    }\n\n    @Override\n    public boolean contains( GUID key ) {\n        return this.trieTreeManipulator.contains( key );\n    }\n\n    @Override\n    public boolean containsChild( GUID parentGuid, GUID childGuid ) {\n        return this.trieTreeManipulator.countNode( parentGuid, childGuid ) > 0;\n    }\n\n    @Override\n    public GUID queryGUIDByPath( String path ) {\n        return this.triePathCacheManipulator.queryGUIDByPath( path );\n    }\n\n    @Override\n    public List<GUIDImperialTrieNode> getChildren(GUID guid ) {\n        return this.trieTreeManipulator.getChildren(guid);\n    }\n\n    @Override\n    public List<GUID > fetchChildrenGuids( GUID parentGuid ) {\n        return this.trieTreeManipulator.fetchChildrenGuids( parentGuid );\n    }\n\n    @Override\n    public List<GUID > fetchParentGuids( GUID guid ) {\n        return this.trieTreeManipulator.fetchParentGuids(guid);\n    }\n\n    @Override\n    public void removeInheritance( GUID childGuid, GUID parentGuid ) {\n        this.trieTreeManipulator.removeInheritance(childGuid,parentGuid);\n    }\n\n    @Override\n    public void setOwner( GUID sourceGuid, GUID targetGuid ) {\n        GUID owner = this.tireOwnerManipulator.getOwner(sourceGuid);\n        if ( owner == null ){\n            long exist = this.trieTreeManipulator.countNode( sourceGuid, targetGuid );\n            if ( exist <= 0 ){\n                this.tireOwnerManipulator.insertOwnedNode( sourceGuid, targetGuid );\n            }\n            else {\n                this.tireOwnerManipulator.setOwned(sourceGuid, targetGuid);\n            }\n        }\n        else {\n            this.tireOwnerManipulator.remove( sourceGuid, owner );\n            this.tireOwnerManipulator.insertOwnedNode( sourceGuid, targetGuid );\n        }\n    }\n\n    @Override\n    public void setGuidLineage( GUID sourceGuid, GUID targetGuid ) {\n        this.tireOwnerManipulator.updateParentGuid( sourceGuid, targetGuid );\n    }\n\n    @Override\n    public String getCachePath( GUID guid ){\n        return this.triePathCacheManipulator.getPath(guid);\n    }\n\n    @Override\n    public void removeCachePath( GUID guid ) {\n        this.triePathCacheManipulator.remove( guid );\n    }\n\n    @Override\n    public GUID getOwner( GUID guid ) {\n        return this.tireOwnerManipulator.getOwner(guid);\n    }\n\n\n\n    @Override\n    public List<GUID > getSubordinates( GUID guid) {\n        return this.tireOwnerManipulator.getSubordinates(guid);\n    }\n\n    @Override\n    public void insertCachePath( GUID guid, String path ) {\n        if ( path.length() > this.shortPathLength ){\n            String part1 = path.substring( 0, this.shortPathLength );\n            String part2 = path.substring( this.shortPathLength    );\n            this.triePathCacheManipulator.insertLongPath( guid, part1, part2 );\n        }\n        else {\n            GUID node = this.triePathCacheManipulator.getNode(path);\n            if( node == null ){\n                this.triePathCacheManipulator.insert( guid, path );\n            }\n\n        }\n    }\n\n\n\n    @Override\n    public List<GUID > fetchRoot() {\n        return this.trieTreeManipulator.fetchRoot();\n    }\n\n\n    @Override\n    public boolean isRoot( GUID guid ) {\n        return this.trieTreeManipulator.isRoot( guid );\n    }\n\n    @Override\n    public long queryLinkedCount( GUID guid, LinkedType linkedType ) {\n        return this.trieTreeManipulator.queryLinkedCount( guid, linkedType );\n    }\n\n    @Override\n    public long queryAllLinkedCount( GUID guid ) {\n        return this.trieTreeManipulator.queryAllLinkedCount( guid );\n    }\n\n    @Override\n    public long queryStrongLinkedCount( GUID guid ) {\n        return this.trieTreeManipulator.queryStrongLinkedCount( guid );\n    }\n\n    @Override\n    public long queryWeakLinkedCount( GUID guid ) {\n        return this.trieTreeManipulator.queryWeakLinkedCount( guid );\n    }\n\n    @Override\n    public void newHardLink( GUID sourceGuid, GUID targetGuid ) {\n        long count = this.trieTreeManipulator.countNode( sourceGuid, targetGuid );\n        if ( count <= 0 ){\n            this.tireOwnerManipulator.insertHardLinkedNode( sourceGuid,targetGuid );\n        }\n    }\n\n    @Override\n    public void moveTo( GUID sourceGuid, GUID destinationGuid ) {\n        this.removeCachePath( sourceGuid );\n        this.tireOwnerManipulator.updateParentGuid( sourceGuid, destinationGuid );\n    }\n\n    @Override\n    public void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName, KernelObjectInstrument instrument ) {\n        GuidAllocator guidAllocator = instrument.getGuidAllocator();\n        GUID tagGuid = guidAllocator.nextGUID();\n        this.trieTreeManipulator.newLinkTag( originalGuid, dirGuid, tagName, tagGuid );\n    }\n\n    @Override\n    public void updateLinkTagName( GUID tagGuid, String tagName ) {\n        this.trieTreeManipulator.updateLinkTagName( tagGuid,tagName );\n    }\n\n    @Override\n    public boolean isTagGuid(GUID guid) {\n        return this.trieTreeManipulator.isTagGuid( guid );\n    }\n\n    @Override\n    public GUID getOriginalGuid( String tagName, GUID parentDirGUID ) {\n        return this.trieTreeManipulator.getOriginalGuid( tagName, parentDirGUID );\n    }\n\n    @Override\n    public GUID getOriginalGuidByNodeGuid( String tagName, GUID nodeGUID ) {\n        return this.trieTreeManipulator.getOriginalGuidByNodeGuid( tagName, nodeGUID );\n    }\n\n    @Override\n    public List<GUID > fetchOriginalGuid( String tagName ) {\n        return this.trieTreeManipulator.fetchOriginalGuid( tagName );\n    }\n\n    @Override\n    public List<GUID > fetchOriginalGuidRoot( String tagName ) {\n        return this.trieTreeManipulator.fetchOriginalGuidRoot( tagName );\n    }\n\n    @Override\n    public ReparseLinkNode getReparseLinkNodeByNodeGuid( String tagName, GUID nodeGUID ) {\n        return this.trieTreeManipulator.getReparseLinkNodeByNodeGuid( tagName, nodeGUID );\n    }\n\n    @Override\n    public ReparseLinkNode getReparseLinkNode( String tagName, GUID parentDirGuid ) {\n        return this.trieTreeManipulator.getReparseLinkNode( tagName, parentDirGuid );\n    }\n\n    @Override\n    public GUID getOriginalGuid( GUID tagGuid ) {\n        return this.trieTreeManipulator.getOriginalGuidByTagGuid( tagGuid );\n    }\n\n    @Override\n    public void removeReparseLink( GUID guid ) {\n        this.trieTreeManipulator.removeReparseLink( guid );\n    }\n\n    @Override\n    public boolean hasOwnProperty(Object key) {\n        return this.containsKey( key );\n    }\n\n    @Override\n    public boolean containsKey(Object key) {\n        if( key instanceof GUID ) {\n            return this.containsKey((GUID) key );\n        }\n        else if( key instanceof String ) {\n            return this.containsKey( (new GUID72((String)key)) );\n        }\n        return false;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/UniImperialTree.java",
    "content": "package com.pinecone.hydra.unit.imperium;\n\npublic interface UniImperialTree extends ImperialTree {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/BranchNode.java",
    "content": "package com.pinecone.hydra.unit.imperium.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface BranchNode extends EntityNode {\n\n    String getName();\n\n    GUID getGuid();\n\n    default String getMetaType() {\n        return this.className().replace(\"Generic\",\"\");\n    }\n\n    default BranchNode evinceBranchNode(){\n        return this;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/ElementumNode.java",
    "content": "package com.pinecone.hydra.unit.imperium.entity;\n\npublic interface ElementumNode extends TreeNode, MetadataNode {\n\n    default ElementumNode evinceElementNode() {\n        return this;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/EntityNode.java",
    "content": "package com.pinecone.hydra.unit.imperium.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface EntityNode extends Pinenut {\n    String getName();\n\n    GUID getGuid();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/MetaEntryNode.java",
    "content": "package com.pinecone.hydra.unit.imperium.entity;\n\npublic interface MetaEntryNode extends EntityNode, MetadataNode {\n\n    default MetaEntryNode evinceEntryNode() {\n        return this;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/MetadataNode.java",
    "content": "package com.pinecone.hydra.unit.imperium.entity;\n\npublic interface MetadataNode extends EntityNode {\n\n    default MetadataNode evinceMetadataNode() {\n        return this;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/ReparseLinkNode.java",
    "content": "package com.pinecone.hydra.unit.imperium.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\n\npublic interface ReparseLinkNode extends EntityNode {\n    @Override\n    default String getName() {\n        return this.getTagName();\n    }\n\n    @Override\n    default GUID getGuid() {\n        return this.getTagGuid();\n    }\n\n    String getTagName();\n\n    GUID getTagGuid();\n\n    LinkedType getLinkedType();\n\n    GUID getTargetNodeGuid();\n\n    GUID getParentNodeGuid();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/SkeletonNode.java",
    "content": "package com.pinecone.hydra.unit.imperium.entity;\n\npublic interface SkeletonNode extends EntityNode {\n\n    default SkeletonNode evinceSkeletonNode(){\n        return this;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/TreeNode.java",
    "content": "package com.pinecone.hydra.unit.imperium.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface TreeNode extends SkeletonNode {\n    String getName();\n\n    GUID getGuid();\n\n    default String getMetaType() {\n        return this.className().replace(\"Generic\",\"\");\n    }\n\n    default TreeNode evinceTreeNode(){\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/entity/TreeReparseLinkNode.java",
    "content": "package com.pinecone.hydra.unit.imperium.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\n\npublic class TreeReparseLinkNode implements ReparseLinkNode {\n    protected String     tagName;\n    protected GUID       tagGuid;\n    protected LinkedType linkedType;\n    protected GUID       targetNodeGuid;\n    protected GUID       parentNodeGuid;\n\n    public TreeReparseLinkNode(){\n\n    }\n\n    @Override\n    public String getTagName() {\n        return this.tagName;\n    }\n\n    public void setTagName( String tagName ) {\n        this.tagName = tagName;\n    }\n\n    @Override\n    public GUID getTagGuid() {\n        return this.tagGuid;\n    }\n\n    public void setTagGuid( GUID tagGuid ) {\n        this.tagGuid = tagGuid;\n    }\n\n    @Override\n    public LinkedType getLinkedType() {\n        return this.linkedType;\n    }\n\n    public void setLinkedType( LinkedType linkedType ) {\n        this.linkedType = linkedType;\n    }\n\n    @Override\n    public GUID getTargetNodeGuid() {\n        return this.targetNodeGuid;\n    }\n\n    public void setTargetNodeGuid( GUID targetNodeGuid ) {\n        this.targetNodeGuid = targetNodeGuid;\n    }\n\n    @Override\n    public GUID getParentNodeGuid() {\n        return this.parentNodeGuid;\n    }\n\n    public void setParentNodeGuid( GUID parentNodeGuid ) {\n        this.parentNodeGuid = parentNodeGuid;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/operator/OperatorFactory.java",
    "content": "package com.pinecone.hydra.unit.imperium.operator;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface OperatorFactory extends Pinenut {\n    void register( String typeName, TreeNodeOperator functionalNodeOperation );\n    TreeNodeOperator getOperator(String typeName);\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/operator/TreeNodeOperator.java",
    "content": "package com.pinecone.hydra.unit.imperium.operator;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface TreeNodeOperator extends Pinenut {\n    GUID insert( TreeNode treeNode );\n\n    void purge( GUID guid );\n\n    TreeNode get( GUID guid ) ;\n\n    TreeNode get( GUID guid, int depth );\n\n    TreeNode getAsRootDepth( GUID guid );\n\n    void update( TreeNode treeNode );\n\n    void updateName( GUID guid ,String name );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/source/TireOwnerManipulator.java",
    "content": "package com.pinecone.hydra.unit.imperium.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\n\nimport java.util.List;\n\npublic interface TireOwnerManipulator extends Pinenut {\n    void insertRootNode ( GUID guid, LinkedType linkedType );\n\n    default void insertRootNode ( GUID guid ) {\n        this.insertRootNode( guid, LinkedType.Owned );\n    }\n\n    void insert( GUID targetGuid, GUID parentGUID, LinkedType linkedType );\n\n    default void insertOwnedNode( GUID targetGuid, GUID parentGUID ) {\n        this.insert( targetGuid, parentGUID, LinkedType.Owned );\n    }\n\n    default void insertHardLinkedNode( GUID targetGuid, GUID parentGUID ) {\n        this.insert( targetGuid, parentGUID, LinkedType.Hard );\n    }\n\n\n\n    void update( GUID targetGuid, GUID parentGUID, LinkedType linkedType );\n\n    void updateParentGuid( GUID targetGuid, GUID parentGUID );\n\n    void updateLinkedType( GUID targetGuid, LinkedType linkedType );\n\n\n\n    void remove( GUID subordinateGuid, GUID ownerGuid );\n\n    void removeBySubordinate( GUID subordinateGuid );\n\n    void removeByOwner( GUID OwnerGuid );\n\n    GUID getOwner( GUID subordinateGuid );\n\n    List<GUID > getSubordinates( GUID guid );\n\n\n\n    void setLinkedType             ( GUID sourceGuid, GUID targetGuid, LinkedType linkedType );\n\n    default void setOwned          ( GUID sourceGuid, GUID targetGuid ) {\n        this.setLinkedType( sourceGuid, targetGuid, LinkedType.Owned );\n    }\n\n    default void setHardLink       ( GUID sourceGuid, GUID targetGuid ) {\n        this.setLinkedType( sourceGuid, targetGuid, LinkedType.Hard );\n    }\n\n    LinkedType getLinkedType       ( GUID childGuid,GUID parentGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/source/TreeMasterManipulator.java",
    "content": "package com.pinecone.hydra.unit.imperium.source;\n\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\n\npublic interface TreeMasterManipulator extends KOISkeletonMasterManipulator {\n    TireOwnerManipulator getTireOwnerManipulator();\n\n    TrieTreeManipulator getTrieTreeManipulator();\n\n    TriePathCacheManipulator getTriePathCacheManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/source/TriePathCacheManipulator.java",
    "content": "package com.pinecone.hydra.unit.imperium.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface TriePathCacheManipulator extends Pinenut {\n    void insert ( GUID guid, String path );\n\n    void insertLongPath( GUID guid, String path, String longPath );\n\n    void remove ( GUID guid );\n\n    String getPath ( GUID guid );\n\n    GUID getNode ( String path );\n\n    GUID queryGUIDByPath( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/imperium/source/TrieTreeManipulator.java",
    "content": "package com.pinecone.hydra.unit.imperium.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\n\nimport java.util.List;\n\npublic interface TrieTreeManipulator extends Pinenut {\n    void insert( TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node );\n\n    /** With detail meta data node information. */\n    GUIDImperialTrieNode getNode(GUID guid );\n\n    boolean contains( GUID key ) ;\n\n    /** Only with tree node index information. */\n    GUIDImperialTrieNode getTreeNodeOnly(GUID guid, GUID parentGuid );\n\n    long countNode( GUID guid, GUID parentGuid );\n\n    // TODO\n    void insertNode( GUID guid, GUIDImperialTrieNode distributedTreeNode );\n\n    // TODO\n    void updateNode( GUID guid, GUIDImperialTrieNode distributedTreeNode );\n\n\n\n    /** Purge / Deletion */\n    void purge         ( GUID guid );\n\n    void removeTreeNode( GUID guid );\n\n    void removeTreeNodeByParentGuid( GUID parentGuid );\n\n    void removeTreeNodeYoke( GUID guid, GUID parentGuid );\n\n    void removeTreeNodeWithLinkedType( GUID guid, LinkedType linkedType );\n\n    void removeNodeMeta( GUID guid );\n\n    default void removeOwnedTreeNode ( GUID guid ) {\n        this.removeTreeNodeWithLinkedType( guid, LinkedType.Owned );\n    }\n\n    default void removeHardLinkedTreeNode ( GUID guid ) {\n        this.removeTreeNodeWithLinkedType( guid, LinkedType.Hard );\n    }\n\n\n\n    /** Lineage / Affinity */\n    List<GUIDImperialTrieNode> getChildren(GUID guid );\n\n    List<GUID > fetchChildrenGuids( GUID parentGuid );\n\n    List<GUID > fetchParentGuids( GUID guid );\n\n    void removeInheritance( GUID childNode, GUID parentGUID );\n\n    void addChild( GUID childGuid, GUID parentGuid );\n\n    void updateType       ( UOI type, GUID guid );\n\n    List<GUID > fetchRoot();\n\n    boolean isRoot( GUID guid );\n\n\n\n    /** Link / Reference */\n    /**\n     * Querying link-count, that the node be linked by its owner. [Strong/Weak]\n     * 获取节点引用计数。 [根据强弱引用条件]\n     * @return the link-count, which its has been linked.\n     */\n    long queryLinkedCount( GUID guid, LinkedType linkedType );\n\n    long queryAllLinkedCount( GUID guid );\n\n    default long queryStrongLinkedCount( GUID guid ) {\n        return this.queryLinkedCount( guid, LinkedType.Owned );\n    }\n\n    default long queryWeakLinkedCount( GUID guid ) {\n        return this.queryLinkedCount( guid, LinkedType.Hard );\n    }\n\n    void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName, GUID tagGuid, LinkedType linkedType );\n\n    default void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName, GUID tagGuid ) {\n        this.newLinkTag( originalGuid, dirGuid, tagName, tagGuid, LinkedType.Hard );\n    }\n\n    void updateLinkTagName( GUID tagGuid, String tagName );\n\n    GUID getOriginalGuid( String tagName,GUID parentDirGuid );\n\n    GUID getOriginalGuidByNodeGuid( String tagName, GUID nodeGUID );\n\n    ReparseLinkNode getReparseLinkNode( String tagName, GUID parentDirGuid );\n\n    ReparseLinkNode getReparseLinkNodeByNodeGuid( String tagName, GUID nodeGUID );\n\n    List<GUID > fetchOriginalGuid( String tagName );\n\n    List<GUID > fetchOriginalGuidRoot( String tagName );\n\n    boolean isTagGuid( GUID guid );\n\n    GUID getOriginalGuidByTagGuid( GUID tagGuid );\n\n    void removeReparseLink( GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/ArchQueueTableMeta.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\npublic abstract class ArchQueueTableMeta implements QueueMeta {\n    protected String  mszQueueTableName;\n\n    public ArchQueueTableMeta(String queueTableName ) {\n        this.mszQueueTableName = queueTableName;\n    }\n\n    public ArchQueueTableMeta(){}\n\n    public void setQueueTableName( String queueTableName ) {\n        this.mszQueueTableName = queueTableName;\n    }\n\n    @Override\n    public String getQueueTable() {\n        return this.mszQueueTableName;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/ConfigurableMegaDeflectPriorityQueueMeta.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\npublic class ConfigurableMegaDeflectPriorityQueueMeta extends ArchQueueTableMeta implements MegaDeflectPriorityQueueMeta {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/ConfigurableMegaStratumQueueMeta.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\npublic class ConfigurableMegaStratumQueueMeta extends ArchQueueTableMeta implements MegaStratumQueueMeta {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/DPQueueManipulator.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueElement;\n\nimport java.util.List;\n\npublic interface DPQueueManipulator extends Pinenut {\n    void pushBack(QueueElement queueElement, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta);\n\n    void pushFront( QueueElement queueElement, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta );\n\n    void incrementLinkedPriorities( QueueElement queueElement, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta );\n\n    QueueElement popFront( long currentPos, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta );\n\n    List<QueueElement> batchPopFront( long currentPos, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta, long limit, long offset );\n\n    QueueElement popBack( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta );\n\n    List<QueueElement> batchPopBack( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta, long limit, long offset );\n\n    long queryQueueSize( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta );\n\n    QueueElement remove( long currentPos, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta );\n\n    QueueElement query( long enumId, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta );\n\n    List<QueueElement> fetchElementByPriority( long priority, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta, long limit, long offset );\n\n    List<QueueElement> fetchElement( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta, long limit, long offset );\n\n    List<GUID> fetchElementGuid( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta, long limit, long offset );\n\n    QueueElement getByIndex( long index, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta );\n\n    Long nextPos( long currentPos, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta  );\n\n    Long getIndexPriority( long currentPos, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/DPStratumQueueManipulator.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueElement;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueStratumElement;\n\npublic interface DPStratumQueueManipulator extends Pinenut {\n    void pushBack(QueueStratumElement queueElement, String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta);\n\n    QueueStratumElement popFront( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta );\n\n    void removeFront( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta );\n\n    long isEmpty( String sharedSegmentField, String sharedSegmentName, QueueMeta queueMeta );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/DeflectPriorityQueue.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueElement;\n\nimport java.util.List;\n\npublic interface DeflectPriorityQueue extends MegaPriorityQueue, SharedSegmentIQueue {\n    void pushBack( QueueElement queueElement );\n\n    void pushFront( QueueElement queueElement );\n\n    QueueElement getByIndex( long index );\n\n    QueueElement popFront();\n\n    List<QueueElement> fetchElements( long offset, long limit );\n\n    List<GUID> fetchElementGuids( long offset, long limit );\n\n    QueueElement popBack();\n\n    long size();\n\n    boolean isEmpty();\n\n    boolean contains( QueueElement queueElement );\n\n    void setCurrentPos( long mnCurrentPos );\n\n    void reset();\n\n    QueueElement remove( long enumId );\n\n    List<QueueElement> fetchElementByPriority( long priority, long offset, long limit );\n\n    String getSegmentName();\n\n    QueueMasterManipulator getMasterManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/MagnitudeDPQueue.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueElement;\n\nimport java.util.List;\n\npublic class MagnitudeDPQueue implements DeflectPriorityQueue, Cloneable {\n    private QueueMasterManipulator      mQueueMasterManipulator;\n\n    private DPQueueManipulator          mDPQueueManipulator;\n\n    private long                        mnCurrentPos;\n\n    private String                      mszSharedSegmentField;\n\n    private String                      mszSharedSegmentName;\n\n    private QueueMeta                   mQueueMeta;\n\n    public MagnitudeDPQueue( KOIMappingDriver driver, long currentPos,\n                            String shareSegmentField, String sharedSegmentName, QueueMeta queueMeta ) {\n        this.mQueueMasterManipulator    = (QueueMasterManipulator) driver.getMasterManipulator();\n        this.mDPQueueManipulator        = this.mQueueMasterManipulator.getDPQueueManipulator();\n        this.mnCurrentPos               = currentPos;\n        this.mszSharedSegmentField      = shareSegmentField;\n        this.mszSharedSegmentName       = sharedSegmentName;\n        this.mQueueMeta                 = queueMeta;\n    }\n\n    @Override\n    public String getSharedSegmentField() {\n        return this.mszSharedSegmentField;\n    }\n\n    @Override\n    public String getSharedSegmentName() {\n        return this.mszSharedSegmentName;\n    }\n\n    @Override\n    public void setCurrentPos( long currentPos ) {\n        this.mnCurrentPos = currentPos;\n    }\n\n    @Override\n    public void reset() {\n        this.setCurrentPos( 0 );\n    }\n\n    @Override\n    public MagnitudeDPQueue clone() {\n        try {\n            Object o = super.clone();\n            MagnitudeDPQueue neo = (MagnitudeDPQueue) o;\n            return neo;\n        }\n        catch ( CloneNotSupportedException ignore ) {\n            return null;\n        }\n    }\n\n    @Override\n    public void pushBack( QueueElement queueElement ) {\n        this.mDPQueueManipulator.pushBack( queueElement, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta );\n    }\n\n    @Override\n    public void pushFront( QueueElement queueElement ) {\n        this.mDPQueueManipulator.pushFront( queueElement, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta );\n        this.mDPQueueManipulator.incrementLinkedPriorities( queueElement, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta );\n    }\n\n    @Override\n    public QueueElement getByIndex( long index ) {\n        return this.mDPQueueManipulator.getByIndex( index, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta );\n    }\n\n    @Override\n    public QueueElement popFront() {\n        QueueElement peek = this.mDPQueueManipulator.popFront( this.mnCurrentPos, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta );\n        Long indexPriority = this.mDPQueueManipulator.getIndexPriority( this.mnCurrentPos, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta );\n        peek.setIndexPriority( indexPriority );\n        Long l = this.mDPQueueManipulator.nextPos( this.mnCurrentPos, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta );\n        if( l == null ) {\n            this.mnCurrentPos = -1;\n        }\n        else {\n            this.mnCurrentPos = l;\n        }\n        return peek;\n    }\n\n    @Override\n    public List<QueueElement> fetchElements( long offset, long limit ) {\n        return this.mDPQueueManipulator.fetchElement( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta, limit, offset );\n    }\n\n    @Override\n    public List<GUID> fetchElementGuids( long offset, long limit ) {\n        return this.mDPQueueManipulator.fetchElementGuid( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta, limit, offset );\n    }\n\n    @Override\n    public QueueElement popBack() {\n        return this.mDPQueueManipulator.popBack( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta );\n    }\n\n    @Override\n    public long size() {\n        return this.mDPQueueManipulator.queryQueueSize( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta );\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mDPQueueManipulator.queryQueueSize( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ) == 0;\n    }\n\n    @Override\n    public boolean contains( QueueElement queueElement ) {\n        QueueElement query = this.mDPQueueManipulator.query( queueElement.getEnumId(), this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta );\n        return !(query == null);\n    }\n\n    @Override\n    public QueueElement remove( long enumId ) {\n        return this.mDPQueueManipulator.remove( this.mnCurrentPos, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta );\n    }\n\n    @Override\n    public List<QueueElement> fetchElementByPriority( long priority, long limit, long offset ) {\n        return this.mDPQueueManipulator.fetchElementByPriority( priority, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta, limit, offset );\n    }\n\n    @Override\n    public String getSegmentName() {\n        return this.mszSharedSegmentName;\n    }\n\n    public long currentPosition() {\n        return this.mnCurrentPos;\n    }\n\n    @Override\n    public QueueMasterManipulator getMasterManipulator() {\n        return this.mQueueMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/MegaDPStratumQueue.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueElement;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueStratumElement;\n\npublic class MegaDPStratumQueue implements MegaStratumQueue {\n    private QueueMasterManipulator          mQueueMasterManipulator;\n\n    private DPStratumQueueManipulator       mDPStratumQueueManipulator;\n\n    private String                          mszSharedSegmentField;\n\n    private String                          mszSharedSegmentName;\n\n    private QueueMeta                       mQueueMeta;\n\n    public MegaDPStratumQueue(\n            KOIMappingDriver driver, String shareSegmentField,\n            String sharedSegmentName, QueueMeta queueMeta\n    ) {\n        this.mQueueMasterManipulator = (QueueMasterManipulator) driver.getMasterManipulator();\n        this.mDPStratumQueueManipulator = this.mQueueMasterManipulator.getDPStratumQueueManipulator();\n        this.mszSharedSegmentName = sharedSegmentName;\n        this.mszSharedSegmentField = shareSegmentField;\n        this.mQueueMeta = queueMeta;\n    }\n\n    @Override\n    public void pushBack(QueueStratumElement queueElement) {\n        this.mDPStratumQueueManipulator.pushBack( queueElement, this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta );\n    }\n\n    @Override\n    public QueueStratumElement popFront() {\n        QueueStratumElement queueStratumElement = this.mDPStratumQueueManipulator.popFront(this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta);\n        this.mDPStratumQueueManipulator.removeFront( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta );\n        return queueStratumElement;\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mDPStratumQueueManipulator.isEmpty( this.mszSharedSegmentField, this.mszSharedSegmentName, this.mQueueMeta ) == 0;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/MegaDeflectPriorityQueueMeta.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\npublic interface MegaDeflectPriorityQueueMeta extends QueueMeta {\n\n    void setQueueTableName( String queueTableName );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/MegaPriorityQueue.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface MegaPriorityQueue extends Pinenut {\n\n    //List<QueueElement> fetchElements( long offset, long pageSize );\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/MegaStratumQueue.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueElement;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueStratumElement;\n\npublic interface MegaStratumQueue extends Pinenut {\n    void pushBack( QueueStratumElement queueElement);\n\n    QueueStratumElement popFront();\n\n    boolean isEmpty();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/MegaStratumQueueMeta.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\npublic interface MegaStratumQueueMeta extends QueueMeta {\n\n    void setQueueTableName( String queueTableName );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/QueueExistManipulator.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface QueueExistManipulator extends Pinenut {\n    void insertQueueExist(GUID layerGuid );\n\n    void insertQueueNotExist(GUID layerGuid );\n\n    void setQueueExist(GUID layerGuid);\n\n    void setQueueNotExist(GUID layerGuid);\n\n    boolean isExist( GUID layer_guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/QueueMasterManipulator.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\n\npublic interface QueueMasterManipulator extends KOIMasterManipulator {\n    DPQueueManipulator              getDPQueueManipulator();\n\n    DPStratumQueueManipulator       getDPStratumQueueManipulator();\n\n    QueueExistManipulator           getQueueExistManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/QueueMeta.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface QueueMeta extends Pinenut {\n\n    String getQueueTable();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/SharedSegmentIQueue.java",
    "content": "package com.pinecone.hydra.unit.iqueue;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface SharedSegmentIQueue extends Pinenut {\n\n    String getSharedSegmentField();\n\n    String getSharedSegmentName();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/entity/GenericQueueElement.java",
    "content": "package com.pinecone.hydra.unit.iqueue.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\n\npublic class GenericQueueElement implements QueueElement {\n\n    protected long        mnEnumId;\n\n    protected GUID        mObjectGuid;\n\n    protected long        mnPriority;\n\n    protected long        mnLinkedPriority;\n\n    protected long        mnIndexPriority;\n\n    protected double      mBias;\n\n    @Override\n    public long getEnumId() {\n        return 0;\n    }\n\n    public void setEnumId( long enumId ) {\n        this.mnEnumId = enumId;\n    }\n\n    @Override\n    public GUID getObjectGuid() {\n        return this.mObjectGuid;\n    }\n\n    public void setObjectGuid( GUID objectGuid ) {\n        this.mObjectGuid = objectGuid;\n    }\n\n\n    @Override\n    public long getPriority() {\n        return this.mnPriority;\n    }\n\n    public void setPriority( long priority ) {\n        this.mnPriority = priority;\n    }\n\n    @Override\n    public long getLinkedPriority() {\n        return this.mnLinkedPriority;\n    }\n\n    public void setLinkedPriority( long linkedPriority ) {\n        this.mnLinkedPriority = linkedPriority;\n    }\n\n    @Override\n    public long getIndexPriority() {\n        return this.mnIndexPriority;\n    }\n\n    public void setIndexPriority( long indexPriority ) {\n        this.mnIndexPriority = indexPriority;\n    }\n\n    @Override\n    public long getActualPriority() {\n        return 0;\n    }\n\n    @Override\n    public double getBias() {\n        return this.mBias;\n    }\n\n    public void  setBias( double bias ) {\n        this.mBias = bias;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/entity/GenericStratumQueueElement.java",
    "content": "package com.pinecone.hydra.unit.iqueue.entity;\n\npublic class GenericStratumQueueElement extends GenericQueueElement implements QueueStratumElement{\n    protected short mStratum;\n\n    @Override\n    public void setStratum(short stratum) {\n        this.mStratum = stratum;\n    }\n\n    @Override\n    public short getStratum() {\n        return this.mStratum;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/entity/QueueElement.java",
    "content": "package com.pinecone.hydra.unit.iqueue.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface QueueElement extends Pinenut {\n\n    long getEnumId();\n\n    GUID getObjectGuid();\n\n    long getPriority();\n\n    long getLinkedPriority();\n\n    long getIndexPriority();\n\n    void setIndexPriority( long indexPriority );\n\n    long getActualPriority();\n\n    double getBias();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/iqueue/entity/QueueStratumElement.java",
    "content": "package com.pinecone.hydra.unit.iqueue.entity;\n\npublic interface QueueStratumElement extends QueueElement {\n    void setStratum( short stratum  );\n\n    short getStratum();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/ArchAtlasInstrument.java",
    "content": "package com.pinecone.hydra.unit.vgraph;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.vgraph.algo.BasicDAGPathResolver;\nimport com.pinecone.hydra.unit.vgraph.algo.BasicDAGPathSelector;\nimport com.pinecone.hydra.unit.vgraph.algo.DAGPathResolver;\nimport com.pinecone.hydra.unit.vgraph.algo.DAGPathSelector;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerInstrument;\nimport com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver;\nimport com.pinecone.hydra.unit.vgraph.source.AtlasMasterManipulator;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphManipulator;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphMasterManipulator;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphPathCacheManipulator;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Objects;\nimport java.util.Queue;\n\npublic abstract class ArchAtlasInstrument implements AtlasInstrument {\n\n    protected AtlasInstrument                   mParentInstrument;\n    protected LayerInstrument                   mLayerInstrument;\n\n    protected Hydrogen                          mHydrogen;\n    protected Processum                         mSuperiorProcess;\n\n    protected GuidAllocator                     mGuidAllocator;\n    protected DAGPathResolver                   mPathResolver;\n    protected DAGPathSelector                   mPathSelector;\n\n    protected AtlasMasterManipulator            mAtlasMasterManipulator;\n    protected VectorGraphMasterManipulator      mVectorGraphMasterManipulator;\n    protected VectorGraphPathCacheManipulator   mVectorGraphPathCacheManipulator;\n    protected VectorGraphManipulator            mVectorGraphManipulator;\n\n    protected VectorGraphConfig                 mVectorGraphConfig;\n\n    public ArchAtlasInstrument(\n            AtlasMappingDriver atlasMappingDriver,\n            VectorGraphConfig vectorGraphConfig,\n            LayerInstrument layerInstrument\n    ) {\n        this.mLayerInstrument                   = layerInstrument;\n        this.mVectorGraphConfig                 = vectorGraphConfig;\n        this.mSuperiorProcess                   = atlasMappingDriver.getSuperiorProcess();\n        this.mAtlasMasterManipulator            = atlasMappingDriver.getMasterManipulator();\n        this.mVectorGraphMasterManipulator      = this.mAtlasMasterManipulator.getVectorGraphMasterManipulator();\n        this.mVectorGraphManipulator            = this.mVectorGraphMasterManipulator.getVectorGraphManipulator();\n        this.mVectorGraphPathCacheManipulator   = this.mVectorGraphMasterManipulator.getVectorGraphPathCacheManipulator();\n\n        if ( this.mSuperiorProcess instanceof Hydrogen) {\n            this.mHydrogen = (Hydrogen) this.mSuperiorProcess;\n        }\n        else {\n            this.mHydrogen = (Hydrogen) this.mSuperiorProcess.parentSystem();\n        }\n        this.mGuidAllocator = GUIDs.newGuidAllocator();\n        this.mPathResolver = new BasicDAGPathResolver();//后续要使用配置类指定\n        this.mPathSelector = new BasicDAGPathSelector( this.mPathResolver, this.mVectorGraphManipulator );\n    }\n\n    public ArchAtlasInstrument( AtlasMappingDriver driver, LayerInstrument layerInstrument ) {\n        this( driver, null, layerInstrument );\n    }\n\n    @Override\n    public LayerInstrument layerInstrument() {\n        return this.mLayerInstrument;\n    }\n\n    @Override\n    public AtlasInstrument parent() {\n        return this.mParentInstrument;\n    }\n\n    @Override\n    public Processum getSuperiorProcess() {\n        return this.mSuperiorProcess;\n    }\n\n    @Override\n    public void setParent(AtlasInstrument atlasInstrument) {\n        this.mParentInstrument = atlasInstrument;\n    }\n\n    @Override\n    public AtlasMasterManipulator getMasterManipulator() {\n        return this.mAtlasMasterManipulator;\n    }\n\n    @Override\n    public VectorGraphConfig getConfig() {\n        return this.mVectorGraphConfig;\n    }\n\n    @Override\n    public GuidAllocator getGuidAllocator() {\n        return this.mGuidAllocator;\n    }\n\n    @Override\n    public List<String> getPath(GUID guid) {\n        return this.getNS( guid, \"/\" );\n    }\n\n    @Override\n    public GUID queryGUIDByPath(String path) {\n        return this.queryGUIDByNS( path, null, null );\n    }\n\n    @Override\n    public GUID queryParentID(GUID guid) {\n        return null;\n    }\n\n    @Override\n    public boolean contains( GUID handleNode, GUID nodeGuid) {\n        return this.mPathSelector.contains( handleNode, nodeGuid );\n    }\n\n\n    @Override\n    public GUID put( GraphNode graphNode ) {\n        GUID guid = this.mGuidAllocator.nextGUID();\n        graphNode.setId( guid );\n        this.mVectorGraphManipulator.insertGraphNode( graphNode );\n\n        return guid;\n    }\n\n    @Override\n    public GUID put( GUID parentGuid, GraphNode graphNode ) {\n        GUID guid = this.mGuidAllocator.nextGUID();\n        graphNode.setId( guid );\n        this.mVectorGraphManipulator.insertNodeByEdge( parentGuid, graphNode );\n        return guid;\n    }\n\n    @Override\n    public GraphNode get( GUID guid ) {\n        return this.mVectorGraphManipulator.queryNode( guid );\n    }\n\n    @Override\n    public GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ) {\n        if( szTargetSep != null ) {\n            path = path.replace( szBadSep, szTargetSep );\n        }\n\n        String[] parts = this.mPathResolver.segmentPathParts( path );\n        List<String > resolvedParts = this.mPathResolver.resolvePath( parts );\n        path = this.mPathResolver.assemblePath( resolvedParts );\n\n        GUID guid = this.mVectorGraphPathCacheManipulator.queryGUIDByPath( path );\n        if ( guid != null ){\n            return guid;\n        }\n\n\n        guid = this.mPathSelector.searchId( resolvedParts );\n        if( guid != null ){\n            this.mVectorGraphPathCacheManipulator.insert( path, guid );\n        }\n        return guid;\n    }\n\n    @Override\n    public TreeNode get( GUID guid, int depth) {\n        return null;\n    }\n\n    @Override\n    public void remove( GUID guid ) {\n        this.mVectorGraphManipulator.removeNode( guid );\n        this.mVectorGraphPathCacheManipulator.remove( guid );\n    }\n\n    @Override\n    public void remove( String path ) {\n        GUID guid = this.queryGUIDByPath(path);\n        if( guid != null ) {\n            this.remove( guid );\n        }\n    }\n\n    @Override\n    public List<GraphNode> getChildren( GUID guid ) {\n        return this.mVectorGraphManipulator.fetchChildNodes( guid );\n    }\n\n    @Override\n    public List<GUID> fetchChildrenIds( GUID guid ) {\n        return this.mVectorGraphManipulator.fetchChildNodeIds( guid );\n    }\n\n    @Override\n    public void rename( GUID guid, String name ) {\n\n    }\n\n    /**使用bfs找到所有可达路径**/\n    protected List<String> getNS( GUID guid, String szSeparator ){\n        // 先检查缓存\n        List<String> path = this.mVectorGraphPathCacheManipulator.getPath( guid );\n        if (path != null && !path.isEmpty()) {\n            return path;\n        }\n\n        GraphNode startNode = this.get(guid);\n        if (startNode == null) {\n            return Collections.emptyList();\n        }\n\n        List<String> allPaths = new ArrayList<>();\n        Queue<GraphNodePair> queue = new LinkedList<>();\n        queue.offer( new GraphNodePair(startNode, startNode.getName()) );\n\n        while ( !queue.isEmpty() ) {\n            GraphNodePair current = queue.poll();\n            GraphNode currentNode = current.getGraphNode();\n            String currentPath = current.getCurrentPath();\n\n            List<GUID> parentIds = this.mVectorGraphManipulator.fetchParentIds( currentNode.getId() );\n            if ( parentIds.isEmpty() || !this.allNonNull(parentIds) ) {\n                allPaths.add(currentPath);\n                continue;\n            }\n\n            // 遍历所有非空的父节点\n            for (GUID parentId : parentIds) {\n                if (parentId != null) {\n                    GraphNode parentNode = this.get(parentId);\n                    if (parentNode != null) {\n                        String newPath = parentNode.getName() + szSeparator + currentPath;\n                        queue.offer(new GraphNodePair(parentNode, newPath));\n                    }\n                }\n            }\n        }\n\n        if (!allPaths.isEmpty()) {\n            for ( String s : allPaths ) {\n                this.mVectorGraphPathCacheManipulator.insert( s, guid );\n            }\n\n        }\n\n        return allPaths;\n    }\n\n    private boolean allNonNull( List<?> list ) {\n        return list.stream().noneMatch( Objects::isNull );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/ArchVectorDAG.java",
    "content": "package com.pinecone.hydra.unit.vgraph;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic abstract class ArchVectorDAG implements VectorDAG {\n\n    protected GUID                                      mGraphGuid;\n    protected VectorGraphConfig                         mVectorGraphConfig;\n\n    // Temporary Graph\n    public ArchVectorDAG( GUID graphGuid, VectorGraphConfig vectorGraphConfig ) {\n        this.mVectorGraphConfig = vectorGraphConfig;\n        this.mGraphGuid = graphGuid;\n    }\n\n    @Override\n    public VectorGraphConfig getConfig() {\n        return this.mVectorGraphConfig;\n    }\n\n    @Override\n    public GUID getGraphGuid() {\n        return this.mGraphGuid;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/AtlasInstrument.java",
    "content": "package com.pinecone.hydra.unit.vgraph;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.regime.Instrument;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\nimport com.pinecone.hydra.unit.vgraph.layer.Layer;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerInstrument;\nimport com.pinecone.hydra.unit.vgraph.source.AtlasMasterManipulator;\n\nimport java.util.List;\n\npublic interface AtlasInstrument extends Instrument {\n    AtlasInstrument parent();\n\n    LayerInstrument layerInstrument();\n\n\n    Processum getSuperiorProcess();\n\n    AtlasMasterManipulator getMasterManipulator();\n\n    VectorGraphConfig getConfig();\n\n    void setParent( AtlasInstrument atlasInstrument );\n\n    GuidAllocator getGuidAllocator();\n\n    List<String> getPath( GUID guid );\n\n    GUID queryGUIDByPath( String path );\n\n    GUID queryParentID( GUID guid );\n\n    default GUID assertPath( String path, String pathType ) throws IllegalArgumentException {\n        GUID guid      = this.queryGUIDByPath( path );\n        if( guid == null ) {\n            throw new IllegalArgumentException( \"Undefined \" + pathType + \" '\" + path + \"'\" );\n        }\n\n        return guid;\n    }\n\n    default GUID assertPath( String path ) throws IllegalArgumentException {\n        return this.assertPath( path, \"path\" );\n    }\n\n    boolean contains( GUID handleNode, GUID nodeGuid );\n\n    GUID put( GraphNode graphNode );\n\n    GUID put( GUID parentGuid, GraphNode graphNode );\n\n    GraphNode get( GUID guid );\n\n    GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep );\n\n    TreeNode get(GUID guid, int depth );\n\n    void remove( GUID guid );\n\n    void remove( String path );\n\n    List<GraphNode> getChildren( GUID guid );\n\n    List<GUID > fetchChildrenIds(GUID guid );\n\n    void rename( GUID guid, String name );\n\n    VectorDAG toVectorDAG( Layer layer );\n\n    void addChild( GUID parentGuid, GUID childGuid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/GraphNodePair.java",
    "content": "package com.pinecone.hydra.unit.vgraph;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\n\npublic class GraphNodePair implements Pinenut {\n    private GraphNode mGraphNode;\n\n    private String mszCurrentPath;\n\n    public GraphNodePair( GraphNode graphNode, String currentPath){\n        this.mGraphNode = graphNode;\n        this.mszCurrentPath = currentPath;\n    }\n\n    public GraphNodePair(){}\n\n    public GraphNode getGraphNode() {\n        return mGraphNode;\n    }\n\n    public void setGraphNode(GraphNode graphNode) {\n        this.mGraphNode = graphNode;\n    }\n\n    public String getCurrentPath() {\n        return mszCurrentPath;\n    }\n\n    public void setCurrentPath(String sCurrentPath) {\n        this.mszCurrentPath = sCurrentPath;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/MagnitudeVectorDAG.java",
    "content": "package com.pinecone.hydra.unit.vgraph;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\nimport com.pinecone.hydra.unit.vgraph.layer.Layer;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphManipulator;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphMasterManipulator;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphPathCacheManipulator;\n\npublic class MagnitudeVectorDAG extends ArchVectorDAG implements VectorDAG {\n    protected Layer                                     mGraphLayer;\n\n    protected VectorGraphMasterManipulator              mMasterManipulator;\n    protected VectorGraphManipulator                    mVectorGraphManipulator;\n    protected VectorGraphPathCacheManipulator           mVectorGraphPathCacheManipulator;\n\n    public MagnitudeVectorDAG( Layer affliatedLayer, VectorGraphMasterManipulator masterManipulator, VectorGraphConfig vectorGraphConfig ) {\n        super( affliatedLayer.getGuid(), vectorGraphConfig );\n\n        this.mGraphLayer                            = affliatedLayer;\n        this.mMasterManipulator                     = masterManipulator;\n        this.mVectorGraphManipulator                = this.mMasterManipulator.getVectorGraphManipulator();\n        this.mVectorGraphPathCacheManipulator       = this.mMasterManipulator.getVectorGraphPathCacheManipulator();\n    }\n\n    @Override\n    public GUID getAffiliateLayerGuid() {\n        return this.mGraphLayer.getGuid();\n    }\n\n    @Override\n    public Layer getAffiliateLayer() {\n        return this.mGraphLayer;\n    }\n\n    @Override\n    public boolean isPersistenceGraph() {\n        return true;\n    }\n\n    @Override\n    public List<GUID> fetchSourceGuids( long offset, long limit ) {\n        return this.mVectorGraphManipulator.fetchHandleGuids( offset, limit );\n    }\n\n    @Override\n    public List<GUID> fetchSourceGuidsByTaskPriority( long offset, long limit ) {\n        return this.mVectorGraphManipulator.fetchHandleGuidsByTaskPriority(offset, limit);\n    }\n\n    @Override\n    public long countSourceNodes() {\n        return this.mVectorGraphManipulator.countSourceNodes();\n    }\n\n    @Override\n    public List<GUID> fetchDownstreamNodeGuid( GUID nodeGuid, long offset, long limit ) {\n        return this.mVectorGraphManipulator.fetchDownstreamNodeGuid(nodeGuid,offset,limit);\n    }\n\n    @Override\n    public List<GUID> fetchUpstreamNodeGuid( GUID nodeGuid, long offset, long limit ) {\n        return this.mVectorGraphManipulator.fetchUpstreamNodeGuid(nodeGuid,offset,limit);\n    }\n\n    @Override\n    public long queryInDegree( GUID nodeGuid ) {\n        return this.mVectorGraphManipulator.queryInDegree(nodeGuid);\n    }\n\n    @Override\n    public long queryOutDegree( GUID nodeGuid ) {\n        return this.mVectorGraphManipulator.queryOutDegree(nodeGuid);\n    }\n\n    @Override\n    public GraphNode get( GUID guid ) {\n        return this.mVectorGraphManipulator.queryNode( guid );\n    }\n\n    @Override\n    public void removeNode( GUID guid ) {\n        this.mVectorGraphManipulator.removeNode( guid );\n        this.mVectorGraphPathCacheManipulator.remove( guid );\n    }\n\n    @Override\n    public List<GraphNode> fetchChildNodes( GUID guid ) {\n        return this.mVectorGraphManipulator.fetchChildNodes( guid );\n    }\n\n    @Override\n    public List<GUID> fetchChildNodeGuids( GUID guid ) {\n        return this.mVectorGraphManipulator.fetchChildNodeGuids( guid );\n    }\n\n    @Override\n    public List<GUID> fetchChildNodeGuids( long offset, long limit, GUID guid ) {\n        return this.mVectorGraphManipulator.limitFetchChildNodeGuids( offset, limit, guid );\n    }\n\n    @Override\n    public long countChildNodeNum( GUID guid ) {\n        return this.mVectorGraphManipulator.countChildNodeNums( guid );\n    }\n\n    @Override\n    public long getPriorityByInDegree( GUID guid ) {\n        return this.mVectorGraphManipulator.getPriorityByInDegree( guid );\n    }\n\n    @Override\n    public void addChild( GUID parentGuid, GUID childGuid ) {\n        this.mVectorGraphManipulator.addChild( parentGuid,childGuid );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/VectorDAG.java",
    "content": "package com.pinecone.hydra.unit.vgraph;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\nimport com.pinecone.hydra.unit.vgraph.layer.Layer;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerInstrument;\n\nimport java.util.List;\n\npublic interface VectorDAG extends Pinenut {\n    GUID getGraphGuid();\n\n    GUID getAffiliateLayerGuid();\n\n    Layer getAffiliateLayer();\n\n    boolean isPersistenceGraph();\n\n    List<GUID> fetchSourceGuids( long offset, long limit );\n\n    List<GUID> fetchSourceGuidsByTaskPriority( long offset, long limit );\n\n    long countSourceNodes();\n\n    List<GUID> fetchDownstreamNodeGuid( GUID nodeGuid, long offset, long limit );\n\n    List<GUID> fetchUpstreamNodeGuid( GUID nodeGuid, long offset, long limit );\n\n    long queryInDegree( GUID nodeGuid );\n\n    long queryOutDegree( GUID nodeGuid );\n\n    VectorGraphConfig getConfig();\n\n    List<GraphNode> fetchChildNodes( GUID guid );\n\n    List<GUID> fetchChildNodeGuids( GUID guid );\n\n    List<GUID> fetchChildNodeGuids( long offset, long limit, GUID guid );\n\n    long countChildNodeNum( GUID guid );\n\n    GraphNode get( GUID guid );\n\n    void removeNode( GUID guid );\n\n    long getPriorityByInDegree( GUID guid );\n\n    void addChild( GUID parentGuid, GUID childGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/VectorGraphConfig.java",
    "content": "package com.pinecone.hydra.unit.vgraph;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface VectorGraphConfig extends Pinenut {\n    String getPathNameSeparator();\n\n    String getFullNameSeparator();\n\n    String getPathNameSepRegex();\n\n    String getFullNameSepRegex();\n\n    int getShortPathLength();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/VectorGraphConstants.java",
    "content": "package com.pinecone.hydra.unit.vgraph;\n\npublic class VectorGraphConstants {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/algo/BasicDAGPathResolver.java",
    "content": "package com.pinecone.hydra.unit.vgraph.algo;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class BasicDAGPathResolver implements DAGPathResolver {\n    protected String mszSepRegex;\n\n    protected String mszSeparator;\n\n    public BasicDAGPathResolver(String szSeparator, String szSepRegex ){\n        this.mszSeparator = szSeparator;\n        this.mszSepRegex  = szSepRegex;\n    }\n\n    public BasicDAGPathResolver(){\n        this( \"/\", \"/\" );\n    }\n\n    @Override\n    public List<String > resolvePath( String[] parts ) {\n        ArrayList<String> resolvedParts = new ArrayList<>();\n        for (String part : parts) {\n            if ( part.equals(\".\") || part.isEmpty() ) {\n                continue;\n            }\n\n            if ( part.equals(\"..\") ) {\n                if ( !resolvedParts.isEmpty() ) {\n                    resolvedParts.remove( resolvedParts.size() - 1 );\n                }\n            }\n            else {\n                resolvedParts.add( part );\n            }\n        }\n        return resolvedParts;\n    }\n\n    @Override\n    public String resolvePath( String path ) {\n        String[] parts = this.processPath( path ).split( this.mszSepRegex );\n        return this.assemblePath( this.resolvePath( parts ) );\n    }\n\n    @Override\n    public List<String > resolvePathParts( String path ) {\n        return this.resolvePath( this.segmentPathParts( path ) );\n    }\n\n    @Override\n    public String[] segmentPathParts( String path ) {\n        return this.processPath( path ).split( this.mszSepRegex );\n    }\n\n    @Override\n    public String assemblePath( List<String > parts ) {\n        if ( parts == null || parts.size() == 0 ) {\n            return \"\";\n        }\n\n        StringBuilder path = new StringBuilder();\n\n        for ( int i = 0; i < parts.size(); ++i ) {\n            if ( i > 0 ) {\n                path.append( this.mszSeparator );\n            }\n            path.append( parts.get( i ) );\n        }\n        return path.toString();\n    }\n\n    protected String processPath( String path ) {\n        return path;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/algo/BasicDAGPathSelector.java",
    "content": "package com.pinecone.hydra.unit.vgraph.algo;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.AtlasInstrument;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphManipulator;\n\nimport java.util.List;\nimport java.util.Stack;\n\npublic class BasicDAGPathSelector implements DAGPathSelector {\n    protected DAGPathResolver mPathResolver;\n\n    protected VectorGraphManipulator    mVectorGraphManipulator;\n\n    public BasicDAGPathSelector(DAGPathResolver pathResolver, VectorGraphManipulator vectorGraphManipulator  ){\n        this.mPathResolver = pathResolver;\n        this.mVectorGraphManipulator = vectorGraphManipulator;\n    }\n    @Override\n    public GUID searchId(String[] parts) {\n        return this.searchId( parts, null );\n    }\n\n    @Override\n    public GUID searchId(String[] parts, @Nullable String[] lpResolvedPath) {\n        List<String > resolvedParts = this.mPathResolver.resolvePath( parts );\n        if( lpResolvedPath != null ) {\n            lpResolvedPath[ 0 ] = this.mPathResolver.assemblePath( resolvedParts );\n        }\n\n        return this.searchId( resolvedParts );\n    }\n\n    @Override\n    public GUID searchId(List<String> resolvedParts) {\n        return (GUID) this.dfsSearch( resolvedParts );\n    }\n\n    @Override\n    public GUID searchId(GUID parentId, String[] parts) {\n        return this.searchId(parentId, parts, null );\n    }\n\n    @Override\n    public GUID searchId(GUID parentId, String[] parts, @Nullable String[] lpResolvedPath) {\n        List<String > resolvedParts = this.mPathResolver.resolvePath( parts );\n        if( lpResolvedPath != null ) {\n            lpResolvedPath[ 0 ] = this.mPathResolver.assemblePath( resolvedParts );\n        }\n\n        return this.searchId( parentId, resolvedParts );\n    }\n\n    @Override\n    public GUID searchId(GUID parentId, List<String> resolvedParts) {\n        return (GUID) this.dfsSearch( parentId, resolvedParts );\n    }\n\n    @Override\n    public boolean contains(GUID handleNode, GUID nodeGuid) {\n        if( handleNode.equals(nodeGuid) ){\n            return true;\n        }\n\n        List<GraphNode> nodes = this.mVectorGraphManipulator.fetchChildNodes(handleNode);\n        for( GraphNode node : nodes ){\n            contains( node.getId(), nodeGuid );\n        }\n        return false;\n    }\n\n    protected GUID dfsSearch(List<String > parts ) {\n        return this.dfsSearch( null, parts );\n    }\n\n\n    /** 使用递归实现图的DFS遍历（考古专用） **/\n    protected Object dfsSearch(GUID parentID, List<String> parts, int depth) {\n        if (depth == parts.size() - 1) {\n            List<GraphNode> nodes = this.mVectorGraphManipulator.fetchNodesByName(parts.get(depth));\n            for (GraphNode graphNode : nodes) {\n                if (parentID == null || this.mVectorGraphManipulator.fetchParentIds(graphNode.getId()).equals(parentID)) {\n                    return graphNode;\n                }\n            }\n            return null;\n        }\n\n        List<GraphNode> nodes = this.mVectorGraphManipulator.fetchNodesByName(parts.get(depth));\n        for (GraphNode graphNode : nodes) {\n            if (parentID == null || this.mVectorGraphManipulator.fetchParentIds(graphNode.getId()).equals(parentID)) {\n                Object result = this.dfsSearch(graphNode.getId(), parts, depth + 1);\n                if (result != null) {\n                    return result;\n                }\n            }\n        }\n        return null;\n    }\n\n    /** 非递归形式DFS遍历 **/\n    protected GUID dfsSearch(GUID parentID, List<String> parts) {\n        if (parts.isEmpty()) {\n            return null; // 边界条件：路径为空\n        }\n\n        // 用栈保存当前状态：节点、父ID、当前深度\n        Stack<Object[]> stack = new Stack<>();\n        stack.push(new Object[]{parentID, 0}); // 初始状态：parentID, depth=0\n\n        while (!stack.isEmpty()) {\n            Object[] state = stack.pop();\n            GUID currentParentID = (GUID) state[0];\n            int currentDepth = (int) state[1];\n\n            // 终止条件：到达路径末尾\n            if (currentDepth == parts.size() - 1) {\n                List<GraphNode> nodes = mVectorGraphManipulator.fetchNodesByName(parts.get(currentDepth));\n                for (GraphNode node : nodes) {\n                    if (currentParentID == null ||\n                            mVectorGraphManipulator.fetchParentIds(node.getId()).contains(currentParentID)) {\n                        return node.getId(); // 找到目标节点\n                    }\n                }\n                continue; // 当前深度未找到，继续回溯\n            }\n\n            // 非终止条件：继续向下搜索\n            List<GraphNode> nodes = mVectorGraphManipulator.fetchNodesByName(parts.get(currentDepth));\n            // 注意：栈是后进先出，为了保证顺序，需要反向遍历节点（或直接按顺序压栈）\n            for (int i = nodes.size() - 1; i >= 0; i--) {\n                GraphNode node = nodes.get(i);\n                if (currentParentID == null ||\n                        mVectorGraphManipulator.fetchParentIds(node.getId()).equals(currentParentID)) {\n                    // 压栈：子节点ID + 下一深度\n                    stack.push(new Object[]{node.getId(), currentDepth + 1});\n                }\n            }\n        }\n\n        return null; // 栈空仍未找到\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/algo/DAGPathResolver.java",
    "content": "package com.pinecone.hydra.unit.vgraph.algo;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.List;\n\npublic interface DAGPathResolver extends Pinenut {\n    List<String > resolvePath      (String[] parts ) ;\n\n    String        resolvePath      ( String path );\n\n    List<String > resolvePathParts ( String path ) ;\n\n    String[] segmentPathParts      ( String path ) ;\n\n    String assemblePath            ( List<String > parts ) ;\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/algo/DAGPathSelector.java",
    "content": "package com.pinecone.hydra.unit.vgraph.algo;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.List;\n\npublic interface DAGPathSelector extends Pinenut {\n    GUID searchId(String[] parts );\n\n    GUID searchId(String[] parts, @Nullable String[] lpResolvedPath );\n\n    GUID searchId(List<String > resolvedParts );\n\n\n    GUID searchId(GUID parentId, String[] parts );\n\n    GUID searchId(GUID parentId, String[] parts, @Nullable String[] lpResolvedPath );\n\n    GUID searchId(GUID parentId, List<String > resolvedParts );\n\n    boolean contains( GUID handleNode, GUID nodeGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/entity/GraphNode.java",
    "content": "package com.pinecone.hydra.unit.vgraph.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.List;\n\npublic interface GraphNode extends Pinenut {\n    long getEnumId();\n\n    void setEnumId(long enumId);\n\n    String getName();\n\n    GUID getId();\n\n    void setId( GUID guid );\n\n    List<GUID> getParentIds();\n\n    void setParentIds( List<GUID> parentIds );\n\n    String getDescription();\n\n    void setDescription( String description );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/AtlasLayer.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class AtlasLayer implements Layer {\n    private String                  mszName;\n\n    private GUID                    parentGuid;\n\n    private GUID                    mGuid;\n\n    private List<GUID>              mLstHandleGuids;\n\n    private List<GUID>              mLstEndGuids;\n\n    private LocalDateTime           mUpdateTime;\n\n    private LocalDateTime           mCreateTime;\n\n    public AtlasLayer() {\n        this.mLstHandleGuids = new ArrayList<>();\n        this.mUpdateTime = LocalDateTime.now();\n        this.mCreateTime = LocalDateTime.now();\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.mGuid;\n    }\n\n    @Override\n    public void setName(String name) {\n        this.mszName = name;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.mGuid = guid;\n    }\n\n    @Override\n    public void setParentGuid( GUID parentGuid ) {\n        this.parentGuid = parentGuid;\n    }\n\n    @Override\n    public GUID getParentGuid() {\n        return this.parentGuid;\n    }\n\n    @Override\n    public List<GUID> getSourceGuids() {\n        return this.mLstHandleGuids;\n    }\n\n    @Override\n    public void setSourceGuids( List<GUID> handleGuids ) {\n        this.mLstHandleGuids = handleGuids;\n    }\n\n    @Override\n    public List<GUID> getSinkGuids() {\n        return this.mLstEndGuids;\n    }\n\n    @Override\n    public void setSinkGuids(List<GUID> endGuids) {\n        this.mLstEndGuids = endGuids;\n    }\n\n    @Override\n    public GUID addSourceeGuid(GUID handleGuid) {\n        this.mLstHandleGuids.add(handleGuid);\n        return handleGuid;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.mUpdateTime;\n    }\n\n    @Override\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.mUpdateTime = updateTime;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.mCreateTime;\n    }\n\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.mCreateTime = createTime;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/AtlasLayerNamespace.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic class AtlasLayerNamespace implements LayerNamespace {\n    protected GUID              mGuid;\n\n    protected String            mszName;\n\n    protected LocalDateTime     mCreateTime;\n\n    protected LocalDateTime     mUpdateTime;\n\n    public AtlasLayerNamespace() {\n        this.mCreateTime = LocalDateTime.now();\n        this.mUpdateTime = LocalDateTime.now();\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.mGuid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.mGuid = guid;\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public void setName(String name) {\n        this.mszName = name;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.mUpdateTime;\n    }\n\n    @Override\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.mUpdateTime = updateTime;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.mCreateTime;\n    }\n\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.mCreateTime = createTime;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/Layer.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\nimport java.util.List;\n\npublic interface Layer extends LayerTreeNode {\n    void setName( String name );\n\n    void setGuid( GUID guid );\n\n    void setParentGuid( GUID parentGuid );\n\n    GUID getParentGuid();\n\n    List<GUID> getSourceGuids();\n\n    void setSourceGuids( List<GUID> handleGuids );\n\n    List<GUID> getSinkGuids();\n\n    void setSinkGuids( List<GUID> endGuids );\n\n    GUID addSourceeGuid( GUID handleGuid );\n\n    LocalDateTime getUpdateTime();\n\n    void setUpdateTime( LocalDateTime updateTime );\n\n    LocalDateTime getCreateTime();\n\n    void setCreateTime( LocalDateTime startTime );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/LayerConfig.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer;\n\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\n\npublic interface LayerConfig extends KernelObjectConfig {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/LayerGraphHandle.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic class LayerGraphHandle implements Pinenut {\n    private String                  mszName;\n\n    private GUID                    mGraphGuid;\n\n    private GUID                    mGuid;\n\n    private GUID                    mHandleNodeGuid;\n\n    private GUID                    mEndNodeGuid;\n\n    private LocalDateTime           mUpdateTime;\n\n    private LocalDateTime           mCreateTime;\n\n\n    public String getName() {\n        return this.mszName;\n    }\n\n    public GUID getGuid() {\n        return this.mGuid;\n    }\n\n    public void setName( String name ) {\n        this.mszName = name;\n    }\n\n    public void setGuid( GUID guid ) {\n        this.mGuid = guid;\n    }\n\n    public void setGraphGuid( GUID graphGuid ) {\n        this.mGraphGuid = graphGuid;\n    }\n\n    public GUID getGraphGuid() {\n        return this.mGraphGuid;\n    }\n\n    public GUID getHandleNodeGuid() {\n        return this.mHandleNodeGuid;\n    }\n\n    public void setHandleNodeGuid(GUID handleNode) {\n        this.mHandleNodeGuid = handleNode;\n    }\n\n    public GUID getEndNodeGuid() {\n        return this.mEndNodeGuid;\n    }\n\n    public void setEndNodeGuid(GUID endNodeGuid) {\n        this.mEndNodeGuid = endNodeGuid;\n    }\n\n    public LocalDateTime getUpdateTime() {\n        return this.mUpdateTime;\n    }\n\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.mUpdateTime = updateTime;\n    }\n\n    public LocalDateTime getCreateTime() {\n        return this.mCreateTime;\n    }\n\n    public void setCreateTime(LocalDateTime createTime) {\n        this.mCreateTime = createTime;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/LayerInstrument.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.Unsafe;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\nimport com.pinecone.slime.meta.TableIndexMeta;\n\nimport java.util.List;\n\npublic interface LayerInstrument extends KOMInstrument {\n    LayerConfig LayerConfig = new VLayerConfig();\n\n    LayerConfig getConfig();\n\n    Hydrogen getSystem();\n\n    void addChild( GUID parentGuid, GUID childGuid );\n\n    void update( TreeNode treeNode );\n\n    List<Layer> splitGraphLayer( VectorDAG vectorDAG );\n\n    long countSourceNode( GUID layerGuid );\n\n    List<GUID> fetchSourceGuidsByTaskPriority( GUID layerGuid,long offset, long limit );\n\n\n\n\n    @Unsafe( \"TestOnly\" )\n    List<Layer> fetchLayersAll();\n\n    List<Layer> fetchLayers( long offset, long limit, boolean anyNode, @Nullable GUID parentGuid );\n\n    default List<Layer> fetchLayers( long offset, long limit ) {\n        return this.fetchLayers( offset, limit, true, null );\n    }\n\n    List<Layer> fetchLayersById( long idStart, long idEnd, boolean anyNode, @Nullable GUID parentGuid );\n\n    default  List<Layer> fetchLayersById( long idStart, long idEnd ) {\n        return this.fetchLayersById( idStart, idEnd, true, null );\n    }\n\n    TableIndexMeta getLayerIndexMeta( boolean anyNode, @Nullable GUID parentGuid );\n\n    default TableIndexMeta getLayerIndexMeta() {\n        return this.getLayerIndexMeta( true, null );\n    }\n\n    long queryMaxLayerPage( long limit, boolean anyNode, @Nullable GUID parentGuid );\n\n    default long queryMaxLayerPage( long limit ) {\n        return this.queryMaxLayerPage( limit, true, null );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/LayerNamespace.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface LayerNamespace extends LayerTreeNode {\n    GUID getGuid();\n\n    void setGuid( GUID guid );\n\n    String getName();\n\n    void setName( String name );\n\n    LocalDateTime getUpdateTime();\n\n    void setUpdateTime( LocalDateTime updateTime );\n\n    LocalDateTime getCreateTime();\n\n    void setCreateTime( LocalDateTime createTime );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/LayerTreeNode.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.time.LocalDateTime;\n\npublic interface LayerTreeNode extends TreeNode {\n    GUID getGuid();\n\n    void setUpdateTime( LocalDateTime updateTime );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/VLayerConfig.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer;\n\nimport com.pinecone.hydra.system.ko.ArchKernelObjectConfig;\n\npublic class VLayerConfig extends ArchKernelObjectConfig implements LayerConfig {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/VLayerInstrument.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.Unsafe;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.identifier.KOPathResolver;\nimport com.pinecone.hydra.system.ko.CascadeInstrument;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.system.ko.kom.ArchKOMTree;\nimport com.pinecone.hydra.system.ko.kom.SimpleMultiFolderPathSelector;\nimport com.pinecone.hydra.system.ko.kom.SimplePathSelector;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\nimport com.pinecone.hydra.unit.vgraph.layer.operator.AtlasLayerComponentOperatorFactory;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerHandleManipulator;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerManipulator;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator;\nimport com.pinecone.hydra.unit.vgraph.layer.source.NamespaceManipulator;\nimport com.pinecone.slime.meta.TableIndexMeta;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128V7;\n\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.stream.Collectors;\n\npublic class VLayerInstrument extends ArchKOMTree implements LayerInstrument {\n    protected LayerMasterManipulator    mLayerMasterManipulator;\n\n    protected LayerManipulator          mLayerManipulator;\n\n    protected NamespaceManipulator      mNamespaceManipulator;\n\n    protected LayerHandleManipulator    mLayerHandleManipulator;\n\n    public VLayerInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, LayerInstrument parent, String name, String superiorPathScope, @Nullable GuidAllocator guidAllocator ) {\n        super( superiorProcess, masterManipulator, LayerInstrument.LayerConfig, parent, name, superiorPathScope, guidAllocator );\n        this.mLayerMasterManipulator    = (LayerMasterManipulator) masterManipulator;\n        this.pathResolver               = new KOPathResolver( this.kernelObjectConfig );\n\n        this.operatorFactory            = new AtlasLayerComponentOperatorFactory( this, (LayerMasterManipulator) masterManipulator);\n        this.mLayerManipulator          = this.mLayerMasterManipulator.getLayerManipulator();\n        this.mNamespaceManipulator      = this.mLayerMasterManipulator.getNamespaceManipulator();\n        this.mLayerHandleManipulator    = this.mLayerMasterManipulator.getLayerHandleManipulator();\n\n        this.pathSelector = new SimpleMultiFolderPathSelector(\n                this.pathResolver, this.imperialTree,\n                new GUIDNameManipulator[]{ this.mNamespaceManipulator, this.mLayerManipulator },\n                new GUIDNameManipulator[]{ this.mLayerManipulator }\n        );\n    }\n\n    public VLayerInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, LayerInstrument parent, String name ) {\n        this( superiorProcess, masterManipulator, parent, name, CascadeInstrument.EmptySuperiorPathScope, new GuidAllocator128V7());\n    }\n\n    public VLayerInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator ) {\n        this( superiorProcess, masterManipulator, null, LayerConfig.class.getSimpleName() );\n    }\n\n    public VLayerInstrument( KOIMappingDriver driver ) {\n        this(driver.getSuperiorProcess(), driver.getMasterManipulator());\n    }\n\n    @Override\n    public String getFullName() {\n        return super.getFullName();\n    }\n\n    @Override\n    public Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) {\n        if( szTargetSep != null ) {\n            path = path.replace( szBadSep, szTargetSep );\n        }\n\n        String[] parts = this.pathResolver.segmentPathParts( path );\n        return this.pathSelector.searchGUID( parts );\n    }\n\n    @Override\n    public Hydrogen getSystem() {\n        return this.hydrogen;\n    }\n\n    @Override\n    public LayerConfig getConfig() {\n        return (LayerConfig) this.kernelObjectConfig;\n    }\n\n    @Override\n    public GUID put( TreeNode treeNode ) {\n        TreeNodeOperator operator = this.operatorFactory.getOperator( this.getLayerMetaType( treeNode ) );\n        return operator.insert( treeNode );\n    }\n\n    @Override\n    public void remove( GUID guid ) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        TreeNode newInstance = (TreeNode)node.getType().newInstance();\n        TreeNodeOperator operator = this.operatorFactory.getOperator( this.getLayerMetaType( newInstance ) );\n        operator.purge( guid );\n    }\n\n    protected TreeNodeOperator getOperatorByGuid( GUID guid ) {\n        ImperialTreeNode node = this.imperialTree.getNode( guid );\n        if ( node == null ){\n            return null;\n        }\n        TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class<? >[]{this.getClass()}, null );\n        return this.operatorFactory.getOperator( this.getLayerMetaType( newInstance ) );\n    }\n\n    @Override\n    public Layer get( GUID guid ) {\n        TreeNodeOperator operator = this.getOperatorByGuid( guid );\n        if( operator == null ) {\n            return null;\n        }\n        return (Layer) operator.get( guid );\n    }\n\n    @Override\n    public void addChild( GUID parentGuid, GUID childGuid ) {\n        this.imperialTree.affirmOwnedNode(childGuid, parentGuid);\n    }\n\n    @Override\n    public void update( TreeNode treeNode ) {\n\n    }\n\n    @Override\n    public List<Layer> splitGraphLayer( VectorDAG vectorDAG ) {\n        Layer layer = this.mLayerManipulator.queryLayer( vectorDAG.getAffiliateLayerGuid() );\n        List<TreeNode> children = this.getChildren( layer.getGuid() );\n        List<GUID> collect = children.stream().map(TreeNode::getGuid).collect(Collectors.toList());\n        List<Layer> layers = this.mLayerManipulator.fetchLayer(collect);\n        return layers;\n    }\n\n    @Override\n    public long countSourceNode( GUID layerGuid ) {\n        return this.mLayerHandleManipulator.countSourceNode( layerGuid );\n    }\n\n    @Override\n    public List<GUID> fetchSourceGuidsByTaskPriority( GUID layerGuid, long offset, long limit ) {\n        return this.mLayerHandleManipulator.fetchSourceGuidsByTaskPriority( layerGuid, offset, limit );\n    }\n\n\n\n\n\n    @Override\n    public List<Layer> fetchLayers( long offset, long limit, boolean anyNode, @Nullable GUID parentGuid ) {\n        return this.mLayerManipulator.fetchLayerPage( offset, limit, anyNode, parentGuid );\n    }\n\n    @Unsafe( \"TestOnly\" )\n    @Override\n    public List<Layer> fetchLayersAll() {\n        TableIndexMeta meta = this.getLayerIndexMeta();\n        return this.fetchLayersById( meta.getMinId(), meta.getMaxId() );\n    }\n\n    @Override\n    public List<Layer> fetchLayersById( long idStart, long idEnd, boolean anyNode, @Nullable GUID parentGuid ) {\n        return this.mLayerManipulator.fetchLayerPageById( idStart, idEnd, anyNode, parentGuid );\n    }\n\n    @Override\n    public TableIndexMeta getLayerIndexMeta( boolean anyNode, @Nullable GUID parentGuid ) {\n        return this.mLayerManipulator.selectLayerIndexMeta( anyNode, parentGuid );\n    }\n\n    @Override\n    public long queryMaxLayerPage( long limit, boolean anyNode, @Nullable GUID parentGuid ) {\n        if ( limit <= 0 ) {\n            throw new IllegalArgumentException( \"Limit must be greater than zero.\" );\n        }\n\n        long nTotal = this.mLayerManipulator.countLayer( anyNode, parentGuid );\n        if ( nTotal == 0 ) {\n            return 0;\n        }\n\n        long nPage = nTotal / limit;\n        if ( nTotal % limit != 0 ) {\n            ++nPage;\n        }\n\n        return nPage;\n    }\n\n\n\n\n    protected ImperialTreeNode affirmPreinsertionInitialize(AtlasLayer atlasLayer) {\n        GUID guid = atlasLayer.getGuid();\n        atlasLayer.setUpdateTime(LocalDateTime.now());\n        GUIDImperialTrieNode imperialTrieNode = new GUIDImperialTrieNode();\n        imperialTrieNode.setGuid(guid);\n        return imperialTrieNode;\n    }\n\n    private String getLayerMetaType( TreeNode treeNode ) {\n        return treeNode.className().replace( \"Atlas\", \"\" );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/operator/ArchLayerComponentOperator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerInstrument;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerTreeNode;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator;\n\nimport java.time.LocalDateTime;\n\npublic abstract class ArchLayerComponentOperator implements LayerComponentOperator {\n    protected LayerInstrument mLayerInstrument;\n\n    protected LayerComponentOperatorFactory     mFactory;\n\n    protected ImperialTree                      mImperialTree;\n\n    protected LayerMasterManipulator            mLayerMasterManipulator;\n\n    protected GuidAllocator                     mGuidAllocator;\n\n\n    public ArchLayerComponentOperator( LayerMasterManipulator layerMasterManipulator, LayerInstrument layerInstrument) {\n        this.mImperialTree = layerInstrument.getMasterTrieTree();\n        this.mLayerInstrument = layerInstrument;\n        this.mLayerMasterManipulator = layerMasterManipulator;\n        this.mGuidAllocator  = layerInstrument.getGuidAllocator();\n    }\n\n    protected ImperialTreeNode affirmPreinsertionInitialize( LayerTreeNode treeNode ) {\n        GUID guid = treeNode.getGuid();\n        treeNode.setUpdateTime( LocalDateTime.now() );\n        GUIDImperialTrieNode imperialTrieNode = new GUIDImperialTrieNode();\n        imperialTrieNode.setGuid( guid );\n        imperialTrieNode.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) );\n\n        return imperialTrieNode;\n    }\n\n    public LayerComponentOperatorFactory getLayerComponentOperatorFactory() {\n        return this.mFactory;\n    }\n\n    protected String getLayerNodeMetaType( TreeNode treeNode ) {\n        return treeNode.className().replace(\"Atlas\",\"\");\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/operator/AtlasLayerComponentOperatorFactory.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.operator;\n\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\nimport com.pinecone.hydra.unit.vgraph.layer.AtlasLayer;\nimport com.pinecone.hydra.unit.vgraph.layer.AtlasLayerNamespace;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerInstrument;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.TreeMap;\n\npublic class AtlasLayerComponentOperatorFactory implements LayerComponentOperatorFactory {\n    protected LayerMasterManipulator                mLayerMasterManipulator;\n\n    protected LayerInstrument mLayerInstrument;\n\n    protected Map<String, TreeNodeOperator>   registerer = new HashMap<>();\n\n    protected Map<String, String>                   metaTypeMap = new TreeMap<>();\n\n    protected void registerDefaultMetaType( Class<?> genericType ) {\n        this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace(\n                \"Atlas\",\"\"\n        ));\n    }\n\n    protected void registerDefaultMetaTypes() {\n        this.registerDefaultMetaType( AtlasLayer.class );\n        this.registerDefaultMetaType( AtlasLayerNamespace.class );\n    }\n\n    public AtlasLayerComponentOperatorFactory(LayerInstrument layerInstrument, LayerMasterManipulator layerMasterManipulator ) {\n        this.mLayerInstrument = layerInstrument;\n        this.mLayerMasterManipulator = layerMasterManipulator;\n\n        this.registerer.put(\n                DefaultLayer,\n                new LayerOperator( this )\n        );\n        this.registerer.put(\n                DefaultNamespace,\n                new LayerNamespaceOperator( this )\n        );\n    }\n\n\n\n    @Override\n    public void register(String typeName, TreeNodeOperator functionalNodeOperation) {\n        this.registerer.put( typeName, functionalNodeOperation );\n    }\n\n    @Override\n    public void registerMetaType(Class<?> clazz, String metaType) {\n        this.registerMetaType( clazz.getName(), metaType );\n    }\n\n    @Override\n    public void registerMetaType(String classFullName, String metaType) {\n        this.metaTypeMap.put( classFullName, metaType );\n    }\n\n    @Override\n    public String getMetaType(String classFullName) {\n        return this.metaTypeMap.get( classFullName );\n    }\n\n    @Override\n    public LayerComponentOperator getOperator(String typeName) {\n        return (LayerComponentOperator) this.registerer.get( typeName );\n    }\n\n    @Override\n    public LayerInstrument getLayerManager() {\n        return this.mLayerInstrument;\n    }\n\n    @Override\n    public LayerMasterManipulator getMasterManipulator() {\n        return this.mLayerMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/operator/LayerComponentOperator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.operator;\n\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface LayerComponentOperator extends TreeNodeOperator {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/operator/LayerComponentOperatorFactory.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.operator;\n\nimport com.pinecone.hydra.unit.imperium.operator.OperatorFactory;\nimport com.pinecone.hydra.unit.vgraph.layer.Layer;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerInstrument;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerNamespace;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator;\n\npublic interface LayerComponentOperatorFactory extends OperatorFactory {\n    String DefaultLayer         = Layer.class.getSimpleName();\n\n    String DefaultNamespace     = LayerNamespace.class.getSimpleName();\n\n    void registerMetaType( Class<?> clazz, String metaType );\n\n    void registerMetaType( String classFullName, String metaType );\n\n    String getMetaType( String classFullName );\n\n    LayerComponentOperator getOperator(String typeName );\n\n    LayerInstrument getLayerManager();\n\n    LayerMasterManipulator getMasterManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/operator/LayerNamespaceOperator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerInstrument;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerNamespace;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator;\nimport com.pinecone.hydra.unit.vgraph.layer.source.NamespaceManipulator;\n\nimport java.util.List;\n\npublic class LayerNamespaceOperator extends ArchLayerComponentOperator implements LayerComponentOperator {\n    protected NamespaceManipulator      mNamespaceManipulator;\n\n    public LayerNamespaceOperator( LayerComponentOperatorFactory factory ) {\n        this( factory.getMasterManipulator(), factory.getLayerManager() );\n        this.mFactory = factory;\n    }\n\n    public LayerNamespaceOperator(LayerMasterManipulator layerMasterManipulator, LayerInstrument layerInstrument) {\n        super(layerMasterManipulator, layerInstrument);\n        this.mNamespaceManipulator = layerMasterManipulator.getNamespaceManipulator();\n    }\n\n    @Override\n    public GUID insert(TreeNode treeNode) {\n        GUID guid = this.mGuidAllocator.nextGUID();\n        LayerNamespace layerNamespace = (LayerNamespace) treeNode;\n        layerNamespace.setGuid( guid );\n        ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(layerNamespace);\n\n        this.mImperialTree.insert(imperialTreeNode);\n        this.mNamespaceManipulator.insert(layerNamespace);\n        return guid;\n    }\n\n    @Override\n    public void purge(GUID guid) {\n        List<GUIDImperialTrieNode> children = this.mImperialTree.getChildren(guid);\n        for( GUIDImperialTrieNode node : children ) {\n            TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class<? >[]{this.getClass()}, this );\n            LayerComponentOperator operator = this.mFactory.getOperator(this.getLayerNodeMetaType(newInstance));\n            operator.purge( node.getGuid() );\n        }\n        this.removeNode( guid );\n    }\n\n    @Override\n    public TreeNode get(GUID guid) {\n\n        return this.mNamespaceManipulator.query(guid);\n    }\n\n    @Override\n    public TreeNode get(GUID guid, int depth) {\n        return null;\n    }\n\n    @Override\n    public TreeNode getAsRootDepth(GUID guid) {\n        return null;\n    }\n\n    @Override\n    public void update(TreeNode treeNode) {\n\n    }\n\n    @Override\n    public void updateName(GUID guid, String name) {\n\n    }\n\n    private void removeNode( GUID guid ) {\n        this.mImperialTree.purge( guid );\n        this.mImperialTree.removeCachePath( guid );\n        this.mNamespaceManipulator.remove( guid );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/operator/LayerOperator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.vgraph.layer.Layer;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerGraphHandle;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerInstrument;\nimport com.pinecone.hydra.unit.vgraph.layer.AtlasLayer;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerHandleManipulator;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerManipulator;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class LayerOperator extends ArchLayerComponentOperator implements LayerComponentOperator{\n    protected LayerManipulator          mLayerManipulator;\n\n    protected LayerHandleManipulator    mLayerHandleManipulator;\n\n    public LayerOperator( LayerComponentOperatorFactory factory ) {\n        this( factory.getMasterManipulator(), factory.getLayerManager() );\n        this.mFactory = factory;\n    }\n\n    public LayerOperator(LayerMasterManipulator layerMasterManipulator, LayerInstrument layerInstrument) {\n        super(layerMasterManipulator, layerInstrument);\n        this.mLayerManipulator = this.mLayerMasterManipulator.getLayerManipulator();\n        this.mLayerHandleManipulator = this.mLayerMasterManipulator.getLayerHandleManipulator();\n    }\n\n    @Override\n    public GUID insert(TreeNode treeNode) {\n        GUID guid = this.mGuidAllocator.nextGUID();\n        AtlasLayer atlasLayer = (AtlasLayer) treeNode;\n        atlasLayer.setGuid( guid );\n        ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(atlasLayer);\n\n        this.mImperialTree.insert(imperialTreeNode);\n\n        LayerGraphHandle layerGraphHandle = new LayerGraphHandle();\n        layerGraphHandle.setGuid(atlasLayer.getGuid());\n        layerGraphHandle.setName(atlasLayer.getName());\n        layerGraphHandle.setUpdateTime(atlasLayer.getUpdateTime());\n        layerGraphHandle.setCreateTime(atlasLayer.getCreateTime());\n\n        this.mLayerManipulator.insertLayer( layerGraphHandle );\n\n        if( atlasLayer.getSourceGuids() != null ) {\n            this.mLayerHandleManipulator.batchInsertSourceNodes( layerGraphHandle.getGuid(), atlasLayer.getSourceGuids() );\n        }\n\n        if( atlasLayer.getSinkGuids() != null ) {\n            this.mLayerHandleManipulator.batchInsertSinkNodes( layerGraphHandle.getGuid(), atlasLayer.getSinkGuids() );\n        }\n\n\n        return guid;\n    }\n\n    @Override\n    public void purge(GUID guid) {\n        List<GUIDImperialTrieNode> children = this.mImperialTree.getChildren(guid);\n        for( GUIDImperialTrieNode node : children ) {\n            TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class<? >[]{this.getClass()}, this );\n            LayerComponentOperator operator = this.mFactory.getOperator(this.getLayerNodeMetaType(newInstance));\n            operator.purge( node.getGuid() );\n        }\n        this.removeNode( guid );\n    }\n\n    @Override\n    public TreeNode get(GUID guid) {\n        Layer layer = this.mLayerManipulator.queryLayer(guid);\n        List<GUID> sourceNodeGuids = this.mLayerHandleManipulator.fetchSourceNodes(layer.getGuid());\n        List<GUID> sinkNodeGuids = this.mLayerHandleManipulator.fetchSinkNodes(layer.getGuid());\n        layer.setSourceGuids( sourceNodeGuids );\n        layer.setSinkGuids( sinkNodeGuids );\n        return layer;\n    }\n\n    @Override\n    public TreeNode get(GUID guid, int depth) {\n        return null;\n    }\n\n    @Override\n    public TreeNode getAsRootDepth(GUID guid) {\n        return null;\n    }\n\n    @Override\n    public void update(TreeNode treeNode) {\n\n    }\n\n    @Override\n    public void updateName(GUID guid, String name) {\n\n    }\n\n    private void removeNode( GUID guid ) {\n        this.mImperialTree.purge( guid );\n        this.mImperialTree.removeCachePath( guid );\n        this.mLayerManipulator.remove( guid );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerHandleManipulator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.List;\n\npublic interface LayerHandleManipulator extends Pinenut {\n\n    void insertSourceNode( GUID layerGuid, GUID handleGuid );\n\n    void insertSinkNode( GUID layerGuid, GUID handleGuid );\n\n    void batchInsertSourceNodes( GUID layerGuid, List<GUID> handleGuids );\n\n    void batchInsertSinkNodes( GUID layerGuid, List<GUID> handleGuids );\n\n    List<GUID> fetchSourceNodes( GUID layerGuid );\n\n    List<GUID> fetchSinkNodes( GUID layerGuid );\n\n    long countSourceNode( GUID layerGuid );\n\n    List<GUID> fetchSourceGuidsByTaskPriority( GUID layerGuid, long offset, long limit );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerManipulator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.source;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.unit.vgraph.layer.Layer;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerGraphHandle;\nimport com.pinecone.slime.meta.TableIndexMeta;\n\nimport java.util.List;\n\npublic interface LayerManipulator extends GUIDNameManipulator {\n\n    void insertLayer(LayerGraphHandle layer );\n\n    void remove( GUID guid );\n\n    Layer queryLayer( GUID guid );\n\n    List<Layer> fetchLayer( List<GUID> guids );\n\n    List<GUID > getGuidsByName( String name );\n\n    List<GUID > getGuidsByNameID( String name, GUID guid );\n\n    void batchInsertLayer( List<LayerGraphHandle> list );\n\n\n    // anyNode: true: ignore parentGuid, false: filter by parentGuid\n    List<Layer> fetchLayerPage( long offset, long limit, boolean anyNode, @Nullable GUID parentGuid );\n\n    List<Layer> fetchLayerPageById( long idStart, long idEnd, boolean anyNode, @Nullable GUID parentGuid );\n\n    TableIndexMeta selectLayerIndexMeta( boolean anyNode, @Nullable GUID parentGuid );\n\n    long countLayer( boolean anyNode, @Nullable GUID parentGuid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerMasterManipulator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.source;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\n\npublic interface LayerMasterManipulator extends KOIMasterManipulator {\n    LayerManipulator            getLayerManipulator();\n\n    NamespaceManipulator        getNamespaceManipulator();\n\n    LayerHandleManipulator      getLayerHandleManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerMasterTreeManipulator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.source;\n\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\n\npublic interface LayerMasterTreeManipulator extends TreeMasterManipulator {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerOwnerManipulator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\n\nimport java.util.List;\n\npublic interface LayerOwnerManipulator extends TireOwnerManipulator {\n    void insertRootNode (GUID guid, LinkedType linkedType );\n\n    default void insertRootNode ( GUID guid ) {\n        this.insertRootNode( guid, LinkedType.Owned );\n    }\n\n    void insert( GUID targetGuid, GUID parentGUID, LinkedType linkedType );\n\n    default void insertOwnedNode( GUID targetGuid, GUID parentGUID ) {\n        this.insert( targetGuid, parentGUID, LinkedType.Owned );\n    }\n\n    default void insertHardLinkedNode( GUID targetGuid, GUID parentGUID ) {\n        this.insert( targetGuid, parentGUID, LinkedType.Hard );\n    }\n\n\n\n    void update( GUID targetGuid, GUID parentGUID, LinkedType linkedType );\n\n    void updateParentGuid( GUID targetGuid, GUID parentGUID );\n\n    void updateLinkedType( GUID targetGuid, LinkedType linkedType );\n\n\n\n    void remove( GUID subordinateGuid, GUID ownerGuid );\n\n    void removeBySubordinate( GUID subordinateGuid );\n\n    void removeByOwner( GUID OwnerGuid );\n\n    GUID getOwner( GUID subordinateGuid );\n\n    List<GUID > getSubordinates(GUID guid );\n\n\n\n    void setLinkedType             ( GUID sourceGuid, GUID targetGuid, LinkedType linkedType );\n\n    default void setOwned          ( GUID sourceGuid, GUID targetGuid ) {\n        this.setLinkedType( sourceGuid, targetGuid, LinkedType.Owned );\n    }\n\n    default void setHardLink       ( GUID sourceGuid, GUID targetGuid ) {\n        this.setLinkedType( sourceGuid, targetGuid, LinkedType.Hard );\n    }\n\n    LinkedType getLinkedType       ( GUID childGuid,GUID parentGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerPathCacheManipulator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\n\npublic interface LayerPathCacheManipulator extends TriePathCacheManipulator {\n    void insert (GUID guid, String path );\n\n    void insertLongPath( GUID guid, String path, String longPath );\n\n    void remove ( GUID guid );\n\n    String getPath ( GUID guid );\n\n    GUID getNode ( String path );\n\n    GUID queryGUIDByPath( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/LayerTreeManipulator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\n\nimport java.util.List;\n\npublic interface LayerTreeManipulator extends TrieTreeManipulator {\n    void insert(TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node );\n\n    /** With detail meta data node information. */\n    GUIDImperialTrieNode getNode(GUID guid );\n\n    boolean contains( GUID key ) ;\n\n    /** Only with tree node index information. */\n    GUIDImperialTrieNode getTreeNodeOnly(GUID guid, GUID parentGuid );\n\n    long countNode( GUID guid, GUID parentGuid );\n\n    // TODO\n    void insertNode( GUID guid, GUIDImperialTrieNode distributedTreeNode );\n\n    // TODO\n    void updateNode( GUID guid, GUIDImperialTrieNode distributedTreeNode );\n\n\n\n    /** Purge / Deletion */\n    void purge         ( GUID guid );\n\n    void removeTreeNode( GUID guid );\n\n    void removeTreeNodeByParentGuid( GUID parentGuid );\n\n    void removeTreeNodeYoke( GUID guid, GUID parentGuid );\n\n    void removeTreeNodeWithLinkedType( GUID guid, LinkedType linkedType );\n\n    void removeNodeMeta( GUID guid );\n\n    default void removeOwnedTreeNode ( GUID guid ) {\n        this.removeTreeNodeWithLinkedType( guid, LinkedType.Owned );\n    }\n\n    default void removeHardLinkedTreeNode ( GUID guid ) {\n        this.removeTreeNodeWithLinkedType( guid, LinkedType.Hard );\n    }\n\n\n\n    /** Lineage / Affinity */\n    List<GUIDImperialTrieNode> getChildren(GUID guid );\n\n    List<GUID > fetchChildrenGuids( GUID parentGuid );\n\n    List<GUID > fetchParentGuids( GUID guid );\n\n    void removeInheritance( GUID childNode, GUID parentGUID );\n\n    void addChild( GUID childGuid, GUID parentGuid );\n\n    void updateType       (UOI type, GUID guid );\n\n    List<GUID > fetchRoot();\n\n    boolean isRoot( GUID guid );\n\n\n\n    /** Link / Reference */\n    /**\n     * Querying link-count, that the node be linked by its owner. [Strong/Weak]\n     * 获取节点引用计数。 [根据强弱引用条件]\n     * @return the link-count, which its has been linked.\n     */\n    long queryLinkedCount( GUID guid, LinkedType linkedType );\n\n    long queryAllLinkedCount( GUID guid );\n\n    default long queryStrongLinkedCount( GUID guid ) {\n        return this.queryLinkedCount( guid, LinkedType.Owned );\n    }\n\n    default long queryWeakLinkedCount( GUID guid ) {\n        return this.queryLinkedCount( guid, LinkedType.Hard );\n    }\n\n    void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName, GUID tagGuid, LinkedType linkedType );\n\n    default void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName, GUID tagGuid ) {\n        this.newLinkTag( originalGuid, dirGuid, tagName, tagGuid, LinkedType.Hard );\n    }\n\n    void updateLinkTagName( GUID tagGuid, String tagName );\n\n    GUID getOriginalGuid( String tagName,GUID parentDirGuid );\n\n    GUID getOriginalGuidByNodeGuid( String tagName, GUID nodeGUID );\n\n    ReparseLinkNode getReparseLinkNode(String tagName, GUID parentDirGuid );\n\n    ReparseLinkNode getReparseLinkNodeByNodeGuid( String tagName, GUID nodeGUID );\n\n    List<GUID > fetchOriginalGuid( String tagName );\n\n    List<GUID > fetchOriginalGuidRoot( String tagName );\n\n    boolean isTagGuid( GUID guid );\n\n    GUID getOriginalGuidByTagGuid( GUID tagGuid );\n\n    void removeReparseLink( GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/layer/source/NamespaceManipulator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.layer.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerNamespace;\n\npublic interface NamespaceManipulator extends GUIDNameManipulator {\n    void insert( LayerNamespace layerNamespace );\n\n    void remove( GUID guid );\n\n    LayerNamespace query( GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/source/AtlasMappingDriver.java",
    "content": "package com.pinecone.hydra.unit.vgraph.source;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.homotype.StereotypicInjector;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.system.Hydrogen;\n\nimport java.util.Map;\n\npublic interface AtlasMappingDriver extends Pinenut {\n    String getVersionSignature();\n\n    Hydrogen getSystem();\n\n    Processum getSuperiorProcess();\n\n    AtlasMasterManipulator getMasterManipulator();\n\n    // Temp, TODO\n    StereotypicInjector autoConstruct(Class<?> stereotype, Map config, Object instance );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/source/AtlasMasterManipulator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface AtlasMasterManipulator extends Pinenut {\n\n    VectorGraphMasterManipulator getVectorGraphMasterManipulator();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/source/VectorGraphManipulator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\nimport com.pinecone.slime.meta.TableIndex64Meta;\n\nimport java.util.List;\n\npublic interface VectorGraphManipulator extends Pinenut {\n    void insertHandleNode( GraphNode graphNode );\n\n    void insertGraphNode( GraphNode graphNode );\n\n    void insertNodeByEdge( GUID parentGuid, GraphNode graphNode );\n\n\n    void removeNode( GUID guid );\n\n    GraphNode queryNode( GUID guid );\n\n    List<GUID> fetchParentIds(GUID guid );\n\n    List<GraphNode> fetchChildNodes( GUID guid );\n\n    List<GUID> fetchChildNodeGuids( GUID guid );\n\n    List<GUID> limitFetchChildNodeGuids( long offset, long limit, GUID guid );\n\n    List<GraphNode> fetchRootNodes();\n\n    long countChildNodeNums( GUID guid );\n\n\n    List<GUID> fetchChildNodeIds( GUID guid );\n\n    List<GraphNode> fetchNodesByName( String name );\n\n    void updateNode( GraphNode graphNode );\n\n    List<GUID> fetchHandleGuids(long offset, long limit);\n\n    List<GUID> fetchHandleGuidsByTaskPriority( long offset, long limit );\n\n    long countSourceNodes();\n\n    List<GUID> fetchDownstreamNodeGuid( GUID nodeGuid, long offset, long limit );\n\n    List<GUID> fetchUpstreamNodeGuid( GUID nodeGuid, long offset, long limit );\n\n    long queryInDegree( GUID nodeGuid );\n\n    long queryOutDegree( GUID nodeGuid );\n\n    long getPriorityByInDegree( GUID guid );\n\n    void addChild( GUID parentGuid, GUID childGuid );\n\n\n    List<GraphNode> fetchIsolatedNodes( long offset, long limit );\n\n    List<GraphNode> fetchIsolatedNodesById( long idStart, long idEnd );\n\n    long countIsolatedNodes();\n\n    TableIndex64Meta selectIsolatedNodeIndexMeta();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/source/VectorGraphMasterManipulator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\n\npublic interface VectorGraphMasterManipulator extends Pinenut {\n    VectorGraphManipulator getVectorGraphManipulator();\n\n    VectorGraphPathCacheManipulator getVectorGraphPathCacheManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/source/VectorGraphPathCacheManipulator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.List;\n\npublic interface VectorGraphPathCacheManipulator extends Pinenut {\n    void insert(String path, GUID guid);\n\n    void insertLongPath( GUID guid, String path, String longPath );\n\n    void remove ( GUID guid );\n\n    void removeByPath( String path );\n\n    List<String> getPath (GUID guid );\n\n    GUID getNode ( String path );\n\n    GUID queryGUIDByPath( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/traversal/AtlasGraphIterator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.traversal;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\n\npublic class AtlasGraphIterator implements GraphIterator {\n\n    @Override\n    public boolean containNode(VectorDAG vectorDAG, GUID targetNodeGuid) {\n        return false;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/unit/vgraph/traversal/GraphIterator.java",
    "content": "package com.pinecone.hydra.unit.vgraph.traversal;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\n\npublic interface GraphIterator extends Pinenut {\n    boolean containNode( VectorDAG vectorDAG, GUID targetNodeGuid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/DataWare.java",
    "content": "package com.pinecone.hydra.ware;\n\npublic interface DataWare extends Ware {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/MessageWare.java",
    "content": "package com.pinecone.hydra.ware;\n\npublic interface MessageWare extends Middleware {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/Middleware.java",
    "content": "package com.pinecone.hydra.ware;\n\npublic interface Middleware extends Ware {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/MiddlewareDirector.java",
    "content": "package com.pinecone.hydra.ware;\n\npublic interface MiddlewareDirector extends WareDirector {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/MiddlewareManager.java",
    "content": "package com.pinecone.hydra.ware;\n\npublic interface MiddlewareManager extends WareManager {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/OLAPWare.java",
    "content": "package com.pinecone.hydra.ware;\n\npublic interface OLAPWare {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/OLTPWare.java",
    "content": "package com.pinecone.hydra.ware;\n\npublic interface OLTPWare {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/RDBWare.java",
    "content": "package com.pinecone.hydra.ware;\n\npublic interface RDBWare extends Ware {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/Ware.java",
    "content": "package com.pinecone.hydra.ware;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Ware extends Pinenut {\n\n    String typeName();\n\n    String domainTypeName();\n\n    WareDomain wareDomain();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/WareDirector.java",
    "content": "package com.pinecone.hydra.ware;\n\nimport com.pinecone.framework.system.regime.arch.Director;\nimport com.pinecone.framework.util.config.Config;\n\npublic interface WareDirector extends Director {\n\n    Config getSectionConfig();\n\n    WareManager getManager( String name );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/WareDomain.java",
    "content": "package com.pinecone.hydra.ware;\n\npublic enum WareDomain {\n    Undefined                  ( \"Undefined\"        , 0x0000 ),\n    Data                       ( \"Data\"             , 0x0001 ),\n    Storage                    ( \"Storage\"          , 0x0002 ),\n    Message                    ( \"Message\"          , 0x0003 ),\n    Config                     ( \"Config\"           , 0x0004 ),\n    Log                        ( \"Log\"              , 0x0005 ),\n    Compute                    ( \"Compute\"          , 0x0006 ),\n    Monitor                    ( \"Monitor\"          , 0x0007 ),\n    Security                   ( \"Security\"         , 0x0008 ),\n    Network                    ( \"Network\"          , 0x0009 ),\n    Business                   ( \"Business\"         , 0x000A ),\n    User                       ( \"User\"             , 0x000B ),\n    Device                     ( \"Device\"           , 0x000C ),\n    Other                      ( \"Other\"            , 0xFFFF );\n\n    private final String value;\n\n    private final short code;\n\n    WareDomain( String value, int code ){\n        this.value = value;\n        this.code  = (short) code;\n    }\n\n    public String getName(){\n        return this.value;\n    }\n\n    public short getCode() {\n        return this.code;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture/src/main/java/com/pinecone/hydra/ware/WareManager.java",
    "content": "package com.pinecone.hydra.ware;\n\nimport com.pinecone.framework.system.regime.arch.Manager;\nimport com.pinecone.framework.util.config.Config;\n\npublic interface WareManager extends Manager {\n\n    Config getManagedWaresConfig();\n\n    Ware getWare( String name );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-conduct/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kernel</groupId>\n    <artifactId>hydra-architecture-conduct</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/conduct/CascadeMarshal.java",
    "content": "package com.pinecone.hydra.system.conduct;\n\npublic interface CascadeMarshal extends Marshal, Unit {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/conduct/CascadeUnit.java",
    "content": "package com.pinecone.hydra.system.conduct;\n\npublic interface CascadeUnit extends Unit {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/conduct/Marshal.java",
    "content": "package com.pinecone.hydra.system.conduct;\n\nimport com.pinecone.framework.system.regime.Instrument;\n\npublic interface Marshal extends Instrument {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/conduct/Unionem.java",
    "content": "package com.pinecone.hydra.system.conduct;\n\nimport com.pinecone.framework.system.regime.Orchestrator;\n\npublic interface Unionem extends Orchestrator {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/conduct/Unit.java",
    "content": "package com.pinecone.hydra.system.conduct;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Unit extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/flow/CascadeFlow.java",
    "content": "package com.pinecone.hydra.system.flow;\n\npublic interface CascadeFlow extends Flow, Stage {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/flow/Flow.java",
    "content": "package com.pinecone.hydra.system.flow;\n\nimport com.pinecone.framework.system.regime.Orchestrator;\n\npublic interface Flow extends Orchestrator {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/flow/SequentialFlow.java",
    "content": "package com.pinecone.hydra.system.flow;\n\npublic interface SequentialFlow extends Flow {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/flow/Stage.java",
    "content": "package com.pinecone.hydra.system.flow;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Stage extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/ups/UniformPyramidTask.java",
    "content": "package com.pinecone.hydra.system.ups;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\n/**\n *  Pinecone Ursus For Java - Uniform Pyramid Task Scheduling\n *  Author: Harald.E (Dragon King)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation. All rights reserved.\n *  *****************************************************************************************\n *  Uniform Pyramid Task Scheduling\n *  A Centrally Controlled Architecture for Systematic Task Orchestration and Scheduling\n *  金字塔模型统一调度系统 - 基于中央集权的系统性任务规划与调度架构\n *  *****************************************************************************************\n */\npublic interface UniformPyramidTask extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-conduct/src/main/java/com/pinecone/hydra/system/ups/UniformPyramidTaskInstrument.java",
    "content": "package com.pinecone.hydra.system.ups;\n\nimport com.pinecone.framework.system.regime.Instrument;\n\npublic interface UniformPyramidTaskInstrument extends Instrument {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-message/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kernel</groupId>\n    <artifactId>hydra-architecture-message</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "Hydra/hydra-architecture-message/src/main/java/com/pinecone/message/ArchResponse.java",
    "content": "package com.pinecone.message;\n\nimport org.springframework.http.HttpStatus;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic abstract class ArchResponse implements Pinenut {\n\n    private Boolean    success;\n    private Integer    code = HttpStatus.OK.value();\n    private String     message;\n    private String     requestId;\n    private String     errorCode;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-message/src/main/java/com/pinecone/message/StringResponse.java",
    "content": "package com.pinecone.message;\n\npublic class StringResponse extends ArchResponse {\n}\n"
  },
  {
    "path": "Hydra/hydra-architecture-storage/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kernel</groupId>\n    <artifactId>hydra-architecture-storage</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "Hydra/hydra-architecture-storage/src/main/java/com/pinecone/hydra/storage/UFile.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface UFile extends Pinenut {\n    String getName();\n\n    Number size();\n\n    //String getPath();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kernel</groupId>\n    <artifactId>hydra-framework-config</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>mysql</groupId>\n            <artifactId>mysql-connector-java</artifactId>\n            <version>8.0.26</version>\n        </dependency>\n        <dependency>\n            <groupId>org.jsoup</groupId>\n            <artifactId>jsoup</artifactId>\n            <version>1.15.4</version>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/AccountConfig.java",
    "content": "package com.pinecone.hydra.account;\n\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\n\npublic interface AccountConfig extends KernelObjectConfig {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/AccountManager.java",
    "content": "package com.pinecone.hydra.account;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.entity.ACNodeAllotment;\nimport com.pinecone.hydra.account.entity.Account;\nimport com.pinecone.hydra.account.entity.Credential;\nimport com.pinecone.hydra.account.entity.Domain;\nimport com.pinecone.hydra.account.entity.ElementNode;\nimport com.pinecone.hydra.account.entity.GenericAccount;\nimport com.pinecone.hydra.account.entity.GenericAuthorization;\nimport com.pinecone.hydra.account.entity.GenericDomain;\nimport com.pinecone.hydra.account.entity.GenericPrivilege;\nimport com.pinecone.hydra.account.entity.GenericRole;\nimport com.pinecone.hydra.account.entity.Group;\nimport com.pinecone.hydra.account.entity.Privilege;\nimport com.pinecone.hydra.account.entity.Role;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\nimport com.pinecone.ulf.util.guid.i64.GUID72;\n\nimport java.util.List;\n\npublic interface AccountManager extends KOMInstrument {\n    ACNodeAllotment getAllotment();\n    AccountConfig KernelAccountConfig = new KernelAccountConfig();\n\n    Account affirmAccount( String path );\n\n    Group   affirmGroup( String path );\n\n    Domain  affirmDomain( String path );\n\n    void insertCredential( Credential credential );\n\n    void insertRole(Role role);\n\n    ElementNode queryElement( String path );\n\n    void addChildren(GUID parentGuid, GUID childrenGuid);\n\n    boolean containsChild( GUID parentGuid, String childName );\n\n    List<GUID> queryAccountGuidByName(String userName);\n\n    boolean queryAccountByGuid(GUID userGuid, String kernelCredential);\n\n    void insertPrivilege(GenericPrivilege privilege);\n    void removePrivilege(GUID privilegeGuid);\n\n    Object queryPrivilege(GUID72 guid72);\n    List<GenericPrivilege> queryPrivilegeByName(String name);\n\n    List<GenericPrivilege> queryAllPrivileges();\n\n    void updateRole(GenericRole role);\n    GUID queryUserCredentialByGuid(GUID userGuid);\n    boolean hasPermission(GUID userGuid, String requiredPrivilegeCode);\n    void insertAuthorization(GenericAuthorization authorization);\n\n    void removeAuthorizationByGuid(GUID userGuid);\n\n    void removeAuthorizationByUserGuid(GUID userGuid);\n\n    List<GenericAccount> queryAllAccount();\n\n    List<GenericDomain> queryAllDomain();\n\n    Group queryGroupByGroupGuid(GUID groupGuid);\n\n    String queryDomainNameByGuid(GUID domainGuid);\n\n    List<GenericAuthorization> queryAllAuthorization();\n\n    List<GenericRole> queryAllRoles();\n\n    Account queryAccountByName(String userName);\n\n    void updateAccount(Account account);\n\n    Account queryAccountByUserGuid(GUID userGuid);\n\n    Privilege queryPrivilegeByGuid(GUID guid);\n\n    void updatePrivilege(Privilege privilege);\n\n    void updateAuthorization(GUID guid72);\n\n    void removeRole(int id);\n\n    List<GenericAuthorization> queryAuthorizationByUserGuid(GUID userGuid);\n\n    Domain queryDomainByGuid(GUID domainGuid);\n\n    void updateDomain(Domain domain);\n\n    void updateGroup(Group group);\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/KernelAccountConfig.java",
    "content": "package com.pinecone.hydra.account;\n\nimport com.pinecone.hydra.system.ko.ArchKernelObjectConfig;\n\npublic class KernelAccountConfig extends ArchKernelObjectConfig implements AccountConfig {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/UniformAccountManager.java",
    "content": "package com.pinecone.hydra.account;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.account.entity.ACNodeAllotment;\nimport com.pinecone.hydra.account.entity.Account;\nimport com.pinecone.hydra.account.entity.Credential;\nimport com.pinecone.hydra.account.entity.Domain;\nimport com.pinecone.hydra.account.entity.ElementNode;\nimport com.pinecone.hydra.account.entity.GenericACNodeAllotment;\nimport com.pinecone.hydra.account.entity.GenericAccount;\nimport com.pinecone.hydra.account.entity.GenericAuthorization;\nimport com.pinecone.hydra.account.entity.GenericDomain;\nimport com.pinecone.hydra.account.entity.GenericGroup;\nimport com.pinecone.hydra.account.entity.GenericPrivilege;\nimport com.pinecone.hydra.account.entity.GenericRole;\nimport com.pinecone.hydra.account.entity.Group;\nimport com.pinecone.hydra.account.entity.Privilege;\nimport com.pinecone.hydra.account.entity.Role;\nimport com.pinecone.hydra.account.source.AuthorizationManipulator;\nimport com.pinecone.hydra.account.source.CredentialManipulator;\nimport com.pinecone.hydra.account.source.PrivilegeManipulator;\nimport com.pinecone.hydra.account.source.RoleManipulator;\nimport com.pinecone.hydra.system.identifier.KOPathResolver;\nimport com.pinecone.hydra.system.ko.CascadeInstrument;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.system.ko.kom.ArchKOMTree;\nimport com.pinecone.hydra.system.ko.kom.MultiFolderPathSelector;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\nimport com.pinecone.hydra.account.operator.GenericAccountOperatorFactory;\nimport com.pinecone.hydra.account.source.DomainNodeManipulator;\nimport com.pinecone.hydra.account.source.GroupNodeManipulator;\nimport com.pinecone.hydra.account.source.UserMasterManipulator;\nimport com.pinecone.hydra.account.source.UserNodeManipulator;\nimport com.pinecone.ulf.util.guid.i64.GUID72;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Objects;\n\npublic class UniformAccountManager extends ArchKOMTree implements AccountManager {\n    protected UserMasterManipulator             userMasterManipulator;\n\n    protected GroupNodeManipulator              groupNodeManipulator;\n\n    protected UserNodeManipulator               userNodeManipulator;\n\n    protected DomainNodeManipulator             domainNodeManipulator;\n\n    protected CredentialManipulator             credentialManipulator;\n\n    protected AuthorizationManipulator          authorizationManipulator;\n\n    protected PrivilegeManipulator              privilegeManipulator;\n    protected RoleManipulator                   roleManipulator;\n\n    protected List<GUIDNameManipulator >        folderManipulators;\n\n    protected List<GUIDNameManipulator >        fileManipulators;\n\n    protected ACNodeAllotment                   acNodeAllotment;\n\n\n    public UniformAccountManager( Processum superiorProcess, KOIMasterManipulator masterManipulator, AccountManager parent, String name, String superiorPathScope, @Nullable GuidAllocator guidAllocator ) {\n        super( superiorProcess, masterManipulator, KernelAccountConfig, parent, name, superiorPathScope, guidAllocator );\n        this.userMasterManipulator = (UserMasterManipulator) masterManipulator;\n        this.pathResolver          = new KOPathResolver( this.kernelObjectConfig );\n\n        this.operatorFactory            = new GenericAccountOperatorFactory( this, this.userMasterManipulator );\n        this.groupNodeManipulator       = this.userMasterManipulator.getGroupNodeManipulator();\n        this.userNodeManipulator        = this.userMasterManipulator.getUserNodeManipulator();\n        this.domainNodeManipulator      = this.userMasterManipulator.getDomainNodeManipulator();\n        this.credentialManipulator      = this.userMasterManipulator.getCredentialManipulator();\n        this.authorizationManipulator   = this.userMasterManipulator.getAuthorizationManipulator();\n        this.privilegeManipulator               = this.userMasterManipulator.getPrivilegeManipulator();\n        this.roleManipulator                      = this.userMasterManipulator.getRoleManipulator();\n\n        this.folderManipulators = new ArrayList<>(List.of(this.domainNodeManipulator, this.groupNodeManipulator));\n        this.fileManipulators   = new ArrayList<>(List.of(this.userNodeManipulator));\n\n        this.pathSelector                = new MultiFolderPathSelector(\n                this.pathResolver, this.imperialTree, this.folderManipulators.toArray( new GUIDNameManipulator[]{} ), this.fileManipulators.toArray( new GUIDNameManipulator[]{} )\n        );\n\n        this.acNodeAllotment = new GenericACNodeAllotment( this );\n    }\n\n    public UniformAccountManager( Processum superiorProcess, KOIMasterManipulator masterManipulator, AccountManager parent, String name ) {\n        this( superiorProcess, masterManipulator, parent, name, CascadeInstrument.EmptySuperiorPathScope, null );\n    }\n\n    public UniformAccountManager( Processum superiorProcess, KOIMasterManipulator masterManipulator ) {\n        this( superiorProcess, masterManipulator, null, AccountManager.class.getSimpleName() );\n    }\n\n    public UniformAccountManager( KOIMappingDriver driver, AccountManager parent, String name ){\n        this( driver.getSuperiorProcess(), driver.getMasterManipulator(), parent, name );\n    }\n\n    public UniformAccountManager( KOIMappingDriver driver ) {\n        this( driver.getSuperiorProcess(), driver.getMasterManipulator() );\n    }\n\n    @Override\n    public ACNodeAllotment getAllotment() {\n        return this.acNodeAllotment;\n    }\n\n    @Override\n    public Object queryEntityHandleByNS(String path, String szBadSep, String szTargetSep) {\n        return null;\n    }\n\n    @Override\n    public String getPath( GUID guid ) {\n        return this.getNS( guid, this.kernelObjectConfig.getPathNameSeparator() );\n    }\n\n    @Override\n    public String getFullName( GUID guid ) {\n        return this.getNS( guid, this.kernelObjectConfig.getFullNameSeparator() );\n    }\n\n    @Override\n    public ElementNode queryElement(String path) {\n        GUID guid = this.queryGUIDByPath(path);\n        if( guid != null ) {\n            return (ElementNode) this.get( guid );\n        }\n\n        return null;\n    }\n\n    protected ElementNode affirmTreeNodeByPath(String path, Class<? > cnSup, Class<? > nsSup ) {\n        String[] parts = this.pathResolver.segmentPathParts( path );\n        String currentPath = \"\";\n        GUID parentGuid = GUIDs.Dummy128();\n\n        ElementNode node = this.queryElement(path);\n        if ( node != null ){\n            return node;\n        }\n\n        ElementNode ret = null;\n        for( int i = 0; i < parts.length; ++i ){\n            currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : \"\" ) + parts[ i ];\n            node = this.queryElement( currentPath );\n            if ( node == null){\n                if ( i == parts.length - 1 && cnSup != null ){\n                    Account account = (Account) this.dynamicFactory.optNewInstance( cnSup, new Object[]{ this } );\n                    account.setName( parts[i] );\n                    GUID guid = this.put( account );\n                    return account;\n                }\n                else {\n                    ElementNode element = (ElementNode) this.dynamicFactory.optNewInstance( nsSup, new Object[]{ this } );\n                    element.setName( parts[i] );\n                    GUID guid = this.put( element );\n                    if ( i != 0 ){\n                        this.treeMasterManipulator.getTrieTreeManipulator().addChild( guid, parentGuid );\n                        parentGuid = guid;\n                    }\n                    else {\n                        parentGuid = guid;\n                    }\n\n                    ret = element;\n                }\n            }\n            else {\n                parentGuid = node.getGuid();\n            }\n        }\n\n        return ret;\n    }\n    @Override\n    public Account affirmAccount(String path) {\n        return (Account) this.affirmTreeNodeByPath( path, GenericAccount.class, GenericDomain.class );\n    }\n\n    @Override\n    public Group affirmGroup(String path) {\n        return (Group) this.affirmTreeNodeByPath( path, GenericGroup.class, GenericDomain.class);\n    }\n\n    @Override\n    public Domain affirmDomain(String path) {\n        return (Domain) this.affirmTreeNodeByPath( path, null, GenericDomain.class );\n    }\n\n    @Override\n    public void insertCredential(Credential credential) {\n        this.credentialManipulator.insert( credential );\n\n    }\n\n    @Override\n    public void insertRole(Role role) {\n        this.roleManipulator.insert( role );\n    }\n\n    @Override\n    public void addChildren( GUID parentGuid, GUID childrenGuid ) {\n        this.treeMasterManipulator.getTrieTreeManipulator().addChild( childrenGuid, parentGuid );\n    }\n\n    @Override\n    public boolean containsChild(GUID parentGuid, String childName) {\n        for( GUIDNameManipulator manipulator : this.fileManipulators ) {\n            if( this.containsChild( manipulator, parentGuid, childName ) ) {\n                return true;\n            }\n        }\n\n        for( GUIDNameManipulator manipulator : this.folderManipulators ) {\n            if( this.containsChild( manipulator, parentGuid, childName ) ) {\n                return true;\n            }\n        }\n        return false;\n    }\n\n    @Override\n    public List<GUID> queryAccountGuidByName(String userName) {\n        return  this.userNodeManipulator.getGuidsByName( userName );\n\n    }\n\n    @Override\n    public boolean queryAccountByGuid(GUID userGuid, String kernelCredential) {\n        Account account = this.userNodeManipulator.queryUser( userGuid );\n        return account.getKernelCredential().equals(kernelCredential);\n\n    }\n\n    @Override\n    public void insertPrivilege(GenericPrivilege privilege) {\n        this.privilegeManipulator.insert(privilege);\n    }\n\n    @Override\n    public void removePrivilege(GUID privilegeGuid) {\n        this.privilegeManipulator.remove( privilegeGuid );\n    }\n\n    @Override\n    public Object queryPrivilege(GUID72 guid72) {\n        return null;\n    }\n\n    @Override\n    public List<GenericPrivilege> queryPrivilegeByName(String name) {\n        return null;\n    }\n\n    @Override\n    public List<GenericPrivilege> queryAllPrivileges() {\n        List<GenericPrivilege> privileges = new ArrayList<>();\n        for( GenericPrivilege privilege : this.privilegeManipulator.queryAllPrivileges() ) {\n            privileges.add( privilege );\n        }\n        return privileges;\n    }\n\n    @Override\n    public void updateRole(GenericRole role) {\n\n        this.roleManipulator.updateRole(role);\n    }\n\n    @Override\n    public GUID queryUserCredentialByGuid(GUID userGuid) {\n        return this.credentialManipulator.queryCredential(userGuid).getGuid();\n    }\n\n    @Override\n    public boolean hasPermission(GUID userGuid, String requiredPrivilegeCode) {\n        List<GenericAuthorization> authorizations = this.authorizationManipulator.queryAuthorizationByUserGuid(userGuid);\n        for( GenericAuthorization authorization : authorizations ) {\n            if( authorization.getPrivilegeToken().contains( requiredPrivilegeCode ) ) {\n                return true;\n            }\n        }\n        return false;\n    }\n\n    @Override\n    public void insertAuthorization(GenericAuthorization authorization) {\n        this.authorizationManipulator.insert( authorization );\n    }\n\n    @Override\n    public void removeAuthorizationByGuid(GUID userGuid) {\n        this.authorizationManipulator.remove( userGuid );\n    }\n\n    @Override\n    public void removeAuthorizationByUserGuid(GUID userGuid) {\n        this.authorizationManipulator.removeAuthorizationByUserGuid( userGuid );\n    }\n\n    @Override\n    public List<GenericAccount> queryAllAccount() {\n        return this.userNodeManipulator.queryAllAccount();\n    }\n\n    @Override\n    public List<GenericDomain> queryAllDomain() {\n        return this.domainNodeManipulator.queryAllDomain();\n    }\n\n    @Override\n    public Group queryGroupByGroupGuid(GUID groupGuid) {\n        return this.groupNodeManipulator.queryGroup(groupGuid);\n    }\n\n    @Override\n    public String queryDomainNameByGuid(GUID domainGuid) {\n        return this.domainNodeManipulator.queryDomainNameByGuid(domainGuid);\n    }\n\n    @Override\n    public List<GenericAuthorization> queryAllAuthorization() {\n        return this.authorizationManipulator.queryAllAuthorization();\n    }\n\n    @Override\n    public List<GenericRole> queryAllRoles() {\n        return  this.roleManipulator.queryAllRoles();\n    }\n\n    @Override\n    public Account queryAccountByName(String userName) {\n        return this.userNodeManipulator.queryAccountByName( userName );\n    }\n\n    @Override\n    public void updateAccount(Account account) {\n        this.userNodeManipulator.update( account );\n    }\n\n    @Override\n    public Account queryAccountByUserGuid(GUID userGuid) {\n        return this.userNodeManipulator.queryAccountByUserGuid( userGuid );\n    }\n\n    @Override\n    public Privilege queryPrivilegeByGuid(GUID guid) {\n        return this.privilegeManipulator.queryPrivilege( guid );\n    }\n\n    @Override\n    public void updatePrivilege(Privilege privilege) {\n        this.privilegeManipulator.update(privilege);\n    }\n\n    @Override\n    public void updateAuthorization(GUID guid72) {\n        this.authorizationManipulator.update( guid72 );\n    }\n\n    @Override\n    public void removeRole(int id) {\n        //this.roleManipulator.removeRole( id );\n    }\n\n    @Override\n    public List<GenericAuthorization> queryAuthorizationByUserGuid(GUID userGuid) {\n        return this.authorizationManipulator.queryAuthorizationByUserGuid( userGuid );\n    }\n\n    @Override\n    public Domain queryDomainByGuid(GUID domainGuid) {\n        return this.domainNodeManipulator.queryDomain( domainGuid );\n    }\n\n    @Override\n    public void updateDomain(Domain domain) {\n        this.domainNodeManipulator.update( domain );\n    }\n\n    @Override\n    public void updateGroup(Group group) {\n        this.groupNodeManipulator.update( group );\n    }\n\n    protected boolean containsChild( GUIDNameManipulator manipulator, GUID parentGuid, String childName ) {\n        List<GUID > guids = manipulator.getGuidsByName( childName );\n        for( GUID guid : guids ) {\n            List<GUID > ps = this.imperialTree.fetchParentGuids( guid );\n            if( ps.contains( parentGuid ) ){\n                return true;\n            }\n        }\n        return false;\n    }\n\n    protected String getNS(GUID guid, String szSeparator ){\n        String path = this.imperialTree.getCachePath(guid);\n        if ( path != null ) {\n            return path;\n        }\n\n        ImperialTreeNode node = this.imperialTree.getNode(guid);\n        String assemblePath = this.getNodeName(node);\n        while ( !node.getParentGUIDs().isEmpty() && this.allNonNull( node.getParentGUIDs() ) ){\n            List<GUID> parentGuids = node.getParentGUIDs();\n            for( int i = 0; i < parentGuids.size(); ++i ){\n                if ( parentGuids.get(i) != null ){\n                    node = this.imperialTree.getNode( parentGuids.get(i) );\n                    break;\n                }\n            }\n            String nodeName = this.getNodeName(node);\n            assemblePath = nodeName + szSeparator + assemblePath;\n        }\n        this.imperialTree.insertCachePath( guid, assemblePath );\n        return assemblePath;\n    }\n\n    private String getNodeName(ImperialTreeNode node ){\n        UOI type = node.getType();\n        TreeNode newInstance = (TreeNode)type.newInstance();\n        TreeNodeOperator operator = this.operatorFactory.getOperator( newInstance.getMetaType() );\n        TreeNode treeNode = operator.get(node.getGuid());\n        return treeNode.getName();\n    }\n\n    private boolean allNonNull( List<?> list ) {\n        return list.stream().noneMatch( Objects::isNull );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/ACNodeAllotment.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ACNodeAllotment extends Pinenut {\n    Domain newDomain();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Account.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface Account extends FileElement {\n    String getNickName();\n    void setNickName( String nickName );\n\n    String getKernelCredential();\n    void setKernelCredential( String kernelCredential );\n\n    GUID getCredentialGuid();\n    void setCredentialGuid( GUID credentialGuid );\n\n    String getKernelGroupType();\n    void setKernelGroupType( String kernelGroupType );\n\n    LocalDateTime getCreateTime();\n    void setCreateTime( LocalDateTime createTime );\n\n    LocalDateTime getUpdateTime();\n    void setUpdateTime( LocalDateTime updateTime );\n\n    String getRole();\n    void setRole( String role);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/ArchElementNode.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.AccountManager;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\npublic class ArchElementNode implements ElementNode {\n    protected long              enumId;\n\n    protected String            name;\n\n    protected GUID              guid;\n\n    protected AccountManager    accountManager;\n\n    protected GuidAllocator     guidAllocator = GUIDs.newGuidAllocator();\n\n    public ArchElementNode(){\n        this.guid = guidAllocator.nextGUID();\n    }\n\n    public ArchElementNode(AccountManager accountManager){\n        this.guid = this.guidAllocator.nextGUID();\n        this.accountManager = accountManager;\n    }\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public void setName(String name) {\n        this.name = name;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/ArchFolderElementNode.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.AccountManager;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\n\npublic class ArchFolderElementNode extends ArchElementNode implements FolderElement{\n\n    public ArchFolderElementNode(){\n        super();\n    }\n    public ArchFolderElementNode(AccountManager accountManager){\n        super(accountManager);\n    }\n\n    @Override\n    public List<ElementNode> fetchChildren() {\n        ArrayList<ElementNode> elementNodes = new ArrayList<>();\n        Collection<GUID> guids = this.fetchChildrenGuids();\n        for( GUID elementGuid : guids ){\n            ElementNode node = (ElementNode)this.accountManager.get(elementGuid);\n            elementNodes.add( node );\n        }\n        return elementNodes;\n    }\n\n    @Override\n    public Collection<GUID> fetchChildrenGuids() {\n        return this.accountManager.fetchChildrenGuids(this.getGuid());\n    }\n\n    @Override\n    public void addChild(ElementNode child) {\n        GUID childId;\n        boolean bContainsChild = this.containsChild( child.getName() );\n        if( bContainsChild ) {\n            return;\n        }\n        else {\n            childId = this.accountManager.put( child );\n        }\n\n\n        this.accountManager.addChildren( this.guid, childId );\n    }\n\n    @Override\n    public boolean containsChild(String childName) {\n        return this.accountManager.containsChild( this.guid, childName );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Authorization.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface Authorization extends Pinenut {\n    int getEnumId();\n\n    GUID getGuid();\n    void setGuid( GUID guid );\n\n    String getUserName();\n    void setUserName( String userName );\n\n    GUID getUserGuid();\n    void setUserGuid( GUID userGuid );\n\n    GUID getCredentialGuid();\n    void setCredentialGuid( GUID credentialGuid );\n\n    String getPrivilegeToken();\n    void setPrivilegeToken(String privilegeToken);\n\n    GUID getPrivilegeGuid();\n    void setPrivilegeGuid( GUID privilegeGuid );\n\n    LocalDateTime getCreateTime();\n    void setCreateTime( LocalDateTime createTime );\n\n    LocalDateTime getUpdateTime();\n    void setUpdateTime( LocalDateTime updateTime );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Credential.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface Credential extends Pinenut {\n    int getEnumId();\n\n    GUID getGuid();\n    void setGuid( GUID guid );\n\n    String getName();\n    void setName( String name );\n\n    String getCredential();\n    void setCredential( String credential );\n\n    LocalDateTime getCreateTime();\n    void setCreateTime( LocalDateTime createTime );\n\n    LocalDateTime getUpdateTime();\n    void setUpdateTime( LocalDateTime updateTime );\n\n    String getType();\n    void setType( String type );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Domain.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.hydra.account.source.DomainNodeManipulator;\n\npublic interface Domain extends FolderElement {\n    String getDomainName();\n    void setDomainName( String domainName );\n\n    void save();\n\n    void  delete();\n\n    void setDomainNodeManipulator(DomainNodeManipulator domainNodeManipulator);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/ElementNode.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.meta.ElementObject;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface ElementNode extends TreeNode, ElementObject {\n    long getEnumId();\n    void setEnumId( long enumId );\n\n    String getName();\n    void setName( String name );\n\n    GUID getGuid();\n    void setGuid( GUID guid );\n\n    @Override\n    default String objectCategoryName() {\n        return \"Account\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/FileElement.java",
    "content": "package com.pinecone.hydra.account.entity;\n\npublic interface FileElement extends ElementNode{\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/FolderElement.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.Collection;\n\npublic interface FolderElement extends ElementNode{\n    Collection<ElementNode> fetchChildren();\n\n    Collection<GUID> fetchChildrenGuids();\n\n    void addChild( ElementNode child );\n\n    boolean containsChild( String childName );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericACNodeAllotment.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.hydra.account.AccountManager;\nimport com.pinecone.hydra.account.source.UserMasterManipulator;\n\npublic class GenericACNodeAllotment implements ACNodeAllotment{\n    protected AccountManager        accountManager;\n\n    protected UserMasterManipulator userMasterManipulator;\n\n    public GenericACNodeAllotment(AccountManager accountManager){\n        this.accountManager = accountManager;\n        this.userMasterManipulator = (UserMasterManipulator) accountManager.getMasterTrieTree();\n    }\n\n\n    @Override\n    public Domain newDomain() {\n        return new GenericDomain( accountManager, userMasterManipulator.getDomainNodeManipulator() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericAccount.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.account.AccountManager;\n\nimport java.time.LocalDateTime;\n\npublic class GenericAccount extends ArchElementNode implements Account {\n    protected long              enumId;\n\n    protected String            name;\n\n    protected GUID              guid;\n\n    protected String            nickName;\n\n    protected String            kernelCredential;\n\n    protected GUID              credentialGuid;\n\n    protected String            kernelGroupType;\n\n    protected LocalDateTime     createTime;\n\n    protected LocalDateTime     updateTime;\n\n    protected String            role;\n\n    public GenericAccount(){\n        super();\n    }\n\n    public GenericAccount(AccountManager accountManager){\n        super(accountManager);\n    }\n\n\n    @Override\n    public String getNickName() {\n        return this.nickName;\n    }\n\n    @Override\n    public void setNickName(String nickName) {\n        this.nickName = nickName;\n    }\n\n    @Override\n    public String getKernelCredential() {\n        return this.kernelCredential;\n    }\n\n    @Override\n    public void setKernelCredential(String kernelCredential) {\n        this.kernelCredential = kernelCredential;\n    }\n\n    @Override\n    public GUID getCredentialGuid() {\n        return this.credentialGuid;\n    }\n\n    @Override\n    public void setCredentialGuid(GUID credentialGuid) {\n        this.credentialGuid = credentialGuid;\n    }\n\n    @Override\n    public String getKernelGroupType() {\n        return this.kernelGroupType;\n    }\n\n    @Override\n    public void setKernelGroupType(String kernelGroupType) {\n        this.kernelGroupType = kernelGroupType;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.updateTime;\n    }\n\n    @Override\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.updateTime = updateTime;\n    }\n\n    @Override\n    public String getRole() {\n        return this.role;\n    }\n\n    @Override\n    public void setRole(String role) {\n        this.role = role;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericAuthorization.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic class GenericAuthorization implements Authorization{\n    private int enumId;\n\n    private GUID guid;\n\n    private String userName;\n\n    private GUID userGuid;\n\n    private GUID credentialGuid;\n\n    private String privilegeToken;\n\n    private GUID privilegeGuid;\n\n    private LocalDateTime createTime;\n\n    private LocalDateTime updateTime;\n\n    public GenericAuthorization(){}\n\n    public GenericAuthorization(GUID userGuid, String userName, GUID credential,\n                                String privilegeToken,\n                                LocalDateTime creationTime, LocalDateTime expirationTime) {\n        this.userGuid = userGuid;\n        this.userName = userName;\n        this.credentialGuid =credential;\n        this.privilegeToken = privilegeToken;\n\n        this.createTime = creationTime;\n        this.updateTime = expirationTime;\n    }\n\n    @Override\n    public int getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public String getUserName() {\n        return this.userName;\n    }\n\n    @Override\n    public void setUserName(String userName) {\n        this.userName = userName;\n    }\n\n    @Override\n    public GUID getUserGuid() {\n        return this.userGuid;\n    }\n\n    @Override\n    public void setUserGuid(GUID userGuid) {\n        this.userGuid = userGuid;\n    }\n\n    @Override\n    public GUID getCredentialGuid() {\n        return this.credentialGuid;\n    }\n\n    @Override\n    public void setCredentialGuid(GUID credentialGuid) {\n        this.credentialGuid = credentialGuid;\n    }\n\n    @Override\n    public String getPrivilegeToken() {\n        return this.privilegeToken;\n    }\n\n    @Override\n    public void setPrivilegeToken(String privilegeToken) {\n        this.privilegeToken = privilegeToken;\n    }\n\n    @Override\n    public GUID getPrivilegeGuid() {\n        return this.privilegeGuid;\n    }\n\n    @Override\n    public void setPrivilegeGuid(GUID privilegeGuid) {\n        this.privilegeGuid = privilegeGuid;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.updateTime;\n    }\n\n    @Override\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.updateTime = updateTime;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericCredential.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic class GenericCredential implements Credential {\n    private int enumId;\n\n    private GUID guid;\n\n    private String name;\n\n    private String credential;\n\n    private LocalDateTime createTime;\n\n    private LocalDateTime updateTime;\n\n    private String type;\n\n    public GenericCredential(){}\n\n    public GenericCredential( GUID guid, String name, String credential, LocalDateTime createTime, LocalDateTime updateTime, String type) {\n\n        this.guid = guid;\n        this.name = name;\n        this.credential = credential;\n        this.createTime = createTime;\n        this.updateTime = updateTime;\n        this.type = type;\n    }\n\n    @Override\n    public int getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    @Override\n    public String getCredential() {\n        return this.credential;\n    }\n\n    @Override\n    public void setCredential(String credential) {\n        this.credential = credential;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.updateTime;\n    }\n\n    @Override\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.updateTime = updateTime;\n    }\n\n    @Override\n    public String getType() {\n        return this.type;\n    }\n\n    @Override\n    public void setType(String type) {\n        this.type = type;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericDomain.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.account.AccountManager;\nimport com.pinecone.hydra.account.source.DomainNodeManipulator;\n\npublic class GenericDomain extends ArchFolderElementNode  implements Domain{\n    protected String                domainName;\n\n    protected DomainNodeManipulator domainNodeManipulator;\n\n    public GenericDomain(){\n        super();\n    }\n\n    public GenericDomain(AccountManager accountManager, DomainNodeManipulator domainNodeManipulator){\n        super(accountManager);\n\n        this.accountManager = accountManager;\n        this.domainNodeManipulator = domainNodeManipulator;\n    }\n\n    @Override\n    public String getDomainName() {\n        return this.domainName;\n    }\n\n    @Override\n    public void setDomainName(String domainName) {\n        this.domainName = domainName;\n    }\n\n\n    @Override\n    public void save() {\n        this.accountManager.put( this );\n    }\n\n    @Override\n    public void delete() {\n        this.accountManager.remove( this.guid );\n    }\n\n    @Override\n    public void setDomainNodeManipulator(DomainNodeManipulator domainNodeManipulator) {\n        this.domainNodeManipulator = domainNodeManipulator;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericGroup.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.account.AccountManager;\n\npublic class GenericGroup extends ArchFolderElementNode implements Group{\n    protected GUID defaultPrivilegePolicyGuid;\n\n    public GenericGroup(){\n        super();\n    }\n\n    public GenericGroup(AccountManager accountManager){\n        super(accountManager);\n    }\n    @Override\n    public GUID getDefaultPrivilegePolicyGuid() {\n        return this.defaultPrivilegePolicyGuid;\n    }\n\n    @Override\n    public void setDefaultPrivilegePolicyGuid(GUID defaultPrivilegePolicyGuid) {\n        this.defaultPrivilegePolicyGuid = defaultPrivilegePolicyGuid;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericPrivilege.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic class GenericPrivilege implements Privilege {\n    private int id;\n    private GUID guid;\n    private String token;\n    private String name;\n    private String privilegeCode;\n    private LocalDateTime createTime;\n    private LocalDateTime updateTime;\n    private String type;\n    private GUID parentPrivGuid;\n\n    @Override\n    public int getId() {\n        return id;\n    }\n    @Override\n    public GUID getGuid() {\n        return guid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public String getToken() {\n        return token;\n    }\n\n    @Override\n    public void setToken(String token) {\n        this.token = token;\n    }\n\n    @Override\n    public String getName() {\n        return name;\n    }\n\n    @Override\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    @Override\n    public String getPrivilegeCode() {\n        return privilegeCode;\n    }\n\n    @Override\n    public void setPrivilegeCode(String privilegeCode) {\n        this.privilegeCode = privilegeCode;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return createTime;\n    }\n\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return updateTime;\n    }\n\n    @Override\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.updateTime = updateTime;\n    }\n\n    @Override\n    public String getType() {\n        return type;\n    }\n\n    @Override\n    public void setType(String type) {\n        this.type = type;\n    }\n\n    @Override\n    public GUID getParentPrivGuid() {\n        return parentPrivGuid;\n    }\n\n    @Override\n    public void setParentPrivGuid(GUID parentPrivGuid) {\n        this.parentPrivGuid = parentPrivGuid;\n    }\n\n    // 无参构造方法\n    public GenericPrivilege() {\n    }\n    // 全参构造方法\n    public GenericPrivilege( GUID guid, String token, String name, String privilegeCode, LocalDateTime createTime, LocalDateTime updateTime, String type) {\n        this.guid = guid;\n        this.token = token;\n        this.name = name;\n        this.privilegeCode = privilegeCode;\n        this.createTime = createTime;\n        this.updateTime = updateTime;\n        this.type = type;\n\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/GenericRole.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport java.time.LocalDateTime;\n\npublic class GenericRole implements Role{\n    private int id;\n    private String name;\n    private String privilegeGuids;\n    private LocalDateTime createTime;\n    private LocalDateTime updateTime;\n    private String type;\n\n    // 无参构造方法\n    public GenericRole() {\n        super();\n    }\n\n    // 全参构造方法\n    public GenericRole(String name, String privilegeGuids, LocalDateTime createTime, LocalDateTime updateTime, String type) {\n\n        this.name = name;\n        this.privilegeGuids = privilegeGuids;\n        this.createTime = createTime;\n        this.updateTime = updateTime;\n        this.type = type;\n    }\n\n\n\n    @Override\n    public int getId() {\n        return id;\n    }\n    @Override\n    public String getName() {\n        return name;\n    }\n\n    @Override\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    @Override\n    public String getPrivilegeGuids() {\n        return privilegeGuids;\n    }\n\n    @Override\n    public void setPrivilegeGuids(String privilegeGuids) {\n        this.privilegeGuids = privilegeGuids;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return createTime;\n    }\n\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return updateTime;\n    }\n\n    @Override\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.updateTime = updateTime;\n    }\n\n    @Override\n    public String getType() {\n        return type;\n    }\n\n    @Override\n    public void setType(String type) {\n        this.type = type;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Group.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\n\npublic interface Group extends FolderElement {\n\n    GUID getDefaultPrivilegePolicyGuid();\n    void setDefaultPrivilegePolicyGuid( GUID defaultPrivilegePolicyGuid );\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Privilege.java",
    "content": "package com.pinecone.hydra.account.entity;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface Privilege extends Pinenut {\n    int getId();\n\n    String getPrivilegeCode();\n    String getToken();\n    GUID getParentPrivGuid();\n    GUID getGuid();\n    void setGuid(GUID guid);\n\n\n    void setToken(String token);\n\n    String getName();\n    void setName(String name);\n\n\n    void setPrivilegeCode(String privilegeCode);\n\n    LocalDateTime getCreateTime();\n    void setCreateTime(LocalDateTime createTime);\n\n    LocalDateTime getUpdateTime();\n    void setUpdateTime(LocalDateTime updateTime);\n\n    String getType();\n    void setType(String type);\n\n\n    void setParentPrivGuid(GUID parentPrivGuid);\n}"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/entity/Role.java",
    "content": "package com.pinecone.hydra.account.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.time.LocalDateTime;\n\npublic interface Role extends Pinenut {\n    int getId();\n    String getName();\n    void setName(String name);\n    String getPrivilegeGuids();\n    void setPrivilegeGuids(String privilegeGuids);\n\n    LocalDateTime getCreateTime();\n    void setCreateTime(LocalDateTime createTime);\n\n    LocalDateTime getUpdateTime();\n    void setUpdateTime(LocalDateTime updateTime);\n\n    String getType();\n    void setType(String type);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/AccountServiceOperator.java",
    "content": "package com.pinecone.hydra.account.operator;\n\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface AccountServiceOperator extends TreeNodeOperator {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/AccountServiceOperatorFactory.java",
    "content": "package com.pinecone.hydra.account.operator;\n\nimport com.pinecone.hydra.account.entity.Account;\nimport com.pinecone.hydra.account.entity.Domain;\nimport com.pinecone.hydra.account.entity.Group;\nimport com.pinecone.hydra.unit.imperium.operator.OperatorFactory;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\nimport com.pinecone.hydra.account.AccountManager;\nimport com.pinecone.hydra.account.source.UserMasterManipulator;\n\npublic interface AccountServiceOperatorFactory extends OperatorFactory {\n    String DefaultUser   = Account.class.getSimpleName();\n\n    String DefaultGroup  = Group.class.getSimpleName();\n\n    String DefaultDomain = Domain.class.getSimpleName();\n\n    void register( String typeName, TreeNodeOperator functionalNodeOperation );\n\n    void registerMetaType( Class<?> clazz, String metaType );\n\n    void registerMetaType( String classFullName, String metaType );\n\n    String getMetaType( String classFullName );\n\n    AccountServiceOperator getOperator(String typeName );\n\n    AccountManager getUserManager();\n\n    UserMasterManipulator getMasterManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/ArchAccountServiceOperator.java",
    "content": "package com.pinecone.hydra.account.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.account.AccountManager;\nimport com.pinecone.hydra.account.source.UserMasterManipulator;\n\npublic abstract class ArchAccountServiceOperator implements AccountServiceOperator {\n    protected AccountManager                accountManager;\n\n    protected AccountServiceOperatorFactory factory;\n\n    protected ImperialTree                  imperialTree;\n\n    protected UserMasterManipulator         userMasterManipulator;\n\n    public ArchAccountServiceOperator(UserMasterManipulator masterManipulator, AccountManager accountManager){\n        this.accountManager = accountManager;\n        this.userMasterManipulator = masterManipulator;\n        this.imperialTree = this.accountManager.getMasterTrieTree();\n    }\n\n    protected ImperialTreeNode affirmPreinsertionInitialize(TreeNode node ){\n        GUID guid = node.getGuid();\n        ImperialTreeNode imperialTreeNode = new GUIDImperialTrieNode();\n        imperialTreeNode.setGuid( guid );\n        imperialTreeNode.setType( UOIUtils.createLocalJavaClass( node.getClass().getName() ) );\n\n        return imperialTreeNode;\n    }\n\n    public AccountServiceOperatorFactory getUserOperatorFactory(){\n        return this.factory;\n    }\n    protected String getUserMetaType( TreeNode treeNode ){\n        return treeNode.className().replace(\"Generic\",\"\");\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/GenericAccountOperator.java",
    "content": "package com.pinecone.hydra.account.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.account.AccountManager;\nimport com.pinecone.hydra.account.entity.Account;\nimport com.pinecone.hydra.account.source.UserMasterManipulator;\nimport com.pinecone.hydra.account.source.UserNodeManipulator;\n\nimport java.time.LocalDateTime;\nimport java.util.List;\n\npublic class GenericAccountOperator extends ArchAccountServiceOperator implements AccountServiceOperator {\n    protected UserNodeManipulator   userNodeManipulator;\n\n    public GenericAccountOperator(AccountServiceOperatorFactory factory ){\n        this( factory.getMasterManipulator(), factory.getUserManager() );\n        this.userNodeManipulator = this.userMasterManipulator.getUserNodeManipulator();\n    }\n\n    public GenericAccountOperator(UserMasterManipulator masterManipulator, AccountManager accountManager) {\n        super(masterManipulator, accountManager);\n        this.userNodeManipulator = this.userMasterManipulator.getUserNodeManipulator();\n    }\n\n    @Override\n    public GUID insert(TreeNode treeNode) {\n        Account account = (Account) treeNode;\n        account.setCreateTime(LocalDateTime.now());\n        account.setUpdateTime(LocalDateTime.now());\n        ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(account);\n        GUID guid = account.getGuid();\n\n        this.imperialTree.insert(imperialTreeNode);\n        this.userNodeManipulator.insert(account);\n\n        return guid;\n    }\n\n    @Override\n    public void purge(GUID guid) {\n        List<GUIDImperialTrieNode> children = this.imperialTree.getChildren(guid);\n        for( GUIDImperialTrieNode node : children ){\n            TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class<? >[]{this.getClass()}, this );\n            AccountServiceOperator operator = this.factory.getOperator(this.getUserMetaType(newInstance));\n            operator.purge( node.getGuid() );\n        }\n        this.removeNode( guid );\n    }\n\n    @Override\n    public TreeNode get(GUID guid) {\n        return this.userNodeManipulator.queryUser(guid);\n    }\n\n    @Override\n    public TreeNode get(GUID guid, int depth) {\n        return null;\n    }\n\n    @Override\n    public TreeNode getAsRootDepth(GUID guid) {\n        return null;\n    }\n\n    @Override\n    public void update(TreeNode treeNode) {\n\n    }\n\n    @Override\n    public void updateName(GUID guid, String name) {\n\n    }\n\n    private void removeNode( GUID guid ){\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.userNodeManipulator.remove( guid );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/GenericAccountOperatorFactory.java",
    "content": "package com.pinecone.hydra.account.operator;\n\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\nimport com.pinecone.hydra.account.AccountManager;\nimport com.pinecone.hydra.account.source.UserMasterManipulator;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.TreeMap;\n\npublic class GenericAccountOperatorFactory implements AccountServiceOperatorFactory {\n\n    protected UserMasterManipulator             userMasterManipulator;\n\n    protected AccountManager accountManager;\n\n    protected Map<String, TreeNodeOperator>     registerer = new HashMap<>();\n    protected Map<String, String >              metaTypeMap = new TreeMap<>();\n\n   public GenericAccountOperatorFactory(AccountManager accountManager, UserMasterManipulator userMasterManipulator ){\n       this.accountManager = accountManager;\n       this.userMasterManipulator = userMasterManipulator;\n\n       this.registerer.put(\n               DefaultUser,\n               new GenericAccountOperator( this )\n       );\n       this.registerer.put(\n               DefaultGroup,\n               new GenericGroupOperator( this )\n       );\n       this.registerer.put(\n               DefaultDomain,\n               new GenericDomainOperator( this )\n       );\n\n   }\n\n    @Override\n    public void register(String typeName, TreeNodeOperator functionalNodeOperation) {\n        this.registerer.put( typeName, functionalNodeOperation );\n    }\n\n    @Override\n    public void registerMetaType(Class<?> clazz, String metaType) {\n        this.registerMetaType( clazz.getName(), metaType );\n    }\n\n    @Override\n    public void registerMetaType(String classFullName, String metaType) {\n        this.metaTypeMap.put( classFullName, metaType );\n    }\n\n    @Override\n    public String getMetaType(String classFullName) {\n        return this.metaTypeMap.get( classFullName );\n    }\n\n    @Override\n    public AccountServiceOperator getOperator(String typeName) {\n        return (AccountServiceOperator) this.registerer.get( typeName );\n    }\n\n    @Override\n    public AccountManager getUserManager() {\n        return this.accountManager;\n    }\n\n    @Override\n    public UserMasterManipulator getMasterManipulator() {\n        return this.userMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/GenericDomainOperator.java",
    "content": "package com.pinecone.hydra.account.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.account.AccountManager;\nimport com.pinecone.hydra.account.entity.Domain;\nimport com.pinecone.hydra.account.source.DomainNodeManipulator;\nimport com.pinecone.hydra.account.source.UserMasterManipulator;\n\nimport java.util.List;\n\npublic class GenericDomainOperator extends ArchAccountServiceOperator implements AccountServiceOperator {\n    protected DomainNodeManipulator  domainNodeManipulator;\n\n    public GenericDomainOperator( AccountServiceOperatorFactory factory ){\n        this( factory.getMasterManipulator(), factory.getUserManager() );\n        this.domainNodeManipulator = this.userMasterManipulator.getDomainNodeManipulator();\n    }\n    public GenericDomainOperator(UserMasterManipulator masterManipulator, AccountManager accountManager) {\n        super(masterManipulator, accountManager);\n        this.domainNodeManipulator = this.userMasterManipulator.getDomainNodeManipulator();\n    }\n\n    @Override\n    public GUID insert(TreeNode treeNode) {\n        Domain domain = (Domain) treeNode;\n        ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(domain);\n        GUID guid = domain.getGuid();\n\n        this.imperialTree.insert(imperialTreeNode);\n        this.domainNodeManipulator.insert( domain );\n        return guid;\n    }\n\n    @Override\n    public void purge(GUID guid) {\n        List<GUIDImperialTrieNode> children = this.imperialTree.getChildren(guid);\n        for( GUIDImperialTrieNode node : children ){\n            TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class<? >[]{this.getClass()}, this );\n            AccountServiceOperator operator = this.factory.getOperator(this.getUserMetaType(newInstance));\n            operator.purge( node.getGuid() );\n        }\n        this.removeNode( guid );\n    }\n\n    @Override\n    public TreeNode get(GUID guid) {\n        return this.domainNodeManipulator.queryDomain( guid );\n    }\n\n    @Override\n    public TreeNode get(GUID guid, int depth) {\n        return null;\n    }\n\n    @Override\n    public TreeNode getAsRootDepth(GUID guid) {\n        return null;\n    }\n\n    @Override\n    public void update(TreeNode treeNode) {\n\n    }\n\n    @Override\n    public void updateName(GUID guid, String name) {\n\n    }\n\n    private void removeNode( GUID guid ){\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.domainNodeManipulator.remove( guid );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/operator/GenericGroupOperator.java",
    "content": "package com.pinecone.hydra.account.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.account.AccountManager;\nimport com.pinecone.hydra.account.entity.Group;\nimport com.pinecone.hydra.account.source.GroupNodeManipulator;\nimport com.pinecone.hydra.account.source.UserMasterManipulator;\n\nimport java.util.List;\n\npublic class GenericGroupOperator extends ArchAccountServiceOperator implements AccountServiceOperator {\n    protected GroupNodeManipulator groupNodeManipulator;\n\n    public GenericGroupOperator( AccountServiceOperatorFactory factory ){\n        this( factory.getMasterManipulator(), factory.getUserManager() );\n        this.groupNodeManipulator = this.userMasterManipulator.getGroupNodeManipulator();\n    }\n\n    public GenericGroupOperator(UserMasterManipulator masterManipulator, AccountManager accountManager) {\n        super(masterManipulator, accountManager);\n        this.groupNodeManipulator = this.userMasterManipulator.getGroupNodeManipulator();\n    }\n\n    @Override\n    public GUID insert(TreeNode treeNode) {\n        Group group = (Group) treeNode;\n        ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(group);\n        GUID guid = group.getGuid();\n\n        this.imperialTree.insert(imperialTreeNode);\n        this.groupNodeManipulator.insert( group );\n\n        return guid;\n    }\n\n    @Override\n    public void purge(GUID guid) {\n        List<GUIDImperialTrieNode> children = this.imperialTree.getChildren(guid);\n        for( GUIDImperialTrieNode node : children ){\n            TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class<? >[]{this.getClass()}, this );\n            AccountServiceOperator operator = this.factory.getOperator(this.getUserMetaType(newInstance));\n            operator.purge( node.getGuid() );\n        }\n        this.removeNode( guid );\n    }\n\n    @Override\n    public TreeNode get(GUID guid) {\n        return this.groupNodeManipulator.queryGroup( guid );\n    }\n\n    @Override\n    public TreeNode get(GUID guid, int depth) {\n        return null;\n    }\n\n    @Override\n    public TreeNode getAsRootDepth(GUID guid) {\n        return null;\n    }\n\n    @Override\n    public void update(TreeNode treeNode) {\n\n    }\n\n    @Override\n    public void updateName(GUID guid, String name) {\n\n    }\n    private void removeNode( GUID guid ){\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.groupNodeManipulator.remove( guid );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/AuthorizationManipulator.java",
    "content": "package com.pinecone.hydra.account.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.entity.Authorization;\nimport com.pinecone.hydra.account.entity.GenericAuthorization;\n\nimport java.util.List;\n\npublic interface AuthorizationManipulator extends Pinenut {\n    void insert(Authorization authorization);\n\n    void remove(GUID authorizationGuid);\n\n    void update(GUID authorizationGuid);\n\n    Authorization queryCredential(GUID authorizationGuid );\n\n    List<GenericAuthorization> queryAuthorizationByUserGuid(GUID userGuid);\n\n    void removeAuthorizationByUserGuid(GUID userGuid);\n\n    List<GenericAuthorization> queryAllAuthorization();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/CredentialManipulator.java",
    "content": "package com.pinecone.hydra.account.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.entity.Credential;\n\npublic interface CredentialManipulator extends Pinenut {\n    void insert(Credential credential);\n\n    void remove(GUID credentialGuid);\n\n    Credential queryCredential(GUID credentialGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/DomainNodeManipulator.java",
    "content": "package com.pinecone.hydra.account.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.entity.GenericDomain;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.account.entity.Domain;\n\nimport java.util.List;\n\npublic interface DomainNodeManipulator extends GUIDNameManipulator {\n    void insert(Domain domain);\n\n    void remove(GUID domainGuid);\n\n    Domain queryDomain(GUID domainGuid );\n\n    List<GenericDomain> queryAllDomain();\n\n    String queryDomainNameByGuid(GUID domainGuid);\n\n    void update(Domain domain);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/GroupNodeManipulator.java",
    "content": "package com.pinecone.hydra.account.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.account.entity.Group;\n\npublic interface GroupNodeManipulator extends GUIDNameManipulator {\n    void insert(Group group);\n\n    void remove(GUID groupGuid);\n\n    Group queryGroup(GUID groupGuid );\n\n    void update(Group group);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/PrivilegeManipulator.java",
    "content": "package com.pinecone.hydra.account.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.entity.GenericPrivilege;\nimport com.pinecone.hydra.account.entity.Privilege;\n\nimport java.util.List;\n\npublic interface PrivilegeManipulator extends Privilege {\n    void insert( Privilege privilege);\n\n    void remove( GUID privilegeGuid);\n\n    Privilege queryPrivilege( GUID privilegeGuid);\n\n    List<GenericPrivilege> queryAllPrivileges();\n\n    void update(Privilege privilege);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/RoleManipulator.java",
    "content": "package com.pinecone.hydra.account.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.entity.GenericRole;\nimport com.pinecone.hydra.account.entity.Role;\n\nimport java.util.List;\n\npublic interface RoleManipulator  extends Role {\n\n    void insert(Role role);\n\n    void remove(GUID roleGuid);\n\n    Role queryRole(GUID roleGuid );\n\n    void updateRole(GenericRole role);\n\n    GenericRole queryRolesByUserGuid(String userGuid);\n\n    List<GenericRole> queryAllRoles();\n\n    void removeRoleById(int id);\n}"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/UserMasterManipulator.java",
    "content": "package com.pinecone.hydra.account.source;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\n\npublic interface UserMasterManipulator extends KOIMasterManipulator {\n    DomainNodeManipulator       getDomainNodeManipulator();\n\n    GroupNodeManipulator        getGroupNodeManipulator();\n\n    UserNodeManipulator         getUserNodeManipulator();\n\n    CredentialManipulator       getCredentialManipulator();\n\n    AuthorizationManipulator    getAuthorizationManipulator();\n\n    PrivilegeManipulator        getPrivilegeManipulator();\n\n    RoleManipulator             getRoleManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/account/source/UserNodeManipulator.java",
    "content": "package com.pinecone.hydra.account.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.entity.GenericAccount;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.account.entity.Account;\n\nimport java.util.List;\n\npublic interface UserNodeManipulator extends GUIDNameManipulator {\n    void insert(Account account);\n\n    void remove(GUID userGuid);\n\n    Account queryUser(GUID userGuid );\n\n    List<GenericAccount> queryAllAccount();\n\n    GenericAccount queryAccountByName(String userName);\n\n    void update(Account account);\n\n    GenericAccount queryAccountByUserGuid(GUID userGuid);\n\n    List<GenericAccount> queryAccountsByGroup(GUID groupGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/config/ConfigSource.java",
    "content": "package com.pinecone.hydra.config;\n\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\n\nimport java.io.IOException;\nimport java.net.URI;\n\npublic interface ConfigSource extends Pinenut {\n    PatriarchalConfig getSearchScopeConfig();\n\n    RuntimeSystem getSystem();\n\n    PatriarchalConfig loadConfig( URI path ) throws IOException;\n\n    PatriarchalConfig loadConfig( Object dyPath ) throws IOException;\n\n    PatriarchalConfig loadConfigBySegmentName ( String szSegName ) throws IOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/config/LocalConfigSource.java",
    "content": "package com.pinecone.hydra.config;\n\nimport com.pinecone.hydra.servgram.Servgram;\nimport com.pinecone.framework.system.ErrorStrings;\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\n\nimport java.io.IOException;\nimport java.net.URI;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class LocalConfigSource implements ConfigSource {\n    protected PatriarchalConfig  mSearchScopeConf;\n    protected List<String >      mPathScopes;\n    protected List<String >      mFileExtends;\n    protected Servgram           mParentGram;\n    protected RuntimeSystem      mSystem;\n\n    @SuppressWarnings(\"unchecked\")\n    public LocalConfigSource( Servgram gram, PatriarchalConfig setupScope, PatriarchalConfig searchScope ) {\n        this.mParentGram        = gram;\n        this.mSystem            = this.mParentGram.parentSystem();\n        this.mSearchScopeConf   = searchScope;\n\n        Object t = setupScope.get( \"PathScopes\" );\n        if( t instanceof List<?> ) {\n            this.mPathScopes      = ( List<String > ) t;\n        }\n        else {\n            this.mPathScopes      = new ArrayList<>();\n        }\n\n        t = setupScope.get( \"FileExtends\" );\n        if( t instanceof List<?> ) {\n            this.mFileExtends    = ( List<String > ) t;\n        }\n        else {\n            this.mFileExtends    = new ArrayList<>();\n        }\n    }\n\n    @Override\n    public RuntimeSystem getSystem() {\n        return this.mSystem;\n    }\n\n    @Override\n    public PatriarchalConfig getSearchScopeConfig() {\n        return this.mSearchScopeConf;\n    }\n\n    @Override\n    public PatriarchalConfig loadConfig( URI path ) throws IOException {\n        String szPath = path.getPath();\n        Path lp = Path.of( szPath );\n        if( lp.isAbsolute() ) {\n            return this.getSearchScopeConfig().getChildFromPath( lp );\n        }\n\n        return this.loadConfig( lp );\n    }\n\n    @Override\n    public PatriarchalConfig loadConfig( Object dyPath ) throws IOException {\n        if( dyPath instanceof Path ) {\n            return this.loadConfig( (Path) dyPath );\n        }\n        else if( dyPath instanceof URI ) {\n            return this.loadConfig( (URI) dyPath );\n        }\n        else if( dyPath instanceof String ) {\n            return this.loadConfig( Path.of( (String) dyPath ) );\n        }\n\n        return this.loadConfig( Path.of( dyPath.toString() ) );\n    }\n\n    public PatriarchalConfig loadConfig( Path path ) throws IOException {\n        try{\n            return this.getSearchScopeConfig().getChildFromPath( path );\n        }\n        catch ( IOException e ) {\n            IOException ie = null;\n            for( String sp : this.mPathScopes ) {\n                try{\n                    return this.getSearchScopeConfig().getChildFromPath( Path.of( sp ).resolve( path ) );\n                }\n                catch ( IOException e1 ) {\n                    ie = e1;\n                }\n            }\n\n            if( ie != null ) {\n                throw new IOException( ErrorStrings.E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED + \"What-> '\" + path + \"'\", ie );\n            }\n        }\n\n        throw new IOException( ErrorStrings.E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED + \"What-> '\" + path + \"'\" );\n    }\n\n    @Override\n    public PatriarchalConfig loadConfigBySegmentName ( String szSegName ) throws IOException {\n        IOException ie = null;\n        for( String sfe : this.mFileExtends ) {\n            try{\n                return this.loadConfig( Path.of( szSegName + \".\" + sfe ) );\n            }\n            catch ( IOException e1 ) {\n                ie = e1;\n            }\n        }\n\n        throw new IOException( ErrorStrings.E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED + \"Segment-> '\" + szSegName + \"'\", ie );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/config/MapConfigReinterpreter.java",
    "content": "package com.pinecone.hydra.config;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.MultiScopeMap;\n\nimport java.util.Collection;\nimport java.util.Map;\n\npublic interface MapConfigReinterpreter extends Pinenut {\n    MultiScopeMap<String, Object > getPrimaryScope();\n\n    void setPrimaryScope( MultiScopeMap<String, Object > scope );\n\n    Collection<String > getExcludeKeys();\n\n    void addExcludeKey( String szKey );\n\n    void addExcludeKeys( Collection<String > keys );\n\n    void removeExcludeKey( String szKey );\n\n    String getKeyWordsToken();\n\n    void setKeyWordsToken( String szToken );\n\n    void reinterpret( Map<String, Object > that );\n\n    void reinterpret( Map<String, Object > that, MultiScopeMap<String, Object > scope );\n\n    void reinterpretByBasicKeyWordsScope( Map<String, Object > that, MultiScopeMap<String, Object > keyWordsScope );\n\n    void reinterpretByLineage( Map<String, Object > that, Object parent );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/config/ScopedMapConfigReinterpreter.java",
    "content": "package com.pinecone.hydra.config;\n\nimport com.pinecone.framework.unit.MultiScopeMap;\nimport com.pinecone.framework.unit.PrecedeMultiMaptron;\nimport com.pinecone.framework.unit.PrecedeMultiScopeMap;\nimport com.pinecone.framework.unit.MultiScopeMaptron;\nimport com.pinecone.framework.util.template.TemplateParser;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.TreeSet;\nimport java.util.TreeMap;\n\npublic class ScopedMapConfigReinterpreter implements MapConfigReinterpreter {\n    protected MultiScopeMap<String, Object >   mPrimaryScope;\n    protected Set<String >                     mExcludeKeys;\n    protected String                           mszKeyWordsToken;\n\n    public ScopedMapConfigReinterpreter( MultiScopeMap<String, Object > scopeMap, String szKeyWordsToken ) {\n        this.mPrimaryScope    = scopeMap;\n        this.mExcludeKeys     = new TreeSet<>();\n        this.mszKeyWordsToken = szKeyWordsToken;\n    }\n\n    public ScopedMapConfigReinterpreter( MultiScopeMap<String, Object > scopeMap ) {\n        this( scopeMap, \"KeyWords\" );\n    }\n\n\n    @Override\n    public MultiScopeMap<String, Object > getPrimaryScope() {\n        return this.mPrimaryScope;\n    }\n\n    @Override\n    public void setPrimaryScope( MultiScopeMap<String, Object> scope ) {\n        this.mPrimaryScope = scope;\n    }\n\n    @Override\n    public Collection<String > getExcludeKeys() {\n        return this.mExcludeKeys;\n    }\n\n    @Override\n    public void addExcludeKey( String szKey ) {\n        this.mExcludeKeys.add( szKey );\n    }\n\n    @Override\n    public void addExcludeKeys( Collection<String> keys ) {\n        this.mExcludeKeys.addAll( keys );\n    }\n\n    @Override\n    public void removeExcludeKey( String szKey ) {\n        this.mExcludeKeys.remove( szKey );\n    }\n\n    @Override\n    public String getKeyWordsToken(){\n        return this.mszKeyWordsToken;\n    }\n\n    @Override\n    public void setKeyWordsToken( String szToken ){\n        this.mszKeyWordsToken = szToken;\n    }\n\n\n    @SuppressWarnings( \"unchecked\" )\n    protected Object reinterpretVal  ( Object key, Object val, Map scope ) {\n        if( val instanceof String ) {\n            String szVal = (String) val;\n            TemplateParser parser = new TemplateParser( szVal, scope );\n            return parser.eval();\n        }\n        else if( val instanceof Map || val instanceof List) {\n            Map    previousThisScope  = null;\n            Object previousSupper     = null;\n            Object previousThis       = null;\n            MultiScopeMap kwFields    = null;\n            if( scope instanceof MultiScopeMap ) { // Retrieving keyword fields\n                MultiScopeMap ms   = (MultiScopeMap) scope;\n                previousThisScope  = ((MultiScopeMap) scope).thisScope();\n                if( ms instanceof PrecedeMultiScopeMap) {\n                    kwFields       = ((PrecedeMultiScopeMap) ms).getPrecedeScope();\n                }\n                else {\n                    kwFields       = ms.getScopeByNS( this.mszKeyWordsToken );\n                }\n\n                if( kwFields != null ) {\n                    previousSupper     = kwFields.get( \"super\" );\n                    previousThis       = kwFields.get( \"this\"  );\n\n                    kwFields.put( \"this\" , val          );\n                    kwFields.put( \"super\", previousThis );\n                }\n            }\n\n            if( val instanceof Map ) {\n                if( scope instanceof MultiScopeMap ) {\n                    ( (MultiScopeMap) scope ).setThisScope( (Map)val      );\n                }\n\n                this.reinterpretObject( (Map<String, Object >)val, scope );\n            }\n            else {\n                this.reinterpretList( (List<Object >)val, scope );\n            }\n\n            // Restoring previous scope.\n            if( scope instanceof MultiScopeMap && kwFields != null ) {\n                MultiScopeMap ms   = (MultiScopeMap) scope;\n                kwFields.put( \"super\", previousSupper );\n                kwFields.put( \"this\" , previousThis   );\n                ms.setThisScope( previousThisScope );\n            }\n        }\n\n        return null;\n    }\n\n    protected void reinterpretList   ( List<Object > that, Map scope ) {\n        int idx = 0;\n        for( Object val : that ) {\n            Object nv = this.reinterpretVal( idx, val, scope );\n            if( nv != null ) {\n                that.set( idx, nv );\n            }\n            ++idx;\n        }\n    }\n\n    protected void reinterpretObject ( Map<String, Object > that, Map scope ) {\n        for( Map.Entry<String, Object > kv : that.entrySet() ) {\n            if( this.mExcludeKeys.contains( kv.getKey() ) ) {\n                continue;\n            }\n            Object nv = this.reinterpretVal( kv.getKey(), kv.getValue(), scope );\n            if( nv != null ) {\n                that.put( kv.getKey(), nv );\n            }\n        }\n    }\n\n    @Override\n    public void reinterpret( Map<String, Object> that ) {\n        this.reinterpret( that, this.mPrimaryScope );\n    }\n\n    @Override\n    public void reinterpret( Map<String, Object> that, MultiScopeMap<String, Object> scope ) {\n        this.reinterpretObject( that, scope );\n    }\n\n    @Override\n    public void reinterpretByBasicKeyWordsScope( Map<String, Object> that, MultiScopeMap<String, Object> keyWordsScope ) {\n        // Keyword fields, keyword has the highest priority.\n        PrecedeMultiMaptron<String, Object > scope = new PrecedeMultiMaptron<>( that );\n        //scope.addParent( keyWords.setName( \"KeyWords\" ) );\n        scope.setPrecedeScope( keyWordsScope.setName( this.mszKeyWordsToken ) );\n        scope.addParent( this.getPrimaryScope()                    );\n        keyWordsScope.put( \"__scope__\" , (Object) scope            );\n\n        this.reinterpretObject( that, scope );\n    }\n\n    @Override\n    public void reinterpretByLineage( Map<String, Object> that, Object parent ) {\n        MultiScopeMap<String, Object > keyWords = new MultiScopeMaptron<>( new TreeMap<>() );\n        keyWords.put( \"this\"      , that      );\n        keyWords.put( \"super\"     , parent    );\n\n        this.reinterpretByBasicKeyWordsScope( that, keyWords );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/GenericKOMRegistry.java",
    "content": "package com.pinecone.hydra.registry;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.registry.entity.ConfigNode;\nimport com.pinecone.hydra.registry.entity.DefaultPropertyConverter;\nimport com.pinecone.hydra.registry.entity.DefaultTextValueConverter;\nimport com.pinecone.hydra.registry.entity.ElementNode;\nimport com.pinecone.hydra.registry.entity.GenericNamespace;\nimport com.pinecone.hydra.registry.entity.GenericProperties;\nimport com.pinecone.hydra.registry.entity.GenericTextFile;\nimport com.pinecone.hydra.registry.entity.GenericTextValue;\nimport com.pinecone.hydra.registry.entity.Namespace;\nimport com.pinecone.hydra.registry.entity.Properties;\nimport com.pinecone.hydra.registry.entity.Property;\nimport com.pinecone.hydra.registry.entity.RegistryTreeNode;\nimport com.pinecone.hydra.registry.entity.TextFile;\nimport com.pinecone.hydra.registry.entity.TextValue;\nimport com.pinecone.hydra.registry.entity.TypeConverter;\nimport com.pinecone.hydra.registry.operator.RegistryNodeOperator;\nimport com.pinecone.hydra.system.identifier.KOPathResolver;\nimport com.pinecone.hydra.system.ko.kom.ArchReparseKOMTree;\nimport com.pinecone.hydra.system.ko.kom.GenericReparseKOMTreeAddition;\nimport com.pinecone.hydra.system.ko.kom.KOMSelector;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.system.ko.kom.StandardPathSelector;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.registry.operator.GenericRegistryOperatorFactory;\nimport com.pinecone.hydra.registry.operator.RegistryOperatorFactory;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\nimport com.pinecone.hydra.registry.source.RegistryMasterManipulator;\nimport com.pinecone.hydra.registry.source.RegistryConfigNodeManipulator;\nimport com.pinecone.hydra.registry.source.RegistryNSNodeManipulator;\nimport com.pinecone.hydra.registry.source.RegistryPropertiesManipulator;\nimport com.pinecone.hydra.registry.source.RegistryTextFileManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.io.StringReader;\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Objects;\n\n/**\n *  Pinecone Ursus For Java Uniform KOMRegistry\n *  Author: Harald.E (Dragon King), Ken\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Uniform Distribute Kernel Object Model Registry (Config KOM Registry)\n *  *****************************************************************************************\n */\npublic class GenericKOMRegistry extends ArchReparseKOMTree implements KOMRegistry {\n    protected RegistryMasterManipulator       registryMasterManipulator;\n    protected RegistryPropertiesManipulator   registryPropertiesManipulator;\n    protected RegistryTextFileManipulator     registryTextFileManipulator;\n    protected RegistryConfigNodeManipulator   configNodeManipulator;\n    protected RegistryNSNodeManipulator       namespaceNodeManipulator;\n\n    protected TypeConverter                   propertyTypeConverter;\n    protected TypeConverter                   textValueTypeConverter;\n\n\n    public GenericKOMRegistry( Processum superiorProcess, KOIMasterManipulator masterManipulator, KOMRegistry parent, String name, @Nullable GuidAllocator guidAllocator ){\n        // Phase [1] Construct system.\n        super( superiorProcess, masterManipulator, KernelRegistryConfig, parent, name, guidAllocator );\n\n        // Phase [2] Construct fundamentals.\n        this.registryMasterManipulator     =  (RegistryMasterManipulator) masterManipulator;\n        this.pathResolver                  =  new KOPathResolver( this.kernelObjectConfig );\n\n        // Phase [3] Construct manipulators.\n        this.registryPropertiesManipulator =  this.registryMasterManipulator.getPropertiesManipulator();\n        this.registryTextFileManipulator   =  this.registryMasterManipulator.getTextFileManipulator();\n        this.configNodeManipulator         =  this.registryMasterManipulator.getConfigNodeManipulator();\n        this.namespaceNodeManipulator      =  this.registryMasterManipulator.getNSNodeManipulator();\n        this.operatorFactory               =  new GenericRegistryOperatorFactory( this, this.registryMasterManipulator );\n\n        // Phase [4] Construct selectors.\n        this.pathSelector                  =  new StandardPathSelector(\n                this.pathResolver, this.imperialTree, this.namespaceNodeManipulator, new GUIDNameManipulator[] { this.configNodeManipulator }\n        );\n        // Warning: ReparseKOMTreeAddition must be constructed only after `pathSelector` has been constructed.\n        this.mReparseKOM                   =  new GenericReparseKOMTreeAddition( this );\n\n        // Phase [5] Construct misc.\n        this.propertyTypeConverter         =  new DefaultPropertyConverter();\n        this.textValueTypeConverter        =  new DefaultTextValueConverter();\n    }\n\n    public GenericKOMRegistry( Processum superiorProcess, KOIMasterManipulator masterManipulator, KOMRegistry parent, String name ){\n        this ( superiorProcess, masterManipulator, parent, name, null );\n    }\n\n    public GenericKOMRegistry( Processum superiorProcess, KOIMasterManipulator masterManipulator ){\n        this( superiorProcess, masterManipulator, null, KOMRegistry.class.getSimpleName() );\n    }\n\n    public GenericKOMRegistry( Processum superiorProcess ) {\n        this( superiorProcess, null );\n    }\n\n    public GenericKOMRegistry( KOIMappingDriver driver ) {\n        this(\n                driver.getSuperiorProcess(),\n                driver.getMasterManipulator()\n        );\n    }\n\n    public GenericKOMRegistry( KOIMappingDriver driver, KOMRegistry parent, String name ) {\n        this(\n                driver.getSuperiorProcess(),\n                driver.getMasterManipulator(),\n                parent,\n                name\n        );\n    }\n\n    @Override\n    public RegistryConfig getConfig() {\n        return (RegistryConfig) this.kernelObjectConfig;\n    }\n\n    public RegistryOperatorFactory getOperatorFactory() {\n        return (RegistryOperatorFactory) this.operatorFactory;\n    }\n\n    @Override\n    public void setPropertyTypeConverter( TypeConverter propertyTypeConverter ) {\n        this.propertyTypeConverter = propertyTypeConverter;\n    }\n\n    @Override\n    public void setTextValueTypeConverter( TypeConverter textValueTypeConverter ) {\n        this.textValueTypeConverter = textValueTypeConverter;\n    }\n\n    @Override\n    public TypeConverter getTextValueTypeConverter() {\n        return this.textValueTypeConverter;\n    }\n\n    @Override\n    public TypeConverter getPropertyTypeConverter() {\n        return this.propertyTypeConverter;\n    }\n\n\n    @Override\n    public RegistryConfig getRegistryConfig() {\n        return this.getConfig();\n    }\n\n\n\n    @Override\n    protected RegistryNodeOperator getOperatorByGuid( GUID guid ) {\n        return (RegistryNodeOperator) super.getOperatorByGuid( guid );\n    }\n\n    @Override\n    public RegistryTreeNode get( GUID guid ) {\n        return (RegistryTreeNode) super.get( guid );\n    }\n\n    @Override\n    public RegistryTreeNode get( GUID guid, int depth ) {\n        return (RegistryTreeNode) super.get( guid, depth );\n    }\n\n    @Override\n    public RegistryTreeNode getAsRootDepth( GUID guid ) {\n        return (RegistryTreeNode) super.getAsRootDepth( guid );\n    }\n\n    @Override\n    public ElementNode queryElement( String path ){\n        //GUID guid = this.distributedConfTree.queryGUIDByPath( path );\n        GUID guid = this.queryGUIDByPath( path );\n        if( guid != null ) {\n            return (ElementNode) this.get( guid );\n        }\n\n        return null;\n    }\n\n    @Override\n    public Properties getProperties( GUID guid ) {\n        return this.get( guid ).evinceProperties();\n    }\n\n    @Override\n    public Properties getProperties( String path ) {\n        GUID guid = this.queryGUIDByPath( path );\n        if( guid == null ) {\n            return null;\n        }\n        return this.getProperties( guid );\n    }\n\n    @Override\n    public Namespace getNamespace( GUID guid ) {\n        return this.get( guid ).evinceNamespace();\n    }\n\n    @Override\n    public Namespace getNamespace( String path ){\n        GUID guid = this.queryGUIDByPath( path );\n        if( guid == null ) {\n            return null;\n        }\n        return this.getNamespace( guid );\n    }\n\n    @Override\n    public List<Property > fetchProperties( GUID guid ) {\n        Properties properties = this.getProperties( guid );\n        if( properties != null ) {\n            return this.registryPropertiesManipulator.getProperties( guid, properties );\n        }\n        return null;\n    }\n\n    @Override\n    public List<Property > fetchProperties( String path ) {\n        GUID guid = this.queryGUIDByPath( path );\n        if( guid == null ) {\n            return null;\n        }\n        return this.fetchProperties( guid );\n    }\n\n    @Override\n    public TextValue getTextValue( GUID guid ) {\n        return this.registryTextFileManipulator.getTextValue( guid );\n    }\n\n    @Override\n    public TextValue getTextValue( String path ) {\n        GUID guid = this.queryGUIDByPath( path );\n        if( guid == null ) {\n            return null;\n        }\n        return this.getTextValue( guid );\n    }\n\n    @Override\n    public void putProperty( Property property, GUID configNodeGuid ) {\n        property.setGuid( configNodeGuid );\n        property.setCreateTime( LocalDateTime.now() );\n        property.setUpdateTime( LocalDateTime.now() );\n        this.registryPropertiesManipulator.insert( property );\n    }\n\n    @Override\n    public void updateProperty( @Nullable GUID configNodeGuid, Property property ) {\n        if( configNodeGuid != null ) {\n            property.setGuid( configNodeGuid );\n        }\n        property.setUpdateTime( LocalDateTime.now() );\n        this.registryPropertiesManipulator.update( property );\n    }\n\n    @Override\n    public void updateTextValue( TextValue textValue, GUID configNodeGuid ) {\n        textValue.setGuid( configNodeGuid );\n        textValue.setUpdateTime( LocalDateTime.now() );\n        this.registryTextFileManipulator.update( textValue );\n    }\n\n    @Override\n    public void updateTextValue( GUID guid, String text, String type ) {\n        TextValue textValue = GenericTextValue.newUpdateTextValue( guid, text, type );\n        this.registryTextFileManipulator.update( textValue );\n    }\n\n\n\n    @Override\n    public void removeProperty( GUID guid, String key ) {\n        this.registryPropertiesManipulator.remove(guid,key);\n    }\n\n    @Override\n    public void removeTextValue( GUID guid ) {\n        this.registryTextFileManipulator.remove(guid);\n    }\n\n    @Override\n    public void setDataAffinityGuid( GUID childGuid, GUID parentGuid ) {\n        this.configNodeManipulator.setDataAffinityGuid( childGuid, parentGuid );\n    }\n\n    @Override\n    public List<TreeNode > selectByName( String name ) {\n        List<GUID> nodes = this.namespaceNodeManipulator.getGuidsByName(name);\n        ArrayList<TreeNode> configNodes = new ArrayList<>();\n        for( GUID guid : nodes ){\n            TreeNode treeNode =  this.get(guid);\n            configNodes.add(treeNode);\n        }\n        return configNodes;\n    }\n\n    protected GUID[] assertCopyMove ( String sourcePath, String destinationPath ) throws IllegalArgumentException {\n        GUID sourceGuid      = this.queryGUIDByPath( sourcePath );\n        if( sourceGuid == null ) {\n            throw new IllegalArgumentException( \"Undefined source '\" + sourcePath + \"'\" );\n        }\n\n        GUID destinationGuid = this.queryGUIDByPath( destinationPath );\n        if( !this.namespaceNodeManipulator.isNamespaceNode( destinationGuid ) ){\n            throw new IllegalArgumentException( \"Illegal destination '\" + destinationPath + \"', should be namespace.\" );\n        }\n\n        if( destinationGuid == null ) {\n            throw new IllegalArgumentException( \"Undefined destination '\" + destinationPath + \"'\" );\n        }\n\n        if( sourceGuid == destinationGuid ) {\n            throw new IllegalArgumentException( \"Cyclic path detected '\" + sourcePath + \"'\" );\n        }\n\n        return new GUID[] { sourceGuid, destinationGuid };\n    }\n\n    @Override\n    public void moveTo( String sourcePath, String destinationPath ) throws IllegalArgumentException {\n        GUID[] pair = this.assertCopyMove( sourcePath, destinationPath );\n        GUID sourceGuid      = pair[ 0 ];\n        GUID destinationGuid = pair[ 1 ];\n\n        this.imperialTree.moveTo( sourceGuid, destinationGuid );\n        this.imperialTree.removeCachePath( sourceGuid );\n    }\n\n    @Override\n    public void move( String sourcePath, String destinationPath ) {\n        GUID sourceGuid         = this.assertPath( sourcePath, \"source\" );\n\n        List<String > sourParts = this.pathResolver.resolvePathParts( sourcePath );\n        List<String > destParts = this.pathResolver.resolvePathParts( destinationPath );\n\n        String szLastDestTarget = destParts.get( destParts.size() - 1 );\n        sourcePath      = sourcePath.trim();\n        destinationPath = destinationPath.trim();\n\n        //   Case1: Move \"game/terraria/npc\"   => \"game/minecraft/npc\", which has the same dest name.\n        // Case1-1: Move \"game/terraria/npc/\"  => \"game/minecraft/npc/\"\n        // Case1-2: Move \"game/terraria/npc/.\" => \"game/minecraft/npc/.\"\n        if(\n                sourParts.get( sourParts.size() - 1 ).equals( szLastDestTarget ) || szLastDestTarget.equals( \".\" ) ||\n                        ( sourcePath.endsWith( this.getConfig().getPathNameSeparator() ) && destinationPath.endsWith( this.getConfig().getPathNameSeparator() ) )\n        ) {\n            destParts.remove( destParts.size() - 1 );\n            String szParentPath = this.pathResolver.assemblePath( destParts );\n            destParts.add( szLastDestTarget );\n\n            // Move to, which has the same name or explicit current dir `.`.\n            this.moveTo( sourcePath, szParentPath );\n        }\n        // Case 2: \"game/terraria/npc\" => \"game/minecraft/character/\" || \"game/minecraft/character/.\"\n        //    game/terraria/npc => game/minecraft/character/npc\n        else if ( !sourcePath.endsWith( this.getConfig().getPathNameSeparator() ) && (\n                destinationPath.endsWith( this.getConfig().getPathNameSeparator() ) || destinationPath.endsWith( \".\" ) )\n        ) {\n            Namespace target = this.affirmNamespace( destinationPath );\n            this.imperialTree.moveTo( sourceGuid, target.getGuid() );\n        }\n        // Case3: Move \"game/terraria/npc\" => \"game/minecraft/character\", move all children therein.\n        //    game/terraria/npc/f1 => game/minecraft/character/f1\n        //    game/terraria/npc/f2 => game/minecraft/character/f2\n        //    etc.\n        else {\n            //  Case3-1: Is config or other none namespace node.\n            //           Move \"game/terraria/file\" => \"game/minecraft/dir\".\n            //  Case3-2: \"game/terraria/npc/\" => \"game/minecraft/character\"\n            // Eq.Case2: Move \"game/terraria/npc\" => \"game/minecraft/character\",\n            if( !this.namespaceNodeManipulator.isNamespaceNode( sourceGuid ) ) {\n                Namespace target = this.affirmNamespace( destinationPath );\n                this.imperialTree.moveTo( sourceGuid, target.getGuid() );\n            }\n            else {\n                List<TreeNode > children = this.getChildren( sourceGuid );\n                if( !children.isEmpty() ) {\n                    Namespace target = this.affirmNamespace( destinationPath );\n                    for( TreeNode node : children ) {\n                        this.imperialTree.moveTo( node.getGuid(), target.getGuid() );\n                    }\n                }\n            }\n\n            this.imperialTree.removeTreeNodeOnly( sourceGuid );\n        }\n\n        this.imperialTree.removeCachePath( sourceGuid );\n    }\n\n    @Override\n    public void copyTo( String sourcePath, String destinationPath ) throws IllegalArgumentException {\n        GUID[] pair = this.assertCopyMove( sourcePath, destinationPath );\n        GUID sourceGuid      = pair[ 0 ];\n        GUID destinationGuid = pair[ 1 ];\n\n        this.get( sourceGuid ).copyTo( destinationGuid );\n    }\n\n    @Override\n    public void copy( String sourcePath, String destinationPath ) {\n        GUID sourceGuid         = this.assertPath( sourcePath, \"source\" );\n\n        List<String > sourParts = this.pathResolver.resolvePathParts( sourcePath );\n        List<String > destParts = this.pathResolver.resolvePathParts( destinationPath );\n\n        sourcePath      = sourcePath.trim();\n        destinationPath = destinationPath.trim();\n\n        String szLastDestTarget = destParts.get( destParts.size() - 1 );\n\n        //   Case1: Copy \"game/terraria/npc\"   => \"game/minecraft/npc\", which has the same dest name.\n        // Case1-1: Copy \"game/terraria/npc/\"  => \"game/minecraft/npc/\"\n        // Case1-2: Copy \"game/terraria/npc/.\" => \"game/minecraft/npc/.\"\n        if(\n                sourParts.get( sourParts.size() - 1 ).equals( szLastDestTarget ) || szLastDestTarget.equals( \".\" ) ||\n                        ( sourcePath.endsWith( this.getConfig().getPathNameSeparator() ) && destinationPath.endsWith( this.getConfig().getPathNameSeparator() ) )\n        ) {\n            // Just return, copy to itself.\n            return;\n        }\n\n        // Case 2: \"game/terraria/npc\" => \"game/minecraft/character/\" || \"game/minecraft/character/.\"\n        //    game/terraria/npc => game/minecraft/character/npc\n        if ( !sourcePath.endsWith( this.getConfig().getPathNameSeparator() ) && (\n                destinationPath.endsWith( this.getConfig().getPathNameSeparator() ) || destinationPath.endsWith( \".\" ) )\n        ) {\n            this.copyTo( sourcePath, destinationPath );\n        }\n        // Case3: Copy \"game/terraria/npc\" => \"game/minecraft/character\", copy all children therein.\n        //    game/terraria/npc/f1 => game/minecraft/character/f1\n        //    game/terraria/npc/f2 => game/minecraft/character/f2\n        //    etc.\n        else {\n            //  Case3-1: Is config or other none namespace node.\n            //           Copy \"game/terraria/file\" => \"game/minecraft/dir\".\n            //  Case3-2: \"game/terraria/npc/\" => \"game/minecraft/character\"\n            // Eq.Case2: Copy \"game/terraria/npc\" => \"game/minecraft/character\",\n            if( !this.namespaceNodeManipulator.isNamespaceNode( sourceGuid ) ) {\n                Namespace target = this.affirmNamespace( destinationPath );\n                this.get( sourceGuid ).copyTo( target.getGuid() );\n            }\n            else {\n                List<TreeNode > children = this.getChildren( sourceGuid );\n                if( !children.isEmpty() ) {\n                    Namespace target = this.affirmNamespace( destinationPath );\n                    for( TreeNode node : children ) {\n                        RegistryTreeNode treeNode = (RegistryTreeNode) node;\n                        treeNode.copyTo( target.getGuid() );\n                    }\n                }\n            }\n        }\n    }\n\n    @Override\n    public List<TreeNode > getAllTreeNode() {\n        List<GUID> nameSpaceNodes = this.namespaceNodeManipulator.dumpGuid();\n        List<GUID> confNodes      = this.configNodeManipulator.dumpGuid();\n        ArrayList<TreeNode> treeNodes = new ArrayList<>();\n        for (GUID guid : nameSpaceNodes){\n            TreeNode treeNode = this.get(guid);\n            treeNodes.add(treeNode);\n        }\n        for ( GUID guid : confNodes ){\n            TreeNode treeNode = this.get(guid);\n            treeNodes.add(treeNode);\n        }\n        return treeNodes;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public List<RegistryTreeNode > fetchRoot() {\n        return (List) super.fetchRoot();\n    }\n\n    // TODO, Unchecked type affirmed.\n    protected RegistryTreeNode affirmTreeNodeByPath( String path, Class<? > cnSup, Class<? > nsSup ) {\n        String[] parts = this.pathResolver.segmentPathParts( path );\n        String currentPath = \"\";\n        GUID parentGuid = GUIDs.Dummy128();\n\n        RegistryTreeNode node = this.queryElement( path );\n        if( node != null ) {\n            return node;\n        }\n\n        RegistryTreeNode ret = null;\n        for( int i = 0; i < parts.length; ++i ){\n            currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : \"\" ) + parts[ i ];\n            node = this.queryElement( currentPath );\n            if ( node == null){\n                if ( i == parts.length - 1 && cnSup != null ){\n                    ConfigNode configNode = (ConfigNode) this.dynamicFactory.optNewInstance( cnSup, new Object[]{ this } );\n                    configNode.setName( parts[i] );\n                    GUID guid = this.put( configNode );\n                    this.affirmOwnedNode( parentGuid, guid );\n                    return configNode;\n                }\n                else {\n                    Namespace namespace = (Namespace) this.dynamicFactory.optNewInstance( nsSup, new Object[]{ this } );\n                    namespace.setName(parts[i]);\n                    GUID guid = this.put(namespace);\n                    if ( i != 0 ){\n                        this.affirmOwnedNode( parentGuid, guid );\n                        parentGuid = guid;\n                    }\n                    else {\n                        parentGuid = guid;\n                    }\n\n                    ret = namespace;\n                }\n            }\n            else {\n                parentGuid = node.getGuid();\n            }\n        }\n\n        return ret;\n    }\n\n\n\n    @Override\n    public Namespace affirmNamespace       ( String path ) {\n        return (Namespace) this.affirmTreeNodeByPath( path, null, GenericNamespace.class );\n    }\n\n    @Override\n    public Properties affirmProperties     ( String path ) {\n        return (Properties) this.affirmTreeNodeByPath( path, GenericProperties.class, GenericNamespace.class );\n    }\n\n    @Override\n    public TextFile affirmTextConfig       ( String path ) {\n        return (TextFile) this.affirmTreeNodeByPath( path, GenericTextFile.class, GenericNamespace.class );\n    }\n\n\n\n    @Override\n    public Properties putProperties( String path, Map<String, Object > properties ) {\n        Properties pro = this.affirmProperties( path );\n        pro.puts( properties );\n        return pro;\n    }\n\n    @Override\n    public TextFile putTextValue(String path, String type, String value ) {\n        TextFile pro = this.affirmTextConfig( path );\n        pro.put( new GenericTextValue( pro.getGuid(), value, type ) );\n        return pro;\n    }\n\n\n    @Override\n    public void copyPropertiesTo( GUID sourceGuid, GUID destinationGuid ) {\n        this.registryPropertiesManipulator.copyPropertiesTo( sourceGuid, destinationGuid );\n    }\n\n    @Override\n    public void copyTextValueTo( GUID sourceGuid, GUID destinationGuid ) {\n        this.registryTextFileManipulator.copyTextValueTo( sourceGuid, destinationGuid );\n    }\n\n    @Override\n    public void putTextValue( GUID guid, String text, String format ){\n        GenericTextValue genericTextValue = new GenericTextValue( guid, text, format );\n        this.registryTextFileManipulator.insert( genericTextValue );\n    }\n\n\n    @Override\n    public ConfigNode getConfigNode( GUID guid ) {\n        RegistryTreeNode p = this.get( guid );\n        ConfigNode cn = p.evinceConfigNode() ;\n        if( cn != null ) {\n            return cn;\n        }\n        return null;\n    }\n\n    private String getNodeName( ImperialTreeNode node ) {\n        UOI type = node.getType();\n        TreeNode newInstance = (TreeNode)type.newInstance();\n        TreeNodeOperator operator = this.operatorFactory.getOperator(newInstance.getMetaType());\n        TreeNode treeNode = operator.get(node.getGuid());\n        return treeNode.getName();\n    }\n\n\n    protected KOMSelector newKOMSelector( String szSelector ) {\n        return new RegistryJPathSelector(\n                new StringReader( szSelector ), this.pathResolver, this, this.namespaceNodeManipulator, new GUIDNameManipulator[] { this.configNodeManipulator }\n        );\n    }\n\n    @Override\n    public Object querySelector( String szSelector ) {\n        return this.newKOMSelector( szSelector ).querySelector( null ) ;\n    }\n\n    @Override\n    public List querySelectorAll( String szSelector ) {\n        return this.newKOMSelector( szSelector ).querySelectorAll( null ) ;\n    }\n\n    @Override\n    public Object querySelectorJ( String szSelector ) {\n        return this.newKOMSelector( szSelector ).querySelectorJ( null ) ;\n    }\n\n    private boolean allNonNull( List<?> list ) {\n        return list.stream().noneMatch( Objects::isNull );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/GenericRenderKOMRegistry.java",
    "content": "package com.pinecone.hydra.registry;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.template.UTRAlmondProvider;\nimport com.pinecone.framework.util.template.UniformTemplateRenderer;\nimport com.pinecone.hydra.registry.render.RenderConfigNode;\nimport com.pinecone.hydra.registry.render.RenderRegistryTreeNode;\nimport com.pinecone.hydra.registry.render.RenderTextValue;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\n\npublic class GenericRenderKOMRegistry extends GenericKOMRegistry implements RenderDistributeRegistry {\n    protected UniformTemplateRenderer       mUniformTemplateRenderer;\n    private UniformTemplateRenderer         renderer;\n\n    public GenericRenderKOMRegistry( Processum superiorProcess, KOIMasterManipulator masterManipulator ){\n        super( superiorProcess, masterManipulator );\n        this.renderer = new UTRAlmondProvider();\n    }\n\n\n    @Override\n    public RenderRegistryTreeNode getAsRootDepth( GUID guid ) {\n        return (RenderRegistryTreeNode) this.getOperatorByGuid( guid ).getAsRootDepth( guid );\n    }\n\n    @Override\n    public RenderConfigNode getConfigNode( GUID guid ) {\n        return (RenderConfigNode) super.getConfigNode(guid);\n    }\n\n    @Override\n    public RenderTextValue getTextValue( GUID guid ) {\n        return (RenderTextValue) this.registryTextFileManipulator.getTextValue(guid);\n    }\n\n    @Override\n    public UniformTemplateRenderer getRenderer() {\n        return this.renderer;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/KOMRegistry.java",
    "content": "package com.pinecone.hydra.registry;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.ConfigNode;\nimport com.pinecone.hydra.registry.entity.Namespace;\nimport com.pinecone.hydra.registry.entity.Properties;\nimport com.pinecone.hydra.registry.entity.Property;\nimport com.pinecone.hydra.registry.entity.RegistryTreeNode;\nimport com.pinecone.hydra.registry.entity.TextValue;\nimport com.pinecone.hydra.system.ko.kom.ReparseKOMTree;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.util.List;\n\npublic interface KOMRegistry extends Registry, ReparseKOMTree {\n\n    @Override\n    String getPath( GUID guid );\n\n    @Override\n    String getFullName( GUID guid );\n\n    @Override\n    GUID put( TreeNode treeNode );\n\n    @Override\n    RegistryTreeNode get( GUID guid );\n\n    @Override\n    RegistryTreeNode get( GUID guid, int depth );\n\n    @Override\n    RegistryTreeNode getAsRootDepth( GUID guid );\n\n    Properties getProperties( GUID guid );\n\n    Namespace getNamespace( GUID guid );\n\n    @Override\n    GUID queryGUIDByPath( String path );\n\n    @Override\n    GUID queryGUIDByFN  ( String fullName );\n\n    List<Property > fetchProperties( GUID guid );\n\n    TextValue getTextValue( GUID guid );\n\n    ConfigNode getConfigNode( GUID guid );\n\n    @Override\n    RegistryConfig getConfig();\n\n\n\n    void putProperty( Property property, GUID configNodeGuid );\n\n    void putTextValue( GUID guid, String text, String format );\n\n    void updateProperty( @Nullable GUID configNodeGuid, Property property );\n\n    default void updateProperty( Property property ) {\n        this.updateProperty( null, property );\n    }\n\n    void updateTextValue( TextValue textValue, GUID configNodeGuid );\n\n    void updateTextValue( GUID guid, String text, String format );\n\n\n    @Override\n    void remove( GUID guid );\n\n    @Override\n    void removeReparseLink( GUID guid );\n\n    void removeProperty( GUID guid, String key );\n\n    void removeTextValue( GUID guid );\n\n\n    @Override\n    List<TreeNode > getChildren( GUID guid );\n\n    @Override\n    void rename( GUID guid, String name );\n\n    default void rename( String path, String name ) {\n        this.rename( this.assertPath( path ), name );\n    }\n\n    @Override\n    default GUID assertPath( String path, String pathType ) throws IllegalArgumentException {\n        GUID guid      = this.queryGUIDByPath( path );\n        if( guid == null ) {\n            throw new IllegalArgumentException( \"Undefined \" + pathType + \" '\" + path + \"'\" );\n        }\n\n        return guid;\n    }\n\n    @Override\n    default GUID assertPath( String path ) throws IllegalArgumentException {\n        return this.assertPath( path, \"path\" );\n    }\n\n    List<TreeNode > getAllTreeNode();\n\n\n\n    /** 断言，确保节点唯一拥有关系*/\n    @Override\n    void affirmOwnedNode( GUID parentGuid, GUID childGuid  );\n\n    @Override\n    void newHardLink    ( GUID sourceGuid, GUID targetGuid );\n\n    /** set affinityParentGuid for child.*/\n    void setDataAffinityGuid ( GUID childGuid, GUID affinityParentGuid  );\n\n    default void setDataAffinity ( String childPath, String parentPath ) {\n        GUID childGuid      = this.assertPath( childPath );\n        GUID parentGuid     = this.assertPath( parentPath );\n        if( childGuid == parentGuid ) {\n            throw new IllegalArgumentException( \"Cyclic path detected '\" + childPath + \"'\" );\n        }\n\n        this.setDataAffinityGuid( childGuid, parentGuid );\n    }\n\n    @Override\n    void newLinkTag( GUID originalGuid,GUID dirGuid,String tagName );\n\n    @Override\n    void newLinkTag( String originalPath ,String dirPath,String tagName );\n\n    @Override\n    void updateLinkTag( GUID tagGuid,String tagName);\n\n\n    void copyPropertiesTo( GUID sourceGuid, GUID destinationGuid );\n\n    void copyTextValueTo( GUID sourceGuid, GUID destinationGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/KernelRegistryConfig.java",
    "content": "package com.pinecone.hydra.registry;\n\nimport com.pinecone.hydra.system.ko.ArchKernelObjectConfig;\n\npublic class KernelRegistryConfig extends ArchKernelObjectConfig implements RegistryConfig {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/Registry.java",
    "content": "package com.pinecone.hydra.registry;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.hydra.registry.entity.ElementNode;\nimport com.pinecone.hydra.registry.entity.Namespace;\nimport com.pinecone.hydra.registry.entity.Properties;\nimport com.pinecone.hydra.registry.entity.Property;\nimport com.pinecone.hydra.registry.entity.RegistryTreeNode;\nimport com.pinecone.hydra.registry.entity.TextFile;\nimport com.pinecone.hydra.registry.entity.TextValue;\nimport com.pinecone.hydra.registry.entity.TypeConverter;\nimport com.pinecone.hydra.system.ko.KernelObjectInstrument;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface Registry extends KOMInstrument {\n    RegistryConfig KernelRegistryConfig = new KernelRegistryConfig();\n\n    RegistryConfig getRegistryConfig();\n\n    void setPropertyTypeConverter( TypeConverter propertyTypeConverter ) ;\n\n    void setTextValueTypeConverter( TypeConverter textValueTypeConverter ) ;\n\n    TypeConverter getTextValueTypeConverter() ;\n\n    TypeConverter getPropertyTypeConverter() ;\n\n\n\n    ElementNode queryElement( String path );\n\n    Properties getProperties( String path );\n\n    Namespace getNamespace(String path );\n\n    void remove( String path );\n\n    Collection<Property > fetchProperties( String path );\n\n    TextValue getTextValue( String path );\n\n\n\n    /** Normal Tree Node or ReparseLinkNode**/\n    EntityNode queryNode( String path );\n\n    ReparseLinkNode queryReparseLink( String path );\n\n    List<TreeNode > selectByName( String name );\n\n\n    /**  Move \"game/terraria/npc\" => \"game/minecraft/\" => \"game/minecraft/npc\"*/\n    void moveTo( String sourcePath, String destinationPath );\n\n    /** Affirm destination path existed.*/\n    void move( String sourcePath, String destinationPath );\n\n    /**  Copy \"game/terraria/npc\" => \"game/minecraft/\" => \"game/minecraft/npc\"*/\n    void copyTo( String sourcePath, String destinationPath );\n\n    void copy( String sourcePath, String destinationPath );\n\n    List<RegistryTreeNode> fetchRoot();\n\n\n\n    Namespace      affirmNamespace        ( String path );\n\n    Properties     affirmProperties       ( String path );\n\n    TextFile       affirmTextConfig       ( String path );\n\n    Properties     putProperties          ( String path, Map<String, Object > properties );\n\n    TextFile       putTextValue           ( String path, String format, String value );\n\n\n    // Return with json.\n    Object         querySelectorJ         ( String szSelector );\n\n    Object         querySelector          ( String szSelector );\n\n    List           querySelectorAll       ( String szSelector );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/RegistryConfig.java",
    "content": "package com.pinecone.hydra.registry;\n\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\n\npublic interface RegistryConfig extends KernelObjectConfig {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/RegistryJPathSelector.java",
    "content": "package com.pinecone.hydra.registry;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.Reader;\nimport java.io.StringReader;\nimport java.util.List;\n\nimport com.pinecone.framework.util.CursorParser;\nimport com.pinecone.framework.util.GeneralStrings;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.name.path.PathResolver;\nimport com.pinecone.hydra.registry.entity.Namespace;\nimport com.pinecone.hydra.registry.entity.Properties;\nimport com.pinecone.hydra.registry.entity.Property;\nimport com.pinecone.hydra.registry.entity.RegistryTreeNode;\nimport com.pinecone.hydra.registry.entity.TextFile;\nimport com.pinecone.hydra.system.ko.kom.KOMSelector;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.kom.ReparseLinkSelector;\n\n/**\n * RegistryJPathSelector\n * TODO: Advance Functions\n */\npublic class RegistryJPathSelector extends ReparseLinkSelector implements KOMSelector {\n    protected Reader                    mReader;\n    protected char                      mcPrevious;\n    protected long                      mnCharacter;\n    protected boolean                   mbUsePrevious;\n    protected int                       mnParseAt ;\n    protected int                       mnLineAt;\n\n    protected TokenType                 mTokenType;\n    protected StringBuilder             mCurrentToken;\n    protected KOMRegistry               mRegistry;\n\n    protected CursorParser              mThisCursor;\n    protected List<RegistryTreeNode>    mQueriedList          ;\n\n    enum TokenType {\n        T_UNDEFINED, T_DELIMITER, T_IDENTIFIER, T_INTEGER, T_FLOAT, T_KEYWORD, T_TEMP, T_STRING, T_BLOCK, T_ENDLINE\n    }\n\n    public RegistryJPathSelector(Reader reader, PathResolver pathResolver, KOMRegistry registry, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) {\n        super( pathResolver, registry.getMasterTrieTree(), dirMan, fileMans );\n\n        this.mRegistry      = registry;\n\n        this.mReader        = (Reader)(reader.markSupported() ? reader : new BufferedReader(reader));\n        this.mCurrentToken  = new StringBuilder();\n\n        this.mThisCursor    = new RegistrySelectorCursorParser( this );\n    }\n\n    protected SelectorParseException parseException( String message ) {\n        return new SelectorParseException( message + \" at \" + this.mnParseAt + \" [character \" + this.mnCharacter + \" line \" + this.mnLineAt + \"]\", (int)this.mnParseAt );\n    }\n\n    public void back() throws SelectorParseException {\n        if (!this.mbUsePrevious && this.mnParseAt > 0L) {\n            --this.mnParseAt;\n            --this.mnCharacter;\n            this.mbUsePrevious = true;\n            this.mTokenType = TokenType.T_UNDEFINED;\n        }\n        else {\n            throw new SelectorParseException( \"Stepping back two steps is not supported\" );\n        }\n    }\n\n\n    public boolean end() {\n        return this.mTokenType == TokenType.T_ENDLINE && !this.mbUsePrevious;\n    }\n\n    public char next() throws SelectorParseException {\n        int c;\n        if ( this.mbUsePrevious ) {\n            this.mbUsePrevious = false;\n            c = this.mcPrevious;\n        }\n        else {\n            try {\n                c = this.mReader.read();\n            }\n            catch ( IOException e ) {\n                throw new SelectorParseException( e, this.mnParseAt );\n            }\n\n            if ( c <= 0 ) {\n                this.mTokenType = TokenType.T_ENDLINE;\n                c = 0;\n            }\n        }\n\n        ++this.mnParseAt;\n        if ( this.mcPrevious == '\\r' ) {\n            ++this.mnLineAt;\n            this.mnCharacter = (long)(c == 10 ? 0 : 1);\n        }\n        else if ( c == '\\n' ) {\n            this.mnCharacter = 0L;\n            ++this.mnLineAt;\n        }\n        else {\n            ++this.mnCharacter;\n        }\n\n//        if ( c != 0 ) {\n//            this.mszNowAt = this.mszRaw.substring(this.mnParseAt);\n//        }\n        this.mcPrevious = (char)c;\n        return this.mcPrevious;\n    }\n\n    public String next( int n ) throws SelectorParseException {\n        if ( n == 0 ) {\n            return \"\";\n        }\n        else {\n            char[] chars = new char[n];\n\n            for( int pos = 0; pos < n; ++pos ) {\n                chars[pos] = this.next();\n                if ( this.end() ) {\n                    throw this.parseException( \"Error parser XPath string with substring bounds error.\" );\n                }\n            }\n\n            return new String(chars);\n        }\n    }\n\n    public void getNextToken() throws SelectorParseException {\n        this.mTokenType = TokenType.T_UNDEFINED;\n\n        StringBuilder temp = this.mCurrentToken;\n        temp.setLength(0);\n\n        char nextChar = this.next();\n        if ( this.end() ) {\n            return;\n        }\n\n        while ( nextChar != 0 && Character.isWhitespace(nextChar) ) {\n            nextChar = this.next();\n        }\n\n        while ( nextChar == '\\r' ) {\n            nextChar = this.next();\n            if (nextChar == '\\n') {\n                nextChar = this.next();\n            }\n            while ( nextChar != 0 && Character.isWhitespace(nextChar) ) {\n                nextChar = this.next();\n            }\n        }\n\n        if ( nextChar == 0 ) {\n            this.mTokenType = TokenType.T_ENDLINE;\n            return;\n        }\n\n\n        boolean isDoubleQuote = true;\n        if ( nextChar == '\"' || nextChar == '\\'' ) {\n            if ( nextChar == '\\'' ) {\n                isDoubleQuote = false;\n            }\n\n            nextChar = this.next();\n            while ( (isDoubleQuote && nextChar != '\"') || (!isDoubleQuote && nextChar != '\\'') && nextChar != '\\r' && nextChar != 0 ) {\n                if ( nextChar == '\\\\' ) {\n                    nextChar = this.next();\n                    GeneralStrings.transferCharParse( nextChar, this.mThisCursor, temp );\n                }\n                else {\n                    this.mCurrentToken.append( nextChar );\n                }\n\n                nextChar = this.next();\n            }\n            if ( nextChar == '\\r' || nextChar == 0 ) {\n                throw this.parseException( \"Unexpected End-line, with '\\r' / '\\0'.\" );\n            }\n\n            this.mTokenType = TokenType.T_STRING;\n            return;\n        }\n\n        if ( \"./\".indexOf( nextChar ) >= 0 ) {\n            temp.append((char) nextChar);\n            this.mTokenType = TokenType.T_DELIMITER;\n            return;\n        }\n\n        if ( Character.isLetter( nextChar ) || nextChar == '_' ) {\n            while (!(\"./\".indexOf(nextChar) >= 0 || nextChar == '\\r' || nextChar == '\\t' || nextChar == '\\n' || nextChar == 0)) {\n                temp.append( nextChar );\n                nextChar = this.next();\n            }\n\n            if( \"./\".indexOf(nextChar) >= 0 ){\n                this.back();\n            }\n\n            this.mTokenType = TokenType.T_TEMP;\n        }\n\n//        String szCurrentToken = this.mCurrentToken.toString();\n//        if ( this.mTokenType == TokenType.T_TEMP ) {\n//            this.mTokenType = TokenType.T_KEYWORD;\n//        }\n\n        if ( this.mTokenType == TokenType.T_UNDEFINED ) {\n            throw this.parseException( \"\\nIllegal token found ! What-> \\\"\" + this.mCurrentToken.toString() + \"\\\"\" );\n        }\n    }\n\n    public List eval() {\n        int depth = 0;\n        GUID parentGuid = null;\n        List<GUID> preGUIDs = null;\n\n        do {\n            this.getNextToken();\n            if( this.mTokenType == TokenType.T_ENDLINE ) {\n                break;\n            }\n\n            if( this.mTokenType == TokenType.T_DELIMITER ) {\n                continue;\n            }\n\n            String currentPart = this.mCurrentToken.toString();\n            List<GUID > guids;\n\n            if ( depth == 0 ) {\n                guids = this.fetchAllGuidsRootCase( currentPart );\n            }\n            else {\n                // Case3: For middle and last parts, retrieve children GUIDs using distributedTrieTree\n                guids = this.imperialTree.fetchChildrenGuids( parentGuid );\n            }\n\n            this.getNextToken();\n            if ( guids == null || guids.isEmpty() ) {\n                if( this.mTokenType == TokenType.T_ENDLINE ) {\n                    guids = preGUIDs;\n                    if ( guids == null || guids.isEmpty() ) {\n                        continue;\n                    }\n                }\n                else {\n                    continue;\n                }\n            }\n\n            boolean bNone = true;\n            for ( GUID guid : guids ) {\n                List result = this.eval_entities( guid, currentPart, parentGuid );\n                if ( result != null && !result.isEmpty() ) {\n                    if ( this.mTokenType == TokenType.T_ENDLINE ) {\n                        return result;\n                    }\n\n                    parentGuid = guid;\n                    preGUIDs = guids;\n\n                    bNone = false;\n                }\n            }\n            if( bNone ) {\n                break;\n            }\n\n            this.back();\n\n            ++depth;\n        }\n        while ( this.mTokenType != TokenType.T_ENDLINE );\n\n        return null;\n    }\n\n    protected List eval_entities( GUID guid, String partName, GUID parentGuid ) {\n        // 在中间部分只匹配文件夹，最后一部分匹配文件和文件夹\n        // In the last part, check both files and directories\n\n        if ( this.mTokenType == TokenType.T_ENDLINE ) {\n            RegistryTreeNode node = this.mRegistry.get( guid );\n            if( !this.checkPartInAllManipulators( guid, partName ) ) {\n                if( node instanceof Properties && node.getGuid().equals( parentGuid ) ) {\n                    return List.of ( ((Properties) node).get( partName ) );\n                }\n                return null;\n            }\n            else {\n                return List.of ( node );\n            }\n        }\n        else {\n            return this.searchAllManipulators( guid, partName );\n        }\n    }\n\n    protected List<GUID > searchAllManipulators ( GUID guid, String partName ) {\n        List<GUID > guids = this.searchDirAndLinks( guid, partName );\n        if( guids != null && !guids.isEmpty() ) {\n            return guids;\n        }\n\n        for ( GUIDNameManipulator manipulator : this.fileManipulators ) {\n            guids = manipulator.getGuidsByNameID( partName, guid );\n            if ( guids != null && !guids.isEmpty() ) {\n                return guids;\n            }\n        }\n        return null;\n    }\n\n    protected RegistryJPathSelector reinit( String szSelector ) {\n        if( szSelector != null ) {\n            // For thread safe.\n            return new RegistryJPathSelector(\n                    new StringReader( szSelector ), this.pathResolver, this.mRegistry, this.dirManipulators[ 0 ], this.fileManipulators\n            );\n        }\n        return this;\n    }\n\n    @Override\n    public List querySelectorAll( String szSelector ) {\n        return this.reinit( szSelector ).eval();\n    }\n\n    @Override\n    public Object querySelector( String szSelector ) {\n        List ret = this.reinit( szSelector ).eval();\n        if( ret != null && !ret.isEmpty() ) {\n            return ret.get( 0 );\n        }\n        return null;\n    }\n\n    @Override\n    public Object querySelectorJ( String szSelector ) {\n        Object raw = this.querySelector( szSelector );\n        if( raw instanceof Properties ) {\n            return ((Properties) raw).toJSONObject();\n        }\n        else if( raw instanceof TextFile) {\n            return ((TextFile) raw).toJSON();\n        }\n        else if( raw instanceof Namespace) {\n            return ((Namespace) raw).toJSONObject();\n        }\n        else if( raw instanceof Property ) {\n            return ((Property) raw).getValue();\n        }\n        else if( raw == null ) {\n            return null;\n        }\n\n        JSONObject repare = new JSONMaptron();\n        repare.put( \"type\", raw.getClass().getSimpleName() );\n        repare.put( \"value\", raw );\n        return repare;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/RegistrySelectorCursorParser.java",
    "content": "package com.pinecone.hydra.registry;\n\nimport com.pinecone.framework.system.ParseException;\nimport com.pinecone.framework.util.CursorParser;\n\npublic class RegistrySelectorCursorParser implements CursorParser {\n    protected RegistryJPathSelector mParser;\n\n    protected RegistrySelectorCursorParser( RegistryJPathSelector parser ) {\n        this.mParser = parser;\n    }\n\n    @Override\n    public void back() throws ParseException {\n        this.mParser.back();\n    }\n\n    @Override\n    public char next() throws ParseException {\n        return this.mParser.next();\n    }\n\n    @Override\n    public String next( int n ) throws ParseException {\n        return this.mParser.next(n);\n    }\n\n    @Override\n    public Object nextValue() throws ParseException {\n        return this.mParser.eval();\n    }\n\n    @Override\n    public Object nextValue( Object indexKey, Object parent, Object[] args ) throws ParseException {\n        return this.nextValue();\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/RenderDistributeRegistry.java",
    "content": "package com.pinecone.hydra.registry;\n\nimport com.pinecone.framework.util.template.UniformTemplateRenderer;\n\npublic interface RenderDistributeRegistry extends KOMRegistry {\n    UniformTemplateRenderer getRenderer();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/SelectorParseException.java",
    "content": "package com.pinecone.hydra.registry;\n\nimport com.pinecone.framework.system.ParseException;\n\npublic class SelectorParseException extends ParseException {\n    public SelectorParseException( String what ) {\n        this( what, -1 );\n    }\n\n    public SelectorParseException    ( String what, int errorOffset ) {\n        super( what, errorOffset );\n    }\n\n    public SelectorParseException    ( String message, int errorOffset, Throwable cause ) {\n        super( message, errorOffset, cause );\n    }\n\n    public SelectorParseException    ( Throwable cause, int errorOffset ) {\n        super(cause.getMessage(), errorOffset, cause);\n    }\n\n    public SelectorParseException    ( Throwable cause ) {\n        this( cause, -1 );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/ArchConfigNode.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.time.LocalDateTime;\n\npublic abstract class ArchConfigNode extends ArchElementNode implements ConfigNode {\n    protected GUID                    dataAffinityGuid;\n\n    protected ConfigNodeMeta          configNodeMeta;\n\n    protected KOMRegistry             registry;\n\n    protected ArchConfigNode() {\n\n    }\n\n    public ArchConfigNode( KOMRegistry registry ) {\n        this.registry = registry;\n\n        GuidAllocator guidAllocator = this.registry.getGuidAllocator();\n        this.setGuid( guidAllocator.nextGUID() );\n        this.setCreateTime( LocalDateTime.now() );\n    }\n\n\n\n    public void apply( KOMRegistry registry ) {\n        this.registry = registry;\n    }\n\n    @Override\n    public GUID getDataAffinityGuid() {\n        return this.dataAffinityGuid;\n    }\n\n    @Override\n    public void setDataAffinityGuid( GUID parentGuid ) {\n        this.dataAffinityGuid = parentGuid;\n    }\n\n\n\n    @Override\n    public void copyMetaTo( GUID guid ) {\n        this.registry.setDataAffinityGuid( guid, this.getDataAffinityGuid() );\n    }\n\n    @Override\n    public void moveTo( String path ) {\n        this.moveTo( this.registry.affirmNamespace( path ).getGuid() );\n    }\n\n    @Override\n    public void moveTo( GUID destinationGuid ) {\n        this.registry.getMasterTrieTree().moveTo( this.guid, destinationGuid );\n    }\n\n\n\n    @Override\n    public ConfigNodeMeta getConfigNodeMeta() {\n        return this.configNodeMeta;\n    }\n\n    @Override\n    public void setConfigNodeMeta( ConfigNodeMeta configNodeMeta ) {\n        this.configNodeMeta = configNodeMeta;\n    }\n\n\n\n    @Override\n    public void setAttributes( Attributes attributes) {\n        this.attributes = attributes;\n    }\n\n    @Override\n    public KOMRegistry parentRegistry() {\n        return this.registry;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n\n    protected void putNewCopy(ConfigNode thisCopy, GUID destinationGuid ) {\n        thisCopy.setName( this.getName() );\n        thisCopy.setConfigNodeMeta( this.getConfigNodeMeta() );\n\n        this.registry.put( thisCopy );\n        this.registry.getMasterTrieTree().setGuidLineage( thisCopy.getGuid(), destinationGuid );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/ArchElementNode.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport java.time.LocalDateTime;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic abstract class ArchElementNode implements ElementNode {\n    protected long                    enumId;\n    protected GUID                    guid;\n    protected LocalDateTime           createTime;\n    protected LocalDateTime           updateTime;\n    protected String                  name;\n\n    protected Attributes              attributes;\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    public void setEnumId( long enumId ) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    public void setGuid( GUID guid ) {\n        this.guid = guid;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    public void setCreateTime( LocalDateTime createTime ) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.updateTime;\n    }\n\n    public void setUpdateTime( LocalDateTime updateTime ) {\n        this.updateTime = updateTime;\n    }\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    public void setName( String name ) {\n        this.name = name;\n    }\n\n    @Override\n    public Attributes getAttributes() {\n        return this.attributes;\n    }\n\n    public void setAttributes( Attributes attributes ) {\n        this.attributes = attributes;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/Attributes.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport java.util.Collection;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface Attributes extends PineUnit, Map<String, String > {\n    GUID getGuid();\n\n    void setGuid( GUID guid );\n\n    String getAttribute( String key );\n\n    void setAttribute( String key, String value );\n\n    Map<String, String > getAttributes();\n\n    void setAttributes( Map<String, String > attributes );\n\n    ElementNode parentElement();\n\n    @Override\n    default boolean isEmpty() {\n        return this.getAttributes().isEmpty();\n    }\n\n    @Override\n    default int size() {\n        return this.getAttributes().size();\n    }\n\n    @Override\n    default boolean containsKey( Object key ) {\n        return this.getAttributes().containsKey( key );\n    }\n\n    @Override\n    default boolean hasOwnProperty( Object key ) {\n        return this.containsKey( key );\n    }\n\n    @Override\n    default boolean containsValue( Object value ) {\n        return this.getAttributes().containsValue(value);\n    }\n\n    @Override\n    default String get( Object key ) {\n        return this.getAttributes().get(key);\n    }\n\n    @Override\n    default Set<String > keySet() {\n        return this.getAttributes().keySet();\n    }\n\n    @Override\n    default Collection<String > values() {\n        return this.getAttributes().values();\n    }\n\n    @Override\n    default Set<Entry<String, String > > entrySet() {\n        return this.getAttributes().entrySet();\n    }\n\n\n    String insert( String key, String value ) ;\n\n    String update( String key, String value ) ;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/ConfigNode.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport java.time.LocalDateTime;\n\npublic interface ConfigNode extends ElementNode {\n    @Override\n    default ConfigNode evinceConfigNode() {\n        return this;\n    }\n\n    void setEnumId( long enumId );\n\n    void setGuid( GUID guid );\n\n    GUID getDataAffinityGuid();\n\n    void setDataAffinityGuid( GUID guid );\n\n    void setCreateTime( LocalDateTime createTime );\n\n    void setUpdateTime( LocalDateTime updateTime );\n\n    void setName( String name );\n\n\n    void copyMetaTo( GUID guid );\n\n\n\n    ConfigNodeMeta getConfigNodeMeta();\n\n    void setConfigNodeMeta( ConfigNodeMeta configNodeMeta );\n\n    void setAttributes( Attributes attributes );\n\n\n\n    KOMRegistry parentRegistry();\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/ConfigNodeMeta.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface ConfigNodeMeta extends Pinenut {\n    long getEnumId();\n\n    void setEnumId(long id);\n\n    GUID getGuid();\n\n    void setGuid(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/DefaultPropertyConverter.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\npublic class DefaultPropertyConverter implements TypeConverter {\n    @Override\n    public Object converter( String val, String type ) {\n        return PropertyTypes.queryValue( val, type );\n    }\n\n    @Override\n    public String queryType( Object val ) {\n        return PropertyTypes.queryType( val );\n    }\n\n    @Override\n    public String queryRecognizedType( String type ) {\n        return PropertyTypes.queryRecognizedType( type );\n    }\n\n    @Override\n    public boolean isJSON( Object val ) {\n        return PropertyTypes.isJSON( val );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/DefaultTextValueConverter.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\npublic class DefaultTextValueConverter implements TypeConverter {\n    @Override\n    public Object converter( String val, String type ) {\n        return TextValueTypes.queryValue( val, type );\n    }\n\n    @Override\n    public String queryType( Object val ) {\n        return TextValueTypes.queryType( val );\n    }\n\n    @Override\n    public String queryRecognizedType( String type ) {\n        return TextValueTypes.queryRecognizedType( type );\n    }\n\n    @Override\n    public boolean isJSON( Object val ) {\n        return TextValueTypes.isJSON( val );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/ElementNode.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport java.time.LocalDateTime;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.Registry;\nimport com.pinecone.hydra.system.ko.meta.ElementObject;\n\npublic interface ElementNode extends RegistryTreeNode, ElementObject {\n    long getEnumId();\n\n    GUID getGuid();\n\n    LocalDateTime getCreateTime();\n\n    LocalDateTime getUpdateTime();\n\n    String getName();\n\n    Attributes getAttributes();\n\n    Registry parentRegistry();\n\n    @Override\n    default String objectCategoryName() {\n        return \"Registry\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericAttributes.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.hydra.registry.source.RegistryAttributesManipulator;\n\npublic class GenericAttributes implements Attributes {\n    protected GUID                            guid;\n    protected Map<String, String >            attributes = new LinkedTreeMap<>();\n    protected ElementNode                     elementNode;\n    protected RegistryAttributesManipulator   attributesManipulator;\n\n    public GenericAttributes( GUID guid, ElementNode element, RegistryAttributesManipulator attributesManipulator ) {\n        this.guid                  = guid;\n        this.elementNode           = element;\n        this.attributesManipulator = attributesManipulator;\n    }\n\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid( GUID guid ) {\n        this.guid = guid;\n    }\n\n\n    @Override\n    public String getAttribute( String key ) {\n        return this.attributes.get(key);\n    }\n\n    @Override\n    public void setAttribute( String key, String value ) {\n        this.put( key, value );\n    }\n\n    @Override\n    public Map<String, String > getAttributes() {\n        return this.attributes;\n    }\n\n    @Override\n    public void setAttributes( Map<String, String > attributes ) {\n        this.attributes = attributes;\n        for( Map.Entry<String, String > kv : attributes.entrySet() ) {\n            this.put( kv.getKey(), kv.getValue() );\n        }\n    }\n\n    @Override\n    public ElementNode parentElement() {\n        return this.elementNode;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this.attributes );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public void putAll( Map<? extends String, ? extends String> m ) {\n        for( Map.Entry<? extends String, ? extends String> kv : m.entrySet() ) {\n            this.put( kv.getKey(), kv.getValue() );\n        }\n    }\n\n    @Override\n    public String insert( String key, String value ) {\n        if ( !this.attributesManipulator.containsKey( this.guid, key ) ) {\n            this.attributesManipulator.insertAttribute( this.guid, key, value );\n            this.attributes.put( key, value );\n            return value;\n        }\n        return null;\n    }\n\n    @Override\n    public String update( String key, String value ) {\n        if ( !this.attributesManipulator.containsKey( this.guid, key ) ) {\n            this.attributesManipulator.updateAttribute( this.guid, key, value );\n            this.attributes.put( key, value );\n            return value;\n        }\n        return null;\n    }\n\n    @Override\n    public String put( String key, String value ) {\n        if ( this.attributesManipulator.containsKey( this.guid, key ) ) {\n            this.attributesManipulator.updateAttribute( this.guid, key, value );\n        }\n        else {\n            this.attributesManipulator.insertAttribute( this.guid, key, value );\n        }\n\n        return this.attributes.put( key, value );\n    }\n\n    @Override\n    public void clear() {\n        this.attributesManipulator.clearAttributes( this.guid );\n        this.attributes.clear();\n    }\n\n    @Override\n    public boolean remove( Object key, Object value ) {\n        if ( this.attributesManipulator.containsKey( this.guid, key.toString() ) ) {\n            this.attributesManipulator.removeAttributeWithValue( this.guid, key.toString(), value.toString() );\n            this.attributes.remove( key, value );\n            return true;\n        }\n        return false;\n    }\n\n    @Override\n    public String remove( Object key ) {\n        if ( this.attributesManipulator.containsKey( this.guid, key.toString() ) ) {\n            this.attributesManipulator.removeAttribute( this.guid, key.toString() );\n            return this.attributes.remove( key );\n        }\n        return null;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericConfigNodeMeta.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\n\npublic class GenericConfigNodeMeta implements ConfigNodeMeta {\n    private long enumId;\n\n    private GUID guid;\n\n\n    public GenericConfigNodeMeta() {\n    }\n\n    public GenericConfigNodeMeta(long enumId, GUID guid) {\n        this.enumId = enumId;\n        this.guid = guid;\n    }\n\n    @Override\n    public long getEnumId() {\n        return enumId;\n    }\n\n    @Override\n    public void setEnumId( long enumId ) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return guid;\n    }\n\n    @Override\n    public void setGuid( GUID guid ) {\n        this.guid = guid;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericNamespace.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\npublic class GenericNamespace extends ArchElementNode implements Namespace {\n    protected NamespaceMeta                   namespaceMeta;\n\n    protected KOMRegistry registry;\n    protected Map<String, RegistryTreeNode >  children;\n    protected List<GUID >                     childrenGuids;\n\n\n    public GenericNamespace() {\n\n    }\n\n    public GenericNamespace(KOMRegistry registry ) {\n        this.registry = registry;\n\n        GuidAllocator guidAllocator = this.registry.getGuidAllocator();\n        this.setGuid( guidAllocator.nextGUID() );\n        this.setCreateTime( LocalDateTime.now() );\n    }\n\n\n    public void apply( KOMRegistry registry ) {\n        this.registry = registry;\n    }\n\n    @Override\n    public NamespaceMeta getNamespaceWithMeta() {\n        return this.namespaceMeta;\n    }\n\n    @Override\n    public void setNamespaceMeta( NamespaceMeta namespaceMeta ) {\n        this.namespaceMeta = namespaceMeta;\n    }\n\n    /** Thread unsafe */\n    @Override\n    public Map<String, RegistryTreeNode > getChildren() {\n        if( this.children == null ) {\n            Map<String, RegistryTreeNode> nodeHashMap = new LinkedHashMap<>();\n            for( GUID guid : this.childrenGuids ){\n                RegistryTreeNode registryTreeNode = this.registry.get( guid );\n                nodeHashMap.put( registryTreeNode.getName(), registryTreeNode );\n            }\n            this.children = nodeHashMap;\n        }\n        return this.children;\n    }\n\n    @Override\n    public List<GUID > fetchChildrenGuids() {\n        return this.childrenGuids;\n    }\n\n    @Override\n    public void setChildrenGuids( List<GUID > contentGuids, int depth ) {\n        this.childrenGuids = contentGuids;\n    }\n\n    @Override\n    public List<RegistryTreeNode > listItem() {\n        ArrayList<RegistryTreeNode > registryTreeNodes = new ArrayList<>();\n        registryTreeNodes.addAll( this.getChildren().values() );\n        return registryTreeNodes;\n    }\n\n    @Override\n    public void put( RegistryTreeNode child ) {\n        String key = child.getName();\n        if ( this.getChildren().get( key ) != null ){\n            throw new IllegalArgumentException( \"key is exist.\" );\n        }\n        this.getChildren().put( key, child );\n        this.registry.affirmOwnedNode( this.guid, child.getGuid() );\n    }\n\n    @Override\n    public void remove( String key ) {\n        RegistryTreeNode registryTreeNode = this.getChildren().get(key);\n        this.registry.remove(registryTreeNode.getGuid());\n        this.getChildren().remove(key);\n    }\n\n    @Override\n    public KOMRegistry parentRegistry() {\n        return this.registry;\n    }\n\n    @Override\n    public boolean containsKey( String key ) {\n        return this.getChildren().containsKey(key);\n    }\n\n    @Override\n    public JSONObject toJSONObject() {\n        Map<String, RegistryTreeNode > children = this.getChildren();\n        JSONObject jo = new JSONMaptron();\n\n        for( Map.Entry<String, RegistryTreeNode > kv : children.entrySet() ) {\n            if( kv.getValue().evinceNamespace() != null ) {\n                jo.put( kv.getKey(), kv.getValue().evinceNamespace().toJSONObject() );\n            }\n            else if( kv.getValue().evinceProperties() != null ) {\n                jo.put( kv.getKey(), kv.getValue().evinceProperties().toJSONObject() );\n            }\n            else if( kv.getValue().evinceTextFile() != null ) {\n                jo.put( kv.getKey(), kv.getValue().evinceTextFile().toJSON() );\n            }\n        }\n        return jo;\n    }\n\n    @Override\n    public ConfigNode getConfigNode(String key ) {\n        return (ConfigNode) this.getChildren().get(key);\n    }\n\n    @Override\n    public Namespace getNamespace( String key ) {\n        return (Namespace) this.getChildren().get( key );\n    }\n\n    @Override\n    public int size() {\n        return this.childrenGuids.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.childrenGuids.isEmpty();\n    }\n\n    @Override\n    public Set<String> keySet() {\n        return this.getChildren().keySet();\n    }\n\n    @Override\n    public Set<Map.Entry<String, RegistryTreeNode > > entrySet() {\n        return this.getChildren().entrySet();\n    }\n\n    @Override\n    public void copyTo( String path ) {\n        this.copyTo( this.registry.affirmNamespace( path ).getGuid() );\n    }\n\n    @Override\n    public void copyTo( GUID destinationGuid ) {\n        List<TreeNode > destChildren = this.registry.getChildren( destinationGuid );\n        Namespace thisCopy = null;\n        for( TreeNode node : destChildren ) {\n            if( this.getName().equals( node.getName() ) ) {\n                if( node instanceof Namespace) {\n                    thisCopy = (Namespace) node;\n                    break;\n                }\n                else {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be namespace.\", this.getName() )\n                    );\n                }\n            }\n        }\n\n        // Child-Destination non-exist.\n        if( thisCopy == null ) {\n            this.copyNamespaceMetaTo( destinationGuid );\n\n            thisCopy = new GenericNamespace( this.registry );\n            thisCopy.setName( this.getName() );\n            thisCopy.setNamespaceMeta( this.getNamespaceWithMeta() );\n\n            this.registry.put( thisCopy );\n            this.registry.getMasterTrieTree().setGuidLineage( thisCopy.getGuid(), destinationGuid );\n        }\n\n        this.copyChildrenTo( thisCopy.getGuid() );\n    }\n\n    @Override\n    public void copyChildrenTo( GUID destinationGuid ) {\n        Collection<RegistryTreeNode > childrenNodes = this.getChildren().values();\n        for ( RegistryTreeNode node : childrenNodes ) {\n            node.copyTo( destinationGuid );\n        }\n    }\n\n    @Override\n    public void moveTo( String path ) {\n        this.moveTo( this.registry.affirmNamespace( path ).getGuid() );\n    }\n\n    @Override\n    public void moveTo( GUID destinationGuid ) {\n        this.registry.getMasterTrieTree().moveTo( this.guid, destinationGuid );\n    }\n\n    @Override\n    public void copyNamespaceMetaTo( GUID destinationGuid ) {\n\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"name\"        , this.getName()            ),\n                new KeyValue<>( \"guid\"        , this.getGuid()            ),\n                new KeyValue<>( \"createTime\"  , this.getCreateTime()      ),\n                new KeyValue<>( \"updateTime\"  , this.getUpdateTime()      ),\n                new KeyValue<>( \"childrenSize\", this.childrenGuids.size() ),\n        } );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericNamespaceMeta.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\n\npublic class GenericNamespaceMeta implements NamespaceMeta {\n    private long enumId;\n\n    private GUID guid;\n\n    public GenericNamespaceMeta() {\n    }\n\n    public GenericNamespaceMeta(long enumId, GUID guid ) {\n        this.enumId = enumId;\n        this.guid = guid;\n    }\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericProperties.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.unit.UniScopeMap;\nimport com.pinecone.framework.unit.UniScopeMaptron;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.io.IOException;\nimport java.io.StringWriter;\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\npublic class GenericProperties extends ArchConfigNode implements Properties {\n    protected Properties parent;\n\n    protected UniScopeMap<String, Property > properties = new UniScopeMaptron<>();\n\n    public GenericProperties() {\n    }\n\n    public GenericProperties( KOMRegistry registry ) {\n        super( registry );\n    }\n\n    @Override\n    public Properties getAffinityParent() {\n        return this.parent;\n    }\n\n    @Override\n    public void setAffinityParent( Properties parent ) {\n        this.parent = parent;\n    }\n\n    public Properties getOwner( String szKey ) {\n        Properties owned = this;\n        while ( owned != null ) {\n            if( owned.hasOwnProperty( szKey ) ) {\n                break;\n            }\n\n            owned = owned.getAffinityParent();\n        }\n        return owned;\n    }\n\n    @Override\n    public void put( String key, Object val ) {\n        Property p = new GenericProperty( this );\n        p.setKey( key );\n        p.setValue( val );\n\n        this.putProperty( p );\n    }\n\n    @Override\n    public void puts( Map<String, Object > map ) {\n        for( Map.Entry<String, Object > kv : map.entrySet() ) {\n            this.put( kv.getKey(), kv.getValue() );\n        }\n    }\n\n    @Override\n    public void putProperty( Property property ) {\n        String szKey     = property.getKey();\n        Properties owned = this.getOwner( szKey );\n\n        property.setCreateTime( LocalDateTime.now() );\n        property.setUpdateTime( LocalDateTime.now() );\n        if( owned == null ) {\n            // Insert to current scope.\n            property.setGuid( this.guid );\n            this.properties.put( property.getKey(), property );\n            this.registry.putProperty( property, this.guid );\n        }\n        else {\n            owned.updateFromDummy( property );\n        }\n    }\n\n    @Override\n    public void remove( String key ) {\n        Properties owner = this.getOwner( key );\n        if( owner != null ) {\n            this.properties.remove( key );\n            this.registry.removeProperty( owner.getGuid(), key );\n        }\n    }\n\n    @Override\n    public void update( Property property ) {\n        if( property.getGuid().equals( this.guid ) ) {\n            Property p = this.get( property.getKey() );\n            // If p == property, which is owned element, no needs to copy.\n            if( p != null && p != property ) {\n                p.from( property );\n                property = p;\n            }\n        }\n\n        this.registry.updateProperty( property );\n    }\n\n    @Override\n    public void updateFromDummy( Property dummy ) {\n        Property p = this.get( dummy.getKey() );\n        // If p == property, which is owned element, no needs to copy.\n        if( p != null ) {\n            p.from( dummy );\n        }\n        this.registry.updateProperty( p );\n    }\n\n    @Override\n    public void set( String key, Object val ) {\n        Property p = this.get( key );\n        if( p != null ) {\n            p.setValue( val );\n        }\n\n        this.registry.updateProperty( p );\n    }\n\n    @Override\n    public Property get( String key ) {\n        return this.properties.get( key );\n    }\n\n    @Override\n    public Object getValue( String key ) {\n        Property property = this.get( key );\n        if( property != null ) {\n            return property.getValue();\n        }\n        return null;\n    }\n\n    @Override\n    public boolean containsKey( String key ) {\n        return this.properties.containsKey( key );\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        return this.properties.containsKey( key );\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object key ) {\n        return this.properties.hasOwnProperty( key );\n    }\n\n    @Override\n    public int size() {\n        return this.properties.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return !this.properties.isEmpty();\n    }\n\n    @Override\n    public Collection<Object > values() {\n        ArrayList<Object > values = new ArrayList<>();\n        for( Property p : this.properties.values() ){\n            values.add(p.getValue());\n        }\n        return values;\n    }\n\n    @Override\n    public Set<String > keySet() {\n        HashSet<String > keys = new HashSet<>();\n        for ( Property p : this.properties.values() ){\n            keys.add( p.getKey() );\n        }\n        return keys;\n    }\n\n    @Override\n    public Set<Property > entrySet() {\n        HashSet<Property> propertyHashSet = new HashSet<>();\n        for( Property p : this.properties.values() ){\n            propertyHashSet.add(p);\n        }\n        return propertyHashSet;\n    }\n\n    @Override\n    public void copyValueTo( GUID destinationGuid ) {\n        if ( destinationGuid != null ){\n            this.registry.copyPropertiesTo( this.guid, destinationGuid );\n        }\n    }\n\n\n    @Override\n    public Collection<Property > getProperties() {\n        return this.properties.values();\n    }\n\n    @Override\n    public Map<String, Object > toMap() {\n        Map<String, Object > jo = new LinkedHashMap<>();\n        LinkedHashMap<String, Property > overridden = new LinkedHashMap<>();\n        this.properties.overrideTo( overridden );\n\n        for( Property property : overridden.values() ) {\n            jo.put( property.getKey(), property.getValue() );\n        }\n        return jo;\n    }\n\n    @Override\n    public UniScopeMap<String, Property > getPropertiesMap() {\n        return this.properties;\n    }\n\n    @Override\n    public void setProperties( List<Property > properties ) {\n        this.properties = new UniScopeMaptron<>();\n        for( Property p : properties ) {\n            this.properties.put( p.getKey(), p );\n        }\n    }\n\n    @Override\n    public void setProperties( UniScopeMap<String, Property > properties ) {\n        this.properties = properties;\n    }\n\n    @Override\n    public void setThisProperties( Map<String, Property> properties ) {\n        this.properties.setThisScope( properties );\n    }\n\n    @Override\n    public void setParentProperties( UniScopeMap<String, Property> parent ) {\n        this.properties.setParent( parent );\n    }\n\n    @Override\n    public KOMRegistry parentRegistry() {\n        return this.registry;\n    }\n\n    @Override\n    public void copyTo( String path ) {\n        this.copyTo( this.registry.affirmProperties( path ).getGuid() );\n    }\n\n    @Override\n    public void copyTo( GUID destinationGuid ) {\n        Properties thisCopy = null;\n        RegistryTreeNode tn = this.registry.get( destinationGuid );\n        if( tn.evinceProperties() == null ) {\n            List<TreeNode > destChildren = this.registry.getChildren( destinationGuid );\n            for( TreeNode node : destChildren ) {\n                if( this.getName().equals( node.getName() ) ) {\n                    if( node instanceof Properties  ) {\n                        thisCopy = (Properties) node;\n                        break;\n                    }\n                    else {\n                        throw new IllegalArgumentException(\n                                String.format( \"Existed child-destination [%s] should be properties.\", this.getName() )\n                        );\n                    }\n                }\n            }\n        }\n        else {\n            thisCopy = (Properties) tn;\n        }\n\n        // Child-Destination non-exist.\n        if( thisCopy == null ) {\n            thisCopy = new GenericProperties( this.registry );\n            this.putNewCopy( thisCopy, destinationGuid );\n        }\n\n        this.copyMetaTo( thisCopy.getGuid() );\n        this.copyValueTo( thisCopy.getGuid() );\n    }\n\n    @Override\n    public String toJSONString() {\n        try{\n            PropertyJSONEncoder encoder = new PropertyJSONEncoder();\n            try( StringWriter writer = new StringWriter() ){\n                encoder.write( this, writer );\n                return writer.toString();\n            }\n        }\n        catch ( IOException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericProperty.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\n\nimport java.time.LocalDateTime;\n\npublic class GenericProperty implements Property {\n    private    long          enumId;\n    private    GUID          guid;\n    private    String        key;\n    private    String        type;\n    private    LocalDateTime createTime;\n    private    LocalDateTime updateTime;\n    private    Object        rawValue;  //TODO\n    private    Object        value;\n\n    protected  Properties    properties;\n\n    public GenericProperty() {\n\n    }\n\n    public GenericProperty( Properties properties ) {\n        this.properties = properties;\n    }\n\n    public GenericProperty(\n            Properties properties,\n            long enumId, GUID guid, String key, String type, LocalDateTime createTime,\n            LocalDateTime updateTime, String value\n    ) {\n        this( properties );\n\n        this.enumId = enumId;\n        this.guid = guid;\n        this.key = key;\n        this.type = type;\n        this.createTime = createTime;\n        this.updateTime = updateTime;\n        this.rawValue = value;\n    }\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public String getKey() {\n        return this.key;\n    }\n\n    @Override\n    public void setKey(String key) {\n        this.key = key;\n    }\n\n    @Override\n    public String getType() {\n        return this.type;\n    }\n\n    @Override\n    public void setType(String type) {\n        this.type = type;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.updateTime;\n    }\n\n    @Override\n    public void setUpdateTime( LocalDateTime updateTime ) {\n        this.updateTime = updateTime;\n    }\n\n    @Override\n    public Object getRawValue() {\n        return this.rawValue;\n    }\n\n    protected String queryType( Object val ) {\n        return this.parentProperties().parentRegistry().getPropertyTypeConverter().queryType( val );\n    }\n\n    protected Object converterValue( String val, String type ) {\n        return this.parentProperties().parentRegistry().getPropertyTypeConverter().converter( val, type );\n    }\n\n    @Override\n    public void setRawValue( Object rawValue ) {\n        this.rawValue = rawValue;\n        this.value    = this.converterValue( this.rawValue.toString(), this.type );\n    }\n\n    @Override\n    public Object getValue() {\n        return this.value;\n    }\n\n    @Override\n    public void setValue( Object value ) {\n        this.rawValue = value.toString();\n        this.type     = this.queryType( value );\n        this.value    = this.converterValue( this.rawValue.toString(), this.type );\n    }\n\n    @Override\n    public boolean isStringBasedType() {\n        return PropertyTypes.isStringBasedType( this.type );\n    }\n\n    @Override\n    public void fromValue( Property that ) {\n        this.key          = that.getKey();\n        this.type         = that.getType();\n        this.rawValue        = that.getValue();\n    }\n\n    @Override\n    public void from( Property that ) {\n        this.fromValue( that );\n        this.createTime   = that.getCreateTime();\n        this.updateTime   = that.getUpdateTime();\n    }\n\n    @Override\n    public void copy( Property that ) {\n        this.setEnumId( that.getEnumId() );\n        this.setGuid( that.getGuid() );\n\n        this.from( that );\n    }\n\n    @Override\n    public Properties parentProperties() {\n        return this.properties;\n    }\n\n    @Override\n    public void setParentProperties( Properties parentProperties ) {\n        this.properties = parentProperties;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericTextFile.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.util.List;\n\npublic class GenericTextFile extends ArchConfigNode implements TextFile {\n    protected TextValue             mTextValue;\n\n    public GenericTextFile() {\n    }\n\n    public GenericTextFile( KOMRegistry registry ) {\n        this.registry = registry;\n    }\n\n    @Override\n    public void setTextValue( TextValue textValue ) {\n        this.mTextValue = textValue;\n    }\n\n    @Override\n    public void put( TextValue textValue ) {\n        if( this.mTextValue == null ) {\n            this.registry.putTextValue( textValue.getGuid(), textValue.getValue(), textValue.getType() );\n        }\n        else {\n            this.update( textValue );\n            this.mTextValue = textValue;\n        }\n    }\n\n    @Override\n    public Object decode() {\n        TextValue value = this.get();\n        return this.registry.getTextValueTypeConverter().converter( value.getValue(), value.getType() );\n    }\n\n    @Override\n    public Object toJSON() {\n        TextValue value = this.get();\n        String type = this.registry.getTextValueTypeConverter().queryRecognizedType( value.getType() );\n        Object ret  = this.decode();\n        if( type == null || !this.registry.getTextValueTypeConverter().isJSON( ret ) ) {\n            JSONObject reparse = new JSONMaptron();\n            reparse.put( \"type\", value.getType() );\n            reparse.put( \"value\", value.getValue() );\n            return reparse;\n        }\n        return ret;\n    }\n\n    @Override\n    public void remove( GUID guid ) {\n        this.registry.removeTextValue(guid);\n    }\n\n    @Override\n    public void update( TextValue textValue ) {\n        this.registry.updateTextValue( textValue, this.guid );\n    }\n\n\n    @Override\n    public void update( String text, String format ) {\n        TextValue textValue = GenericTextValue.newUpdateTextValue( this.guid, text, format );\n        this.update( textValue );\n    }\n\n    @Override\n    public void put( String text, String format ) {\n        if( this.mTextValue == null ) {\n            this.registry.putTextValue( this.guid, text, format );\n        }\n        else {\n            this.update( text, format );\n        }\n    }\n\n    @Override\n    public TextValue get() {\n        return this.mTextValue;\n    }\n\n\n\n    public KOMRegistry parentRegistry() {\n        return this.registry;\n    }\n\n\n    public void setRegistry(KOMRegistry registry) {\n        this.registry = registry;\n    }\n\n    @Override\n    public void copyTo( String path ) {\n        this.copyTo( this.registry.affirmTextConfig( path ).getGuid() );\n    }\n\n    @Override\n    public void copyTo( GUID destinationGuid ) {\n        TextFile thisCopy = null;\n        RegistryTreeNode tn = this.registry.get( destinationGuid );\n        if( tn.evinceTextFile() == null ) {\n            List<TreeNode> destChildren = this.registry.getChildren( destinationGuid );\n            for( TreeNode node : destChildren ) {\n                if( this.getName().equals( node.getName() ) ) {\n                    if( node instanceof TextFile) {\n                        thisCopy = (TextFile) node;\n                        break;\n                    }\n                    else {\n                        throw new IllegalArgumentException(\n                                String.format( \"Existed child-destination [%s] should be text config.\", this.getName() )\n                        );\n                    }\n                }\n            }\n        }\n        else {\n            thisCopy = (TextFile) tn;\n        }\n\n\n        // Child-Destination non-exist.\n        if( thisCopy == null ) {\n            thisCopy = new GenericTextFile( this.registry );\n\n            this.putNewCopy( thisCopy, destinationGuid );\n        }\n\n        this.copyMetaTo( thisCopy.getGuid() );\n        this.copyValueTo( thisCopy.getGuid() );\n    }\n\n    @Override\n    public void copyValueTo( GUID destinationGuid ) {\n        if ( destinationGuid != null ){\n            this.registry.copyTextValueTo( this.guid, destinationGuid );\n        }\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this.toJSON() );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/GenericTextValue.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\n\nimport java.time.LocalDateTime;\n\npublic class GenericTextValue implements TextValue {\n    private long enumId;\n    private GUID guid;\n    private String value;\n    private String type;\n    private LocalDateTime createTime;\n    private LocalDateTime updateTime;\n\n    public GenericTextValue() {\n    }\n\n    public GenericTextValue( GUID guid, String value, String type ) {\n        this.setGuid( guid );\n        this.setValue( value );\n        this.setType( type );\n        this.setCreateTime( LocalDateTime.now() );\n        this.setUpdateTime( LocalDateTime.now() );\n    }\n\n    public GenericTextValue( long enumId, GUID guid, String value, String type, LocalDateTime createTime, LocalDateTime updateTime ) {\n        this.enumId = enumId;\n        this.guid = guid;\n        this.value = value;\n        this.type = type;\n        this.createTime = createTime;\n        this.updateTime = updateTime;\n    }\n\n    /**\n     * 获取\n     * @return enumId\n     */\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    /**\n     * 设置\n     * @param enumId\n     */\n    @Override\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n    /**\n     * 获取\n     * @return guid\n     */\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    /**\n     * 设置\n     * @param guid\n     */\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    /**\n     * 获取\n     * @return value\n     */\n    @Override\n    public String getValue() {\n        return this.value;\n    }\n\n    /**\n     * 设置\n     * @param value\n     */\n    @Override\n    public void setValue(String value) {\n        this.value = value;\n    }\n\n    /**\n     * 获取\n     * @return type\n     */\n    @Override\n    public String getType() {\n        return this.type;\n    }\n\n    /**\n     * 设置\n     * @param type\n     */\n    @Override\n    public void setType(String type) {\n        this.type = type;\n    }\n\n    /**\n     * 获取\n     * @return createTime\n     */\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    /**\n     * 设置\n     * @param createTime\n     */\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n    /**\n     * 获取\n     * @return updateTime\n     */\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return updateTime;\n    }\n\n    /**\n     * 设置\n     * @param updateTime\n     */\n    @Override\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.updateTime = updateTime;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n\n\n    public static TextValue newUpdateTextValue( GUID guid, String text, String format ) {\n        TextValue textValue = new GenericTextValue();\n        textValue.setGuid( guid );\n        textValue.setUpdateTime(LocalDateTime.now());\n        textValue.setValue(text);\n        textValue.setType(format);\n\n        return textValue;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/Namespace.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.registry.KOMRegistry;\n\nimport java.time.LocalDateTime;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\npublic interface Namespace extends ElementNode {\n    long getEnumId();\n\n    void setEnumId( long enumId );\n\n    GUID getGuid();\n\n    void setGuid( GUID guid );\n\n    String getName();\n\n    void setName( String name );\n\n    LocalDateTime getCreateTime();\n\n    @Override\n    default Namespace evinceNamespace() {\n        return this;\n    }\n\n    void setCreateTime( LocalDateTime createTime );\n\n    LocalDateTime getUpdateTime();\n\n    void setUpdateTime( LocalDateTime updateTime );\n\n    NamespaceMeta getNamespaceWithMeta();\n\n    void setNamespaceMeta( NamespaceMeta namespaceMeta );\n\n    Attributes getAttributes();\n\n    void setAttributes(Attributes attributes);\n\n    Map<String, RegistryTreeNode > getChildren();\n\n    List<GUID > fetchChildrenGuids();\n\n    void setChildrenGuids( List<GUID> contentGuids, int depth );\n\n    List<RegistryTreeNode > listItem();\n\n\n    void put ( RegistryTreeNode child );\n\n    void remove ( String key );\n\n    KOMRegistry parentRegistry();\n\n    boolean containsKey  ( String key );\n\n\n\n    JSONObject toJSONObject();\n\n    ConfigNode getConfigNode(String key );\n\n    Namespace getNamespace( String key );\n\n\n\n\n    int size();\n\n    boolean isEmpty();\n\n    Set<String > keySet();\n\n    Set<Map.Entry<String,RegistryTreeNode>> entrySet();\n\n    void copyTo( String path ) ;\n\n    /**\n     * Copy itself and its owned elements into destination.\n     * 复制自己和自己的孩子元素到目的地址.\n     * @param destinationGuid Guid of destination.\n     */\n    void copyTo( GUID destinationGuid );\n\n    /**\n     * Only copy its owned elements into destination.\n     * 仅复制自己的孩子元素到目的地址.\n     * @param destinationGuid Guid of destination.\n     */\n    void copyChildrenTo( GUID destinationGuid );\n\n    void copyNamespaceMetaTo( GUID destinationGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/NamespaceMeta.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface NamespaceMeta extends Pinenut {\n    long getEnumId();\n    void setEnumId(long id);\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/Properties.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.unit.UniScopeMap;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\n\npublic interface Properties extends ConfigNode, PineUnit {\n\n    Properties getAffinityParent();\n\n    void setAffinityParent( Properties parent );\n\n    Collection<Property > getProperties();\n\n    Map<String, Object > toMap();\n\n    default JSONObject toJSONObject() {\n        return new JSONMaptron( this.toMap(), true );\n    }\n\n    UniScopeMap<String, Property > getPropertiesMap();\n\n    void setProperties       ( List<Property> properties );\n\n    void setProperties       ( UniScopeMap<String, Property > properties );\n\n    void setThisProperties   ( Map<String, Property > properties );\n\n    void setParentProperties ( UniScopeMap<String, Property > parent );\n\n    Properties getOwner      ( String szKey );\n\n    void put                 ( String key, Object val );\n\n    void puts                ( Map<String, Object > map );\n\n    void putProperty         ( Property property );\n\n    void remove              ( String key );\n\n    void update              ( Property property );\n\n    void updateFromDummy     ( Property dummy );\n\n    void set                 ( String key, Object val );\n\n    Property get             ( String key );\n\n    Object getValue          ( String key );\n\n    boolean containsKey      ( String key );\n\n    boolean containsKey      ( Object key );\n\n    boolean hasOwnProperty   ( Object key );\n\n    int size();\n\n    boolean isEmpty();\n\n    Collection<Object > values();\n\n    Set<String > keySet();\n\n    Set<Property > entrySet();\n\n    @Override\n    default Properties evinceProperties() {\n        return this;\n    }\n\n    void copyValueTo( GUID destinationGuid );\n\n    void copyTo    ( GUID destinationGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/Property.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface Property extends Pinenut {\n    static Property newDummy() {\n        return new GenericProperty();\n    }\n\n    long getEnumId();\n\n    void setEnumId( long enumId );\n\n    GUID getGuid();\n\n    void setGuid( GUID guid );\n\n    String getKey();\n\n    void setKey( String key );\n\n    String getType();\n\n    void setType( String type );\n\n    Object getRawValue();\n\n    void setRawValue( Object value );\n\n    Object getValue();\n\n    void setValue( Object value );\n\n    boolean isStringBasedType() ;\n\n    LocalDateTime getCreateTime();\n\n    void setCreateTime( LocalDateTime createTime );\n\n    LocalDateTime getUpdateTime();\n\n    void  setUpdateTime( LocalDateTime updateTime );\n\n    // Not copy guid\n    void fromValue ( Property that );\n\n    void from      ( Property that );\n\n    void copy      ( Property that );\n\n    Properties parentProperties();\n\n    void setParentProperties( Properties parentProperties );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/PropertyJSONEncoder.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport java.io.IOException;\nimport java.io.Writer;\nimport java.util.Iterator;\nimport java.util.Map;\n\nimport com.pinecone.framework.unit.UniScopeMap;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.GenericJSONEncoder;\n\npublic class PropertyJSONEncoder extends GenericJSONEncoder {\n    protected boolean mbSimpleEncode;\n\n    public PropertyJSONEncoder( boolean bSimpleEncode ) {\n        this.mbSimpleEncode = bSimpleEncode;\n    }\n\n    public PropertyJSONEncoder() {\n        this( true );\n    }\n\n    @Override\n    public Writer write                    ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        if( that instanceof Properties ) {\n            this.write( (Properties) that, writer, nIndentFactor, nIndentBlankNum );\n            return writer;\n        }\n\n        return super.write( that, writer, nIndentFactor, nIndentBlankNum );\n    }\n\n    public Writer write( Properties that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        writer.write(\"{\");\n\n        UniScopeMap<String, Property > propertyMap = that.getPropertiesMap();\n        int nNewIndent          = nIndentBlankNum + nIndentFactor;\n        boolean bHasNextElement = false;\n        if ( that.getAffinityParent() != null ) {\n            GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, false );\n            writer.write(\"\\\"__parent__\\\": \");\n            this.write( that.getAffinityParent(), writer, nIndentFactor, nNewIndent );\n            bHasNextElement = true;\n        }\n\n        Iterator<Map.Entry<String, Property > > iter = propertyMap.entrySet().iterator();\n        for( ; iter.hasNext(); bHasNextElement = true ) {\n            GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, bHasNextElement );\n            Map.Entry<String, Property > kv = iter.next();\n\n            writer.write( StringUtils.jsonQuote( kv.getKey() ) );\n            writer.write(':');\n            if ( nIndentFactor > 0 ) {\n                writer.write( ' ');\n            }\n\n            if( this.mbSimpleEncode ) {\n                this.write( kv.getValue().getValue(), writer, nIndentFactor, nIndentBlankNum  );\n            }\n            else {\n                this.write( kv.getValue(), writer, nIndentFactor, nIndentBlankNum  );\n            }\n        }\n\n        if ( nIndentFactor > 0 ) {\n            writer.write( '\\n' );\n        }\n\n        GenericJSONEncoder.indentBlank( writer, nIndentBlankNum );\n\n        writer.write(\"}\");\n\n        return writer;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/PropertyTypes.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport java.util.Date;\n\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONArraytron;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\n\npublic final class PropertyTypes {\n    public final static String ELEMENT_STRING_TYPE_NAME      = \"String\";\n    public final static String ELEMENT_INT64_TYPE_NAME       = \"int64\";\n    public final static String ELEMENT_INT32_TYPE_NAME       = \"int32\";\n    public final static String ELEMENT_FLOAT32_TYPE_NAME     = \"float32\";\n    public final static String ELEMENT_FLOAT64_TYPE_NAME     = \"float64\";\n    public final static String ELEMENT_BOOLEAN_TYPE_NAME     = \"bool\";\n    public final static String ELEMENT_NULL_TYPE_NAME        = \"Null\";\n\n    public final static String ELEMENT_JSONOBJECT_TYPE_NAME  = \"JSONObject\";\n    public final static String ELEMENT_JSONARRAY_TYPE_NAME   = \"JSONArray\";\n\n    public static String queryType( Object val ) {\n        String type = PropertyTypes.ELEMENT_STRING_TYPE_NAME;\n        if( val == null ) {\n            type = PropertyTypes.ELEMENT_NULL_TYPE_NAME;\n        }\n        else if( val instanceof JSONObject ) {\n            type = PropertyTypes.ELEMENT_JSONOBJECT_TYPE_NAME;\n        }\n        else if( val instanceof JSONArray ) {\n            type = PropertyTypes.ELEMENT_JSONARRAY_TYPE_NAME;\n        }\n        else if( val instanceof Byte || val instanceof Short || val instanceof Integer ) {\n            type = PropertyTypes.ELEMENT_INT32_TYPE_NAME;\n        }\n        else if( val instanceof Long ) {\n            type = PropertyTypes.ELEMENT_INT64_TYPE_NAME;\n        }\n        else if( val instanceof Float ) {\n            type = PropertyTypes.ELEMENT_FLOAT32_TYPE_NAME;\n        }\n        else if( val instanceof Double ) {\n            type = PropertyTypes.ELEMENT_FLOAT64_TYPE_NAME;\n        }\n        else if( val instanceof Boolean ) {\n            type = PropertyTypes.ELEMENT_BOOLEAN_TYPE_NAME;\n        }\n\n        return type;\n    }\n\n    public static Object queryValue( String val, String type ) {\n        switch ( type ) {\n            case PropertyTypes.ELEMENT_NULL_TYPE_NAME: {\n                return null;\n            }\n            case PropertyTypes.ELEMENT_STRING_TYPE_NAME :{\n                return val;\n            }\n            case PropertyTypes.ELEMENT_JSONOBJECT_TYPE_NAME :{\n                return new JSONMaptron( val );\n            }\n            case PropertyTypes.ELEMENT_JSONARRAY_TYPE_NAME :{\n                return new JSONArraytron( val );\n            }\n            case PropertyTypes.ELEMENT_INT32_TYPE_NAME :{\n                return Integer.parseInt( val );\n            }\n            case PropertyTypes.ELEMENT_INT64_TYPE_NAME :{\n                return Long.parseLong( val );\n            }\n            case PropertyTypes.ELEMENT_FLOAT32_TYPE_NAME :{\n                return Float.parseFloat( val );\n            }\n            case PropertyTypes.ELEMENT_FLOAT64_TYPE_NAME :{\n                return Double.parseDouble( val );\n            }\n            case PropertyTypes.ELEMENT_BOOLEAN_TYPE_NAME :{\n                return Boolean.parseBoolean( val );\n            }\n        }\n        return null;\n    }\n\n    public static String queryRecognizedType( String type ) {\n        switch ( type ) {\n            case PropertyTypes.ELEMENT_NULL_TYPE_NAME: {\n                return PropertyTypes.ELEMENT_NULL_TYPE_NAME;\n            }\n            case PropertyTypes.ELEMENT_STRING_TYPE_NAME :{\n                return PropertyTypes.ELEMENT_STRING_TYPE_NAME;\n            }\n            case PropertyTypes.ELEMENT_JSONOBJECT_TYPE_NAME :{\n                return PropertyTypes.ELEMENT_JSONOBJECT_TYPE_NAME;\n            }\n            case PropertyTypes.ELEMENT_JSONARRAY_TYPE_NAME :{\n                return PropertyTypes.ELEMENT_JSONARRAY_TYPE_NAME;\n            }\n            case PropertyTypes.ELEMENT_INT32_TYPE_NAME :{\n                return PropertyTypes.ELEMENT_INT32_TYPE_NAME;\n            }\n            case PropertyTypes.ELEMENT_INT64_TYPE_NAME :{\n                return PropertyTypes.ELEMENT_INT64_TYPE_NAME;\n            }\n            case PropertyTypes.ELEMENT_FLOAT32_TYPE_NAME :{\n                return PropertyTypes.ELEMENT_FLOAT32_TYPE_NAME;\n            }\n            case PropertyTypes.ELEMENT_FLOAT64_TYPE_NAME :{\n                return PropertyTypes.ELEMENT_FLOAT64_TYPE_NAME;\n            }\n            case PropertyTypes.ELEMENT_BOOLEAN_TYPE_NAME :{\n                return PropertyTypes.ELEMENT_BOOLEAN_TYPE_NAME;\n            }\n        }\n        return null;\n    }\n\n    public static boolean isStringBasedType( String type ) {\n        switch ( type ) {\n            case PropertyTypes.ELEMENT_STRING_TYPE_NAME :{\n                return true;\n            }\n            case PropertyTypes.ELEMENT_JSONOBJECT_TYPE_NAME :{\n                return true;\n            }\n            case PropertyTypes.ELEMENT_JSONARRAY_TYPE_NAME :{\n                return true;\n            }\n        }\n\n        return false;\n    }\n\n    public static boolean isJSON( Object val ) {\n        Class<?> type = val.getClass();\n        return val instanceof JSONObject || val instanceof JSONArray || (\n                type.isPrimitive() ||\n                        type == String.class ||\n                        Number.class.isAssignableFrom(type) ||\n                        type == Boolean.class ||\n                        type == Character.class ||\n                        type == Date.class ||\n                        type.isEnum() ||\n                        type == byte[].class\n        );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/RegistryTreeNode.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface RegistryTreeNode extends TreeNode {\n\n    default ConfigNode evinceConfigNode(){\n        return null;\n    }\n\n    default Namespace evinceNamespace(){\n        return null;\n    }\n\n    default Properties evinceProperties() {\n        return null;\n    }\n\n    default TextFile evinceTextFile() {\n        return null;\n    }\n\n    void copyTo( String path );\n\n    void copyTo( GUID guid );\n\n    void moveTo( String path );\n\n    void moveTo( GUID destinationGuid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/TextFile.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface TextFile extends ConfigNode {\n    void setTextValue( TextValue textValue );\n\n    void put     ( TextValue textValue );\n\n    void remove  ( GUID guid );\n\n    void update  ( TextValue textValue );\n\n    void update  ( String text, String format ) ;\n\n    void put     ( String text, String format ) ;\n\n    TextValue get ();\n\n    Object decode();\n\n    Object toJSON();\n\n    void copyValueTo( GUID destinationGuid );\n\n    void copyTo    ( GUID destinationGuid );\n\n    @Override\n    default TextFile evinceTextFile() {\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/TextValue.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface TextValue extends Pinenut {\n    long getEnumId();\n\n    void setEnumId( long enumId );\n\n    GUID getGuid();\n\n    void setGuid( GUID guid );\n\n    String getValue();\n\n    void setValue( String value );\n\n    LocalDateTime getCreateTime();\n\n    void setCreateTime( LocalDateTime createTime );\n\n    LocalDateTime getUpdateTime();\n\n    void setUpdateTime( LocalDateTime updateTime );\n\n    String getType();\n\n    void setType( String type );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/TextValueTypes.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\npublic final class TextValueTypes {\n    public final static String STRING_TYPE_NAME      = PropertyTypes.ELEMENT_STRING_TYPE_NAME;\n    public final static String INT64_TYPE_NAME       = PropertyTypes.ELEMENT_INT64_TYPE_NAME;\n    public final static String INT32_TYPE_NAME       = PropertyTypes.ELEMENT_INT32_TYPE_NAME;\n    public final static String FLOAT32_TYPE_NAME     = PropertyTypes.ELEMENT_FLOAT32_TYPE_NAME;\n    public final static String FLOAT64_TYPE_NAME     = PropertyTypes.ELEMENT_FLOAT64_TYPE_NAME;\n    public final static String BOOLEAN_TYPE_NAME     = PropertyTypes.ELEMENT_BOOLEAN_TYPE_NAME;\n    public final static String NULL_TYPE_NAME        = PropertyTypes.ELEMENT_NULL_TYPE_NAME;\n\n    public final static String JSONOBJECT_TYPE_NAME  = PropertyTypes.ELEMENT_JSONOBJECT_TYPE_NAME;\n    public final static String JSONARRAY_TYPE_NAME   = PropertyTypes.ELEMENT_JSONARRAY_TYPE_NAME;\n\n    public final static String YAML_TYPE_NAME        = \"Yaml\";\n    public final static String XML_TYPE_NAME         = \"XML\";\n    public final static String INI_TYPE_NAME         = \"INI\";\n\n\n    public static String queryType( Object val ) {\n        return PropertyTypes.queryType( val );\n    }\n\n    public static Object queryValue( String val, String type ) {\n        if( val == null ) {\n            return null;\n        }\n\n        Object ret = PropertyTypes.queryValue( val, type );\n        if( ret != null ) {\n            return ret;\n        }\n\n        return val;\n    }\n\n    public static String queryRecognizedType( String type ) {\n        String ret = PropertyTypes.queryRecognizedType( type );\n        if( ret == null ) {\n            switch ( type ) {\n                case TextValueTypes.YAML_TYPE_NAME: {\n                    return TextValueTypes.YAML_TYPE_NAME;\n                }\n                case TextValueTypes.XML_TYPE_NAME: {\n                    return TextValueTypes.XML_TYPE_NAME;\n                }\n                case TextValueTypes.INI_TYPE_NAME: {\n                    return TextValueTypes.INI_TYPE_NAME;\n                }\n            }\n        }\n        else {\n            return ret;\n        }\n\n        return null;\n    }\n\n    public static boolean isJSON( Object val ) {\n        return PropertyTypes.isJSON( val );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/entity/TypeConverter.java",
    "content": "package com.pinecone.hydra.registry.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface TypeConverter extends Pinenut {\n    Object converter( String val, String type );\n\n    String queryType( Object val );\n\n    String queryRecognizedType( String type );\n\n    boolean isJSON( Object val );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/AnnotatedRegObjectInjector.java",
    "content": "package com.pinecone.hydra.registry.marshaling;\n\npublic class AnnotatedRegObjectInjector {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryDOMEncoder.java",
    "content": "package com.pinecone.hydra.registry.marshaling;\n\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport com.pinecone.hydra.registry.entity.Attributes;\nimport com.pinecone.hydra.registry.entity.ElementNode;\nimport com.pinecone.hydra.registry.entity.Namespace;\nimport com.pinecone.hydra.registry.entity.Properties;\nimport com.pinecone.hydra.registry.entity.RegistryTreeNode;\nimport com.pinecone.hydra.registry.entity.TextFile;\n\nimport org.jsoup.nodes.Element;\n\npublic class RegistryDOMEncoder implements RegistryEncoder {\n    protected KOMRegistry registry;\n\n    public RegistryDOMEncoder( KOMRegistry registry ) {\n        this.registry = registry;\n    }\n\n    @Override\n    public Object encode( ElementNode node ) {\n        if ( node.evinceNamespace() != null ) {\n            return this.encodeNS(node.evinceNamespace() );\n        }\n        else if ( node.evinceProperties() != null ) {\n            return this.encodeProperties(node.evinceProperties() );\n        }\n        else if ( node.evinceTextFile() != null ) {\n            return this.encodeTextFile(node.evinceTextFile());\n        }\n        return null;\n    }\n\n    protected Element encodeNS( Namespace ns ) {\n        Element element = new Element(ns.getName());\n        Attributes attributes = ns.getAttributes();\n        setDOMAttributes(element, attributes);\n\n        for ( RegistryTreeNode child : ns.getChildren().values() ) {\n            Object encodedChild = this.encode((ElementNode)child);\n            if ( encodedChild instanceof Element ) {\n                element.appendChild((Element) encodedChild);\n            }\n        }\n\n        return element;\n    }\n\n    protected Element encodeProperties( Properties properties ) {\n        Element element = new Element( properties.getName() );\n        Attributes attributes = properties.getAttributes();\n        setDOMAttributes(element, attributes);\n\n        for ( String key : properties.keySet() ) {\n            Element propertyElement = new Element(key);\n            propertyElement.text( properties.get(key).getValue().toString() );\n            element.appendChild( propertyElement );\n        }\n\n        return element;\n    }\n\n    protected Element encodeTextFile( TextFile textFile ) {\n        Element element = new Element( textFile.getName() );\n        Attributes attributes = textFile.getAttributes();\n        setDOMAttributes( element, attributes );\n\n        element.append( textFile.get().getValue() );\n\n        return element;\n    }\n\n    private void setDOMAttributes( Element element, Attributes attributes ) {\n        for ( String key : attributes.keySet() ) {\n            element.attr(key, attributes.get(key));\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryDecoder.java",
    "content": "package com.pinecone.hydra.registry.marshaling;\n\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.ElementNode;\n\npublic interface RegistryDecoder extends Pinenut {\n    default ElementNode decode( Object val, GUID parentGUID ) {\n        if ( val instanceof Map ) {\n            Map map = (Map) val;\n            if( map.isEmpty() ) {\n                return null;\n            }\n            else if( map.size() > 1 ) {\n                throw new IllegalArgumentException( \"Root element should be 1\" );\n            }\n\n            Map.Entry kv = (Map.Entry) map.entrySet().iterator().next();\n            return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID );\n        }\n        else if ( val instanceof List ) {\n            List list = (List) val;\n            if( list.isEmpty() ) {\n                return null;\n            }\n            else if( list.size() > 1 ) {\n                throw new IllegalArgumentException( \"Root element should be 1\" );\n            }\n\n            return this.decode( Integer.toString( 0 ), list.get( 0 ), parentGUID );\n        }\n\n        return null;\n    }\n\n    ElementNode decode( String key, Object val, GUID parentGUID );\n\n    default ElementNode decode( Map.Entry kv, GUID parentGUID ) {\n        return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID );\n    }\n\n    default ElementNode decode( Object val ) {\n        return this.decode( val, null );\n    }\n\n    default ElementNode decode( String key, Object val ) {\n        return this.decode( key, val, null );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryEncoder.java",
    "content": "package com.pinecone.hydra.registry.marshaling;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.registry.entity.ElementNode;\n\npublic interface RegistryEncoder extends Pinenut {\n    Object encode( ElementNode node );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryJQuery.java",
    "content": "package com.pinecone.hydra.registry.marshaling;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Target({ElementType.FIELD})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface RegistryJQuery {\n    String value() default \"\";\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryJSONDecoder.java",
    "content": "package com.pinecone.hydra.registry.marshaling;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport com.pinecone.hydra.registry.entity.ElementNode;\nimport com.pinecone.hydra.registry.entity.GenericNamespace;\nimport com.pinecone.hydra.registry.entity.GenericProperties;\nimport com.pinecone.hydra.registry.entity.GenericTextFile;\nimport com.pinecone.hydra.registry.entity.GenericTextValue;\nimport com.pinecone.hydra.registry.entity.Namespace;\nimport com.pinecone.hydra.registry.entity.Properties;\nimport com.pinecone.hydra.registry.entity.RegistryTreeNode;\nimport com.pinecone.hydra.registry.entity.TextFile;\nimport com.pinecone.hydra.registry.entity.TextValueTypes;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic class RegistryJSONDecoder implements RegistryDecoder {\n    protected KOMRegistry registry;\n\n    public RegistryJSONDecoder( KOMRegistry registry ) {\n        this.registry = registry;\n    }\n\n    protected boolean isPropertiesFormat( Map jo ) {\n        boolean b = false;\n        for( Object o : jo.entrySet() ) {\n            Map.Entry kv = (Map.Entry) o;\n            if( kv.getValue() instanceof Map ) {\n                b = true;\n            }\n            if( kv.getValue() instanceof List ) {\n                b = true;\n            }\n        }\n        return b;\n    }\n\n    protected boolean isPropertiesFormat( List jo ) {\n        boolean b = false;\n        for( Object o : jo ) {\n            if( o instanceof Map ) {\n                b = true;\n            }\n            if( o instanceof List ) {\n                b = true;\n            }\n        }\n        return b;\n    }\n\n    @Override\n    public ElementNode decode( String szName, Object o, GUID parentGuid ) {\n        if ( o instanceof Map ) {\n            return (ElementNode) this.registry.get( this.decodeJSONObject( szName, (Map) o, parentGuid ).getGuid() );\n        }\n        else if ( o instanceof List ) {\n            return (ElementNode) this.registry.get( this.decodeJSONArray(szName, (List) o, parentGuid).getGuid() );\n        }\n\n        // Handling text file as a leaf node\n        TextFile file = new GenericTextFile(this.registry);\n        file.setName( szName );\n        this.registry.put( file );\n        file.put( new GenericTextValue( file.getGuid(), o.toString(), TextValueTypes.queryType(o) ) );\n        this.registry.affirmOwnedNode( parentGuid, file.getGuid() );\n        return file;\n    }\n\n    protected Namespace newNamespace( String szName ) {\n        Namespace ns = new GenericNamespace( this.registry );\n        ns.setName( szName );\n\n        return ns;\n    }\n\n    protected Object[]   affirmNSExisted( String szName, GUID parentGuid ) {\n        Namespace ns = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.registry.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evinceNamespace() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be namespace.\", szName )\n                    );\n                }\n\n                ns = rootE.evinceNamespace();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.registry.get( parentGuid );\n            if( parentNode instanceof Namespace ) {\n                Collection<RegistryTreeNode> destChildren = parentNode.evinceNamespace().getChildren().values();\n                for( TreeNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof Namespace ) {\n                            ns = (Namespace) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"<Registry> Existed child-destination [%s] should be namespace.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n        }\n\n\n        GUID currentGuid;\n        if( ns == null ) {\n            ns = this.newNamespace( szName );\n            currentGuid  = this.registry.put( ns );\n            this.registry.affirmOwnedNode( parentGuid, currentGuid );\n        }\n        else {\n            currentGuid = ns.getGuid();\n        }\n        return new Object[] { ns, currentGuid };\n    }\n\n    protected Object[]   affirmPrExisted( String szName, GUID parentGuid ) {\n        Properties pr = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.registry.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evinceProperties() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be properties.\", szName )\n                    );\n                }\n\n                pr = rootE.evinceProperties();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.registry.get( parentGuid );\n            if( parentNode instanceof Namespace ) {\n                Collection<RegistryTreeNode> destChildren = parentNode.evinceNamespace().getChildren().values();\n                for( TreeNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof Properties ) {\n                            pr = (Properties) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"Existed child-destination [%s] should be properties.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n        }\n\n\n\n        Properties neo ;\n        if( pr == null ) {\n            neo = new GenericProperties( this.registry );\n            neo.setName( szName );\n        }\n        else {\n            neo = pr;\n        }\n        return new Object[] { pr, neo };\n    }\n\n    protected ElementNode decodeJSONObject( String szName, Map jo, GUID parentGuid ) {\n        boolean isNamespace = this.isPropertiesFormat(jo);\n        ElementNode elementNode;\n        GUID currentGuid;\n\n        if ( isNamespace ) {\n            Object[] pair = this.affirmNSExisted( szName, parentGuid );\n            Namespace     ns = (Namespace) pair[ 0 ];\n            currentGuid      = (GUID)      pair[ 1 ];\n\n            for ( Object o : jo.entrySet() ) {\n                Map.Entry kv = (Map.Entry) o;\n                this.decode( kv.getKey().toString(), kv.getValue(), currentGuid );\n            }\n\n            elementNode = ns;\n        }\n        else {\n            Object[] pair = this.affirmPrExisted( szName, parentGuid );\n            Properties   prX = (Properties) pair[ 0 ];\n            Properties   pro = (Properties) pair[ 1 ];\n\n            for ( Object o : jo.entrySet() ) {\n                Map.Entry kv = (Map.Entry) o;\n                pro.put( kv.getKey().toString(), kv.getValue() );\n            }\n\n            if( prX == null ) {\n                currentGuid = this.registry.put( pro );\n                this.registry.affirmOwnedNode( parentGuid, currentGuid );\n            }\n            elementNode = pro;\n        }\n\n        return elementNode;\n    }\n\n    protected ElementNode decodeJSONArray( String szName, List ja, GUID parentGuid ) {\n        boolean isNamespace = this.isPropertiesFormat(ja);\n        ElementNode elementNode;\n        GUID currentGuid;\n\n        if ( isNamespace ) {\n            Object[] pair = this.affirmNSExisted( szName, parentGuid );\n            Namespace     ns = (Namespace) pair[ 0 ];\n            currentGuid = (GUID)      pair[ 1 ];\n\n            int i = 0;\n            for ( Object o : ja ) {\n                this.decode( Integer.toString(i), o, currentGuid );\n                ++i;\n            }\n\n            elementNode = ns;\n        }\n        else {\n            Object[] pair = this.affirmPrExisted( szName, parentGuid );\n            Properties   prX = (Properties) pair[ 0 ];\n            Properties   pro = (Properties) pair[ 1 ];\n\n            int i = 0;\n            for ( Object o : ja ) {\n                pro.put( Integer.toString(i), o );\n                ++i;\n            }\n            if( prX == null ) {\n                currentGuid = this.registry.put( pro );\n                this.registry.affirmOwnedNode( parentGuid, currentGuid );\n            }\n            elementNode = pro;\n        }\n\n        return elementNode;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryJSONEncoder.java",
    "content": "package com.pinecone.hydra.registry.marshaling;\n\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport com.pinecone.hydra.registry.entity.ElementNode;\n\npublic class RegistryJSONEncoder implements RegistryEncoder {\n    protected KOMRegistry registry;\n\n    public RegistryJSONEncoder( KOMRegistry registry ) {\n        this.registry = registry;\n    }\n\n    @Override\n    public Object encode( ElementNode node ) {\n        if( node.evinceNamespace() != null ) {\n            return node.evinceNamespace().toJSONObject();\n        }\n        else if( node.evinceProperties() != null ) {\n            return node.evinceProperties().toJSONObject();\n        }\n        else if( node.evinceTextFile() != null ) {\n            return node.evinceTextFile().toJSON();\n        }\n        return null;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/marshaling/RegistryQuery.java",
    "content": "package com.pinecone.hydra.registry.marshaling;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Target({ElementType.FIELD})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface RegistryQuery {\n    String value() default \"\";\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/ArchConfigNodeOperator.java",
    "content": "package com.pinecone.hydra.registry.operator;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport com.pinecone.hydra.registry.entity.ConfigNode;\nimport com.pinecone.hydra.registry.entity.ConfigNodeMeta;\nimport com.pinecone.hydra.registry.entity.ArchConfigNode;\nimport com.pinecone.hydra.registry.entity.Attributes;\nimport com.pinecone.hydra.registry.entity.RegistryTreeNode;\nimport com.pinecone.hydra.registry.source.RegistryMasterManipulator;\nimport com.pinecone.hydra.registry.source.RegistryConfigNodeManipulator;\nimport com.pinecone.hydra.registry.source.RegistryNodeMetaManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.lang.reflect.Field;\nimport java.time.LocalDateTime;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Objects;\n\npublic abstract class ArchConfigNodeOperator extends ArchRegistryOperator {\n    protected Map<GUID, ConfigNode>        cacheMap = new HashMap<>();\n\n    protected RegistryConfigNodeManipulator registryConfigNodeManipulator;\n    protected RegistryNodeMetaManipulator   configNodeMetaManipulator;\n\n    public ArchConfigNodeOperator( RegistryOperatorFactory factory ) {\n        this( factory.getMasterManipulator(), (KOMRegistry) factory.getRegistry() );\n        this.factory = factory;\n    }\n\n    public ArchConfigNodeOperator( RegistryMasterManipulator masterManipulator, KOMRegistry registry ) {\n        super( masterManipulator, registry );\n\n        this.registryConfigNodeManipulator = this.registryMasterManipulator.getConfigNodeManipulator();\n        this.configNodeMetaManipulator     = this.registryMasterManipulator.getNodeMetaManipulator();\n    }\n\n    @Override\n    public GUID insert( TreeNode treeNode ) {\n        ArchConfigNode configNode   = (ArchConfigNode) treeNode;\n        ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize( treeNode );\n        GuidAllocator guidAllocator = this.registry.getGuidAllocator();\n        GUID guid72                 = configNode.getGuid();\n\n\n        GUID configNodeMetaGuid = guidAllocator.nextGUID();\n        ConfigNodeMeta configNodeMeta = configNode.getConfigNodeMeta();\n        if ( configNodeMeta != null ){\n            configNodeMeta.setGuid(configNodeMetaGuid);\n            this.configNodeMetaManipulator.insert(configNodeMeta);\n        }\n        else {\n            configNodeMetaGuid = null;\n        }\n\n\n        GUID commonDataGuid = guidAllocator.nextGUID();\n        Attributes attributes = configNode.getAttributes();\n        if (attributes != null){\n            attributes.setGuid(commonDataGuid);\n            this.attributesManipulator.insert(attributes);\n        }\n        else {\n            commonDataGuid = null;\n        }\n\n\n        imperialTreeNode.setBaseDataGUID( commonDataGuid );\n        imperialTreeNode.setNodeMetadataGUID( configNodeMetaGuid );\n        this.imperialTree.insert(imperialTreeNode);\n        this.registryConfigNodeManipulator.insert( configNode );\n        return guid72;\n    }\n\n    @Override\n    public void purge( GUID guid ) {\n        //ConfigNode为叶子节点只需要删除节点信息与引用继承关系\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.registryConfigNodeManipulator.remove(guid);\n        this.attributesManipulator.remove(node.getAttributesGUID());\n        this.configNodeMetaManipulator.remove(node.getNodeMetadataGUID());\n        this.imperialTree.removeCachePath(guid);\n    }\n\n    @Override\n    public RegistryTreeNode get( GUID guid ) {\n        ConfigNode rootConfig = this.cacheMap.get( guid );\n        if ( rootConfig == null ) {\n            rootConfig = this.getConfigNodeWideData( guid );\n            ConfigNode thisConfig = rootConfig;\n            while ( true ) {\n                GUID affinityGuid = thisConfig.getDataAffinityGuid();\n                if ( affinityGuid != null ){\n                    ConfigNode parent = this.getConfigNodeWideData( affinityGuid );\n                    this.inherit( thisConfig, parent );\n                    thisConfig = parent;\n                }\n                else {\n                    break;\n                }\n            }\n            this.cacheMap.put( guid, rootConfig );\n        }\n        return rootConfig;\n    }\n\n    @Override\n    public RegistryTreeNode getAsRootDepth( GUID guid ) {\n        return this.getConfigNodeWideData( guid );\n    }\n\n    @Override\n    public void update( TreeNode treeNode ) {\n        ConfigNode configNode = (ConfigNode) treeNode;\n        ConfigNodeMeta configNodeMeta = configNode.getConfigNodeMeta();\n        Attributes attributes = configNode.getAttributes();\n        configNode.setUpdateTime(LocalDateTime.now());\n        if (configNodeMeta != null){\n            this.configNodeMetaManipulator.update(configNodeMeta);\n        }\n        if (attributes != null){\n            this.attributesManipulator.update(attributes);\n        }\n        this.registryConfigNodeManipulator.update(configNode);\n    }\n\n    @Override\n    public void updateName( GUID guid, String name ) {\n        this.registryConfigNodeManipulator.updateName( guid, name );\n    }\n\n    protected ConfigNode getConfigNodeWideData( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        ConfigNode cn = this.registryConfigNodeManipulator.getConfigNode( guid );\n        if( cn instanceof ArchConfigNode ) {\n            ((ArchConfigNode) cn).apply( this.registry );\n        }\n\n        ConfigNodeMeta configNodeMeta = this.configNodeMetaManipulator.getConfigNodeMeta( node.getNodeMetadataGUID() );\n\n        //Notice: Registry attributes is difference from other tree, -- that is, same as DOM;\n        //        So in this case, this field is deprecated.\n        //Attributes         attributes = this.attributesManipulator.getAttributes( node.getAttributesGUID(), cn );\n\n        Attributes         attributes = this.attributesManipulator.getAttributes( guid, cn );\n        cn.setAttributes    ( attributes );\n        cn.setConfigNodeMeta( configNodeMeta );\n        return cn;\n    }\n\n    protected void inherit(ConfigNode self, ConfigNode prototype ){\n        Class<? extends ConfigNode> clazz = self.getClass();\n        Field[] fields = clazz.getDeclaredFields();\n\n        for ( Field field : fields ){\n            field.setAccessible(true);\n            try {\n                Object value1 = field.get( self );\n                Object value2 = field.get( prototype );\n                if ( Objects.isNull(value1) || (value1 instanceof List && ((List<?>) value1).isEmpty()) ){\n                    field.set(self,value2);\n                }\n            }\n            catch ( IllegalAccessException e ) {\n                throw new ProxyProvokeHandleException(e);\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/ArchRegistryOperator.java",
    "content": "package com.pinecone.hydra.registry.operator;\n\nimport java.time.LocalDateTime;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport com.pinecone.hydra.registry.entity.ArchElementNode;\nimport com.pinecone.hydra.registry.source.RegistryAttributesManipulator;\nimport com.pinecone.hydra.registry.source.RegistryMasterManipulator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\npublic abstract class ArchRegistryOperator implements RegistryNodeOperator {\n    protected KOMRegistry                    registry;\n    protected ImperialTree                   imperialTree;\n    protected RegistryMasterManipulator      registryMasterManipulator;\n    protected RegistryAttributesManipulator  attributesManipulator;\n\n    protected RegistryOperatorFactory        factory;\n\n    public ArchRegistryOperator ( RegistryOperatorFactory factory ) {\n        this( factory.getMasterManipulator(),(KOMRegistry) factory.getRegistry() );\n        this.factory = factory;\n    }\n\n    public ArchRegistryOperator( RegistryMasterManipulator masterManipulator, KOMRegistry registry ){\n        this.registryMasterManipulator     = masterManipulator;\n        this.imperialTree = registry.getMasterTrieTree();\n        this.attributesManipulator         = this.registryMasterManipulator.getAttributesManipulator();\n\n        this.registry                      = registry;\n    }\n\n    protected ImperialTreeNode affirmPreinsertionInitialize(TreeNode treeNode ) {\n        ArchElementNode entityNode   = (ArchElementNode) treeNode;\n\n        GUID guid72 = entityNode.getGuid();\n        // Case 1: Dummy config node.\n        GuidAllocator guidAllocator = this.registry.getGuidAllocator();\n        if( guid72 == null ) {\n            guid72 = guidAllocator.nextGUID();\n            entityNode.setGuid( guid72 );\n            entityNode.setCreateTime( LocalDateTime.now() );\n        }\n        entityNode.setUpdateTime( LocalDateTime.now() );\n\n        ImperialTreeNode imperialTreeNode = new GUIDImperialTrieNode();\n        imperialTreeNode.setGuid( guid72 );\n        imperialTreeNode.setType( UOIUtils.createLocalJavaClass( entityNode.getClass().getName() ) );\n\n        return imperialTreeNode;\n    }\n\n    public RegistryOperatorFactory getOperatorFactory() {\n        return this.factory;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/GenericRegistryOperatorFactory.java",
    "content": "package com.pinecone.hydra.registry.operator;\n\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport com.pinecone.hydra.registry.entity.GenericNamespace;\nimport com.pinecone.hydra.registry.entity.GenericProperties;\nimport com.pinecone.hydra.registry.entity.GenericTextFile;\nimport com.pinecone.hydra.registry.source.RegistryMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.TreeMap;\n\npublic class GenericRegistryOperatorFactory implements RegistryOperatorFactory {\n    protected RegistryMasterManipulator        registryMasterManipulator;\n\n    protected KOMRegistry registry;\n\n    protected Map<String, TreeNodeOperator>    registerer = new HashMap<>();\n\n    protected Map<String, String >             metaTypeMap = new TreeMap<>();\n\n    protected void registerDefaultMetaType( Class<?> genericType ) {\n        this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace(\"Generic\",\"\") );\n    }\n\n    protected void registerDefaultMetaTypes() {\n        this.registerDefaultMetaType( GenericNamespace.class );\n        this.registerDefaultMetaType( GenericProperties.class );\n        this.registerDefaultMetaType( GenericTextFile.class );\n    }\n\n    public GenericRegistryOperatorFactory(KOMRegistry registry, RegistryMasterManipulator registryMasterManipulator ){\n        this.registry = registry;\n        this.registryMasterManipulator = registryMasterManipulator;\n\n        this.registerer.put(\n                RegistryOperatorFactory.DefaultNamespaceNodeKey,\n                new NamespaceNodeOperator( this )\n        );\n\n        this.registerer.put(RegistryOperatorFactory.DefaultPropertyConfigNodeKey,\n                new PropertiesOperator(this)\n        );\n\n        this.registerer.put(RegistryOperatorFactory.DefaultTextConfigNode,\n                new TextValueNodeOperator(this)\n        );\n\n        this.registerDefaultMetaTypes();\n    }\n\n    @Override\n    public void register( String typeName, TreeNodeOperator functionalNodeOperation ) {\n        this.registerer.put( typeName, functionalNodeOperation );\n    }\n\n    @Override\n    public void registerMetaType( Class<?> clazz, String metaType ){\n        this.registerMetaType( clazz.getName(), metaType );\n    }\n\n    @Override\n    public void registerMetaType( String classFullName, String metaType ){\n        this.metaTypeMap.put( classFullName, metaType );\n    }\n\n    @Override\n    public String getMetaType( String classFullName ) {\n        return this.metaTypeMap.get( classFullName );\n    }\n\n    @Override\n    public RegistryNodeOperator getOperator( String typeName ) {\n        //Debug.trace( this.registerer.toString() );\n        return (RegistryNodeOperator)this.registerer.get( typeName );\n    }\n\n    @Override\n    public KOMRegistry getRegistry() {\n        return this.registry;\n    }\n\n    @Override\n    public RegistryMasterManipulator getMasterManipulator() {\n        return this.registryMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/NamespaceNodeOperator.java",
    "content": "package com.pinecone.hydra.registry.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport com.pinecone.hydra.registry.entity.GenericNamespace;\nimport com.pinecone.hydra.registry.entity.Namespace;\nimport com.pinecone.hydra.registry.entity.NamespaceMeta;\nimport com.pinecone.hydra.registry.entity.Attributes;\nimport com.pinecone.hydra.registry.entity.RegistryTreeNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.registry.source.RegistryMasterManipulator;\nimport com.pinecone.hydra.registry.source.RegistryNSNodeManipulator;\nimport com.pinecone.hydra.registry.source.RegistryNSNodeMetaManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class NamespaceNodeOperator extends ArchRegistryOperator {\n    private RegistryNSNodeManipulator     namespaceNodeManipulator;\n    private RegistryNSNodeMetaManipulator namespaceNodeMetaManipulator;\n\n\n    public NamespaceNodeOperator ( RegistryOperatorFactory factory ) {\n        this( factory.getMasterManipulator(),(KOMRegistry) factory.getRegistry() );\n        this.factory = factory;\n    }\n\n    public NamespaceNodeOperator( RegistryMasterManipulator masterManipulator , KOMRegistry registry ){\n        super( masterManipulator, registry );\n        this.namespaceNodeManipulator       = this.registryMasterManipulator.getNSNodeManipulator();\n        this.namespaceNodeMetaManipulator   = this.registryMasterManipulator.getNSNodeMetaManipulator();\n    }\n\n    @Override\n    public GUID insert( TreeNode treeNode ) {\n        Namespace nsNode        = (Namespace) treeNode;\n        ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize( treeNode );\n        GuidAllocator guidAllocator = this.registry.getGuidAllocator();\n        GUID guid72                 = nsNode.getGuid();\n\n        NamespaceMeta namespaceMeta = nsNode.getNamespaceWithMeta();\n        GUID namespaceNodeMetaGuid = guidAllocator.nextGUID();\n        if (namespaceMeta != null){\n            namespaceMeta.setGuid(namespaceNodeMetaGuid);\n            this.namespaceNodeMetaManipulator.insert(namespaceMeta);\n        }\n        else {\n            namespaceNodeMetaGuid = null;\n        }\n\n\n        Attributes attributes = nsNode.getAttributes();\n        GUID nodeAttributesGuid = guidAllocator.nextGUID();\n        if (attributes != null){\n            attributes.setGuid( nodeAttributesGuid );\n            this.attributesManipulator.insert(attributes);\n        }\n        else {\n            nodeAttributesGuid = null;\n        }\n\n        imperialTreeNode.setNodeMetadataGUID(namespaceNodeMetaGuid);\n        imperialTreeNode.setBaseDataGUID(nodeAttributesGuid);\n        this.imperialTree.insert(imperialTreeNode);\n        this.namespaceNodeManipulator.insert( nsNode );\n        return guid72;\n    }\n\n    @Override\n    public void purge( GUID guid ) {\n        //namespace节点需要递归删除其拥有节点若其引用节点，没有其他引用则进行清理\n        List<GUIDImperialTrieNode> childNodes = this.imperialTree.getChildren(guid);\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        if ( !childNodes.isEmpty() ){\n            List<GUID > subordinates = this.imperialTree.getSubordinates(guid);\n            if ( !subordinates.isEmpty() ){\n                for ( GUID subordinateGuid : subordinates ){\n                    this.purge( subordinateGuid );\n                }\n            }\n            childNodes = this.imperialTree.getChildren( guid );\n            for( GUIDImperialTrieNode childNode : childNodes ){\n                List<GUID > parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid());\n                if ( parentNodes.size() > 1 ){\n                    this.imperialTree.removeInheritance(childNode.getGuid(),guid);\n                }\n                else {\n                    this.purge( childNode.getGuid() );\n                }\n            }\n        }\n\n        if ( node.getType().getObjectName().equals(GenericNamespace.class.getName()) ){\n            this.removeNode(guid);\n        }\n        else {\n            UOI uoi = node.getType();\n            String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() );\n            if( metaType == null ) {\n                TreeNode newInstance = (TreeNode)uoi.newInstance( new Class<? >[]{ KOMRegistry.class }, this.registry );\n                metaType = newInstance.getMetaType();\n            }\n\n            RegistryNodeOperator operator = this.getOperatorFactory().getOperator( metaType );\n            operator.purge( guid );\n        }\n    }\n\n    @Override\n    public RegistryTreeNode get( GUID guid ) {\n        return this.getNamespaceNodeWideData( guid, 0 );\n    }\n\n    @Override\n    public RegistryTreeNode get( GUID guid, int depth ) {\n        return this.getNamespaceNodeWideData( guid, depth );\n    }\n\n    @Override\n    public RegistryTreeNode getAsRootDepth( GUID guid ) {\n        return this.getNamespaceNodeWideData( guid, 0 );\n    }\n\n    @Override\n    public void update( TreeNode treeNode ) {\n\n    }\n\n    @Override\n    public void updateName( GUID guid, String name ) {\n        this.namespaceNodeManipulator.updateName( guid, name );\n    }\n\n    private Namespace getNamespaceNodeWideData( GUID guid, int depth ){\n        Namespace ns = this.namespaceNodeManipulator.getNamespaceWithMeta( guid );\n        if ( ns instanceof GenericNamespace ){\n             ((GenericNamespace) ns).apply( this.registry );\n        }\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n\n        if( depth <= 0 ) {\n            List<GUIDImperialTrieNode> childNode = this.imperialTree.getChildren(guid);\n            ArrayList<GUID> guids = new ArrayList<>();\n            for ( GUIDImperialTrieNode n : childNode ){\n                guids.add( n.getGuid() );\n            }\n            ++depth;\n            ns.setChildrenGuids( guids, depth );\n        }\n\n        Attributes           attributes = this.attributesManipulator.getAttributes( guid, ns );\n        NamespaceMeta namespaceNodeMeta = this.namespaceNodeMetaManipulator.getNamespaceNodeMeta( node.getNodeMetadataGUID() );\n        ns.setAttributes    ( attributes );\n        ns.setNamespaceMeta ( namespaceNodeMeta );\n        return ns;\n    }\n\n    private void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath(guid);\n        this.namespaceNodeManipulator.remove(guid);\n        this.namespaceNodeMetaManipulator.remove(node.getNodeMetadataGUID());\n        this.attributesManipulator.remove(node.getAttributesGUID());\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/PropertiesOperator.java",
    "content": "package com.pinecone.hydra.registry.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.ConfigNode;\nimport com.pinecone.hydra.registry.entity.Properties;\nimport com.pinecone.hydra.registry.entity.Property;\nimport com.pinecone.hydra.registry.source.RegistryPropertiesManipulator;\n\nimport java.util.List;\n\npublic class PropertiesOperator extends ArchConfigNodeOperator {\n    protected RegistryPropertiesManipulator registryPropertiesManipulator;\n\n    public PropertiesOperator( RegistryOperatorFactory factory ) {\n        super(factory);\n        this.registryPropertiesManipulator=factory.getMasterManipulator().getPropertiesManipulator();\n    }\n\n    @Override\n    public Properties get( GUID guid ) {\n        return (Properties) super.get( guid );\n    }\n\n    @Override\n    public Properties get( GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n\n    @Override\n    protected void inherit( ConfigNode self, ConfigNode prototype ) {\n        // Extends meta data.\n        super.inherit( self, prototype );\n        Properties sp = (Properties) self;\n        Properties pp = (Properties) prototype;\n\n        sp.setAffinityParent( pp );\n        sp.setParentProperties( pp.getPropertiesMap() );\n    }\n\n    @Override\n    protected Properties getConfigNodeWideData( GUID guid ) {\n        ConfigNode configNodeWideData = super.getConfigNodeWideData( guid );\n        if( configNodeWideData instanceof Properties ) {\n            Properties propertiesNode = (Properties) configNodeWideData;\n            List<Property > properties = this.registryPropertiesManipulator.getProperties( guid, propertiesNode );\n            propertiesNode.setProperties( properties );\n            return propertiesNode;\n        }\n\n        throw new IllegalStateException(\n                String.format(\n                        \"'%s' should be `PropertiesNode` but `%s` found.\",\n                        guid.toString(), configNodeWideData.getClass().getSimpleName()\n                )\n        );\n    }\n\n    @Override\n    public void purge( GUID guid ) {\n        super.purge(guid);\n        this.registryPropertiesManipulator.removeAll(guid);\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/RegistryNodeOperator.java",
    "content": "package com.pinecone.hydra.registry.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.RegistryTreeNode;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface RegistryNodeOperator extends TreeNodeOperator {\n    @Override\n    RegistryTreeNode get( GUID guid );\n\n    RegistryTreeNode get( GUID guid, int depth );\n\n    @Override\n    RegistryTreeNode getAsRootDepth( GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/RegistryOperatorFactory.java",
    "content": "package com.pinecone.hydra.registry.operator;\n\nimport com.pinecone.hydra.registry.Registry;\nimport com.pinecone.hydra.registry.entity.ConfigNode;\nimport com.pinecone.hydra.registry.entity.Namespace;\nimport com.pinecone.hydra.registry.entity.Properties;\nimport com.pinecone.hydra.registry.entity.TextFile;\nimport com.pinecone.hydra.registry.source.RegistryMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.operator.OperatorFactory;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface RegistryOperatorFactory extends OperatorFactory {\n    String DefaultNamespaceNodeKey          =   Namespace.class.getSimpleName();\n    String DefaultConfigNodeKey             =   ConfigNode.class.getSimpleName();\n    String DefaultPropertyConfigNodeKey     =   Properties.class.getSimpleName();\n    String DefaultTextConfigNode            =   TextFile.class.getSimpleName();\n\n    void register( String typeName, TreeNodeOperator functionalNodeOperation );\n\n    void registerMetaType( Class<?> clazz, String metaType );\n\n    void registerMetaType( String classFullName, String metaType );\n\n    String getMetaType( String classFullName );\n\n    RegistryNodeOperator getOperator( String typeName );\n\n    Registry getRegistry();\n\n    RegistryMasterManipulator getMasterManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/operator/TextValueNodeOperator.java",
    "content": "package com.pinecone.hydra.registry.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.ConfigNode;\nimport com.pinecone.hydra.registry.entity.GenericTextFile;\nimport com.pinecone.hydra.registry.entity.TextFile;\nimport com.pinecone.hydra.registry.entity.TextValue;\nimport com.pinecone.hydra.registry.source.RegistryTextFileManipulator;\n\npublic class TextValueNodeOperator extends ArchConfigNodeOperator {\n    protected RegistryTextFileManipulator registryTextFileManipulator;\n\n    public TextValueNodeOperator(RegistryOperatorFactory factory) {\n        super(factory);\n        this.registryTextFileManipulator = factory.getMasterManipulator().getTextFileManipulator();\n    }\n\n    @Override\n    public TextFile get( GUID guid ) {\n        return (TextFile) super.get( guid );\n    }\n\n    @Override\n    public TextFile get( GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    protected TextFile getConfigNodeWideData( GUID guid ) {\n        ConfigNode configNodeWideData = super.getConfigNodeWideData( guid );\n        GenericTextFile textConfNode = new GenericTextFile();\n        TextValue textValue = this.registryTextFileManipulator.getTextValue( guid );\n\n        textConfNode.setTextValue       ( textValue );\n        textConfNode.setConfigNodeMeta  ( configNodeWideData.getConfigNodeMeta() );\n        textConfNode.setAttributes      ( configNodeWideData.getAttributes() );\n        textConfNode.setGuid            ( configNodeWideData.getGuid() );\n        textConfNode.setName            ( configNodeWideData.getName() );\n        textConfNode.setCreateTime      ( configNodeWideData.getCreateTime() );\n        textConfNode.setRegistry        ( configNodeWideData.parentRegistry() );\n        textConfNode.setUpdateTime      ( configNodeWideData.getUpdateTime() );\n        return textConfNode;\n    }\n\n    @Override\n    public void purge( GUID guid ) {\n        super.purge(guid);\n        this.registryTextFileManipulator.remove(guid);\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/GenericRenderNamespace.java",
    "content": "package com.pinecone.hydra.registry.render;\n\nimport com.pinecone.hydra.registry.RenderDistributeRegistry;\nimport com.pinecone.hydra.registry.entity.GenericNamespace;\n\npublic class GenericRenderNamespace extends GenericNamespace implements RenderNamespace {\n    protected RenderDistributeRegistry registry;\n\n    public void apply( RenderDistributeRegistry registry ) {\n        this.registry = registry;\n    }\n\n    @Override\n    public RenderDistributeRegistry getRegistry() {\n        return this.registry;\n    }\n\n    @Override\n    public RenderConfigNode getConfigNode(String key) {\n        return (RenderConfigNode) this.children.get(key);\n    }\n\n    @Override\n    public RenderNamespace getNamespace(String key) {\n        return (RenderNamespace) this.children.get(key);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/GenericRenderProperties.java",
    "content": "package com.pinecone.hydra.registry.render;\n\nimport com.pinecone.hydra.registry.RenderDistributeRegistry;\nimport com.pinecone.hydra.registry.entity.GenericProperties;\nimport com.pinecone.hydra.registry.entity.Property;\n\nimport java.util.Collection;\nimport java.util.Set;\n\npublic class GenericRenderProperties extends GenericProperties implements RenderProperties {\n    protected RenderDistributeRegistry       registry;\n\n    public void apply(RenderDistributeRegistry registry) {\n       this.registry = registry;\n    }\n\n    @Override\n    public RenderProperty get(String key) {\n        return (RenderProperty)super.get( key );\n    }\n\n    @Override\n    public Set<Property> entrySet() {\n        return super.entrySet();\n    }\n\n    @Override\n    public Collection<Property > getProperties() {\n        return super.getProperties();\n    }\n\n\n    @Override\n    public RenderDistributeRegistry getRegistry() {\n        return this.registry;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/GenericRenderProperty.java",
    "content": "package com.pinecone.hydra.registry.render;\n\nimport com.pinecone.hydra.registry.entity.GenericProperty;\nimport com.pinecone.hydra.registry.entity.Properties;\n\npublic class GenericRenderProperty extends GenericProperty implements RenderProperty {\n    public GenericRenderProperty() {\n        super();\n    }\n\n    public GenericRenderProperty( Properties properties ) {\n        super( properties );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/GenericRenderTextFile.java",
    "content": "package com.pinecone.hydra.registry.render;\n\nimport com.pinecone.hydra.registry.RenderDistributeRegistry;\nimport com.pinecone.hydra.registry.entity.GenericTextFile;\n\npublic class GenericRenderTextFile extends GenericTextFile implements RenderTextFile {\n    protected RenderDistributeRegistry registry;\n\n    public void apply(RenderDistributeRegistry registry) {\n        super.apply(registry);\n    }\n    @Override\n    public RenderTextValue get() {\n        return (RenderTextValue) this.mTextValue;\n    }\n\n    @Override\n    public RenderDistributeRegistry getRegistry() {\n        return (RenderDistributeRegistry) super.parentRegistry();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/GenericRenderTextValue.java",
    "content": "package com.pinecone.hydra.registry.render;\n\nimport com.pinecone.hydra.registry.entity.GenericTextValue;\n\npublic class GenericRenderTextValue extends GenericTextValue implements RenderTextValue{\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderConfigNode.java",
    "content": "package com.pinecone.hydra.registry.render;\n\nimport com.pinecone.hydra.registry.entity.ConfigNode;\n\npublic interface RenderConfigNode extends ConfigNode {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderNamespace.java",
    "content": "package com.pinecone.hydra.registry.render;\n\nimport com.pinecone.hydra.registry.entity.Namespace;\n\npublic interface RenderNamespace extends Namespace,RenderRegistryTreeNode {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderProperties.java",
    "content": "package com.pinecone.hydra.registry.render;\n\nimport com.pinecone.framework.util.template.UniformTemplateRenderer;\nimport com.pinecone.hydra.registry.entity.Property;\nimport com.pinecone.hydra.registry.entity.Properties;\n\npublic interface RenderProperties extends Properties, RenderRegistryTreeNode {\n    default Object renderValue      ( String key, UniformTemplateRenderer renderer, Object tpl ) {\n        Property v = this.get( key );\n        if( v.isStringBasedType() ) {\n            return renderer.render( v.getValue().toString(), tpl );\n        }\n        return v.getValue().toString();\n    }\n\n    default Object renderValue      ( String key, Object tpl ) {\n        Property v = this.get( key );\n        if( v.isStringBasedType() ) {\n            return this.getRegistry().getRenderer().render( v.getValue().toString(), tpl );\n        }\n        return v.getValue().toString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderProperty.java",
    "content": "package com.pinecone.hydra.registry.render;\n\nimport com.pinecone.hydra.registry.entity.Property;\n\npublic interface RenderProperty extends Property {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderRegistryTreeNode.java",
    "content": "package com.pinecone.hydra.registry.render;\n\nimport com.pinecone.hydra.registry.RenderDistributeRegistry;\nimport com.pinecone.hydra.registry.entity.RegistryTreeNode;\n\npublic interface RenderRegistryTreeNode extends RegistryTreeNode {\n    RenderDistributeRegistry getRegistry();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderTextFile.java",
    "content": "package com.pinecone.hydra.registry.render;\n\nimport com.pinecone.framework.util.template.UniformTemplateRenderer;\nimport com.pinecone.hydra.registry.entity.TextFile;\nimport com.pinecone.hydra.registry.entity.TextValue;\n\npublic interface RenderTextFile extends TextFile,RenderRegistryTreeNode {\n    default Object renderValue      ( UniformTemplateRenderer renderer, Object tpl ) {\n        TextValue v = this.get( );\n        return renderer.render( v.toString() ,tpl );\n    }\n\n    default Object renderValue      ( Object tpl ) {\n        TextValue v = this.get();\n        return this.getRegistry().getRenderer().render( v.toString(), tpl );\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/render/RenderTextValue.java",
    "content": "package com.pinecone.hydra.registry.render;\n\nimport com.pinecone.hydra.registry.entity.TextValue;\n\npublic interface RenderTextValue extends TextValue {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryAttributesManipulator.java",
    "content": "package com.pinecone.hydra.registry.source;\n\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.Attributes;\nimport com.pinecone.hydra.registry.entity.ElementNode;\n\npublic interface RegistryAttributesManipulator extends Pinenut {\n    void insertAttribute( GUID guid, String key, String value );\n\n    List<Map<String, Object > > getAttributesByGuid(GUID guid );\n\n    void updateAttribute( GUID guid, String key, String value );\n\n    void remove( GUID guid );\n\n    Attributes getAttributes( GUID guid, ElementNode element );\n\n    default void insert( Attributes attributes) {\n        for ( Map.Entry<String, String> entry : attributes.getAttributes().entrySet() ) {\n            this.insertAttribute( attributes.getGuid(), entry.getKey(), entry.getValue() );\n        }\n    }\n\n    default void update( Attributes attributes) {\n        for ( Map.Entry<String, String> entry : attributes.getAttributes().entrySet() ) {\n            this.updateAttribute( attributes.getGuid(), entry.getKey(), entry.getValue() );\n        }\n    }\n\n\n    boolean containsKey ( GUID guid, String key );\n\n    void clearAttributes( GUID guid );\n\n    void removeAttributeWithValue( GUID guid, String key, String value );\n\n    void removeAttribute( GUID guid, String key );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryConfigNodeManipulator.java",
    "content": "package com.pinecone.hydra.registry.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.ConfigNode;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\nimport java.util.List;\n\npublic interface RegistryConfigNodeManipulator extends GUIDNameManipulator {\n    void insert( ConfigNode configNode );\n\n    void remove( GUID guid );\n\n    boolean isConfigNode( GUID guid );\n\n    ConfigNode getConfigNode(GUID guid );\n\n    void update( ConfigNode configNode );\n\n    List<GUID > getGuidsByName( String name );\n\n    List<GUID > getGuidsByNameID( String name, GUID guid );\n\n    List<GUID > dumpGuid();\n\n    void updateName(GUID guid ,String name);\n\n    GUID getDataAffinityGuid( GUID guid );\n\n    void setDataAffinityGuid( GUID guid, GUID affinityGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryMasterManipulator.java",
    "content": "package com.pinecone.hydra.registry.source;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\n\npublic interface RegistryMasterManipulator extends KOIMasterManipulator {\n\n    RegistryConfigNodeManipulator getConfigNodeManipulator();\n\n    RegistryNSNodeManipulator     getNSNodeManipulator();\n\n    RegistryPropertiesManipulator getPropertiesManipulator();\n\n    RegistryTextFileManipulator   getTextFileManipulator();\n\n    RegistryNodeMetaManipulator   getNodeMetaManipulator();\n\n    RegistryNSNodeMetaManipulator getNSNodeMetaManipulator();\n\n    RegistryAttributesManipulator getAttributesManipulator();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryNSNodeManipulator.java",
    "content": "package com.pinecone.hydra.registry.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.Namespace;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\nimport java.util.List;\n\npublic interface RegistryNSNodeManipulator extends GUIDNameManipulator {\n    void insert( Namespace namespace);\n\n    void remove( GUID guid );\n\n    boolean isNamespaceNode( GUID guid );\n\n    Namespace getNamespaceWithMeta( GUID guid );\n\n    void update( Namespace namespace);\n\n    List<GUID > getGuidsByName( String name );\n\n    List<GUID > getGuidsByNameID( String name, GUID guid );\n\n    List<GUID > dumpGuid();\n\n    void updateName( GUID guid, String name );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryNSNodeMetaManipulator.java",
    "content": "package com.pinecone.hydra.registry.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.NamespaceMeta;\n\npublic interface RegistryNSNodeMetaManipulator extends Pinenut {\n    void insert( NamespaceMeta namespaceMeta);\n\n    void remove( GUID guid );\n\n    NamespaceMeta getNamespaceNodeMeta(GUID guid );\n\n    void update( NamespaceMeta namespaceMeta);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryNodeMetaManipulator.java",
    "content": "package com.pinecone.hydra.registry.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.ConfigNodeMeta;\n\npublic interface RegistryNodeMetaManipulator {\n    void insert(ConfigNodeMeta configNodeMeta);\n\n    void remove(GUID guid);\n\n    ConfigNodeMeta getConfigNodeMeta(GUID guid);\n\n    void update(ConfigNodeMeta configNodeMeta);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryNodeOwnerManipulator.java",
    "content": "package com.pinecone.hydra.registry.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.List;\n\npublic interface RegistryNodeOwnerManipulator extends Pinenut {\n    void insert(GUID subordinateGuid,GUID ownerGuid);\n\n    void remove(GUID subordinateGuid,GUID ownerGuid);\n\n    void removeBySubordinate(GUID subordinateGuid);\n\n    void removeByOwner(GUID OwnerGuid);\n\n    GUID getOwner(GUID subordinateGuid);\n\n    List<GUID> getSubordinates(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryNodePathManipulator.java",
    "content": "package com.pinecone.hydra.registry.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface RegistryNodePathManipulator extends Pinenut {\n    void insert( GUID guid, String path );\n\n    void remove( GUID guid );\n\n    String getPath( GUID guid );\n\n    GUID getNode( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryPropertiesManipulator.java",
    "content": "package com.pinecone.hydra.registry.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.Properties;\nimport com.pinecone.hydra.registry.entity.Property;\n\nimport java.util.List;\n\npublic interface RegistryPropertiesManipulator extends Pinenut {\n    void insert(Property property);\n\n    void remove( GUID guid, String key );\n\n    List<Property > getProperties( GUID guid, Properties parent );\n\n    void update( Property property );\n\n    void removeAll( GUID guid );\n\n    void copyPropertiesTo(GUID sourceGuid, GUID destinationGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryTextFileManipulator.java",
    "content": "package com.pinecone.hydra.registry.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.TextValue;\n\npublic interface RegistryTextFileManipulator extends Pinenut {\n    void insert(TextValue textValue);\n\n    void remove(GUID guid);\n\n    TextValue getTextValue(GUID guid);\n\n    void update(TextValue textValue);\n\n    void copyTextValueTo(GUID sourceGuid, GUID destinationGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-config/src/main/java/com/pinecone/hydra/registry/source/RegistryTreeManipulator.java",
    "content": "package com.pinecone.hydra.registry.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\n\nimport java.util.List;\n\npublic interface RegistryTreeManipulator extends Pinenut {\n    void insert (ImperialTreeNode distributedConfTreeNode);\n\n    GUIDImperialTrieNode getNode(GUID guid);\n\n    void remove(GUID guid);\n\n    void removeInheritance(GUID childGuid, GUID parentGuid);\n\n    List<GUID> fetchParentGuids(GUID guid);\n\n    List<GUIDImperialTrieNode> getChild(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kernel</groupId>\n    <artifactId>hydra-framework-device</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/ArchDeployFamilyMeta.java",
    "content": "package com.pinecone.hydra.deploy;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\npublic abstract class ArchDeployFamilyMeta implements DeployFamilyMeta {\n    protected GUID   guid;\n\n    protected String name;\n\n    protected String extraInformation;\n\n    protected String ipAddress;\n\n    protected String szElementaryConfig;\n\n    protected Map<String, Object > elementaryConfig;\n\n\n    protected String description;\n\n    public ArchDeployFamilyMeta() {\n    }\n\n    public ArchDeployFamilyMeta(Map<String, Object > joEntity ) {\n        this.apply( joEntity );\n    }\n\n    protected ArchDeployFamilyMeta apply(Map<String, Object > joEntity ) {\n        String szGuid = (String) joEntity.get( \"guid\" );\n        if( szGuid != null ) {\n            this.guid = GUIDs.GUID128( (String) joEntity.get( \"guid\" ) );\n        }\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n\n        return this;\n    }\n\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public Identification getId() {\n        return this.getGuid();\n    }\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public String getExtraInformation() {\n        return this.extraInformation;\n    }\n    @Override\n    public String getIpAddress() {\n        return this.ipAddress;\n    }\n\n    @Override\n    public String getDescription() {\n        return this.description;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/DeployExtraMeta.java",
    "content": "package com.pinecone.hydra.deploy;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface DeployExtraMeta extends Pinenut {\n\n    DeployFamilyMeta getKernelMeta();\n\n    GUID getGuid() ;\n\n    String getDeployName();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/DeployFamilyMeta.java",
    "content": "package com.pinecone.hydra.deploy;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.Identification;\n\npublic interface DeployFamilyMeta extends Pinenut  {\n\n    //long getEnumId();\n\n    //GUID getGuid();\n\n    Identification getId() ;\n\n    String getName();\n\n    String getExtraInformation();\n\n    String getIpAddress();\n\n    String getDescription();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/entity/GenericContainer.java",
    "content": "package com.pinecone.hydra.deploy.entity;\n\nimport com.pinecone.hydra.deploy.Container;\n\npublic class GenericContainer implements Container {\n\n    protected String status;\n\n    @Override\n    public String getStatus() {\n        return this.status;\n    }\n\n    @Override\n    public void setStatus( String status ) {\n        this.status = status;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/entity/GenericPhysicalHost.java",
    "content": "package com.pinecone.hydra.deploy.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.deploy.PhysicalHost;\nimport com.pinecone.hydra.deploy.kom.entity.ArchElementNode;\n\npublic class GenericPhysicalHost extends ArchElementNode implements PhysicalHost {\n\n    protected GUID guid;\n\n    protected String status;\n\n    protected String hardwareSpecs;\n\n\n    protected String name;\n\n    @Override\n    public String getStatus() {\n        return this.status;\n    }\n\n    @Override\n    public void setStatus( String status ) {\n           this.status = status;\n    }\n\n    @Override\n    public String getHardwareSpecs() {\n        return this.hardwareSpecs;\n    }\n\n    @Override\n    public void setHardwareSpecs( String hardwareSpecs ) {\n            this.hardwareSpecs = hardwareSpecs;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n//    @Override\n//    public GUID getGuid() {\n//        return this.guid;\n//    }\n//\n//    @Override\n//    public void setGuid(GUID guid) {\n//        this.guid = guid;\n//    }\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public void setName( String name ) {\n            this.name = name;\n    }\n\n\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/entity/GenericQuick.java",
    "content": "package com.pinecone.hydra.deploy.entity;\n\nimport com.pinecone.hydra.deploy.Quick;\n\npublic class GenericQuick implements Quick {\n\n    protected String status;\n\n    @Override\n    public String getStatus() {\n        return this.status;\n    }\n\n    @Override\n    public void setStatus(String status) {\n          this.status = status;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/entity/GenericVirtualMachine.java",
    "content": "package com.pinecone.hydra.deploy.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.deploy.PhysicalHost;\nimport com.pinecone.hydra.deploy.VirtualMachine;\n\npublic class GenericVirtualMachine implements VirtualMachine {\n\n    protected GUID affiliateHostGuid;\n\n    protected String name;\n\n    protected String ipAddress;\n\n    protected String status;\n\n    protected GUID guid;\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    @Override\n    public String getIpAddress() {\n        return this.ipAddress;\n    }\n\n    @Override\n    public void setIpAddress(String ipAddress) {\n        this.ipAddress = ipAddress;\n    }\n\n    @Override\n    public String getStatus() {\n        return this.status;\n    }\n\n    @Override\n    public void setStatus(String status) {\n        this.status = status;\n    }\n\n    @Override\n    public PhysicalHost getAffiliateHost() {\n        return null;\n        //return this.affiliateHostGuid;\n    }\n\n//    @Override\n//    public void setGuid(GUID guid) {\n//        this.guid = guid;\n//    }\n//\n//    @Override\n//    public void setAffiliateHostGuid(GUID guid) {\n//        this.affiliateHostGuid = guid;\n//    }\n\n//    @Override\n//    public void setGuid(GUID guid) {\n//        this.guid = guid;\n//    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n//    @Override\n//    public void setAffiliateHostGuid( GUID guid ) {\n//        this.affiliateHostGuid = guid;\n//    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/DeployConfig.java",
    "content": "package com.pinecone.hydra.deploy.kom;\n\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\n\npublic interface DeployConfig extends KernelObjectConfig {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/DeployFamilyNode.java",
    "content": "package com.pinecone.hydra.deploy.kom;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.deploy.DeployFamilyMeta;\n\npublic interface DeployFamilyNode extends DeployFamilyMeta {\n    long getEnumId();\n\n    void setEnumId(long id);\n\n    void setName(String name);\n\n    GUID getGuid();\n\n    void setGuid(GUID guid);\n\n    @Override\n    default Identification getId() {\n        return this.getGuid();\n    }\n\n    String getExtraInformation();\n\n    void setExtraInformation(String extraInformation);\n\n    String getDescription();\n\n    void setDescription(String description);\n\n     String getIpAddress();\n\n     void setIpAddress( String ipAddress );\n\n    DeployFamilyNode apply(Map<String, Object> joEntity) ;\n}"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/DeployInstrument.java",
    "content": "package com.pinecone.hydra.deploy.kom;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.entity.ContainerElement;\nimport com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement;\nimport com.pinecone.hydra.deploy.kom.entity.QuickElement;\nimport com.pinecone.hydra.deploy.kom.entity.ServerElement;\nimport com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement;\nimport com.pinecone.hydra.system.ko.kom.ReparseKOMTree;\nimport com.pinecone.hydra.deploy.kom.entity.ElementNode;\nimport com.pinecone.hydra.deploy.kom.entity.ClusterElement;\nimport com.pinecone.hydra.deploy.kom.entity.Namespace;\nimport com.pinecone.hydra.deploy.kom.entity.DeployElement;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface DeployInstrument extends ReparseKOMTree {\n\n    DeployConfig KERNEL_DEPLOY_CONFIG = new KernelDeployConfig();\n\n    ClusterElement          affirmCluster(String path );\n\n    Namespace               affirmNamespace( String path );\n\n    ServerElement           affirmServer( String path );\n\n    QuickElement            affirmQuick( String path );\n\n    VirtualMachineElement   affirmVirtualMachine( String path );\n\n    ContainerElement        affirmContainerElement(String path);\n\n    PhysicalHostElement     affirmPhysicalHost(String path );\n\n    ElementNode             queryElement( String path );\n\n    boolean                 containsChild( GUID parentGuid, String childName );\n\n    void                    update( TreeNode treeNode );\n\n    void                    createDeployServiceInsMapping( GUID deployGuid, GUID serviceInsGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/DeployPathSelector.java",
    "content": "package com.pinecone.hydra.deploy.kom;\n\nimport com.pinecone.framework.util.name.path.PathResolver;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.kom.StandardPathSelector;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\n\npublic class DeployPathSelector extends StandardPathSelector {\n    public DeployPathSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) {\n        super( pathResolver, trieTree, dirMan, fileMans );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/KernelDeployConfig.java",
    "content": "package com.pinecone.hydra.deploy.kom;\n\nimport com.pinecone.hydra.system.ko.ArchKernelObjectConfig;\n\npublic class KernelDeployConfig extends ArchKernelObjectConfig implements DeployConfig {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/UniformDeployInstrument.java",
    "content": "package com.pinecone.hydra.deploy.kom;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.deploy.kom.entity.ClusterElement;\nimport com.pinecone.hydra.deploy.kom.entity.ContainerElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericContainerElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericDeployInsMapping;\nimport com.pinecone.hydra.deploy.kom.entity.GenericPhysicalHostElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericQuickElement;\nimport com.pinecone.hydra.deploy.kom.entity.ArchServerElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement;\nimport com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement;\nimport com.pinecone.hydra.deploy.kom.entity.QuickElement;\nimport com.pinecone.hydra.deploy.kom.entity.ServerElement;\nimport com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement;\nimport com.pinecone.hydra.deploy.kom.source.DeployServiceInsMappingManipulator;\nimport com.pinecone.hydra.deploy.kom.source.PhysicalHostManipulator;\nimport com.pinecone.hydra.deploy.kom.source.QuickElementManipulator;\nimport com.pinecone.hydra.deploy.kom.source.VirtualMachineManipulator;\nimport com.pinecone.hydra.system.identifier.KOPathResolver;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.system.ko.kom.ArchReparseKOMTree;\nimport com.pinecone.hydra.system.ko.kom.GenericReparseKOMTreeAddition;\nimport com.pinecone.hydra.system.ko.kom.MultiFolderPathSelector;\nimport com.pinecone.hydra.deploy.kom.entity.ElementNode;\nimport com.pinecone.hydra.deploy.kom.entity.GenericClusterElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.deploy.kom.entity.Namespace;\nimport com.pinecone.hydra.deploy.kom.entity.DeployTreeNode;\nimport com.pinecone.hydra.deploy.kom.operator.GenericElementOperatorFactory;\nimport com.pinecone.hydra.deploy.kom.source.ClusterNodeManipulator;\nimport com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator;\nimport com.pinecone.hydra.deploy.kom.source.DeployNamespaceManipulator;\nimport com.pinecone.hydra.deploy.kom.source.DeployNodeManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.RegimentedImperialTree;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\npublic class UniformDeployInstrument extends ArchReparseKOMTree implements DeployInstrument {\n    //GenericDistributedScopeTree\n    protected ImperialTree                          imperialTree;\n\n    protected DeployMasterManipulator               deployMasterManipulator;\n\n    protected DeployNamespaceManipulator            deployNamespaceManipulator;\n\n    protected ClusterNodeManipulator                clusterNodeManipulator;\n\n    protected DeployNodeManipulator                 deployNodeManipulator;\n\n    protected List<GUIDNameManipulator >            folderManipulators;\n\n    protected List<GUIDNameManipulator >            fileManipulators;\n\n    protected PhysicalHostManipulator               physicalHostManipulator;\n\n    protected VirtualMachineManipulator             virtualMachineManipulator;\n\n    protected QuickElementManipulator               quickElementManipulator;\n\n    protected DeployServiceInsMappingManipulator    deployServiceInsMappingManipulator;\n\n    public UniformDeployInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, DeployInstrument parent, String name, @Nullable GuidAllocator guidAllocator ) {\n        super( superiorProcess, masterManipulator, DeployInstrument.KERNEL_DEPLOY_CONFIG, parent, name, guidAllocator );\n\n        this.deployMasterManipulator = (DeployMasterManipulator) masterManipulator;\n        this.deployNamespaceManipulator = this.deployMasterManipulator.getNamespaceManipulator();\n        this.clusterNodeManipulator          = this.deployMasterManipulator.getJobNodeManipulator();\n        this.deployNodeManipulator = this.deployMasterManipulator.getDeployNodeManipulator();\n        KOISkeletonMasterManipulator skeletonMasterManipulator = this.deployMasterManipulator.getSkeletonMasterManipulator();\n        TreeMasterManipulator        treeMasterManipulator     = (TreeMasterManipulator) skeletonMasterManipulator;\n        this.imperialTree                = new RegimentedImperialTree(treeMasterManipulator);\n        this.operatorFactory             = new GenericElementOperatorFactory(this,(DeployMasterManipulator) masterManipulator);\n        this.physicalHostManipulator     = this.deployMasterManipulator.getPhysicalHostManipulator();\n        this.virtualMachineManipulator   = this.deployMasterManipulator.getVirtualMachineManipulator();\n        this.pathResolver                = new KOPathResolver( this.kernelObjectConfig );\n        this.quickElementManipulator     = this.deployMasterManipulator.getQuickElementManipulator();\n        this.deployServiceInsMappingManipulator = this.deployMasterManipulator.getDeployServiceInsMappingManipulator();\n        // TODO for customize service tree architecture.\n        this.folderManipulators          = new ArrayList<>( List.of( this.deployNamespaceManipulator, this.clusterNodeManipulator) );\n        this.fileManipulators            = new ArrayList<>( List.of( this.clusterNodeManipulator, this.physicalHostManipulator, this.virtualMachineManipulator, this.quickElementManipulator) );\n        this.pathSelector                = new MultiFolderPathSelector(\n                this.pathResolver, this.imperialTree, this.folderManipulators.toArray( new GUIDNameManipulator[]{} ), this.fileManipulators.toArray( new GUIDNameManipulator[]{} )\n        );\n\n        this.mReparseKOM                 =  new GenericReparseKOMTreeAddition( this );\n    }\n\n    public UniformDeployInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator ) {\n        this( superiorProcess, masterManipulator, null, DeployInstrument.class.getSimpleName(), null );\n    }\n\n//    public UniformTaskInstrument( Hydrogen hydrogen ) {\n//        this.hydrogen = hydrogen;\n//    }\n\n    public UniformDeployInstrument( KOIMappingDriver driver ) {\n        this(\n                driver.getSuperiorProcess(),\n                driver.getMasterManipulator()\n        );\n    }\n\n    public UniformDeployInstrument( KOIMappingDriver driver, DeployInstrument parent, String name ) {\n        this(\n                driver.getSuperiorProcess(),\n                driver.getMasterManipulator(),\n                parent,\n                name,\n                null\n        );\n    }\n\n    protected DeployTreeNode affirmTreeNodeByPath(String path, Class<? > cnSup, Class<? > nsSup ) {\n        String[] parts = this.pathResolver.segmentPathParts( path );\n        String currentPath = \"\";\n        GUID parentGuid = GUIDs.Dummy128();\n\n        DeployTreeNode node = this.queryElement(path);\n        if ( node != null ){\n            return node;\n        }\n\n        DeployTreeNode ret = null;\n        for( int i = 0; i < parts.length; ++i ){\n            currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : \"\" ) + parts[ i ];\n            node = this.queryElement( currentPath );\n            if ( node == null){\n                if ( i == parts.length - 1 && cnSup != null ){\n                    ElementNode en = (ElementNode) this.dynamicFactory.optNewInstance( cnSup, new Object[]{ this } );\n                    en.setName( parts[i] );\n                    GUID guid = this.put( en );\n                    this.affirmOwnedNode( parentGuid, guid );\n                    return en;\n                }\n                else {\n                    Namespace namespace = (Namespace) this.dynamicFactory.optNewInstance( nsSup, new Object[]{ this } );\n                    namespace.setName( parts[i] );\n                    GUID guid = this.put( namespace );\n                    if ( i != 0 ){\n                        this.affirmOwnedNode( parentGuid, guid );\n                        parentGuid = guid;\n                    }\n                    else {\n                        parentGuid = guid;\n                    }\n\n                    ret = namespace;\n                }\n            }\n            else {\n                parentGuid = node.getGuid();\n            }\n        }\n\n        return ret;\n    }\n\n    @Override\n    public ClusterElement affirmCluster(String path ) {\n        return (ClusterElement) this.affirmTreeNodeByPath( path, GenericClusterElement.class, GenericNamespace.class );\n    }\n\n    @Override\n    public ServerElement affirmServer(String path) {\n        return (ServerElement) this.affirmTreeNodeByPath( path, ArchServerElement.class, GenericNamespace.class );\n    }\n\n    @Override\n    public QuickElement affirmQuick(String path) {\n        return (QuickElement) this.affirmTreeNodeByPath( path, GenericQuickElement.class, GenericNamespace.class );\n    }\n\n    @Override\n    public VirtualMachineElement affirmVirtualMachine(String path) {\n        return (VirtualMachineElement) this.affirmTreeNodeByPath( path, GenericVirtualMachineElement.class, GenericNamespace.class );\n    }\n\n    @Override\n    public ContainerElement affirmContainerElement(String path) {\n        return (ContainerElement) this.affirmTreeNodeByPath( path, GenericContainerElement.class, GenericNamespace.class );\n\n    }\n\n    @Override\n    public PhysicalHostElement affirmPhysicalHost(String path) {\n        return (PhysicalHostElement) this.affirmTreeNodeByPath( path, GenericPhysicalHostElement.class, GenericNamespace.class );\n    }\n\n    @Override\n    public ElementNode queryElement( String path ) {\n        GUID guid = this.queryGUIDByPath( path );\n        if( guid != null ) {\n            return this.get( guid ).evinceElementNode();\n        }\n\n        return null;\n    }\n\n    @Override\n    public Namespace affirmNamespace( String path ) {\n        return ( Namespace ) this.affirmTreeNodeByPath( path, null, GenericNamespace.class );\n    }\n\n\n    protected boolean containsChild( GUIDNameManipulator manipulator, GUID parentGuid, String childName ) {\n        List<GUID > guids = manipulator.getGuidsByName( childName );\n        for( GUID guid : guids ) {\n            List<GUID > ps = this.imperialTree.fetchParentGuids( guid );\n            if( ps.contains( parentGuid ) ){\n                return true;\n            }\n        }\n        return false;\n    }\n\n    @Override\n    public boolean containsChild( GUID parentGuid, String childName ) {\n        for( GUIDNameManipulator manipulator : this.fileManipulators ) {\n            if( this.containsChild( manipulator, parentGuid, childName ) ) {\n                return true;\n            }\n        }\n\n        for( GUIDNameManipulator manipulator : this.folderManipulators ) {\n            if( this.containsChild( manipulator, parentGuid, childName ) ) {\n                return true;\n            }\n        }\n        return false;\n    }\n\n\n    /**\n     * Affirm path exist in cache, if required.\n     * 确保路径存在于缓存，如果有明确实现必要的话。\n     * 对于GenericDistributedScopeTree::getPath, 默认会自动写入缓存，因此这里可以通过getPath保证路径缓存一定存在。\n     * @param guid, target guid.\n     * @return Path\n     */\n    protected void affirmPathExist( GUID guid ) {\n        this.imperialTree.getCachePath( guid );\n    }\n\n    @Override\n    public DeployTreeNode get( GUID guid ){\n        return (DeployTreeNode) super.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode treeNode ) {\n        TreeNodeOperator operator = this.operatorFactory.getOperator( treeNode.getMetaType() );\n        operator.update( treeNode );\n    }\n\n    @Override\n    public void remove( GUID guid ) {\n        super.remove( guid );\n    }\n\n    @Override\n    public void createDeployServiceInsMapping(GUID deployGuid, GUID serviceInsGuid) {\n        GenericDeployInsMapping insMapping = new GenericDeployInsMapping();\n        insMapping.setServiceInsGuid( serviceInsGuid );\n        insMapping.setDeployGuid( deployGuid );\n        this.deployServiceInsMappingManipulator.insert( insMapping );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/ArchElementNode.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanColonist;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.ArchDeployFamilyMeta;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\n\npublic abstract class ArchElementNode extends ArchDeployFamilyMeta implements ElementNode {\n    protected long                       enumId;\n\n    protected GUID                       metaGuid;\n\n    protected GUIDImperialTrieNode       distributedTreeNode;\n\n    protected DeployInstrument           deployInstrument;\n\n    protected LocalDateTime              createTime;\n\n    protected LocalDateTime              updateTime;\n\n    public ArchElementNode() {\n        super();\n\n        this.createTime = LocalDateTime.now();\n        this.updateTime = LocalDateTime.now();\n    }\n\n    public ArchElementNode( Map<String, Object > joEntity ) {\n        super( joEntity );\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n        this.createTime = LocalDateTime.now();\n        this.updateTime = LocalDateTime.now();\n    }\n\n    public ArchElementNode( Map<String, Object > joEntity, DeployInstrument deployInstrument) {\n        super( joEntity );\n        this.apply(deployInstrument);\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public ArchElementNode( DeployInstrument deployInstrument) {\n        this.apply(deployInstrument);\n    }\n\n    public void apply( DeployInstrument deployInstrument) {\n        this.deployInstrument = deployInstrument;\n        GuidAllocator guidAllocator = this.deployInstrument.getGuidAllocator();\n        this.setGuid( guidAllocator.nextGUID() );\n        if ( this.createTime == null ) {\n            this.createTime = LocalDateTime.now();\n            this.updateTime = LocalDateTime.now();\n        }\n    }\n\n    @Override\n    public ArchElementNode apply( Map<String, Object > joEntity ) {\n        super.apply( joEntity );\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n\n        return this;\n    }\n\n    @Override\n    public String getKomPath() {\n        return this.deployInstrument.getPath( this.getGuid() );\n    }\n\n    @Override\n    public GUID getMetaGuid() {\n        return this.metaGuid;\n    }\n\n    @Override\n    public void setMetaGuid( GUID metaGuid ) {\n        this.metaGuid = metaGuid;\n    }\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public void setEnumId( long enumId ) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid( GUID guid ) {\n        this.guid = guid;\n    }\n\n    @Override\n    public void setName( String name ) {\n        this.name = name;\n    }\n\n    @Override\n    public void setExtraInformation( String extraInformation ) {\n        this.extraInformation = extraInformation;\n    }\n    @Override\n    public void setIpAddress( String ipAddress ) {\n        this.ipAddress = ipAddress;\n    }\n    @Override\n    public String getIpAddress() {\n        return this.ipAddress;\n    }\n\n    @Override\n    public void setDescription( String description ) {\n        this.description = description;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    @Override\n    public void setCreateTime( LocalDateTime createTime ) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.updateTime;\n    }\n\n    @Override\n    public void setUpdateTime( LocalDateTime updateTime ) {\n        this.updateTime = updateTime;\n    }\n\n    @Override\n    public GUIDImperialTrieNode getDistributedTreeNode() {\n        return this.distributedTreeNode;\n    }\n\n    @Override\n    public void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ) {\n        this.distributedTreeNode = distributedTreeNode;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n\n    protected Collection<ElementNode > fetchChildren() {\n        Collection<GUID > guids = this.fetchChildrenGuids();\n        List<ElementNode > elementNodes = new ArrayList<>();\n        for( GUID guid : guids ){\n            ElementNode elementNode = (ElementNode) this.deployInstrument.get( guid );\n            elementNodes.add( elementNode );\n        }\n        return elementNodes;\n    }\n\n    protected Collection<GUID > fetchChildrenGuids() {\n        return this.deployInstrument.fetchChildrenGuids( this.getGuid() );\n    }\n\n    protected void addChild( ElementNode child ) {\n        GUID childId;\n        boolean bContainsChild = this.containsChild( child.getName() );\n        if( bContainsChild ) {\n            return;\n        }\n        else {\n            childId = this.deployInstrument.put( child );\n        }\n\n\n        this.deployInstrument.affirmOwnedNode( this.guid, childId );\n    }\n\n    protected boolean containsChild( String childName ) {\n        return this.deployInstrument.containsChild( this.guid, childName );\n    }\n\n    @Override\n    public JSONObject toJSONObject() {\n        return BeanColonist.DirectColonist.populate( this, UnbeanifiedKeys );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/ArchServerElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.deploy.DeployExtraMeta;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\n\nimport java.util.Map;\n\npublic abstract class ArchServerElement extends ArchElementNode implements ServerElement {\n\n\n\n    protected String localDomain;\n\n    protected String wideDomain;\n\n    protected boolean  enable;\n\n    protected DeployExtraMeta extraMeta;\n\n    protected  DeployElement affiliateDeployment;\n\n    private void initSelf( Map<String, Object > joEntity ) {\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n        if ( this.szElementaryConfig != null ) {\n            this.elementaryConfig = (JSONObject) JSON.parse( this.szElementaryConfig );\n        }\n    }\n\n    public ArchServerElement(Map<String, Object > joEntity ) {\n        super( joEntity );\n        this.initSelf( joEntity );\n    }\n\n    public ArchServerElement(Map<String, Object > joEntity, DeployInstrument deployInstrument ) {\n        super( joEntity, deployInstrument);\n        this.initSelf( joEntity );\n    }\n\n    public ArchServerElement(DeployInstrument deployInstrument ) {\n        super(deployInstrument);\n    }\n\n\n    public ArchServerElement() {\n        super();\n    }\n\n\n    @Override\n    public String getLocalDomain() {\n        return this.localDomain;\n    }\n\n    @Override\n    public void setLocalDomain(String localDomain) {\n        this.localDomain = localDomain;\n    }\n\n    @Override\n    public String getWideDomain() {\n        return this.wideDomain;\n    }\n\n    @Override\n    public void setWideDomain(String wideDomain) {\n        this.wideDomain = wideDomain;\n    }\n\n    @Override\n    public boolean isEnable() {\n        return this.enable;\n    }\n\n    @Override\n    public void setEnable( boolean enable ) {\n        this.enable = enable;\n    }\n\n    @Override\n    public DeployExtraMeta getExtraMeta() {\n        return this.extraMeta;\n    }\n\n    @Override\n    public void setExtraMeta(DeployExtraMeta extraMeta) {\n          this.extraMeta = extraMeta;\n    }\n\n    @Override\n    public DeployElement getAffiliateDeployment() {\n        return this.affiliateDeployment;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/ClusterElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport com.pinecone.hydra.deploy.kom.DeployFamilyNode;\n\npublic interface ClusterElement extends FolderElement, DeployFamilyNode {\n    @Override\n    default ClusterElement evinceClusterElement() {\n        return this;\n    }\n\n    String getType();\n\n    void setType( String type );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/CommonMeta.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\npublic interface CommonMeta extends ElementNode {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/ContainerElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\npublic interface ContainerElement extends DeployElement {\n\n    void setStatus( String status );\n\n    String getStatus();\n\n    @Override\n    default ContainerElement evinceContainerElement() {\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/DeployElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport com.pinecone.hydra.deploy.DeployExtraMeta;\n\npublic interface DeployElement extends ElementNode {\n\n    @Override\n    default DeployElement evinceDeployElement() {\n        return this;\n    }\n\n    boolean isEnable() ;\n\n    void setEnable( boolean enable ) ;\n\n    DeployExtraMeta getExtraMeta();\n\n    void setExtraMeta(DeployExtraMeta extraMeta);\n\n    DeployElement getAffiliateDeployment();\n\n}"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/DeployInsMapping.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface DeployInsMapping extends Pinenut {\n    void setEnumId( long enumId );\n\n    long getEnumId();\n\n    void setDeployGuid( GUID deployGuid );\n\n    GUID getDeployGuid();\n\n    void setServiceInsGuid( GUID serviceInsGuid );\n\n    GUID getServiceInsGuid();\n\n    void setCreateTime( LocalDateTime createTime );\n\n    LocalDateTime getCreateTime();\n\n    void setUpdateTime( LocalDateTime updateTime );\n\n    LocalDateTime getUpdateTime();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/DeployTreeNode.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface DeployTreeNode extends TreeNode {\n    String getName();\n\n    default String getMetaType() {\n        return this.className().replace(\"Generic\",\"\");\n    }\n\n    default DeployTreeNode evinceTreeNode(){\n        return this;\n    }\n\n    default ElementNode evinceElementNode(){\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/ElementNode.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport java.time.LocalDateTime;\nimport java.util.Set;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.deploy.kom.DeployFamilyNode;\nimport com.pinecone.hydra.system.ko.meta.ElementObject;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\n\npublic interface ElementNode extends DeployTreeNode, DeployFamilyNode, ElementObject {\n\n    Set<String > UnbeanifiedKeys = Set.of( \"distributedTreeNode\" );\n\n    @Override\n    default String objectCategoryName() {\n        return \"Deploy\";\n    }\n\n    default Namespace evinceNamespace() {\n        return null;\n    }\n\n    default QuickElement evinceQuickElement() {\n        return null;\n    }\n\n    default ClusterElement evinceClusterElement() {\n        return null;\n    }\n\n    default DeployElement evinceDeployElement() {\n        return null;\n    }\n\n    default VirtualMachineElement evinceVirtualMachineElement() {\n        return null;\n    }\n\n    default PhysicalHostElement evincePhysicalHostElement() {\n        return null;\n    }\n\n    default ContainerElement evinceContainerElement() {\n        return null;\n    }\n\n\n\n    GUIDImperialTrieNode getDistributedTreeNode();\n\n    void setDistributedTreeNode(GUIDImperialTrieNode distributedTreeNode);\n\n    JSONObject toJSONObject();\n\n    @Override\n    default ElementNode evinceElementNode(){\n        return this;\n    }\n\n    GUID getMetaGuid();\n\n    void setMetaGuid(GUID metaGuid);\n\n    String getKomPath();\n\n\n\n    String getName();\n\n    void setName(String name);\n\n    LocalDateTime getCreateTime();\n\n    void setCreateTime(LocalDateTime createTime);\n\n    LocalDateTime getUpdateTime();\n\n    void setUpdateTime(LocalDateTime updateTime);\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/FolderElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport java.util.Collection;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface FolderElement extends ElementNode {\n\n    Collection<ElementNode > fetchChildren();\n\n    Collection<GUID > fetchChildrenGuids();\n\n    void addChild( ElementNode child );\n\n    boolean containsChild( String childName );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericClusterElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanColonist;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\n\npublic class GenericClusterElement extends ArchElementNode implements ClusterElement {\n    protected String        taskType;\n\n    public GenericClusterElement() {\n        super();\n    }\n\n    public GenericClusterElement(Map<String, Object > joEntity ) {\n        super( joEntity );\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericClusterElement(Map<String, Object > joEntity, DeployInstrument deployInstrument) {\n        super( joEntity, deployInstrument);\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericClusterElement(DeployInstrument deployInstrument) {\n        super(deployInstrument);\n    }\n    @Override\n    public String getIpAddress() {\n        return this.ipAddress;\n    }\n\n    @Override\n    public void setIpAddress(String ipAddress) {\n        this.ipAddress = ipAddress;\n    }\n\n\n    @Override\n    public String getType() {\n        return this.taskType;\n    }\n\n    @Override\n    public void setType( String taskType ) {\n        this.taskType = taskType;\n    }\n\n    @Override\n    public Collection<ElementNode > fetchChildren() {\n        return super.fetchChildren();\n    }\n\n    @Override\n    public Collection<GUID > fetchChildrenGuids() {\n        return super.fetchChildrenGuids();\n    }\n\n    @Override\n    public void addChild( ElementNode child ) {\n        if( child instanceof FolderElement ) {\n            throw new IllegalArgumentException( \"Foisting `FolderElement` into application node is not accepted.\" );\n        }\n        super.addChild( child );\n    }\n\n    @Override\n    public boolean containsChild( String childName ) {\n        return super.containsChild( childName );\n    }\n\n    @Override\n    public JSONObject toJSONObject() {\n        Collection<ElementNode > children = this.fetchChildren();\n        JSONObject jo         = BeanColonist.DirectColonist.populate( this, UnbeanifiedKeys );\n        JSONObject joChildren = new JSONMaptron();\n\n        for( ElementNode node : children ) {\n            joChildren.put( node.getName(), node.toJSONObject() );\n        }\n        jo.put( \"deployments\", joChildren );\n        return jo;\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericCommonMeta.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\npublic class GenericCommonMeta extends ArchElementNode implements CommonMeta {\n\n    public GenericCommonMeta() {\n        super();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericContainerElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.deploy.DeployExtraMeta;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.source.ContainerElementManipulator;\n\nimport java.util.Map;\n\npublic class GenericContainerElement extends ArchElementNode implements ContainerElement{\n\n    protected String status;\n\n    protected boolean  enable;\n\n    protected DeployExtraMeta extraMeta;\n\n\n    protected ContainerElementManipulator ContainerElementManipulator;\n\n\n    private void initSelf( Map<String, Object > joEntity ) {\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n        if ( this.szElementaryConfig != null ) {\n            this.elementaryConfig = (JSONObject) JSON.parse( this.szElementaryConfig );\n        }\n    }\n\n    public GenericContainerElement(DeployInstrument deployInstrument, ContainerElementManipulator containerElementManipulator) {\n        super(deployInstrument);\n        this.ContainerElementManipulator = containerElementManipulator;\n    }\n\n    public GenericContainerElement( Map<String, Object > joEntity, DeployInstrument deployInstrument ) {\n        super( joEntity, deployInstrument);\n        this.initSelf( joEntity );\n    }\n\n    public GenericContainerElement( Map<String, Object > joEntity ) {\n        super( joEntity );\n        this.initSelf( joEntity );\n    }\n    public GenericContainerElement( DeployInstrument deployInstrument ) {\n        super(deployInstrument);\n    }\n\n\n    public GenericContainerElement() {\n        super();\n    }\n    \n    \n    \n    @Override\n    public void setStatus(String status) {\n        this.status = status;\n    }\n\n    @Override\n    public String getStatus() {\n        return this.status;\n    }\n\n    @Override\n    public boolean isEnable() {\n        return this.enable;\n    }\n\n    @Override\n    public void setEnable(boolean enable) {\n            this.enable = enable;\n    }\n\n    @Override\n    public DeployExtraMeta getExtraMeta() {\n        return this.extraMeta;\n    }\n\n    @Override\n    public void setExtraMeta(DeployExtraMeta extraMeta) {\n                this.extraMeta = extraMeta;\n    }\n\n    @Override\n    public DeployElement getAffiliateDeployment() {\n        return null;\n    }\n    @Override\n    public String getIpAddress() {\n        return this.ipAddress;\n    }\n\n    @Override\n    public void setIpAddress(String ipAddress) {\n        this.ipAddress = ipAddress;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericDeployInsMapping.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic class GenericDeployInsMapping implements DeployInsMapping {\n    protected long                      mEnumId;\n\n    protected GUID                      mDeployGuid;\n\n    protected GUID                      mServiceInsGuid;\n\n    protected LocalDateTime             mCreateTime;\n\n    protected LocalDateTime             mUpdateTime;\n\n    @Override\n    public void setEnumId(long enumId) {\n        this.mEnumId = enumId;\n    }\n\n    @Override\n    public long getEnumId() {\n        return this.mEnumId;\n    }\n\n    @Override\n    public void setDeployGuid(GUID deployGuid) {\n        this.mDeployGuid = deployGuid;\n    }\n\n    @Override\n    public GUID getDeployGuid() {\n        return this.mDeployGuid;\n    }\n\n    @Override\n    public void setServiceInsGuid(GUID serviceInsGuid) {\n        this.mServiceInsGuid = serviceInsGuid;\n    }\n\n    @Override\n    public GUID getServiceInsGuid() {\n        return this.mServiceInsGuid;\n    }\n\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.mCreateTime = createTime;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.mCreateTime;\n    }\n\n    @Override\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.mUpdateTime = updateTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.mUpdateTime;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericNamespace.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport java.util.Collection;\nimport java.util.Map;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanColonist;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.source.DeployNamespaceManipulator;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\n\npublic class GenericNamespace extends ArchElementNode implements Namespace {\n    protected GUID                        metaGuid;\n\n    protected GUIDImperialTrieNode        distributedTreeNode;\n\n    protected DeployNamespaceManipulator  namespaceManipulator;\n\n\n    public GenericNamespace() {\n        super();\n    }\n\n    public GenericNamespace( Map<String, Object > joEntity ) {\n        super( joEntity );\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericNamespace( Map<String, Object > joEntity, DeployInstrument deployInstrument) {\n        super( joEntity, deployInstrument);\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericNamespace( DeployInstrument deployInstrument) {\n        super(deployInstrument);\n    }\n\n    public GenericNamespace(DeployInstrument deployInstrument, DeployNamespaceManipulator namespaceManipulator ) {\n        this(deployInstrument);\n        this.namespaceManipulator = namespaceManipulator;\n    }\n\n    @Override\n    public GUIDImperialTrieNode getDistributedTreeNode() {\n        return this.distributedTreeNode;\n    }\n\n    @Override\n    public void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ) {\n        this.distributedTreeNode = distributedTreeNode;\n    }\n\n    @Override\n    public GUID getMetaGuid() {\n        return this.metaGuid;\n    }\n\n    @Override\n    public void setMetaGuid( GUID metaGuid ) {\n        this.metaGuid = metaGuid;\n    }\n\n    @Override\n    public JSONObject toJSONObject() {\n        Collection<ElementNode > children = this.fetchChildren();\n        JSONObject jo = new JSONMaptron();\n\n        for( ElementNode node : children ) {\n            jo.put( node.getName(), node.toJSONObject() );\n        }\n        return jo;\n    }\n\n    @Override\n    public JSONObject toJSONDetails() {\n        return BeanColonist.DirectColonist.populate( this, ElementNode.UnbeanifiedKeys );\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"guid\"        , this.getGuid()            ),\n                new KeyValue<>( \"name\"        , this.getName()            )\n        } );\n    }\n\n    @Override\n    public String toString() {\n        return this.name;\n    }\n\n    @Override\n    public Collection<ElementNode > fetchChildren() {\n        return super.fetchChildren();\n    }\n\n    @Override\n    public Collection<GUID > fetchChildrenGuids() {\n        return super.fetchChildrenGuids();\n    }\n\n    @Override\n    public void addChild( ElementNode child ) {\n        super.addChild( child );\n    }\n\n    @Override\n    public boolean containsChild( String childName ) {\n        return super.containsChild( childName );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericPhysicalHostElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\n\nimport java.util.Map;\n\npublic class GenericPhysicalHostElement extends ArchServerElement implements PhysicalHostElement {\n\n    protected String hardwareSpecs;\n\n    protected String status;\n\n    private void initSelf( Map<String, Object > joEntity ) {\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n        if ( this.szElementaryConfig != null ) {\n            this.elementaryConfig = (JSONObject) JSON.parse( this.szElementaryConfig );\n        }\n    }\n\n    public GenericPhysicalHostElement(Map<String, Object> joEntity) {\n        super(joEntity);\n    }\n\n    public GenericPhysicalHostElement(DeployInstrument deployInstrument) {\n        super(deployInstrument);\n    }\n\n    public GenericPhysicalHostElement() {\n        super();\n    }\n\n    public GenericPhysicalHostElement( Map<String, Object > joEntity, DeployInstrument deployInstrument ) {\n        super( joEntity, deployInstrument);\n        this.initSelf( joEntity );\n    }\n\n\n    @Override\n    public void setHardwareSpecs(String hardwareSpecs) {\n        this.hardwareSpecs = hardwareSpecs;\n    }\n\n    @Override\n    public String getHardwareSpecs() {\n        return this.hardwareSpecs;\n    }\n\n    @Override\n    public void setStatus(String status) {\n        this.status = status;\n    }\n\n    @Override\n    public String getStatus() {\n        return this.status;\n    }\n    @Override\n    public String getIpAddress() {\n        return this.ipAddress;\n    }\n\n    @Override\n    public void setIpAddress(String ipAddress) {\n        this.ipAddress = ipAddress;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericQuickElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.deploy.DeployExtraMeta;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.source.QuickElementManipulator;\n\nimport java.util.Map;\n\npublic class GenericQuickElement extends ArchElementNode implements QuickElement {\n\n    protected String typeName;\n\n    protected boolean enable;\n\n    protected  DeployExtraMeta extraMeta;\n\n    protected QuickElementManipulator quickElementManipulator;\n\n\n    private void initSelf( Map<String, Object > joEntity ) {\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n        if ( this.szElementaryConfig != null ) {\n            this.elementaryConfig = (JSONObject) JSON.parse( this.szElementaryConfig );\n        }\n    }\n\n    public GenericQuickElement(DeployInstrument deployInstrument, QuickElementManipulator quickElementManipulator) {\n        super(deployInstrument);\n        this.quickElementManipulator = quickElementManipulator;\n    }\n\n    public GenericQuickElement( Map<String, Object > joEntity, DeployInstrument deployInstrument ) {\n        super( joEntity, deployInstrument);\n        this.initSelf( joEntity );\n    }\n\n    public GenericQuickElement( Map<String, Object > joEntity ) {\n        super( joEntity );\n        this.initSelf( joEntity );\n    }\n    public GenericQuickElement( DeployInstrument deployInstrument ) {\n        super(deployInstrument);\n    }\n\n\n    public GenericQuickElement() {\n        super();\n    }\n\n    @Override\n    public String getTypeName() {\n        return this.typeName;\n    }\n\n    @Override\n    public void setTypeName(String typeName) {\n        this.typeName = typeName;\n    }\n\n    @Override\n    public boolean isEnable() {\n        return this.enable;\n    }\n\n    @Override\n    public void setEnable(boolean enable) {\n           this.enable = enable;\n    }\n\n    @Override\n    public DeployExtraMeta getExtraMeta() {\n        return this.extraMeta;\n    }\n\n    @Override\n    public void setExtraMeta(DeployExtraMeta extraMeta) {\n              this.extraMeta = extraMeta;\n    }\n\n    @Override\n    public DeployElement getAffiliateDeployment() {\n        return null;\n    }\n\n    @Override\n    public void addChild(ElementNode child) {\n        super.addChild( child );\n    }\n\n    @Override\n    public String getIpAddress() {\n        return this.ipAddress;\n    }\n\n    @Override\n    public void setIpAddress(String ipAddress) {\n        this.ipAddress = ipAddress;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/GenericVirtualMachineElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.deploy.DeployExtraMeta;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\n\nimport java.util.Map;\n\npublic class GenericVirtualMachineElement extends ArchServerElement implements VirtualMachineElement {\n\n    protected GUID affiliateHostGuid;\n\n    protected String status;\n\n    protected DeployExtraMeta vmExtraMeta;\n\n    private void initSelf( Map<String, Object > joEntity ) {\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n        if ( this.szElementaryConfig != null ) {\n            this.elementaryConfig = (JSONObject) JSON.parse( this.szElementaryConfig );\n        }\n    }\n\n    public GenericVirtualMachineElement( Map<String, Object> joEntity ) {\n        super(joEntity);\n    }\n\n    public GenericVirtualMachineElement( DeployInstrument deployInstrument ) {\n        super(deployInstrument);\n    }\n\n    public GenericVirtualMachineElement() {\n        super();\n    }\n\n    public GenericVirtualMachineElement( Map<String, Object > joEntity, DeployInstrument deployInstrument ) {\n        super( joEntity, deployInstrument);\n        this.initSelf( joEntity );\n    }\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    @Override\n    public String getIpAddress() {\n        return this.ipAddress;\n    }\n\n    @Override\n    public void setIpAddress(String ipAddress) {\n        this.ipAddress = ipAddress;\n    }\n\n    @Override\n    public String getLocalDomain() {\n        return this.localDomain;\n    }\n\n    @Override\n    public void setLocalDomain(String localDomain) {\n\n    }\n\n    @Override\n    public String getWideDomain() {\n        return this.wideDomain;\n    }\n\n    @Override\n    public void setWideDomain(String wideDomain) {\n            this.wideDomain =wideDomain ;\n    }\n\n    @Override\n    public String getStatus() {\n        return this.status;\n    }\n\n    @Override\n    public void setStatus(String status) {\n        this.status = status;\n    }\n\n    @Override\n    public GUID getAffiliateHostGuid() {\n        return this.affiliateHostGuid;\n    }\n\n    @Override\n    public void setAffiliateHostGuid(GUID guid) {\n        this.affiliateHostGuid = guid;\n    }\n\n    @Override\n    public DeployExtraMeta getVmExtraMeta() {\n        return this.vmExtraMeta;\n    }\n\n    @Override\n    public DeployExtraMeta getExtraMeta() {\n        return this.extraMeta;\n    }\n\n    @Override\n    public void setExtraMeta(DeployExtraMeta extraMeta) {\n            this.extraMeta = extraMeta;\n    }\n\n    @Override\n    public DeployElement getAffiliateDeployment() {\n        return null;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/Namespace.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport java.util.Set;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\n\npublic interface Namespace extends FolderElement {\n    Set<String > UnbeanifiedKeys = Set.of( \"distributedTreeNode\", \"classificationRules\" );\n\n    long getEnumId();\n\n    void setEnumId(long id);\n\n    GUID getGuid();\n\n    void setGuid(GUID guid);\n\n    GUID getMetaGuid();\n\n    void setMetaGuid(GUID metaGuid);\n\n    String getName();\n\n    void setName(String name);\n\n\n    GUIDImperialTrieNode getDistributedTreeNode();\n\n    void setDistributedTreeNode(GUIDImperialTrieNode distributedTreeNode);\n\n    @Override\n    default Namespace evinceNamespace() {\n        return this;\n    }\n\n    JSONObject toJSONDetails();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/PhysicalHostElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\npublic interface PhysicalHostElement extends ServerElement {\n\n    void setHardwareSpecs( String hardwareSpecs );\n    String getHardwareSpecs();\n\n    void setStatus( String status );\n    String getStatus();\n\n    default PhysicalHostElement evincePhysicalHostElement() {\n        return this;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/QuickElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\npublic interface QuickElement extends DeployElement {\n\n    String getTypeName();// e.g. Script, POD\n\n    void setTypeName(String typeName);\n\n\n    @Override\n    default QuickElement evinceQuickElement() {\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/ServerElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\npublic interface ServerElement extends DeployElement {\n\n    String getLocalDomain();\n\n    void setLocalDomain( String localDomain );\n\n    String getWideDomain();\n\n    void setWideDomain( String wideDomain );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/entity/VirtualMachineElement.java",
    "content": "package com.pinecone.hydra.deploy.kom.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.DeployExtraMeta;\n\npublic interface VirtualMachineElement extends ServerElement {\n\n    @Override\n    default VirtualMachineElement evinceVirtualMachineElement() {\n        return this;\n    }\n\n\n/*    String getName();\n    void setName(String name);*/\n\n    String getIpAddress();\n    void setIpAddress( String ipAddress );\n\n    String getStatus();\n    void setStatus( String status );\n\n\n    GUID getAffiliateHostGuid();\n    void setAffiliateHostGuid( GUID guid );\n\n\n    DeployExtraMeta getVmExtraMeta();\n}"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/marshaling/DeployInstrumentDecoder.java",
    "content": "package com.pinecone.hydra.deploy.kom.marshaling;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.entity.ElementNode;\n\npublic interface DeployInstrumentDecoder extends Pinenut {\n    default ElementNode decode(Object val, GUID parentGUID) {\n        if ( val instanceof Map ) {\n            Map map = (Map) val;\n            if( map.isEmpty() ) {\n                return null;\n            }\n            else if( map.size() > 1 ) {\n                throw new IllegalArgumentException( \"Root element should has at last 1.\" );\n            }\n\n            Map.Entry kv = (Map.Entry) map.entrySet().iterator().next();\n            return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID );\n        }\n\n        return null;\n    }\n\n    ElementNode decode(String key, Object val, GUID parentGUID);\n\n    default ElementNode decode(Map.Entry kv, GUID parentGUID) {\n        return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID );\n    }\n\n    default ElementNode decode(Object val) {\n        return this.decode( val, null );\n    }\n\n    default ElementNode decode(String key, Object val) {\n        return this.decode( key, val, null );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/marshaling/DeployInstrumentEncoder.java",
    "content": "package com.pinecone.hydra.deploy.kom.marshaling;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.deploy.kom.entity.ElementNode;\n\npublic interface DeployInstrumentEncoder extends Pinenut {\n    Object encode(ElementNode node);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/marshaling/DeployJSONDecoder.java",
    "content": "package com.pinecone.hydra.deploy.kom.marshaling;\n\nimport java.util.Collection;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.ContainerElement;\nimport com.pinecone.hydra.deploy.kom.entity.ElementNode;\nimport com.pinecone.hydra.deploy.kom.entity.FolderElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericClusterElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericContainerElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.deploy.kom.entity.ClusterElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericPhysicalHostElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericQuickElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement;\nimport com.pinecone.hydra.deploy.kom.entity.Namespace;\nimport com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement;\nimport com.pinecone.hydra.deploy.kom.entity.QuickElement;\nimport com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement;\n\npublic class DeployJSONDecoder implements DeployInstrumentDecoder {\n    protected DeployInstrument instrument;\n\n    public DeployJSONDecoder(DeployInstrument instrument ) {\n        this.instrument = instrument;\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public ElementNode    decode( String szName, Object o, GUID parentGuid ) {\n        if ( o instanceof Map ) {\n            return (ElementNode) this.instrument.get( this.decodeJSONObject( szName, (Map<String, Object>) o, parentGuid ).getGuid() );\n        }\n\n        throw new IllegalArgumentException( \"Elements of `DeployInstrument` should all be object.\" );\n    }\n\n    protected Namespace   newNamespace( String szName, Map<String, Object > jo ) {\n        Namespace ns = new GenericNamespace( jo, this.instrument );\n        ns.setName( szName );\n\n        return ns;\n    }\n\n    protected Object[]    affirmNSExisted( String szName, GUID parentGuid, Map<String, Object > jo ) {\n        Namespace ns = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.instrument.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evinceNamespace() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be namespace.\", szName )\n                    );\n                }\n\n                ns = rootE.evinceNamespace();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid );\n            if( parentNode instanceof Namespace ) {\n                Collection<ElementNode> destChildren = parentNode.evinceNamespace().fetchChildren();\n                for( ElementNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof Namespace ) {\n                            ns = (Namespace) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"<DeployInstrument> Existed child-destination [%s] should be namespace.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n        }\n\n\n        GUID currentGuid;\n        if( ns == null ) {\n            ns = this.newNamespace( szName, jo );\n            currentGuid  = this.instrument.put( ns );\n            this.instrument.affirmOwnedNode( parentGuid, currentGuid );\n        }\n        else {\n            currentGuid = ns.getGuid();\n        }\n        return new Object[] { ns, currentGuid };\n    }\n\n    protected Object[]    affirmClusterExisted( String szName, GUID parentGuid, Map<String, Object > jo ) {\n        ClusterElement cluster = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.instrument.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evinceClusterElement() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be `AppElement`.\", szName )\n                    );\n                }\n\n                cluster = rootE.evinceClusterElement();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid );\n            if( parentNode instanceof Namespace ) {\n                Collection<ElementNode> destChildren = parentNode.evinceNamespace().fetchChildren();\n                for( ElementNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof ClusterElement) {\n                            cluster = (ClusterElement) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"Existed child-destination [%s] should be `AppElement`.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n        }\n\n\n\n        ClusterElement neo ;\n        if( cluster == null ) {\n            neo = new GenericClusterElement( jo, this.instrument );\n            neo.setName( szName );\n        }\n        else {\n            neo = cluster;\n        }\n        return new Object[] { cluster, neo };\n    }\n\n    protected Object[]    affirmPhyExisted( String szName, GUID parentGuid, Map<String, Object > jo ) {\n        PhysicalHostElement dep = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.instrument.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evincePhysicalHostElement() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be `TaskElement`.\", szName )\n                    );\n                }\n\n                dep = rootE.evincePhysicalHostElement();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid );\n            Collection<ElementNode> destChildren;\n            if( parentNode instanceof FolderElement ) {\n                destChildren = ( (FolderElement) parentNode ).fetchChildren();\n                for( ElementNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof PhysicalHostElement ) {\n                            dep = (PhysicalHostElement) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"Existed child-destination [%s] should be `PhysicalHostElement`.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n            else {\n                throw new IllegalStateException(\n                        String.format( \"Parent of `TaskElement` [%s] should be `FolderElement`.\", szName )\n                );\n            }\n        }\n\n\n\n        PhysicalHostElement neo ;\n        if( dep == null ) {\n            neo = new GenericPhysicalHostElement( jo, this.instrument );\n            neo.setName( szName );\n        }\n        else {\n            neo = dep;\n        }\n        return new Object[] { dep, neo };\n    }\n\n    protected Object[]    affirmVMExisted( String szName, GUID parentGuid, Map<String, Object > jo ) {\n        VirtualMachineElement dep = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.instrument.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evinceVirtualMachineElement() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be `TaskElement`.\", szName )\n                    );\n                }\n\n                dep = rootE.evinceVirtualMachineElement();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid );\n            Collection<ElementNode> destChildren;\n            if( parentNode instanceof FolderElement ) {\n                destChildren = ( (FolderElement) parentNode ).fetchChildren();\n                for( ElementNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof VirtualMachineElement ) {\n                            dep = (VirtualMachineElement) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"Existed child-destination [%s] should be `VirtualMachineElement`.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n            else {\n                throw new IllegalStateException(\n                        String.format( \"Parent of `TaskElement` [%s] should be `FolderElement`.\", szName )\n                );\n            }\n        }\n\n\n\n        VirtualMachineElement neo ;\n        if( dep == null ) {\n            neo = new GenericVirtualMachineElement( jo, this.instrument );\n            neo.setName( szName );\n        }\n        else {\n            neo = dep;\n        }\n        return new Object[] { dep, neo };\n    }\n\n    protected Object[]    affirmQuickExisted( String szName, GUID parentGuid, Map<String, Object > jo ) {\n        QuickElement dep = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.instrument.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evinceQuickElement() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be `TaskElement`.\", szName )\n                    );\n                }\n\n                dep = rootE.evinceQuickElement();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid );\n            Collection<ElementNode> destChildren;\n            if( parentNode instanceof FolderElement ) {\n                destChildren = ( (FolderElement) parentNode ).fetchChildren();\n                for( ElementNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof QuickElement ) {\n                            dep = (QuickElement) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"Existed child-destination [%s] should be `QuickElement`.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n            else {\n                throw new IllegalStateException(\n                        String.format( \"Parent of `TaskElement` [%s] should be `FolderElement`.\", szName )\n                );\n            }\n        }\n\n\n\n        QuickElement neo ;\n        if( dep == null ) {\n            neo = new GenericQuickElement( jo, this.instrument );\n            neo.setName( szName );\n        }\n        else {\n            neo = dep;\n        }\n        return new Object[] { dep, neo };\n    }\n\n    protected Object[]    affirmContainerExisted( String szName, GUID parentGuid, Map<String, Object > jo ) {\n        ContainerElement dep = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.instrument.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evinceContainerElement() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be `TaskElement`.\", szName )\n                    );\n                }\n\n                dep = rootE.evinceContainerElement();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid );\n            Collection<ElementNode> destChildren;\n            if( parentNode instanceof FolderElement ) {\n                destChildren = ( (FolderElement) parentNode ).fetchChildren();\n                for( ElementNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof ContainerElement ) {\n                            dep = (ContainerElement) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"Existed child-destination [%s] should be `ContainerElement`.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n            else {\n                throw new IllegalStateException(\n                        String.format( \"Parent of `TaskElement` [%s] should be `FolderElement`.\", szName )\n                );\n            }\n        }\n\n\n\n        ContainerElement neo ;\n        if( dep == null ) {\n            neo = new GenericContainerElement( jo, this.instrument );\n            neo.setName( szName );\n        }\n        else {\n            neo = dep;\n        }\n        return new Object[] { dep, neo };\n    }\n\n\n    protected Object[]    decodeExternalElements( String szMetaType, String szName, GUID parentGuid, Map<String, Object > jo ) throws IllegalArgumentException {\n        throw new IllegalArgumentException( \"Unknown metaType '\" + szMetaType + \"'.\" );\n    }\n\n    protected void        decodeChildren ( Map jo, GUID currentGuid ) {\n        for ( Object o : jo.entrySet() ) {\n            Map.Entry kv = (Map.Entry) o;\n            Object   val = kv.getValue();\n            if( val instanceof Map ) {\n                this.decode( kv.getKey().toString(), val, currentGuid );\n            }\n        }\n    }\n\n    protected ElementNode decodeJSONObject( String szName, Map<String, Object > jo, GUID parentGuid ) {\n        String szMetaType = (String) jo.get( \"metaType\" );\n        boolean isNamespace = szMetaType == null || szMetaType.equals( Namespace.class.getSimpleName() );\n        ElementNode elementNode;\n        GUID currentGuid;\n\n        if ( isNamespace ) {\n            Object[] pair = this.affirmNSExisted( szName, parentGuid, jo );\n            Namespace     ns = (Namespace) pair[ 0 ];\n            currentGuid      = (GUID)      pair[ 1 ];\n\n            this.decodeChildren( jo, currentGuid );\n\n            elementNode = ns;\n        }\n        else {\n            Object[] pair;\n            boolean bIsFolderElement = false;\n            if( szMetaType.equals( ClusterElement.class.getSimpleName() ) ) {\n                pair = this.affirmClusterExisted( szName, parentGuid, jo );\n                bIsFolderElement = true;\n            }\n            else if( szMetaType.equals( PhysicalHostElement.class.getSimpleName() ) ) {\n                pair = this.affirmPhyExisted( szName, parentGuid, jo );\n            }\n            else if( szMetaType.equals( VirtualMachineElement.class.getSimpleName() ) ) {\n                pair = this.affirmVMExisted( szName, parentGuid, jo );\n            }\n            else if( szMetaType.equals( QuickElement.class.getSimpleName() ) ) {\n                pair = this.affirmQuickExisted( szName, parentGuid, jo );\n            }\n            else if( szMetaType.equals( ContainerElement.class.getSimpleName() ) ) {\n                pair = this.affirmContainerExisted( szName, parentGuid, jo );\n            }\n            else {\n                try{\n                    pair = this.decodeExternalElements( szMetaType, szName, parentGuid, jo );\n                }\n                catch ( RuntimeException e ) {\n                    throw new IllegalArgumentException( e );\n                }\n            }\n\n            ElementNode          arc = (ElementNode) pair[ 0 ];\n            ElementNode          neo = (ElementNode) pair[ 1 ];\n\n            if( arc == null ) {\n                currentGuid = this.instrument.put( neo );\n                this.instrument.affirmOwnedNode( parentGuid, currentGuid );\n            }\n            else {\n                currentGuid = arc.getGuid();\n                this.instrument.update( neo );\n            }\n\n            if( bIsFolderElement ) {\n                Object services = jo.get( \"deployments\" );\n                if( services instanceof Map ) {\n                    Map joSer = (Map) services;\n                    this.decodeChildren( joSer, currentGuid );\n                }\n            }\n\n            elementNode = neo;\n        }\n\n        return elementNode;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/marshaling/DeployJSONEncoder.java",
    "content": "package com.pinecone.hydra.deploy.kom.marshaling;\n\n\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.ElementNode;\n\npublic class DeployJSONEncoder implements DeployInstrumentEncoder {\n    protected DeployInstrument instrument;\n\n    public DeployJSONEncoder(DeployInstrument instrument ) {\n        this.instrument = instrument;\n    }\n\n    @Override\n    public Object encode( ElementNode node ) {\n        return node.toJSONObject();\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/ArchElementOperator.java",
    "content": "package com.pinecone.hydra.deploy.kom.operator;\n\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.CommonMeta;\nimport com.pinecone.hydra.deploy.kom.entity.ElementNode;\nimport com.pinecone.hydra.deploy.kom.source.NodeMetaManipulator;\nimport com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\n\npublic abstract class ArchElementOperator implements ElementOperator {\n    protected DeployInstrument              deployInstrument;\n    protected ImperialTree                  imperialTree;\n    protected NodeMetaManipulator           nodeMetaManipulator;\n    protected DeployMasterManipulator       deployMasterManipulator;\n    protected ElementOperatorFactory        factory;\n\n    public ArchElementOperator( ElementOperatorFactory factory ){\n        this( factory.getTaskMasterManipulator(),factory.getServicesTree() );\n        this.factory = factory;\n    }\n\n    public ArchElementOperator(DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument){\n        this.imperialTree = deployInstrument.getMasterTrieTree();\n        this.deployInstrument = deployInstrument;\n        this.nodeMetaManipulator = masterManipulator.getNodeMetaManipulator();\n        this.deployMasterManipulator = masterManipulator;\n        //this.factory = new GenericServiceOperatorFactory(servicesTree,masterManipulator);\n    }\n\n    public ElementOperatorFactory getOperatorFactory() {\n        return this.factory;\n    }\n\n    protected void applyCommonMeta( ElementNode ele, CommonMeta commonMeta ){\n        if( commonMeta != null ) {\n            ele.setGuid                     ( commonMeta.getGuid()                     );\n            ele.setExtraInformation         ( commonMeta.getExtraInformation()         );\n            ele.setDescription              ( commonMeta.getDescription()              );\n            ele.setIpAddress                ( commonMeta.getIpAddress()                );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/ClusterElementOperator.java",
    "content": "package com.pinecone.hydra.deploy.kom.operator;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.ClusterElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericClusterElement;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.deploy.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.deploy.kom.source.ClusterNodeManipulator;\nimport com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic class ClusterElementOperator extends ArchElementOperator implements ElementOperator {\n    protected ClusterNodeManipulator jobNodeManipulator;\n\n    public ClusterElementOperator(ElementOperatorFactory factory ) {\n        this( factory.getTaskMasterManipulator(),factory.getServicesTree() );\n        this.factory = factory;\n    }\n\n    public ClusterElementOperator(DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument){\n        super( masterManipulator, deployInstrument);\n        this.jobNodeManipulator = masterManipulator.getJobNodeManipulator();\n    }\n\n\n    @Override\n    public GUID insert( TreeNode treeNode ) {\n        GenericClusterElement jobElement = (GenericClusterElement) treeNode;\n\n        GuidAllocator guidAllocator = this.deployInstrument.getGuidAllocator();\n        GUID jobNodeGUID = guidAllocator.nextGUID();\n        jobElement.setGuid( jobNodeGUID );\n        this.jobNodeManipulator.insert( jobElement );\n\n        //将应用元信息存入元信息表\n        this.nodeMetaManipulator.insert( jobElement );\n\n\n        //将节点信息存入主表\n        GUIDImperialTrieNode node = new GUIDImperialTrieNode();\n        node.setNodeMetadataGUID(jobNodeGUID);\n        node.setGuid(jobNodeGUID);\n        node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) );\n        this.imperialTree.insert( node );\n        return jobNodeGUID;\n    }\n\n\n    @Override\n    public void purge( GUID guid ) {\n        //namespace节点需要递归删除其拥有节点若其引用节点，没有其他引用则进行清理\n        List<GUIDImperialTrieNode> childNodes = this.imperialTree.getChildren(guid);\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        if ( !childNodes.isEmpty() ){\n            List<GUID > subordinates = this.imperialTree.getSubordinates(guid);\n            if ( !subordinates.isEmpty() ){\n                for ( GUID subordinateGuid : subordinates ){\n                    this.purge( subordinateGuid );\n                }\n            }\n            childNodes = this.imperialTree.getChildren( guid );\n            for( GUIDImperialTrieNode childNode : childNodes ){\n                List<GUID > parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid());\n                if ( parentNodes.size() > 1 ){\n                    this.imperialTree.removeInheritance(childNode.getGuid(),guid);\n                }\n                else {\n                    this.purge( childNode.getGuid() );\n                }\n            }\n        }\n\n        if ( node.getType().getObjectName().equals( GenericNamespace.class.getName() ) ){\n            this.removeNode(guid);\n        }\n        else {\n            UOI uoi = node.getType();\n            String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() );\n            if( metaType == null ) {\n                TreeNode newInstance = (TreeNode)uoi.newInstance( new Class<? >[]{ DeployInstrument.class }, this.deployInstrument);\n                metaType = newInstance.getMetaType();\n            }\n\n            ElementOperator operator = this.getOperatorFactory().getOperator( metaType );\n            operator.purge( guid );\n        }\n    }\n\n    @Override\n    public ClusterElement get(GUID guid ) {\n        ClusterElement clusterElement;\n        clusterElement = this.jobNodeManipulator.getClusterElement( guid, this.deployInstrument);\n        this.applyCommonMeta(clusterElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) );\n\n        clusterElement.setGuid(clusterElement.getGuid());\n        return clusterElement;\n    }\n\n    @Override\n    public ClusterElement get(GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public ClusterElement getAsRootDepth(GUID guid ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode treeNode ) {\n        GenericClusterElement applicationElement = (GenericClusterElement) treeNode;\n        this.jobNodeManipulator.update( applicationElement );\n        this.nodeMetaManipulator.update( applicationElement );\n    }\n\n    @Override\n    public void updateName( GUID guid, String name ) {\n\n    }\n\n    protected void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath(guid);\n        this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() );\n        this.jobNodeManipulator.remove( node.getGuid( ));\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/ContainerElementOperator.java",
    "content": "package com.pinecone.hydra.deploy.kom.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.GenericContainerElement;\nimport com.pinecone.hydra.deploy.kom.entity.ContainerElement;\nimport com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator;\nimport com.pinecone.hydra.deploy.kom.source.ContainerElementManipulator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic class ContainerElementOperator extends ArchElementOperator implements ElementOperator{\n\n    protected ContainerElementManipulator containerElementManipulator;\n\n\n\n    public ContainerElementOperator(ElementOperatorFactory factory ) {\n        this( factory.getTaskMasterManipulator(),factory.getServicesTree() );\n        this.factory = factory;\n    }\n\n    public ContainerElementOperator(DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument){\n        super( masterManipulator, deployInstrument);\n\n        this.containerElementManipulator = masterManipulator.getContainerElementManipulator();\n    }\n\n\n    @Override\n    public GUID insert(TreeNode treeNode ) {\n        GenericContainerElement containerElement = ( GenericContainerElement ) treeNode;\n\n        //将信息写入数据库\n        //将节点信息存入应用节点表\n        GuidAllocator guidAllocator = this.deployInstrument.getGuidAllocator();\n        GUID taskNodeGUID = guidAllocator.nextGUID();\n        containerElement.setGuid(taskNodeGUID);\n\n        this.containerElementManipulator.insert( containerElement );\n        //将应用元信息存入元信息表\n        this.nodeMetaManipulator.insert( containerElement );\n\n\n        //将节点信息存入主表\n        GUIDImperialTrieNode node = new GUIDImperialTrieNode();\n        node.setNodeMetadataGUID( taskNodeGUID ); // Since 20250419, the meta has been merged into the `node`.\n        node.setGuid( taskNodeGUID );\n        node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) );\n        this.imperialTree.insert( node );\n        return taskNodeGUID;\n    }\n\n    @Override\n    public void purge( GUID guid ) {\n        this.removeNode( guid );\n    }\n\n    @Override\n    public ContainerElement get( GUID guid ) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        ContainerElement ContainerElement   = this.containerElementManipulator.getContainerElement( guid, this.deployInstrument);\n        //TODO\n/*\n        this.applyCommonMeta( ContainerElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) );\n*/\n\n        ContainerElement.setDistributedTreeNode(node);\n\n        ContainerElement.setGuid( guid );\n\n        return ContainerElement;\n    }\n\n    @Override\n    public ContainerElement get(GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public ContainerElement getAsRootDepth(GUID guid ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode nodeWideData ) {\n        ContainerElement serviceElement = (ContainerElement) nodeWideData;\n        this.containerElementManipulator.update( serviceElement );\n        this.nodeMetaManipulator.update( serviceElement );\n    }\n\n    @Override\n    public void updateName( GUID guid, String name ) {\n\n    }\n\n    private void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.containerElementManipulator.remove( node.getGuid() );\n        this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/ElementOperator.java",
    "content": "package com.pinecone.hydra.deploy.kom.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.entity.ElementNode;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface ElementOperator extends TreeNodeOperator {\n    @Override\n    ElementNode get(GUID guid);\n\n    @Override\n    ElementNode get(GUID guid, int depth);\n\n    @Override\n    ElementNode getAsRootDepth(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/ElementOperatorFactory.java",
    "content": "package com.pinecone.hydra.deploy.kom.operator;\n\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.ClusterElement;\nimport com.pinecone.hydra.deploy.kom.entity.ContainerElement;\nimport com.pinecone.hydra.deploy.kom.entity.Namespace;\nimport com.pinecone.hydra.deploy.kom.entity.DeployElement;\nimport com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement;\nimport com.pinecone.hydra.deploy.kom.entity.QuickElement;\nimport com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement;\nimport com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.operator.OperatorFactory;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface ElementOperatorFactory extends OperatorFactory {\n    String DefaultServiceNode     =  DeployElement.class.getSimpleName();\n    String DefaultNamespace       =  Namespace.class.getSimpleName();\n    String DefaultApplicationNode =  ClusterElement.class.getSimpleName();\n    String DefaultVirtualMachine  =  VirtualMachineElement.class.getSimpleName();\n    String DefaultPhysicalHost    =  PhysicalHostElement.class.getSimpleName();\n    String DefaultQuickElement    =  QuickElement.class.getSimpleName();\n    String DefaultContainerElement =  ContainerElement.class.getSimpleName();\n\n\n    void register(String typeName, TreeNodeOperator functionalNodeOperation);\n\n    void registerMetaType(Class<?> clazz, String metaType);\n\n    void registerMetaType(String classFullName, String metaType);\n\n    String getMetaType(String classFullName);\n\n    ElementOperator getOperator(String typeName);\n\n    DeployInstrument getServicesTree();\n\n    DeployMasterManipulator getTaskMasterManipulator();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/GenericElementOperatorFactory.java",
    "content": "package com.pinecone.hydra.deploy.kom.operator;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.TreeMap;\n\nimport com.pinecone.hydra.deploy.kom.entity.GenericContainerElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericPhysicalHostElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.GenericClusterElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\nimport com.pinecone.hydra.unit.iqueue.entity.GenericQueueElement;\n\npublic class GenericElementOperatorFactory implements ElementOperatorFactory {\n    protected DeployMasterManipulator deployMasterManipulator;\n    protected DeployInstrument deployInstrument;\n    protected Map<String, TreeNodeOperator> registerer = new HashMap<>();\n\n    protected Map<String, String >             metaTypeMap = new TreeMap<>();\n\n    protected void registerDefaultMetaType( Class<?> genericType ) {\n        this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace(\"Generic\",\"\") );\n    }\n\n    protected void registerDefaultMetaTypes() {\n        this.registerDefaultMetaType( GenericNamespace.class );\n        this.registerDefaultMetaType( GenericClusterElement.class );\n        this.registerDefaultMetaType( GenericVirtualMachineElement.class );\n        this.registerDefaultMetaType( GenericPhysicalHostElement.class );\n        this.registerDefaultMetaType( GenericQueueElement.class);\n        this.registerDefaultMetaType( GenericContainerElement.class);\n    }\n\n    public GenericElementOperatorFactory(DeployInstrument deployInstrument, DeployMasterManipulator deployMasterManipulator){\n        this.deployInstrument = deployInstrument;\n        this.deployMasterManipulator = deployMasterManipulator;\n\n        this.registerer.put(\n                ElementOperatorFactory.DefaultApplicationNode,\n                new ClusterElementOperator(this)\n        );\n\n        this.registerer.put(\n                ElementOperatorFactory.DefaultNamespace,\n                new NamespaceOperator(this)\n        );\n\n        this.registerer.put(\n                ElementOperatorFactory.DefaultVirtualMachine,\n                new VirtualMachineElementOperator(this)\n        );\n\n        this.registerer.put(\n                ElementOperatorFactory.DefaultPhysicalHost,\n                new PhysicalHostElementOperator(this)\n        );\n\n        this.registerer.put(\n                ElementOperatorFactory.DefaultQuickElement,\n                new QuickElementOperator(this)\n        );\n\n        this.registerer.put(\n                ElementOperatorFactory.DefaultContainerElement,\n                new ContainerElementOperator(this)\n        );\n        this.registerDefaultMetaTypes();\n    }\n    @Override\n    public void register( String typeName, TreeNodeOperator functionalNodeOperation ) {\n        this.registerer.put( typeName, functionalNodeOperation );\n    }\n\n    @Override\n    public void registerMetaType( Class<?> clazz, String metaType ){\n        this.registerMetaType( clazz.getName(), metaType );\n    }\n\n    @Override\n    public void registerMetaType( String classFullName, String metaType ){\n        this.metaTypeMap.put( classFullName, metaType );\n    }\n\n    @Override\n    public DeployInstrument getServicesTree() {\n        return this.deployInstrument;\n    }\n\n    @Override\n    public DeployMasterManipulator getTaskMasterManipulator() {\n        return this.deployMasterManipulator;\n    }\n\n    @Override\n    public String getMetaType( String classFullName ) {\n        return this.metaTypeMap.get( classFullName );\n    }\n\n    @Override\n    public ElementOperator getOperator(String typeName ) {\n        //Debug.trace( this.registerer.toString() );\n        return (ElementOperator) this.registerer.get( typeName );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/NamespaceOperator.java",
    "content": "package com.pinecone.hydra.deploy.kom.operator;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.deploy.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.GenericClusterElement;\nimport com.pinecone.hydra.deploy.kom.entity.Namespace;\nimport com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator;\nimport com.pinecone.hydra.deploy.kom.source.DeployNamespaceManipulator;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic class NamespaceOperator extends ArchElementOperator implements ElementOperator {\n    protected DeployNamespaceManipulator namespaceManipulator;\n\n    public NamespaceOperator( ElementOperatorFactory factory ) {\n        this( factory.getTaskMasterManipulator(),factory.getServicesTree() );\n        this.factory = factory;\n    }\n\n    public NamespaceOperator( DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument ){\n        super( masterManipulator, deployInstrument);\n        this.namespaceManipulator = masterManipulator.getNamespaceManipulator();\n    }\n\n    @Override\n    public GUID insert( TreeNode treeNode ) {\n        GenericNamespace ns = ( GenericNamespace ) treeNode;\n\n        //存节点基础信息\n        GuidAllocator          guidAllocator = this.deployInstrument.getGuidAllocator();\n        GUID              namespaceRulesGuid = ns.getGuid();\n\n        GUID namespaceGuid = guidAllocator.nextGUID();\n        ns.setGuid( namespaceGuid );\n        this.namespaceManipulator.insert( ns );\n\n        //存元信息\n        GUID metadataGUID = guidAllocator.nextGUID();\n        ns.setMetaGuid( metadataGUID );\n        this.nodeMetaManipulator.insertNS( ns );\n\n\n        GUIDImperialTrieNode node = new GUIDImperialTrieNode();\n        node.setBaseDataGUID( namespaceRulesGuid );\n        node.setGuid( namespaceGuid );\n        node.setNodeMetadataGUID( metadataGUID );\n        node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) );\n        this.imperialTree.insert( node );\n        return namespaceGuid;\n    }\n\n    @Override\n    public void purge( GUID guid ) {\n        //namespace节点需要递归删除其拥有节点若其引用节点，没有其他引用则进行清理\n        List<GUIDImperialTrieNode> childNodes = this.imperialTree.getChildren(guid);\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        if ( !childNodes.isEmpty() ){\n            List<GUID > subordinates = this.imperialTree.getSubordinates(guid);\n            if ( !subordinates.isEmpty() ){\n                for ( GUID subordinateGuid : subordinates ){\n                    this.purge( subordinateGuid );\n                }\n            }\n            childNodes = this.imperialTree.getChildren( guid );\n            for( GUIDImperialTrieNode childNode : childNodes ){\n                List<GUID > parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid());\n                if ( parentNodes.size() > 1 ){\n                    this.imperialTree.removeInheritance(childNode.getGuid(),guid);\n                }\n                else {\n                    this.purge( childNode.getGuid() );\n                }\n            }\n        }\n\n        if ( node.getType().getObjectName().equals(GenericNamespace.class.getName()) ||  node.getType().getObjectName().equals(GenericClusterElement.class.getName())){\n            this.removeNode(guid);\n        }\n        else {\n            UOI uoi = node.getType();\n            String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() );\n            if( metaType == null ) {\n                TreeNode newInstance = (TreeNode)uoi.newInstance( new Class<? >[]{ DeployInstrument.class }, this.deployInstrument);\n                metaType = newInstance.getMetaType();\n            }\n\n            ElementOperator operator = this.getOperatorFactory().getOperator( metaType );\n            operator.purge( guid );\n        }\n    }\n\n    @Override\n    public Namespace get( GUID guid ) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        GenericNamespace                      namespace = new GenericNamespace( this.deployInstrument);\n        GUIDImperialTrieNode guidDistributedTrieNode = this.imperialTree.getNode( node.getGuid() );\n\n        GUID metaGuid = guidDistributedTrieNode.getNodeMetadataGUID();\n        namespace.setDistributedTreeNode( guidDistributedTrieNode );\n        namespace.setName( this.namespaceManipulator.getNamespace( guid ).getName() );\n        this.applyCommonMeta( namespace, this.nodeMetaManipulator.getNodeCommonMeta( metaGuid ) ); // GUID / MetaGUID difference.\n        namespace.setGuid( guid );\n        namespace.setMetaGuid( metaGuid );\n\n        return namespace;\n    }\n\n    @Override\n    public Namespace get( GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public Namespace getAsRootDepth( GUID guid ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode nodeWideData ) {\n        GenericNamespace ns = ( GenericNamespace ) nodeWideData;\n        this.namespaceManipulator.update( ns );\n        this.nodeMetaManipulator.update( ns );\n    }\n\n    @Override\n    public void updateName( GUID guid, String name ) {\n\n    }\n\n    protected void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.namespaceManipulator.remove( node.getGuid() );\n        this.nodeMetaManipulator.remove( node.getAttributesGUID() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/PhysicalHostElementOperator.java",
    "content": "package com.pinecone.hydra.deploy.kom.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.GenericPhysicalHostElement;\nimport com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement;\nimport com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator;\nimport com.pinecone.hydra.deploy.kom.source.PhysicalHostManipulator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic class PhysicalHostElementOperator extends ArchElementOperator implements ElementOperator{\n    protected PhysicalHostManipulator physicalHostManipulator;\n\n\n\n    public PhysicalHostElementOperator( ElementOperatorFactory factory ) {\n        this( factory.getTaskMasterManipulator(),factory.getServicesTree() );\n        this.factory = factory;\n    }\n\n    public PhysicalHostElementOperator( DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument ){\n        super( masterManipulator, deployInstrument);\n\n        this.physicalHostManipulator = masterManipulator.getPhysicalHostManipulator();\n    }\n\n\n    @Override\n    public GUID insert(TreeNode treeNode ) {\n        GenericPhysicalHostElement physicalHostElement = ( GenericPhysicalHostElement ) treeNode;\n\n        //将信息写入数据库\n        //将节点信息存入应用节点表\n        GuidAllocator guidAllocator = this.deployInstrument.getGuidAllocator();\n        GUID taskNodeGUID = guidAllocator.nextGUID();\n        physicalHostElement.setGuid(taskNodeGUID);\n\n        this.physicalHostManipulator.insert( physicalHostElement );\n        //将应用元信息存入元信息表\n        this.nodeMetaManipulator.insert( physicalHostElement );\n\n\n        //将节点信息存入主表\n        GUIDImperialTrieNode node = new GUIDImperialTrieNode();\n        node.setNodeMetadataGUID( taskNodeGUID ); // Since 20250419, the meta has been merged into the `node`.\n        node.setGuid( taskNodeGUID );\n        node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) );\n        this.imperialTree.insert( node );\n        return taskNodeGUID;\n    }\n\n    @Override\n    public void purge( GUID guid ) {\n        this.removeNode( guid );\n    }\n\n    @Override\n    public PhysicalHostElement get(GUID guid ) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        PhysicalHostElement physicalHostElement   = this.physicalHostManipulator.getPhysicalHostElement( guid, this.deployInstrument);\n        //TODO\n/*\n        this.applyCommonMeta( physicalHostElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) );\n*/\n\n        physicalHostElement.setDistributedTreeNode(node);\n\n        physicalHostElement.setGuid( guid );\n\n        return physicalHostElement;\n    }\n\n    @Override\n    public PhysicalHostElement get(GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public PhysicalHostElement getAsRootDepth(GUID guid ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode nodeWideData ) {\n        PhysicalHostElement serviceElement = (PhysicalHostElement) nodeWideData;\n        this.physicalHostManipulator.update( serviceElement );\n        this.nodeMetaManipulator.update( serviceElement );\n    }\n\n    @Override\n    public void updateName( GUID guid, String name ) {\n\n    }\n\n    private void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.physicalHostManipulator.remove( node.getGuid() );\n        this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() );\n    }\n    \n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/QuickElementOperator.java",
    "content": "package com.pinecone.hydra.deploy.kom.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.GenericQuickElement;\nimport com.pinecone.hydra.deploy.kom.entity.QuickElement;\nimport com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator;\nimport com.pinecone.hydra.deploy.kom.source.QuickElementManipulator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic class QuickElementOperator extends ArchElementOperator implements ElementOperator{\n    protected QuickElementManipulator quickElementManipulator;\n\n\n\n    public QuickElementOperator( ElementOperatorFactory factory ) {\n        this( factory.getTaskMasterManipulator(),factory.getServicesTree() );\n        this.factory = factory;\n    }\n\n    public QuickElementOperator( DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument ){\n        super( masterManipulator, deployInstrument);\n\n        this.quickElementManipulator = masterManipulator.getQuickElementManipulator();\n    }\n\n\n    @Override\n    public GUID insert(TreeNode treeNode ) {\n        GenericQuickElement quickElement = ( GenericQuickElement ) treeNode;\n\n        //将信息写入数据库\n        //将节点信息存入应用节点表\n        GuidAllocator guidAllocator = this.deployInstrument.getGuidAllocator();\n        GUID taskNodeGUID = guidAllocator.nextGUID();\n        quickElement.setGuid(taskNodeGUID);\n\n        this.quickElementManipulator.insert( quickElement );\n        //将应用元信息存入元信息表\n        this.nodeMetaManipulator.insert( quickElement );\n\n\n        //将节点信息存入主表\n        GUIDImperialTrieNode node = new GUIDImperialTrieNode();\n        node.setNodeMetadataGUID( taskNodeGUID ); // Since 20250419, the meta has been merged into the `node`.\n        node.setGuid( taskNodeGUID );\n        node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) );\n        this.imperialTree.insert( node );\n        return taskNodeGUID;\n    }\n\n    @Override\n    public void purge( GUID guid ) {\n        this.removeNode( guid );\n    }\n\n    @Override\n    public QuickElement get(GUID guid ) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        QuickElement quickElement   = this.quickElementManipulator.getQuickElement( guid, this.deployInstrument);\n    //TODO\n/*\n        this.applyCommonMeta( quickElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) );\n*/\n\n        quickElement.setDistributedTreeNode(node);\n\n        quickElement.setGuid( guid );\n\n        return quickElement;\n    }\n\n    @Override\n    public QuickElement get(GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public QuickElement getAsRootDepth(GUID guid ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode nodeWideData ) {\n        QuickElement quickElement = (QuickElement) nodeWideData;\n        this.quickElementManipulator.update( quickElement );\n        this.nodeMetaManipulator.update( quickElement );\n    }\n\n    @Override\n    public void updateName( GUID guid, String name ) {\n\n    }\n\n    private void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.quickElementManipulator.remove( node.getGuid() );\n        this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/operator/VirtualMachineElementOperator.java",
    "content": "package com.pinecone.hydra.deploy.kom.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement;\n\nimport com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator;\nimport com.pinecone.hydra.deploy.kom.source.VirtualMachineManipulator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic class VirtualMachineElementOperator extends ArchElementOperator implements ElementOperator{\n\n    protected VirtualMachineManipulator virtualMachineManipulator;\n\n\n\n    public VirtualMachineElementOperator( ElementOperatorFactory factory ) {\n        this( factory.getTaskMasterManipulator(),factory.getServicesTree() );\n        this.factory = factory;\n    }\n\n    public VirtualMachineElementOperator( DeployMasterManipulator masterManipulator, DeployInstrument deployInstrument ){\n        super( masterManipulator, deployInstrument);\n\n        this.virtualMachineManipulator = masterManipulator.getVirtualMachineManipulator();\n    }\n\n\n    @Override\n    public GUID insert( TreeNode treeNode ) {\n        GenericVirtualMachineElement virtualMachineElement = ( GenericVirtualMachineElement ) treeNode;\n\n        //将信息写入数据库\n        //将节点信息存入应用节点表\n        GuidAllocator guidAllocator = this.deployInstrument.getGuidAllocator();\n        GUID taskNodeGUID = guidAllocator.nextGUID();\n        virtualMachineElement.setGuid(taskNodeGUID);\n\n        this.virtualMachineManipulator.insert( virtualMachineElement );\n        //将应用元信息存入元信息表\n        this.nodeMetaManipulator.insert( virtualMachineElement );\n\n\n        //将节点信息存入主表\n        GUIDImperialTrieNode node = new GUIDImperialTrieNode();\n        node.setNodeMetadataGUID( taskNodeGUID ); // Since 20250419, the meta has been merged into the `node`.\n        node.setGuid( taskNodeGUID );\n        node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) );\n        this.imperialTree.insert( node );\n        return taskNodeGUID;\n    }\n\n    @Override\n    public void purge( GUID guid ) {\n        this.removeNode( guid );\n    }\n\n    @Override\n    public VirtualMachineElement get(GUID guid ) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        VirtualMachineElement virtualMachineElement   = this.virtualMachineManipulator.getDeployNode( guid, this.deployInstrument);\n        //TODO\n/*\n        this.applyCommonMeta( virtualMachineElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) );\n*/\n\n        virtualMachineElement.setDistributedTreeNode(node);\n\n        virtualMachineElement.setGuid( guid );\n\n        return virtualMachineElement;\n    }\n\n    @Override\n    public VirtualMachineElement get(GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public VirtualMachineElement getAsRootDepth(GUID guid ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode nodeWideData ) {\n        VirtualMachineElement serviceElement = (VirtualMachineElement) nodeWideData;\n        this.virtualMachineManipulator.update( serviceElement );\n        this.nodeMetaManipulator.update( serviceElement );\n    }\n\n    @Override\n    public void updateName( GUID guid, String name ) {\n\n    }\n\n    private void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.virtualMachineManipulator.remove( node.getGuid() );\n        this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/ClusterNodeManipulator.java",
    "content": "package com.pinecone.hydra.deploy.kom.source;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.entity.ClusterElement;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\n\npublic interface ClusterNodeManipulator extends GUIDNameManipulator {\n\n    void insert( ClusterElement clusterElement );\n\n    void remove( GUID guid );\n\n    ClusterElement getClusterElement( GUID guid, DeployInstrument instrument );\n\n    void update( ClusterElement clusterElement );\n\n    List<ClusterElement> fetchJobNodeByName( String name );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/ContainerElementManipulator.java",
    "content": "package com.pinecone.hydra.deploy.kom.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.ContainerElement;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\nimport java.util.List;\n\npublic interface ContainerElementManipulator extends GUIDNameManipulator {\n\n    void insert(  ContainerElement quickElement );\n\n    ContainerElement getContainerElement( GUID guid, DeployInstrument deployInstrument );\n\n    void update( ContainerElement serviceElement);\n\n\n    void remove( GUID guid );\n\n\n    List< ContainerElement> fetchContainerElementByName( String name );\n\n    @Override\n    List<GUID> getGuidsByName( String name );\n\n    @Override\n    List<GUID> getGuidsByNameID( String name, GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/DeployMasterManipulator.java",
    "content": "package com.pinecone.hydra.deploy.kom.source;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\n\npublic interface DeployMasterManipulator extends KOIMasterManipulator {\n    TrieTreeManipulator getTrieTreeManipulator() ;\n\n    NodeMetaManipulator getNodeMetaManipulator();\n\n    ClusterNodeManipulator getJobNodeManipulator();\n\n    DeployNodeManipulator getDeployNodeManipulator();\n\n    DeployNamespaceManipulator getNamespaceManipulator();\n\n    TireOwnerManipulator getTireOwnerManipulator();\n\n    PhysicalHostManipulator getPhysicalHostManipulator();\n\n    VirtualMachineManipulator getVirtualMachineManipulator();\n\n    QuickElementManipulator getQuickElementManipulator();\n\n    ContainerElementManipulator getContainerElementManipulator();\n\n    DeployServiceInsMappingManipulator getDeployServiceInsMappingManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/DeployNamespaceManipulator.java",
    "content": "package com.pinecone.hydra.deploy.kom.source;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.deploy.kom.entity.Namespace;\n\npublic interface DeployNamespaceManipulator extends GUIDNameManipulator {\n    void insert( Namespace ns );\n\n    void remove( GUID guid );\n\n    Namespace getNamespace( GUID guid );\n\n    void update( Namespace ns );\n\n    List<Namespace > fetchNamespaceNodeByName( String name );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/DeployNodeManipulator.java",
    "content": "package com.pinecone.hydra.deploy.kom.source;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.DeployElement;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\npublic interface DeployNodeManipulator extends GUIDNameManipulator {\n\n    void insert( DeployElement deployElement );\n\n    void remove( GUID UUID );\n\n    void update( DeployElement taskElement );\n\n    List<DeployElement> fetchDeployNodeByName( String name );\n\n    @Override\n    List<GUID> getGuidsByName( String name );\n\n    @Override\n    List<GUID> getGuidsByNameID( String name, GUID guid );\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/DeployServiceInsMappingManipulator.java",
    "content": "package com.pinecone.hydra.deploy.kom.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.entity.DeployInsMapping;\n\npublic interface DeployServiceInsMappingManipulator extends Pinenut {\n    void insert( DeployInsMapping deployInsMapping );\n\n    DeployInsMapping queryDeployInsMappingByInsGuid( GUID insGuid );\n\n    DeployInsMapping queryDeployInsMappingByDeployGuid( GUID deployGuid );\n\n    void removeByInsGuid( GUID insGuid );\n\n    void removeByDeployGuid( GUID deployGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/NodeMetaManipulator.java",
    "content": "package com.pinecone.hydra.deploy.kom.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.DeployFamilyNode;\nimport com.pinecone.hydra.deploy.kom.entity.CommonMeta;\nimport com.pinecone.hydra.deploy.kom.entity.Namespace;\n\npublic interface NodeMetaManipulator extends Pinenut {\n\n    void insert( DeployFamilyNode node );\n\n    void insertNS( Namespace node );\n\n    void remove( GUID guid );\n\n    CommonMeta getNodeCommonMeta( GUID guid );\n\n    void update( DeployFamilyNode node );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/PhysicalHostManipulator.java",
    "content": "package com.pinecone.hydra.deploy.kom.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\nimport java.util.List;\n\npublic interface PhysicalHostManipulator extends GUIDNameManipulator {\n\n   /* void insert(PhysicalHost physicalHost);*/\n\n    void insert( PhysicalHostElement physicalHostElement );\n\n    PhysicalHostElement getPhysicalHostElement( GUID guid, DeployInstrument deployInstrument );\n\n    void update( PhysicalHostElement serviceElement );\n\n    void remove( GUID guid );\n\n    @Override\n    List<GUID> getGuidsByName( String name );\n\n    @Override\n    List<GUID> getGuidsByNameID( String name, GUID guid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/QuickElementManipulator.java",
    "content": "package com.pinecone.hydra.deploy.kom.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.DeployElement;\nimport com.pinecone.hydra.deploy.kom.entity.QuickElement;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\nimport java.util.List;\n\npublic interface QuickElementManipulator extends GUIDNameManipulator {\n\n    void insert( QuickElement quickElement );\n\n    QuickElement getQuickElement( GUID guid, DeployInstrument deployInstrument );\n\n    void update( QuickElement serviceElement );\n\n    void remove( GUID guid );\n\n\n    List<QuickElement> fetchQuickElementByName( String name );\n\n    @Override\n    List<GUID> getGuidsByName( String name );\n\n    @Override\n    List<GUID> getGuidsByNameID( String name, GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/deploy/kom/source/VirtualMachineManipulator.java",
    "content": "package com.pinecone.hydra.deploy.kom.source;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\npublic interface VirtualMachineManipulator extends GUIDNameManipulator {\n\n    void insert( VirtualMachineElement virtualMachineElement );\n\n    VirtualMachineElement getDeployNode( GUID guid, DeployInstrument instrument );\n\n    void update( VirtualMachineElement serviceElement );\n\n    void remove( GUID guid );\n\n    @Override\n    List<GUID> getGuidsByName( String name );\n\n    @Override\n    List<GUID> getGuidsByNameID( String name, GUID guid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/server/ArchServer.java",
    "content": "package com.pinecone.hydra.server;\n\nimport com.pinecone.framework.util.json.JSONObject;\n\npublic abstract class ArchServer implements Server {\n    protected String       name;\n    protected String       nickName;\n    protected boolean      enable;\n    protected String       localDomain;\n    protected String       wideDomain;\n    protected JSONObject   extras;\n\n\n    public String getName() {\n        return this.name;\n    }\n\n    public void setName( String name ) {\n        this.name = name;\n    }\n\n    public String getNickName() {\n        return this.nickName;\n    }\n\n    public void setNickName( String niceName ) {\n        this.nickName = niceName;\n    }\n\n    public boolean isEnable() {\n        return this.enable;\n    }\n\n    public void setEnable( boolean enable ) {\n        this.enable = enable;\n    }\n\n    public String getLocalDomain() {\n        return this.localDomain;\n    }\n\n    public void setLocalDomain( String localDomain ) {\n        this.localDomain = localDomain;\n    }\n\n    public String getWideDomain() {\n        return this.wideDomain;\n    }\n\n    public void setWideDomain( String wideDomain ) {\n        this.wideDomain = wideDomain;\n    }\n\n    public JSONObject getExtras() {\n        return this.extras;\n    }\n\n    public void setExtras( JSONObject extras ) {\n        this.extras = extras;\n    }\n\n    public Object get( Object key ) {\n        return this.extras.getMap().get( key );\n    }\n\n    @Override\n    public String toJSONString() {\n        return this.getExtras().toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/server/ArchServersCenter.java",
    "content": "package com.pinecone.hydra.server;\n\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.ArchSystemCascadeComponent;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.JSONMaptron;\n\nimport java.util.Map;\n\npublic abstract class ArchServersCenter extends ArchSystemCascadeComponent implements ServersCenter {\n    protected JSONObject     serversConfig;\n\n    protected JSONObject     nameMap;\n    protected JSONObject     nickNameMap;\n\n    public ArchServersCenter(Namespace name, Hydrogen system, HyComponent parent ) {\n        super( name, system, system.getComponentManager(), parent );\n\n        this.nameMap     = new JSONMaptron();\n        this.nickNameMap = new JSONMaptron();\n        this.loadConfig();\n    }\n\n    public ArchServersCenter(Hydrogen system, HyComponent parent ) {\n        this( null, system, parent );\n    }\n\n    public ArchServersCenter( Hydrogen system ) {\n        this( system, null );\n    }\n\n    protected abstract void   loadConfig() ;\n\n    protected abstract Server newServer( JSONObject prototype ) ;\n\n    protected void fetchAll() {\n        for( Map.Entry<String, Object > skv : this.serversConfig.entrySet() ){\n            JSONObject seg = (JSONObject) skv.getValue();\n\n            for( Map.Entry<String, Object > seg_kv : seg.entrySet() ){\n                Object v = seg_kv.getValue();\n                if( v instanceof JSONObject ) {\n                    JSONObject archy = (JSONObject) seg_kv.getValue();\n                    archy.put( \"Hierarchy\", seg_kv.getKey() );\n                    this.addServer( this.newServer( archy ) );\n                }\n                else if( v instanceof JSONArray) {\n                    JSONArray archy = (JSONArray) seg_kv.getValue();\n                    for ( int i = 0; i < archy.size(); i++ ) {\n                        JSONObject each = archy.optJSONObject(i);\n                        each.put( \"Hierarchy\", seg_kv.getKey() );\n                        this.addServer( this.newServer( each ) );\n                    }\n                }\n            }\n        }\n    }\n\n\n    @Override\n    public ServersCenter addServer( Server server ) {\n        this.getNameMap().put( server.getName(), server );\n        this.getNickNameMap().put( server.getNickName(), server );\n        return this;\n    }\n\n    @Override\n    public ServersCenter removeServer( Server server ) {\n        this.getNameMap().remove( server.getName() );\n        this.getNickNameMap().remove( server.getNickName() );\n        return this;\n    }\n\n    @Override\n    public JSONObject getNameMap() {\n        return this.nameMap;\n    }\n\n    @Override\n    public JSONObject getNickNameMap() {\n        return this.nickNameMap;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/server/Server.java",
    "content": "package com.pinecone.hydra.server;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.JSONObject;\n\npublic interface Server extends Pinenut {\n    String getName();\n    void setName( String name );\n\n    String getNickName();\n    void setNickName( String niceName );\n\n    boolean isEnable();\n    void setEnable( boolean enable );\n\n    String getLocalDomain();\n    void setLocalDomain( String localDomain );\n\n    String getWideDomain();\n    void setWideDomain(String wideDomain);\n\n    JSONObject getExtras();\n    void setExtras( JSONObject extras );\n\n    Object get( Object key );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-device/src/main/java/com/pinecone/hydra/server/ServersCenter.java",
    "content": "package com.pinecone.hydra.server;\n\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.JSONObject;\n\npublic interface ServersCenter extends Pinenut, HyComponent {\n\n    JSONObject getNameMap() ;\n\n    JSONObject getNickNameMap() ;\n\n    ServersCenter addServer   ( Server server );\n\n    ServersCenter removeServer( Server server );\n\n    Hydrogen getSystem();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kernel</groupId>\n    <artifactId>hydra-framework-runtime</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture-conduct</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime</groupId>\n            <artifactId>slime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>mysql</groupId>\n            <artifactId>mysql-connector-java</artifactId>\n            <version>8.0.26</version>\n        </dependency>\n        <dependency>\n            <groupId>org.javassist</groupId>\n            <artifactId>javassist</artifactId>\n            <version>3.29.0-GA</version>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/AbortException.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class AbortException extends PineRuntimeException {\n    public AbortException    () {\n        super();\n    }\n\n    public AbortException    ( String message ) {\n        super(message);\n    }\n\n    public AbortException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public AbortException    ( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ArchAutomatron.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.GenericMasterTaskManager;\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.executum.ArchProcessum;\nimport com.pinecone.framework.system.executum.Processum;\n\npublic abstract class ArchAutomatron extends ArchProcessum implements Automatron {\n    private   ExceptionHandler            mExceptionHandler;\n    protected Exception                   mLastException;\n\n    protected ArchAutomatron( String szName, Processum parent, ExceptionHandler handler ) {\n        super( szName, parent );\n\n        if( handler == null ) {\n            handler = new DeathExceptionHandler( this );\n        }\n\n        this.mTaskManager      = new GenericMasterTaskManager( this );\n        this.mExceptionHandler = handler;\n    }\n\n    protected ArchAutomatron( String szName, Processum parent ) {\n        this( szName, parent, null );\n    }\n\n    protected void handleException( Exception e ) throws ProxyProvokeHandleException, InstantKillException, AbortException, ContinueException {\n        this.mLastException = e;\n\n        try{\n            this.getExceptionHandler().handle( e );\n        }\n        catch ( ContinueException c ) {\n            throw c;\n        }\n        catch ( RuntimeException e1 ) {\n            this.intoEnded();\n            throw e1;\n        }\n    }\n\n    protected abstract void intoEnded() ;\n\n    @Override\n    public Exception getLastException() {\n        return this.mLastException;\n    }\n\n    @Override\n    public ExceptionHandler getExceptionHandler() {\n        return this.mExceptionHandler;\n    }\n\n    @Override\n    public Automatron setExceptionHandler( ExceptionHandler handler ) {\n        this.mExceptionHandler = handler;\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ArchInstructation.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic abstract class ArchInstructation implements Instructation {\n    protected Exception  mLastException;\n\n    protected ArchInstructation() {\n\n    }\n\n    public Exception lastException() {\n        return this.mLastException;\n    }\n\n    public void setLastException( Exception e ) {\n        this.mLastException = e;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ArchParallelInstructation.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.executum.ArchThreadum;\nimport com.pinecone.framework.system.executum.Executum;\nimport com.pinecone.framework.system.executum.Processum;\n\npublic abstract class ArchParallelInstructation extends ArchInstructation implements ParallelInstructation {\n\n    protected volatile boolean mbEnded                ;\n    protected long             mStartNano             ;\n    protected long             mnMaxJoinMillis        ;\n    protected Processum        mParentPro             ;\n\n    protected Executum         mMasterExecutum    = new ArchThreadum( null, this.mParentPro ) {\n        @Override\n        public void apoptosis() {\n            this.interrupt();\n        }\n    };\n\n    protected Runnable         mMasterRun         = new Runnable() {\n        protected ArchParallelInstructation ion = ArchParallelInstructation.this;\n\n        @Override\n        public void run() {\n            try{\n                if( Thread.currentThread().isInterrupted() ) {\n                    if( this.ion instanceof Suggestation ) {\n                        ((Suggestation) this.ion).setIgnoredReason( IgnoredReason.Interrupt );\n                    }\n                    return;\n                }\n                this.ion.doExecute();\n            }\n            catch ( Exception e ) {\n                this.ion.setLastException( e );\n                if( this.ion instanceof Suggestation ) {\n                    if( e instanceof InterruptedException ) {\n                        ((Suggestation) this.ion).setIgnoredReason( IgnoredReason.Interrupt );\n                    }\n                    else if( e instanceof AbortException || e instanceof ContinueException ) {\n                        ((Suggestation) this.ion).setIgnoredReason( IgnoredReason.Abort );\n                    }\n                }\n            }\n            finally {\n                this.ion.mbEnded = true;\n            }\n        }\n    };\n\n    protected Thread           mMasterThread      = new Thread( this.mMasterRun );\n\n    protected ArchParallelInstructation( Processum parent, long nMaxJoinMillis ) {\n        super();\n\n        this.mbEnded         = false               ;\n        this.mnMaxJoinMillis = nMaxJoinMillis      ;\n        this.mParentPro      = parent              ;\n        this.mStartNano      = System.nanoTime()   ;\n    }\n\n    protected ArchParallelInstructation( Processum parent ) {\n        this( parent, -1 );\n    }\n\n    @Override\n    public void terminate() {\n        this.interrupt();\n    }\n\n    @Override\n    public void interrupt(){\n        this.mMasterExecutum.interrupt();\n    }\n\n    @Override\n    public void kill(){\n        this.mMasterExecutum.kill();\n    }\n\n    @Override\n    public boolean isEnded() {\n        return this.mbEnded;\n    }\n\n    @Override\n    public long getStartNano() {\n        return this.mStartNano;\n    }\n\n    protected abstract void doExecute() throws Exception;\n\n    @Override\n    public void execute() throws Exception {\n        if( this.mbEnded && this.mMasterThread.getState() == Thread.State.TERMINATED ) {\n            this.mMasterThread = new Thread( this.mMasterRun );\n            this.mbEnded       = false;\n        }\n\n        this.mMasterThread.start();\n        if( this.mnMaxJoinMillis == 0 ) {\n            this.mMasterThread.join();\n        }\n        else if( this.mnMaxJoinMillis > 0 ) {\n            this.mMasterThread.join( this.mnMaxJoinMillis );\n        }\n    }\n\n    @Override\n    public boolean isDetached() {\n        return this.mnMaxJoinMillis < 0;\n    }\n\n    @Override\n    public boolean isJoined() {\n        return this.mnMaxJoinMillis >= 0;\n    }\n\n    @Override\n    public ArchParallelInstructation setDetach() {\n        this.mnMaxJoinMillis = -1;\n        return this;\n    }\n\n    @Override\n    public ArchParallelInstructation setJoin() {\n        this.mnMaxJoinMillis = 0;\n        return this;\n    }\n\n    @Override\n    public long getMaxJoinMillis() {\n        return this.mnMaxJoinMillis;\n    }\n\n    @Override\n    public ParallelInstructation setMaxJoinMillis( long join ) {\n        this.mnMaxJoinMillis = join;\n        return this;\n    }\n\n    @Override\n    public Thread getMasterThread() {\n        return this.mMasterThread;\n    }\n\n    @Override\n    public Executum tryGetMasterExecutum() {\n        return this.mMasterExecutum;\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ArchParallelSuggestation.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.executum.Processum;\n\npublic abstract class ArchParallelSuggestation extends ArchParallelInstructation implements ParallelSuggestation {\n    protected IgnoredReason mIgnoredReason;\n\n    protected ArchParallelSuggestation( Processum parent ){\n        super( parent );\n    }\n\n    @Override\n    public IgnoredReason getIgnoredReason() {\n        return this.mIgnoredReason;\n    }\n\n    @Override\n    public void setIgnoredReason( IgnoredReason ignoredReason ) {\n        this.mIgnoredReason = ignoredReason;\n    }\n\n    @Override\n    public abstract void execute();\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ArchSequentialMarshalling.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Deque;\nimport java.util.concurrent.TimeoutException;\n\npublic abstract class ArchSequentialMarshalling implements Marshalling {\n    protected List<Instructation >   mInstructations;\n    protected Deque<Instructation >  mPriorInstructations;\n    protected List<Instructation >   mParallelInstructations;\n    protected MationInvoker          mMationInvoker;\n\n    protected ArchSequentialMarshalling( List<Instructation> instructations, List<Instructation> parallelInstructations, Deque<Instructation> priorInstructations, MationInvoker invoker ) {\n        this.mParallelInstructations = parallelInstructations;\n        this.mPriorInstructations    = priorInstructations;\n        this.mInstructations         = instructations;\n        this.mMationInvoker          = invoker;\n    }\n\n\n\n    protected ArchSequentialMarshalling( MationInvoker invoker ) {\n        this( new ArrayList<>(), new ArrayList<>(), new LinkedList<>(), invoker );\n    }\n\n    protected ArchSequentialMarshalling() {\n        this( new GenericMationInvoker() );\n    }\n\n    @Override\n    public Collection<Instructation> getInstructations() {\n        return this.mInstructations;\n    }\n\n    @Override\n    public Collection<Instructation> getParallelInstructations() {\n        return this.mParallelInstructations;\n    }\n\n    @Override\n    public Collection<Instructation> getPriorInstructations() {\n        return this.mPriorInstructations;\n    }\n\n    @Override\n    public void addLast( Instructation instructation ) {\n        if( instructation instanceof InstantInstructation ) {\n            this.prompt( instructation );\n        }\n        else {\n            this.mInstructations.add( instructation );\n            if ( instructation instanceof ParallelInstructation ) {\n                this.mParallelInstructations.add( instructation );\n            }\n        }\n    }\n\n    @Override\n    public void addFirst( Instructation instructation ) {\n        if( instructation instanceof InstantInstructation ) {\n            this.prompt( instructation );\n        }\n        else {\n            this.mInstructations.add( 0, instructation );\n            if ( instructation instanceof ParallelInstructation ) {\n                this.mParallelInstructations.add( 0, instructation );\n            }\n        }\n    }\n\n    @Override\n    public void erase( Instructation instructation ) {\n        if( instructation instanceof InstantInstructation ) {\n            this.mPriorInstructations.remove( instructation );\n        }\n        else {\n            this.mInstructations.remove( instructation );\n            if ( instructation instanceof ParallelInstructation ) {\n                this.mParallelInstructations.remove( instructation );\n            }\n        }\n    }\n\n    @Override\n    public void prompt( Instructation instructation ) {\n        this.mPriorInstructations.addFirst( instructation );\n    }\n\n    protected boolean executeKernelInstructations( Instructation instruction ) throws Exception {\n        if( instruction == KernelInstructation.DIE ) {\n            instruction.execute();\n            return true;\n        }\n        return false;\n    }\n\n    protected void executePriorInstructations( boolean bOnlyTryCallKernel ) throws Exception {\n        for( Instructation instruction : this.mPriorInstructations ) {\n            if( !this.executeKernelInstructations( instruction ) && !bOnlyTryCallKernel ){\n                this.mMationInvoker.invoke( instruction );\n            }\n        }\n    }\n\n    @Override\n    public void execute() throws Exception {\n        List<Instructation> children = this.mInstructations;\n\n        try{\n            this.executePriorInstructations( false );\n            for( Instructation instruction : children ) {\n                this.executePriorInstructations( true );\n                if( Thread.currentThread().isInterrupted() ) {\n                    throw new InterruptedException( \"Interrupt termination.\" );\n                }\n                this.mMationInvoker.invoke( instruction );\n            }\n        }\n        catch ( InstantKillException e ) {\n            this.terminate();\n            throw e;\n        }\n\n        this.waitForParallelInstructations();\n    }\n\n    @Override\n    public void terminate() {\n        for ( Instructation instruction : this.mParallelInstructations ) {\n            ParallelInstructation parallelInstruction = (ParallelInstructation) instruction;\n            parallelInstruction.terminate();\n        }\n    }\n\n    protected void waitForParallelInstructations() throws InterruptedException, TimeoutException {\n        boolean allEnded;\n        do {\n            allEnded = true;\n            for ( Instructation instruction : this.mParallelInstructations ) {\n                if( Thread.currentThread().isInterrupted() ) {\n                    this.terminate();\n                    throw new InterruptedException( \"Interrupt termination.\" );\n                }\n\n                ParallelInstructation parallelInstruction = (ParallelInstructation) instruction;\n                if ( !parallelInstruction.isEnded() ) {\n                    allEnded = false;\n                    try{\n                        this.mMationInvoker.checkTimeout( parallelInstruction );\n                    }\n                    catch ( TimeoutException e ) {\n                        this.mMationInvoker.terminate( parallelInstruction );\n                        parallelInstruction.setLastException( e );\n\n                        if( parallelInstruction instanceof ParallelSuggestation ) {\n                            ((ParallelSuggestation) parallelInstruction).setIgnoredReason( IgnoredReason.Overtime );\n                        }\n                        else {\n                            throw e;\n                        }\n                    }\n\n                    Thread.sleep( 50 );\n                }\n            }\n        }\n        while ( !allEnded );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ArchSuggestation.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic abstract class ArchSuggestation extends ArchInstructation implements Suggestation {\n    protected IgnoredReason mIgnoredReason;\n\n    protected ArchSuggestation() {\n        super();\n    }\n\n    @Override\n    public IgnoredReason getIgnoredReason() {\n        return this.mIgnoredReason;\n    }\n\n    @Override\n    public void setIgnoredReason( IgnoredReason ignoredReason ) {\n        this.mIgnoredReason = ignoredReason;\n    }\n\n    @Override\n    public abstract void execute();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Automaton.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.executum.Processum;\n\nimport java.util.concurrent.BlockingDeque;\nimport java.util.concurrent.LinkedBlockingDeque;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.concurrent.atomic.AtomicLong;\n\npublic class Automaton extends ArchAutomatron implements LifecycleAutomaton {\n    private static final AtomicInteger nextSerialNumber = new AtomicInteger( 0 );\n\n    private static int serialNumber() {\n        return Automaton.nextSerialNumber.getAndIncrement();\n    }\n\n    private static String name( String name ) {\n        if( name == null ) {\n            return Automaton.class.getSimpleName() + \"-\" + Automaton.serialNumber();\n        }\n        return name;\n    }\n\n    private Thread                                mMasterThread       ;\n    private AtomicLong                            mMaxLifetimeMillis  ;\n    private AtomicLong                            mHeartbeatTimeoutMillis;\n\n    protected volatile boolean                    mRunning            ;\n    protected long                                mnCurrentPipelineWaitingMillis;\n    protected final BlockingDeque<Instructation > mInstructationQueue ;\n\n    protected Automaton( String szName, Processum parent, BlockingDeque<Instructation > deque, boolean bIsDaemon, long nCurrentPipelineWaitingMillis ) {\n        super( Automaton.name( szName ), parent );\n\n        this.mMaxLifetimeMillis             = new AtomicLong( 0 );\n        this.mHeartbeatTimeoutMillis        = new AtomicLong( 0 );\n        this.mnCurrentPipelineWaitingMillis = nCurrentPipelineWaitingMillis;\n        this.mInstructationQueue            = deque;\n        this.mMasterThread                  = new Thread( this::mainLoop );\n\n        this.mMasterThread.setDaemon( bIsDaemon );\n        this.mMasterThread.setName( this.mszName + this.mMasterThread.getName() );\n        this.setThreadAffinity( this.mMasterThread );\n    }\n\n    public Automaton( String szName, Processum parent, boolean bIsDaemon, long nCurrentPipelineWaitingMillis ) {\n        this( szName, parent, new LinkedBlockingDeque<>(), bIsDaemon, nCurrentPipelineWaitingMillis  );\n    }\n\n    public Automaton( Processum parent, boolean bIsDaemon, long nCurrentPipelineWaitingMillis ) {\n        this( null, parent, bIsDaemon, nCurrentPipelineWaitingMillis );\n    }\n\n    public Automaton( String szName, Processum parent, long nCurrentPipelineWaitingMillis  ) {\n        this( szName, parent, false, nCurrentPipelineWaitingMillis );\n    }\n\n    public Automaton( Processum parent, long nCurrentPipelineWaitingMillis ) {\n        this( null, parent, nCurrentPipelineWaitingMillis );\n    }\n\n\n    public Automaton( String szName, Processum parent, boolean bIsDaemon ) {\n        this( szName, parent, bIsDaemon, 50  );\n    }\n\n    public Automaton( Processum parent, boolean bIsDaemon ) {\n        this( null, parent, bIsDaemon );\n    }\n\n    public Automaton( String szName, Processum parent  ) {\n        this( szName, parent, false );\n    }\n\n    public Automaton( Processum parent ) {\n        this( null, parent );\n    }\n\n    @Override\n    public void start() {\n        this.mRunning      = true;\n        this.mMasterThread.start();\n    }\n\n    @Override\n    public void join() throws InterruptedException {\n        this.mMasterThread.join();\n    }\n\n    @Override\n    public void join( long millis ) throws InterruptedException {\n        this.mMasterThread.join( millis );\n    }\n\n    @Override\n    public void command( Instructation instructation ) {\n        this.mInstructationQueue.addLast( instructation );\n    }\n\n    @Override\n    public void prompt( Instructation instructation ) {\n        this.mInstructationQueue.addFirst( instructation );\n    }\n\n    @Override\n    public void withdraw( Instructation instructation ) {\n        this.mInstructationQueue.remove( instructation );\n    }\n\n    @Override\n    public boolean isEnded() {\n        return !this.mRunning;\n    }\n\n    @Override\n    protected void intoEnded() {\n        this.mRunning = false;\n    }\n\n    @Override\n    public long getMaxLifetimeMillis() {\n        return this.mMaxLifetimeMillis.get();\n    }\n\n    @Override\n    public LifecycleAutomaton setMaxLifetimeMillis( long maxLifetimeMillis ) {\n        this.mMaxLifetimeMillis.getAndSet( maxLifetimeMillis );\n        return this;\n    }\n\n    @Override\n    public long getHeartbeatTimeoutMillis() {\n        return this.mHeartbeatTimeoutMillis.get();\n    }\n\n    @Override\n    public LifecycleAutomaton setHeartbeatTimeoutMillis( long heartbeatTimeoutMillis ) {\n        this.mHeartbeatTimeoutMillis.getAndSet( heartbeatTimeoutMillis );\n        return this;\n    }\n\n    protected void mainLoop() {\n        long startTime       = System.currentTimeMillis();\n        long lastCommandTime = System.currentTimeMillis();\n\n        while ( this.mRunning ) {\n            try{\n                if( Thread.currentThread().isInterrupted() ) {\n                    throw new AbortException();\n                }\n\n                long currentTime = System.currentTimeMillis();\n                if ( this.getMaxLifetimeMillis() > 0 && ( currentTime - startTime ) > this.getMaxLifetimeMillis() ) {\n                    this.intoEnded();  // Suicide\n                    break;\n                }\n\n                if ( this.getHeartbeatTimeoutMillis() > 0 && ( currentTime - lastCommandTime ) > this.getHeartbeatTimeoutMillis() ) {\n                    this.intoEnded(); // Suicide\n                    break;\n                }\n\n                Instructation instructation = this.mInstructationQueue.poll( this.mnCurrentPipelineWaitingMillis, TimeUnit.MILLISECONDS );\n                if ( instructation != null ) {\n                    try {\n                        instructation.execute();\n                    }\n                    catch ( ContinueException c ) {\n                        // Do nothing\n                    }\n\n                    lastCommandTime = System.currentTimeMillis(); // Reset heartbeat timeout\n                }\n            }\n            catch ( Exception e ) {\n                try{\n                    this.handleException( e );\n                }\n                catch ( ContinueException c ) {\n                    // Do nothing\n                }\n                catch ( Exception ke ) {\n                    break;\n                }\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Automatron.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.regime.Automatus;\n\npublic interface Automatron extends Processum, Automatus {\n    void start();\n\n    void join() throws InterruptedException;\n\n    void join( long millis ) throws InterruptedException;\n\n    // Add to pipeline tail\n    void command ( Instructation instructation );\n\n    // Add to pipeline front\n    void prompt  ( Instructation instructation );\n\n    void withdraw  ( Instructation instructation );\n\n    default void terminate() {\n        this.prompt( KernelInstructation.DIE );\n    }\n\n    boolean isEnded();\n\n    Exception getLastException();\n\n    ExceptionHandler getExceptionHandler();\n\n    Automatron setExceptionHandler( ExceptionHandler handler );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/AutomatronMationInvoker.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.executum.Executum;\n\npublic class AutomatronMationInvoker extends GenericMationInvoker {\n    protected Automatron mAutomatron;\n\n    public AutomatronMationInvoker( long maxExecutionMillis, long maxInterruptMillis, Automatron automatron ) {\n        super( maxExecutionMillis, maxInterruptMillis );\n\n        this.mAutomatron = automatron;\n    }\n\n    public AutomatronMationInvoker( Automatron automatron ) {\n        this( Long.MAX_VALUE, -1, automatron );\n    }\n\n    @Override\n    public void invoke   ( Instructation instructation ) throws Exception {\n        Executum executum = null;\n        if( instructation instanceof ParallelInstructation ) {\n            executum = ((ParallelInstructation) instructation).tryGetMasterExecutum();\n            if( executum != null ) {\n                this.mAutomatron.getTaskManager().add( executum );\n            }\n        }\n\n        try{\n            super.invoke( instructation );\n        }\n        catch ( Exception e ) {\n            if( executum != null ) {\n                this.mAutomatron.getTaskManager().erase( executum );\n            }\n\n            throw e;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Continue.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic final class Continue extends ArchInstructation {\n    Continue () { }\n\n    @Override\n    public void execute() throws Exception {\n        throw new ContinueException();\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ContinueException.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class ContinueException extends PineRuntimeException {\n    public ContinueException    () {\n        super();\n    }\n\n    public ContinueException    ( String message ) {\n        super(message);\n    }\n\n    public ContinueException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ContinueException    ( Throwable cause ) {\n        super(cause);\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/DeathExceptionHandler.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.Debug;\n\npublic class DeathExceptionHandler implements ExceptionHandler {\n    protected Automatron mAutomatron;\n\n    public DeathExceptionHandler( Automatron automatron ) {\n        this.mAutomatron = automatron;\n    }\n\n    @Override\n    public void handle( Exception e ) throws ProxyProvokeHandleException, InstantKillException, AbortException, ContinueException {\n        if( e instanceof InstantKillException ) {\n            Debug.info( \"[NOTICE] <Caused by InstantKillException>\" );\n            //e.printStackTrace();\n            throw new InstantKillException( e ) ;\n        }\n        else if( e instanceof ContinueException ) {\n            throw (ContinueException) e;\n        }\n        else if( e instanceof AbortException ) {\n            throw (AbortException) e;\n        }\n        else {\n            e.printStackTrace();\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Die.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic final class Die extends ArchInstructation implements InstantInstructation {\n    Die () { }\n\n    @Override\n    public void execute() throws Exception {\n        throw new InstantKillException();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ExceptionHandler.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ExceptionHandler extends Pinenut {\n    void handle( Exception e ) throws ProxyProvokeHandleException, InstantKillException, AbortException, ContinueException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/GenericMarshalling.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic class GenericMarshalling extends ArchSequentialMarshalling {\n    protected Automatron   mAutomatron;\n\n    public GenericMarshalling( Automatron automatron, MationInvoker invoker ) {\n        super( invoker );\n\n        this.mAutomatron = automatron;\n    }\n\n    public GenericMarshalling( Automatron automatron ) {\n        this( automatron, new AutomatronMationInvoker( automatron ) );\n    }\n\n    public GenericMarshalling() {\n        this( null, new GenericMationInvoker() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/GenericMationInvoker.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic class GenericMationInvoker implements MationInvoker {\n    private long mMaxExecutionMillis;\n    private long mMaxInterruptMillis;\n\n    public GenericMationInvoker( long maxExecutionMillis, long maxInterruptMillis ) {\n        this.mMaxExecutionMillis = maxExecutionMillis;\n        this.mMaxInterruptMillis = maxInterruptMillis;\n    }\n\n    public GenericMationInvoker() {\n        this( Long.MAX_VALUE, -1 );\n    }\n\n\n    @Override\n    public long getMaxExecutionMillis() {\n        return this.mMaxExecutionMillis;\n    }\n\n    @Override\n    public long getMaxInterruptMillis() {\n        return this.mMaxInterruptMillis;\n    }\n\n    @Override\n    public void setMaxExecutionMillis( long maxExecutionMillis ) {\n        this.mMaxExecutionMillis = maxExecutionMillis;\n    }\n\n    @Override\n    public void setMaxInterruptMillis( long maxInterruptMillis ) {\n        this.mMaxInterruptMillis = maxInterruptMillis;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Heartbeat.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic final class Heartbeat extends ArchInstructation implements InstantInstructation {\n    Heartbeat () { }\n\n    @Override\n    public void execute() throws Exception {\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/IgnoredReason.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic enum IgnoredReason {\n    Exception    ( \"Exception\" ),\n    Violation    ( \"Violation\" ),\n    Overtime     ( \"Overtime\"  ),\n    Interrupt    ( \"Interrupt\"  ),\n    Abort        ( \"Abort\"     );\n\n    private final String value;\n    IgnoredReason( String value ){\n        this.value = value;\n    }\n\n    public String getName(){\n        return this.value;\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/InstantInstructation.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic interface InstantInstructation extends Instructation {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/InstantKillException.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class InstantKillException extends PineRuntimeException {\n    public InstantKillException    () {\n        super();\n    }\n\n    public InstantKillException    ( String message ) {\n        super(message);\n    }\n\n    public InstantKillException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public InstantKillException    ( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Instructation.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.functions.Executor;\n\n/**\n * Instruction -mation\n */\npublic interface Instructation extends Executor {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/KernelInstructation.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic final class KernelInstructation {\n    public static final Die        DIE         = new Die();\n    public static final Continue   CONTINUE    = new Continue();\n    public static final Heartbeat  HEARTBEAT   = new Heartbeat();\n    //public static final Terminate  TERMINATE   = new Terminate();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/LifecycleAutomaton.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic interface LifecycleAutomaton extends Automatron {\n    long getMaxLifetimeMillis();\n\n    LifecycleAutomaton setMaxLifetimeMillis( long maxLifetimeMillis );\n\n    long getHeartbeatTimeoutMillis();\n\n    LifecycleAutomaton setHeartbeatTimeoutMillis( long heartbeatTimeoutMillis );\n\n    default LifecycleAutomaton sendHeartbeat() {\n        this.command( KernelInstructation.HEARTBEAT );\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Marshalling.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport java.util.Collection;\n\npublic interface Marshalling extends Instructation {\n    default void add( Instructation instructation ) {\n        this.addLast( instructation );\n    }\n\n    void addLast( Instructation instructation );\n\n    void addFirst( Instructation instructation );\n\n    void erase( Instructation instructation );\n\n    void prompt( Instructation instructation );\n\n    Collection<Instructation> getInstructations();\n\n    Collection<Instructation> getParallelInstructations();\n\n    Collection<Instructation> getPriorInstructations();\n\n    void terminate();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/MationInvoker.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.concurrent.TimeoutException;\n\npublic interface MationInvoker extends Pinenut {\n    default boolean isInstructationViolation( Suggestation suggestation ) {\n        return false;\n    }\n\n    default void invoke   ( Instructation instructation ) throws Exception {\n        try {\n            if( instructation instanceof Suggestation ) {\n                if( this.isInstructationViolation( (Suggestation)instructation ) ){\n                    ((Suggestation) instructation).setIgnoredReason( IgnoredReason.Violation );\n                    return;\n                }\n            }\n            instructation.execute();\n        }\n        catch ( ContinueException c ) { // Just continue.\n            if( instructation instanceof Suggestation ) {\n                Suggestation suggestation = ((Suggestation) instructation);\n                suggestation.setIgnoredReason( IgnoredReason.Abort );\n                suggestation.setLastException( c );\n            }\n            else {\n                throw c;\n            }\n        }\n        catch ( Exception e ) {\n            if( instructation instanceof Suggestation ) {\n                Suggestation suggestation = ((Suggestation) instructation);\n                suggestation.setIgnoredReason( IgnoredReason.Exception );\n                suggestation.setLastException( e );\n            }\n            else if( instructation instanceof ParallelInstructation ) {\n                ((ParallelInstructation) instructation).setLastException( e );\n            }\n            else {\n                throw e;\n            }\n        }\n    }\n\n    default void terminate( ParallelInstructation instructation ) throws InterruptedException {\n        instructation.interrupt();\n\n        if( this.getMaxInterruptMillis() > 0 ) {\n            long startApoptosisMillis = System.currentTimeMillis();\n            long maxApoptosisMillis   = this.getMaxInterruptMillis();\n            while ( System.currentTimeMillis() - startApoptosisMillis < maxApoptosisMillis ) {\n                if (instructation.isEnded()) {\n                    return;\n                }\n\n                Thread.sleep( 50 );\n            }\n        }\n\n        // If the instructation is still not ended, kill it\n        if ( !instructation.isEnded() ) {\n            instructation.kill();\n        }\n    }\n\n    long getMaxExecutionMillis();\n\n    long getMaxInterruptMillis();\n\n    void setMaxExecutionMillis( long maxExecutionMillis );\n\n    void setMaxInterruptMillis( long maxInterruptMillis );\n\n    default void checkTimeout( ParallelInstructation instructation ) throws TimeoutException {\n        if ( instructation.getExecutedMillis() > this.getMaxExecutionMillis() ) {\n            throw new TimeoutException( \"Execution exceeded max time limit of \" + this.getMaxExecutionMillis() + \" milliseconds.\" );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ParallelInstructation.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.executum.Chronum;\nimport com.pinecone.framework.system.executum.Executum;\n\npublic interface ParallelInstructation extends Instructation, Chronum {\n    boolean isEnded();\n\n    Exception lastException();\n\n    void setLastException( Exception e );\n\n    void terminate() ;\n\n    void interrupt();\n\n    void kill();\n\n    boolean isDetached();\n\n    boolean isJoined();\n\n    ParallelInstructation setDetach();\n\n    ParallelInstructation setJoin();\n\n    long getMaxJoinMillis();\n\n    ParallelInstructation setMaxJoinMillis( long join );\n\n    Thread getMasterThread();\n\n    default Executum tryGetMasterExecutum() {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/ParallelSuggestation.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic interface ParallelSuggestation extends ParallelInstructation, Suggestation {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/PeriodicAutomaton.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport com.pinecone.framework.system.executum.Processum;\n\n\nimport java.util.ArrayList;\nimport java.util.LinkedList;\nimport java.util.Collection;\nimport java.util.Deque;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.concurrent.atomic.AtomicLong;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\n\npublic class PeriodicAutomaton extends ArchAutomatron implements PeriodicAutomatron {\n    private static final AtomicInteger nextSerialNumber = new AtomicInteger( 0 );\n\n    private static int serialNumber() {\n        return PeriodicAutomaton.nextSerialNumber.getAndIncrement();\n    }\n\n    private static String name( String name ) {\n        if( name == null ) {\n            return PeriodicAutomaton.class.getSimpleName() + \"-\" + PeriodicAutomaton.serialNumber();\n        }\n        return name;\n    }\n\n    private Marshalling                   mMarshalling;\n    private final AtomicLong              mPeriodMillis;\n    private Thread                        mMasterThread;\n\n    protected Deque<InstructLine >        mInstructationBuffer;\n    protected ReentrantReadWriteLock      mBufferLock;\n    protected final Object                mNextPeriodLock = new Object();\n    protected volatile boolean            mRunning;\n\n\n    protected PeriodicAutomaton( String szName, Processum parent, Marshalling marshalling, Deque<InstructLine > buffer, long nPeriodMillis, boolean bIsDaemon, ExceptionHandler handler ) {\n        super( PeriodicAutomaton.name( szName ), parent, handler );\n\n        this.mBufferLock          = new ReentrantReadWriteLock();\n        this.mPeriodMillis        = new AtomicLong( nPeriodMillis );\n        this.mMarshalling         = marshalling;\n        this.mInstructationBuffer = buffer;\n        this.mMasterThread        = new Thread( this::mainLoop );\n\n        this.mMasterThread.setDaemon( bIsDaemon );\n        this.mMasterThread.setName( this.mszName + this.mMasterThread.getName() );\n        this.setThreadAffinity( this.mMasterThread );\n    }\n\n    public PeriodicAutomaton( String szName, Processum parent, Marshalling marshalling, long nPeriodMillis, boolean bIsDaemon, ExceptionHandler handler ) {\n        this( szName, parent, marshalling, new LinkedList<>(),nPeriodMillis, bIsDaemon, handler  );\n    }\n\n    public PeriodicAutomaton( String szName, Processum parent, Marshalling marshalling, long nPeriodMillis, boolean bIsDaemon ) {\n        this( szName, parent, marshalling,nPeriodMillis, bIsDaemon, null  );\n    }\n\n    public PeriodicAutomaton( String szName, Processum parent, ExceptionHandler handler, long nPeriodMillis, boolean bIsDaemon ) {\n        this( szName, parent, new GenericMarshalling(), nPeriodMillis, bIsDaemon, handler  );\n    }\n\n    public PeriodicAutomaton( String szName, Processum parent, long nPeriodMillis, boolean bIsDaemon ) {\n        this( szName, parent, (ExceptionHandler) null, nPeriodMillis, bIsDaemon );\n    }\n\n    public PeriodicAutomaton( Processum parent, long nPeriodMillis, boolean bIsDaemon ) {\n        this( null, parent, (ExceptionHandler) null, nPeriodMillis, bIsDaemon );\n    }\n\n    public PeriodicAutomaton( Processum parent, long nPeriodMillis ) {\n        this( null, parent, (ExceptionHandler) null, nPeriodMillis, false );\n    }\n\n    @Override\n    public void start() {\n        this.mRunning      = true;\n        this.mMasterThread.start();\n    }\n\n    @Override\n    public void join() throws InterruptedException {\n        this.mMasterThread.join();\n    }\n\n    @Override\n    public void join( long millis ) throws InterruptedException {\n        this.mMasterThread.join( millis );\n    }\n\n\n    @Override\n    public boolean isEnded() {\n        return !this.mRunning;\n    }\n\n    protected void invokeIfKernelInstructation( Instructation instructation ) throws Exception {\n        if( instructation == KernelInstructation.DIE ) {\n            instructation.execute();\n        }\n    }\n\n    @Override\n    protected void intoEnded() {\n        this.mRunning = false;\n    }\n\n    protected void fetchCacheIntoMarshalling( boolean bLoopMode ) {\n        // <Ref::Marshalling>, locked writing operations.\n        this.mBufferLock.writeLock().lock();\n        try {\n            if ( this.mInstructationBuffer.isEmpty() ) {\n                return;\n            }\n\n            for( InstructLine line : this.mInstructationBuffer ) {\n                if( line.setRemove ) {\n                    if( bLoopMode ) {\n                        this.invokeIfKernelInstructation( line.instructation );\n                    }\n\n                    if( line.instructation instanceof InstantInstructation ) {\n                        this.mMarshalling.prompt( line.instructation );\n                    }\n                    else {\n                        if( line.piror ) {\n                            this.mMarshalling.addFirst( line.instructation );\n                        }\n                        else {\n                            this.mMarshalling.addLast( line.instructation );\n                        }\n                    }\n                }\n                else {\n                    this.mMarshalling.erase( line.instructation );\n                }\n            }\n            this.mInstructationBuffer.clear();\n        }\n        catch ( Exception e ) {\n            this.handleException( e );\n        }\n        finally {\n            this.mBufferLock.writeLock().unlock();\n        }\n    }\n\n    protected void mainLoop() {\n        try{\n            while ( this.mRunning ) {\n                try {\n                    long startTime = System.currentTimeMillis();\n\n                    if( Thread.currentThread().isInterrupted() ) {\n                        throw new AbortException();\n                    }\n\n                    this.fetchCacheIntoMarshalling( true );\n                    if( !this.mRunning ) { // Check if given `death` instruction.\n                        break;\n                    }\n\n                    // <Ref::Marshalling>, locked writing operations.\n                    // this(Consumer, Who are trying to consume all commands) and others (Producer, Who are trying to add new command)\n                    this.mBufferLock.readLock().lock();\n                    try{\n                        try{\n                            this.mMarshalling.execute();\n                        }\n                        catch ( ContinueException c ) {\n                            // Do nothing\n                        }\n                    }\n                    finally {\n                        this.mBufferLock.readLock().unlock();\n                    }\n\n\n                    //Debug.echo( \"\\n\" );\n\n                    long endTime = System.currentTimeMillis();\n                    long elapsed = endTime - startTime;\n                    long sleepTime = this.mPeriodMillis.get() - elapsed;\n\n                    if ( sleepTime > 0 ) {\n                        synchronized ( this.mNextPeriodLock ) {\n                            this.mNextPeriodLock.wait( sleepTime );\n                        }\n                    }\n                }\n                catch ( Exception e ) {\n                    try{\n                        this.handleException( e );\n                    }\n                    catch ( ContinueException c ) {\n                        // Do nothing\n                    }\n                    catch ( Exception ke ) {\n                        break;\n                    }\n                }\n            }\n        }\n        finally {\n            synchronized ( this.mNextPeriodLock ) {\n                this.mNextPeriodLock.notify();\n            }\n        }\n    }\n\n    protected boolean tryLockBuffer() {\n        boolean bHeldByCurrentThread = this.mBufferLock.writeLock().isHeldByCurrentThread();\n        if( !bHeldByCurrentThread ) {\n            bHeldByCurrentThread = Thread.currentThread() == this.mMasterThread;\n        }\n        boolean bOptLocked = true;\n        if( bHeldByCurrentThread ) {\n            bOptLocked = this.mBufferLock.writeLock().tryLock();\n        }\n        else {\n            this.mBufferLock.writeLock().lock();\n        }\n        return bOptLocked;\n    }\n\n    protected void add ( Instructation instructation, boolean bPrior ) {\n        boolean bOptLocked = this.tryLockBuffer();\n\n        try{\n            if ( !this.mRunning ) {\n                // <Ref::Marshalling>, locked writing operations.\n                this.mMarshalling.add( instructation );\n            }\n            else {\n                if( Thread.currentThread() != this.mMasterThread ) {  // Nested operation\n                    this.fetchCacheIntoMarshalling( false );\n                }\n\n                InstructLine line = new InstructLine( instructation, bPrior, true );\n                if( bPrior ) {\n                    this.mInstructationBuffer.addFirst( line );\n                }\n                else {\n                    this.mInstructationBuffer.addLast( line );\n                }\n            }\n        }\n        finally {\n            if( bOptLocked && this.mBufferLock.writeLock().getHoldCount() > 0 ) {\n                this.mBufferLock.writeLock().unlock();\n            }\n        }\n    }\n\n    @Override\n    public void command( Instructation instructation ) {\n        this.add( instructation, false );\n    }\n\n    @Override\n    public void prompt( Instructation instructation ) {\n        this.add( instructation, true );\n    }\n\n    @Override\n    public void withdraw( Instructation instructation ) {\n        boolean bOptLocked = this.tryLockBuffer();\n\n        try {\n            InstructLine target = null;\n            for( InstructLine line : this.mInstructationBuffer ) {\n                if( line.instructation.equals( instructation ) ) {\n                    target = line;\n                    break;\n                }\n            }\n\n            if( target != null ) {\n                this.mInstructationBuffer.remove( target );\n            }\n\n            if ( !this.mRunning ) {\n                // <Ref::Marshalling>, locked writing operations.\n                this.mMarshalling.erase( instructation );\n            }\n            else {\n                if( Thread.currentThread() != this.mMasterThread ) {  // Nested operation\n                    this.fetchCacheIntoMarshalling( false );\n                }\n\n                InstructLine line = new InstructLine( instructation, false, false );\n                this.mInstructationBuffer.addLast( line );\n            }\n        }\n        finally {\n            if( bOptLocked && this.mBufferLock.writeLock().getHoldCount() > 0 ) {\n                this.mBufferLock.writeLock().unlock();\n            }\n        }\n    }\n\n    @Override\n    public long getPeriodMillis() {\n        return this.mPeriodMillis.get();\n    }\n\n    @Override\n    public void setPeriodMillis( long periodMillis ) {\n        this.mPeriodMillis.getAndSet( periodMillis );\n    }\n\n    @Override\n    public Collection<Instructation > getBuffer() {\n        ArrayList<Instructation > list = new ArrayList<>();\n        for( InstructLine line : this.mInstructationBuffer ) {\n            list.add( line.instructation );\n        }\n        return list;\n    }\n\n    @Override\n    public int bufferSize() {\n        return this.mInstructationBuffer.size();\n    }\n\n    @Override\n    public Marshalling getMarshalling() {\n        return this.mMarshalling;\n    }\n\n    @Override\n    public Thread getMasterThread() {\n        return this.mMasterThread;\n    }\n\n    protected class InstructLine {\n        protected Instructation instructation;\n        protected boolean       piror;\n        protected boolean       setRemove; // 0 => remove, 1 => set\n\n        protected InstructLine ( Instructation instructation, boolean piror, boolean setRemove ) {\n            this.instructation = instructation;\n            this.piror         = piror;\n            this.setRemove     = setRemove;\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/PeriodicAutomatron.java",
    "content": "package com.pinecone.hydra.auto;\n\nimport java.util.Collection;\n\npublic interface PeriodicAutomatron extends Automatron {\n    long getPeriodMillis() ;\n\n    void setPeriodMillis( long periodMillis ) ;\n\n    Thread getMasterThread();\n\n    int bufferSize();\n\n    Collection<Instructation > getBuffer();\n\n    Marshalling getMarshalling();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Suggestation.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic interface Suggestation extends Instructation {\n    @Override\n    void execute() ; // No exceptions\n\n    default boolean hasIgnored() {\n        return this.getIgnoredReason() != null;\n    }\n\n    default boolean hasAccepted() {\n        return this.getIgnoredReason() == null;\n    }\n\n    IgnoredReason getIgnoredReason();\n\n    void setIgnoredReason( IgnoredReason ignoredReason );\n\n    Exception lastException();\n\n    void setLastException( Exception e );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/auto/Terminate.java",
    "content": "package com.pinecone.hydra.auto;\n\npublic final class Terminate extends ArchInstructation {\n    Terminate () { }\n\n    @Override\n    public void execute() throws Exception {\n        throw new InstantKillException();\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchExertion.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport com.pinecone.framework.util.Debug;\n\npublic abstract class ArchExertion extends ArchGraphNode implements Exertion {\n    protected String                 mszName;\n    protected boolean                mbDefaultRollback = false;\n    protected IntegrityLevel         mIntegrityLevel   = IntegrityLevel.Strict;\n    protected ExertionStatus         mStatus           = ExertionStatus.NEW;\n    protected Exception              mLastError        = null;\n    protected long                   mnStartNano             ;\n\n    protected ArchExertion() {\n        this.mnStartNano = System.nanoTime();\n    }\n\n    @Override\n    public void reset() {\n        this.mStatus = ExertionStatus.NEW;\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public void setName( String name ) {\n        this.mszName = name;\n    }\n\n    @Override\n    public IntegrityLevel getIntegrityLevel(){\n        return this.mIntegrityLevel;\n    }\n\n    @Override\n    public void setIntegrityLevel( IntegrityLevel level ){\n        this.mIntegrityLevel = level;\n    }\n\n    @Override\n    public long getStartNano() {\n        return this.mnStartNano;\n    }\n\n    @Override\n    public void setDefaultRollback( boolean b ){\n        this.mbDefaultRollback = b;\n    }\n\n    @Override\n    public boolean isDefaultRollback() {\n        return this.mbDefaultRollback;\n    }\n\n    @Override\n    public ExertionStatus getStatus() {\n        return this.mStatus;\n    }\n\n    protected void intoStart() {\n        this.mStatus = ExertionStatus.RUNNING;\n    }\n\n    protected void intoFinished() {\n        this.mStatus = ExertionStatus.FINISHED;\n    }\n\n    protected void intoTerminated() {\n        this.mStatus = ExertionStatus.TERMINATED;\n    }\n\n    protected void intoRollback() {\n        this.mStatus = ExertionStatus.ROLLING;\n    }\n\n    protected void intoError( Exception e ) {\n        this.mStatus    = ExertionStatus.ERROR;\n        this.mLastError = e;\n    }\n\n    @Override\n    public Exception getLastError() {\n        return this.mLastError;\n    }\n\n    protected abstract void doStart();\n\n    protected abstract void doTerminate() ;\n\n    protected abstract void doRollback();\n\n    protected boolean handleErrorCondition( Exception e ) {\n        if( this.mIntegrityLevel != IntegrityLevel.Strict ) {\n            if( this.mIntegrityLevel == IntegrityLevel.Warning ) {\n                Debug.warn( \"TODO\", e, e.getMessage() ); // TODO\n                e.printStackTrace();\n            }\n            return true;\n        }\n        return false;\n    }\n\n    @Override\n    public void start() {\n        this.intoStart();\n        try{\n            this.doStart();\n            this.intoFinished();\n        }\n        catch ( Exception e ) {\n            if( this.handleErrorCondition( e ) ) {\n                this.intoFinished();\n            }\n            else {\n                this.intoError( e );\n            }\n        }\n    }\n\n    @Override\n    public void terminate() {\n        try{\n            this.doTerminate();\n            this.intoTerminated();\n        }\n        catch ( Exception e ) {\n            if( this.handleErrorCondition( e ) ) {\n                this.intoTerminated();\n            }\n            else {\n                this.intoError( e );\n            }\n        }\n    }\n\n    @Override\n    public void rollback() {\n        this.intoRollback();\n        try{\n            this.doRollback();\n            this.intoFinished();\n        }\n        catch ( Exception e ) {\n            if( this.handleErrorCondition( e ) ) {\n                this.intoFinished();\n            }\n            else {\n                this.intoError( e );\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchGraphNode.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic abstract class ArchGraphNode implements GraphNode {\n    protected int             mnStratumId;\n    protected GraphNode       mParent;\n\n    protected void setParent( GraphNode parent ) {\n        this.mParent = parent;\n    }\n\n    @Override\n    public int getStratumId() {\n        return this.mnStratumId;\n    }\n\n    @Override\n    public GraphNode parent() {\n        return this.mParent;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchIrrevocableController.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic abstract class ArchIrrevocableController extends ArchExertion implements ProcessController {\n    public ArchIrrevocableController() {\n        super();\n    }\n\n    @Override\n    public void doStart() {\n        // Marking state for DFA.\n    }\n\n    @Override\n    public void doTerminate() {\n    }\n\n    @Override\n    public void doRollback() {\n        // Do nothing, 'Irrevocable Controller (e.g. Break, Continue)' can`t directly withdraw.\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchLoop.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic abstract class ArchLoop extends ArchSequential implements Loop {\n    @Override\n    protected BranchContext init_branch_context() {\n        BranchContext    context  = new BranchContext();\n        context.doBreak           = false;\n\n        return context;\n    }\n\n    @Override\n    protected boolean do_process_controller  ( ProcessController controller, BranchContext context ) {\n        context.doBreak = this.invoke_process_controller( controller );\n        if( context.doBreak ) {\n            return true;\n        }\n\n        if( controller instanceof BreakController ) {\n            context.doBreak = true;\n            return true;\n        }\n        else if( controller instanceof ContinueController ) {\n            return true;\n        }\n        else if( controller instanceof JumpController ) {\n            try{\n                JumpController jmp = ((JumpController) controller);\n                context.jmpPoint   = this.eval_jump_point( jmp.getJumpPoint() );\n                return true;\n            }\n            catch ( InstantJumpOutBranchException e ){\n                context.doBreak = true;\n                return true;\n            }\n        }\n\n        throw new IllegalArgumentException( \"ProcessController for Loop can ONLY be [break, continue, jump]\" );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchParallel.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport com.pinecone.hydra.orchestration.parallel.ParallelExertion;\n\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.concurrent.Phaser;\n\npublic abstract class ArchParallel extends ArchSequential implements Parallel {\n    protected Phaser activePhaser           = new Phaser(1);\n\n    ArchParallel() {\n        super();\n    }\n\n    @Override\n    public void reset() {\n        this.getExertium().reset();\n    }\n\n    @Override\n    protected void waiting_exertions_pool_synchronized() {\n        this.activePhaser.arriveAndAwaitAdvance();\n\n        List<GraphNode > children = this.getChildren();\n        Iterator<GraphNode > iter = children.iterator();\n\n        while ( iter.hasNext() ) {\n            Exertion exertion = (Exertion) iter.next();\n\n            if( !exertion.isFinished() ){\n                throw new UnfulfilledActionException( \"Illegal transaction status, unfulfilled exertion founded.\" , exertion );\n            }\n        }\n    }\n\n    @Override\n    protected void waiting_for_single_exertion( Exertion exertion ) {\n        if( exertion instanceof ParallelExertion ) {\n            ParallelExertion pe = ( ParallelExertion ) exertion;\n            if( pe.isForceSynchronized() ) {\n                synchronized ( pe.getFinaleLock() ) {\n                    if( pe.getMasterExecutum().getAffiliateThread().isAlive() ) {\n                        try {\n                            long nMax = pe.getMaximumExecutionTime();\n                            if( nMax > 0 ) {\n                                pe.getFinaleLock().wait( nMax );\n                            }\n                            else {\n                                pe.getFinaleLock().wait( );\n                            }\n                        }\n                        catch ( InterruptedException e ) {\n                            throw new UnfulfilledActionException( e );\n                        }\n                    }\n                }\n            }\n        }\n    }\n\n    @Override\n    protected boolean is_dfa_status_finished_check_required( Exertion exertion ) {\n        if( exertion instanceof ParallelExertion ) {\n            return ((ParallelExertion) exertion).isForceSynchronized();\n        }\n        return true;\n    }\n\n    @Override\n    public void notifyFinished( Exertion exertion ){\n        this.activePhaser.arriveAndDeregister();\n    }\n\n    @Override\n    public void notifyExecuting ( Exertion exertion ) {\n        this.activePhaser.register();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchSequential.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport java.util.Iterator;\nimport java.util.List;\n\npublic abstract class ArchSequential extends ArchTransaction implements Sequential {\n    ArchSequential() {\n        super();\n    }\n\n    @Override\n    public void reset() {\n        this.getExertium().reset();\n    }\n\n    protected void execute_exertion( Exertion exertion ) {\n        try {\n            if( this.mExertionStartCB != null ) {\n                this.mExertionStartCB.callback( exertion );\n            }\n            exertion.start();\n            this.waiting_for_single_exertion( exertion );\n            if( this.is_dfa_status_finished_check_required( exertion ) && !exertion.isFinished() ) {\n                throw new UnfulfilledActionException( exertion );\n            }\n\n            if( this.mExertionEndCB != null ) {\n                this.mExertionEndCB.callback( exertion );\n            }\n        }\n        catch ( RuntimeException e ) {\n            if( !this.getSeqExceptionNeglector().isNeglectException( e ) ){\n                if( exertion.getIntegrityLevel() != IntegrityLevel.Strict ) {\n                    if( exertion.isDefaultRollback() ) {\n                        exertion.rollback();\n                        // TODO: Notice for warning.\n                    }\n                    else {\n                        throw e;\n                    }\n                }\n            }\n        }\n    }\n\n    protected void noticeAll( BranchNoticeException e ) {\n        List<GraphNode > children = this.getChildren();\n        for( GraphNode node : children ) {\n            if( node instanceof Notifiable ) {\n                ((Notifiable) node).notice( e );\n            }\n        }\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    protected Iterator<GraphNode > eval_jump_point( Object at ) throws InstantJumpOutBranchException {\n        if( at instanceof Iterator<? > ) {\n            return ( Iterator<GraphNode >) at;\n        }\n        else if( at instanceof Integer ) {\n            int id = (int) at;\n            int  i = 0;\n            List<GraphNode > children = this.getChildren();\n            Iterator<GraphNode >   it = children.iterator();\n\n            if( id > children.size() ) {\n                throw new IndexOutOfBoundsException( \"Jump [Segment:\" + id + \"] is out of range.\" );\n            }\n            else if( id == children.size() ) { // Instant jump-out.\n                throw new InstantJumpOutBranchException();\n            }\n\n            while ( it.hasNext() ) {\n                if( i == id ) {\n                    return it;\n                }\n                it.next();\n                ++i;\n            }\n        }\n\n        return null;\n    }\n\n    protected boolean invoke_process_controller( ProcessController controller ) {\n        try{\n            controller.call();\n        }\n        catch ( BranchNoticeException e ) {\n            if( e.isNoticeAll() ) { // TODO: Notice designed.\n                this.noticeAll( e );\n            }\n        }\n        catch ( InstantJumpOutBranchException e ){\n            return true;\n        }\n        catch ( BranchControlException e ){\n            e.printStackTrace(); // TODO: BranchControlException for more precisely control granularity.\n        }\n\n        return false;\n    }\n\n    protected boolean do_process_controller  ( ProcessController controller, BranchContext context ) {\n        context.doBreak = this.invoke_process_controller( controller );\n        if( context.doBreak ) {\n            return true;\n        }\n\n        if( controller instanceof BreakController ) {\n            context.doBreak = true;\n            return true;\n        }\n        else if( controller instanceof JumpController ) {\n            try{\n                JumpController jmp = ((JumpController) controller);\n                context.jmpPoint   = this.eval_jump_point( jmp.getJumpPoint() );\n                context.doBreak    = false;\n                return true;\n            }\n            catch ( InstantJumpOutBranchException e ){\n                context.doBreak = true;\n                return true;\n            }\n        }\n\n        throw new IllegalArgumentException( \"ProcessController for Sequential can ONLY be [break, jump]\" );\n    }\n\n    protected BranchContext init_branch_context() {\n        BranchContext    context  = new BranchContext( true );\n\n        return context;\n    }\n\n    /**\n     * Waiting synchronized for all exertions which in pool .\n     * Should overridden by Parallel.\n     */\n    protected void waiting_exertions_pool_synchronized() {\n\n    }\n\n    /**\n     * Waiting for single exertion synchronized.\n     * Should overridden by Parallel.\n     */\n    protected void waiting_for_single_exertion( Exertion exertion ) {\n\n    }\n\n    protected boolean is_dfa_status_finished_check_required( Exertion exertion ) {\n        return true;\n    }\n\n    @Override\n    public void start() {\n        this.getExertium().intoStart();\n\n        List<GraphNode > children = this.getChildren();\n        BranchContext    context  = this.init_branch_context();\n\n        while ( true ) {\n            Iterator<GraphNode > iter;\n            if( context.jmpPoint != null ) {\n                iter              = context.jmpPoint;\n                context.jmpPoint  = null;\n            }\n            else {\n                iter              = children.iterator();\n            }\n\n            while ( iter.hasNext() ) {\n                Exertion exertion = (Exertion) iter.next();\n\n                if( exertion instanceof ProcessController ) {\n                    if( this.do_process_controller( (ProcessController) exertion, context ) ){\n                        break;\n                    }\n                }\n                else {\n                    this.execute_exertion( exertion );\n                }\n\n                //Debug.sleep( 100 );\n                ++context.nIP;\n            }\n\n            if( context.doBreak ) {\n                break;\n            }\n        }\n\n        this.waiting_exertions_pool_synchronized();\n\n        this.getExertium().intoFinished();\n    }\n\n    @Override\n    public void terminate() {\n\n    }\n\n    @Override\n    public void rollback() {\n\n    }\n\n    protected class BranchContext {\n        public Iterator<GraphNode >   jmpPoint;\n        public boolean                doBreak;\n        public int                    nIP;\n\n        BranchContext( boolean doBreak ) {\n            this.doBreak = doBreak;\n        }\n\n        BranchContext() {\n            this( true );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchStratum.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic abstract class ArchStratum implements GraphStratum {\n    protected List<GraphNode >   mChildren;\n    protected ArchGraphNode      mParent;\n\n    ArchStratum() {\n        this.mChildren = new ArrayList<>();\n    }\n\n    @Override\n    public ArchGraphNode parent() {\n        return this.mParent;\n    }\n\n    @Override\n    public List<GraphNode > getChildren() {\n        return this.mChildren;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ArchTransaction.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport com.pinecone.hydra.orchestration.regulation.NeglectRegulation;\nimport com.pinecone.hydra.orchestration.regulation.RuntimeNeglector;\n\npublic abstract class ArchTransaction extends ArchStratum implements Transaction {\n    protected Exertium               mExertium; // To implement GraphNode and Exertion.\n    protected NeglectRegulation      mSequentialRuntimeNeglector;\n    protected ExertionEventCallback  mExertionStartCB;\n    protected ExertionEventCallback  mExertionEndCB;\n\n    protected ArchTransaction() {\n        super();\n        this.mExertium                     = new Exertium();\n        this.mSequentialRuntimeNeglector   = new RuntimeNeglector( this );\n    }\n\n    @Override\n    public NeglectRegulation getSeqExceptionNeglector() {\n        return this.mSequentialRuntimeNeglector;\n    }\n\n    @Override\n    public void setSeqExceptionNeglector( NeglectRegulation neglector ) {\n        this.mSequentialRuntimeNeglector = neglector;\n    }\n\n    protected Exertium getExertium() {\n        return this.mExertium;\n    }\n\n    @Override\n    public void registerExertionStartCallback( ExertionEventCallback callback ) {\n        this.mExertionStartCB = callback;\n    }\n\n    @Override\n    public void registerExertionEndCallback( ExertionEventCallback callback ) {\n        this.mExertionEndCB = callback;\n    }\n\n    @Override\n    public void setDefaultRollback( boolean b ) {\n        this.getExertium().setDefaultRollback( b );\n    }\n\n    @Override\n    public boolean isDefaultRollback() {\n        return this.getExertium().isDefaultRollback();\n    }\n\n    @Override\n    public String getName(){\n        return this.getExertium().getName();\n    }\n\n    @Override\n    public void setName( String name ) {\n        this.getExertium().setName( name );\n    }\n\n    @Override\n    public IntegrityLevel getIntegrityLevel(){\n        return this.getExertium().getIntegrityLevel();\n    }\n\n    @Override\n    public void setIntegrityLevel( IntegrityLevel level ){\n        this.getExertium().setIntegrityLevel( level );\n    }\n\n    @Override\n    public long getStartNano() {\n        return this.getExertium().getStartNano();\n    }\n\n    @Override\n    public int getStratumId() {\n        return this.getExertium().getStratumId();\n    }\n\n    protected void beforeAdd( Exertion exertion ) {\n        if( exertion instanceof ArchGraphNode ) {\n            ((ArchGraphNode) exertion).setParent( this );\n        }\n    }\n\n    @Override\n    public void add( Exertion exertion ) {\n        this.beforeAdd( exertion );\n        this.getChildren().add( exertion );\n    }\n\n    @Override\n    public void addFirst( Exertion exertion ) {\n        this.beforeAdd( exertion );\n        this.getChildren().add( 0, exertion );\n    }\n\n    @Override\n    public ExertionStatus getStatus() {\n        return this.getExertium().getStatus();\n    }\n\n    @Override\n    public Exception getLastError() {\n        return this.mExertium.getLastError();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/BooleanCondition.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic interface BooleanCondition extends Condition {\n    boolean result();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/BranchControlException.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class BranchControlException extends Exception implements Pinenut {\n    public BranchControlException() {\n        super();\n    }\n\n    public BranchControlException( String message ) {\n        super( message );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/BranchNoticeException.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic class BranchNoticeException extends BranchControlException {\n    protected Object  noticeMsg;\n    protected boolean noticeAll;\n\n    public BranchNoticeException() {\n        super();\n    }\n\n    public BranchNoticeException( String message ) {\n        super( message );\n    }\n\n    public BranchNoticeException( Object noticeMsg, boolean noticeAll, String message ) {\n        super( message );\n        this.noticeMsg = noticeMsg;\n        this.noticeAll = noticeAll;\n    }\n\n    public BranchNoticeException( Object noticeMsg ) {\n        super();\n        this.noticeMsg = noticeMsg;\n    }\n\n    public Object getNoticeMsg() {\n        return this.noticeMsg;\n    }\n\n    public boolean isNoticeAll() {\n        return this.noticeAll;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/BreakController.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic interface BreakController extends ProcessController {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/BreakPoint.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic class BreakPoint extends ArchIrrevocableController implements BreakController  {\n    public BreakPoint() {\n        super();\n    }\n\n    @Override\n    public void call() throws BranchControlException {\n        this.start();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/CausalBranch.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic interface CausalBranch extends Transaction {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Condition.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic interface Condition {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ContinueController.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic interface ContinueController extends ProcessController {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ContinuePoint.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic class ContinuePoint extends ArchIrrevocableController implements ContinueController {\n    public ContinuePoint() {\n        super();\n    }\n\n    @Override\n    public void call() throws BranchControlException {\n        this.start();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Exertion.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport com.pinecone.framework.system.executum.Chronum;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Exertion extends Pinenut, GraphNode, Chronum {\n    String getName();\n\n    void setName( String name );\n\n    IntegrityLevel getIntegrityLevel();\n\n    void setIntegrityLevel( IntegrityLevel level );\n\n    void reset();\n\n    void start();\n\n    void terminate();\n\n    void rollback();\n\n    void setDefaultRollback( boolean b );\n\n    boolean isDefaultRollback();\n\n    ExertionStatus getStatus();\n\n    default boolean isFinished(){\n        return this.getStatus() == ExertionStatus.FINISHED;\n    }\n\n    default boolean isIntrrupted(){\n        return this.getStatus() == ExertionStatus.INTERRUPTED;\n    }\n\n    default boolean isTerminated(){\n        return this.getStatus() == ExertionStatus.TERMINATED;\n    }\n\n    // The running is end.\n    default boolean isEnded() {\n        return this.getStatus().isEnded();\n    }\n\n    Exception getLastError();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ExertionEventCallback.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ExertionEventCallback extends Pinenut {\n    void callback( Exertion exertion );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ExertionStatus.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic enum ExertionStatus {\n    NEW            ( \"New\"         ),\n    RUNNING        ( \"Running\"     ),\n    FINISHED       ( \"Finished\"    ), // Finished termination.\n    TERMINATED     ( \"Terminated\"  ), // Forced termination.\n    ROLLING        ( \"Rolling\"     ),\n    INTERRUPTED    ( \"Interrupted\" ),\n    ERROR          ( \"Error\"       );\n\n    private final String value;\n    ExertionStatus( String value ){\n        this.value = value;\n    }\n\n    public String getName(){\n        return this.value;\n    }\n\n    public static ExertionStatus queryStatus( String sz ) {\n        return ExertionStatus.valueOf( sz.toUpperCase() );\n    }\n\n    public boolean isEnded() {\n        return this == ExertionStatus.FINISHED || this == ExertionStatus.TERMINATED || this == ExertionStatus.ERROR;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Exertium.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport com.pinecone.framework.system.NotImplementedException;\n\npublic class Exertium extends ArchExertion {\n    @Override\n    protected void doStart() {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    protected void doTerminate() {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    protected void doRollback() {\n        throw new NotImplementedException();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/GraphNode.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface GraphNode extends Pinenut {\n    int getStratumId();\n\n    GraphNode parent();\n\n    default GraphNode root() {\n        GraphNode p = this.parent();\n        if( p == null ) {\n            return this;\n        }\n\n        return p.root();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/GraphStratum.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport java.util.List;\n\npublic interface GraphStratum extends GraphNode {\n    List<GraphNode > getChildren();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/InstantJumpOutBranchException.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic class InstantJumpOutBranchException extends BranchControlException {\n    public InstantJumpOutBranchException() {\n        super();\n    }\n\n    public InstantJumpOutBranchException( String message ) {\n        super( message );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/IntegrityLevel.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic enum IntegrityLevel {\n    // Absolute and successfully transaction executed.\n    // If errors happened, it will stop the whole transaction graph.\n    Strict          (\"Strict\"),\n\n    // Irresponsibly invoked the transaction.\n    // If errors happened, it will ignored all errors(Tracing warning), and keeps continuity for next transactions.\n    Warning         (\"Warning\"),\n\n    // Irresponsibly invoked the transaction.\n    // If errors happened, it will ignored all errors(No warning), and keeps continuity for next transactions.\n    Irresponsible   (\"Irresponsible\");\n\n    private final String value;\n\n    IntegrityLevel( String value ){\n        this.value = value;\n    }\n\n    public String getName(){\n        return this.value;\n    }\n\n    public static String queryName( IntegrityLevel type ) {\n        return type.getName();\n    }\n\n    public static IntegrityLevel queryIntegrityLevel( String sz ) {\n        return IntegrityLevel.valueOf( sz );\n    }\n\n    public static final String ConfIntegrityLevelKey = \"IntegrityLevel\";\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/JumpController.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic interface JumpController extends ProcessController {\n    JumpController setJumpPoint( Object iter );\n\n    Object getJumpPoint();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/JumpPoint.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic class JumpPoint extends ArchIrrevocableController implements JumpController {\n    protected Object mJumpPoint;\n\n    public JumpPoint( Object jumpPoint ) {\n        super();\n        this.mJumpPoint = jumpPoint;\n    }\n\n    @Override\n    public JumpPoint setJumpPoint(Object mJumpPoint) {\n        this.mJumpPoint = mJumpPoint;\n        return this;\n    }\n\n    @Override\n    public Object getJumpPoint() {\n        return this.mJumpPoint;\n    }\n\n    @Override\n    public void call() throws BranchControlException {\n        this.start();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Loop.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic interface Loop extends Transaction {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/LoopAction.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic class LoopAction extends ArchLoop {\n    public LoopAction() {\n        super();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Notifiable.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Notifiable extends Pinenut {\n    void notice( BranchNoticeException e );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Parallel.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic interface Parallel extends Transaction {\n    void notifyFinished  ( Exertion exertion );\n\n    void notifyExecuting ( Exertion exertion );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ParallelAction.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport com.pinecone.hydra.orchestration.parallel.ParallelExertion;\nimport com.pinecone.hydra.orchestration.parallel.WrappedMasterParallelium;\n\npublic class ParallelAction extends ArchParallel {\n    public ParallelAction() {\n        super();\n    }\n\n    public static ParallelExertion wrap( Exertion exertion ) {\n        return new WrappedMasterParallelium( exertion );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/ProcessController.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic interface ProcessController extends Exertion {\n    ProcessController BREAK    = new BreakPoint();\n    ProcessController CONTINUE = new ContinuePoint();\n\n    void call() throws BranchControlException ;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Sequential.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic interface Sequential extends Transaction {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/SequentialAction.java",
    "content": "package com.pinecone.hydra.orchestration;\n\npublic class SequentialAction extends ArchSequential {\n    public SequentialAction() {\n        super();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/Transaction.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport com.pinecone.hydra.orchestration.regulation.NeglectRegulation;\nimport com.pinecone.hydra.system.flow.Stage;\n\npublic interface Transaction extends Exertion, Stage {\n    void add( Exertion exertion );\n\n    void addFirst( Exertion exertion );\n\n    NeglectRegulation getSeqExceptionNeglector();\n\n    void setSeqExceptionNeglector( NeglectRegulation neglector ) ;\n\n    void registerExertionStartCallback( ExertionEventCallback callback );\n\n    void registerExertionEndCallback( ExertionEventCallback callback );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/UnfulfilledActionException.java",
    "content": "package com.pinecone.hydra.orchestration;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class UnfulfilledActionException extends PineRuntimeException {\n    protected Exertion exertion;\n\n    public UnfulfilledActionException( Exertion exertion ) {\n        this( null, \"\", exertion );\n    }\n\n    public UnfulfilledActionException( Throwable cause, String message, Exertion exertion ) {\n        super( message, cause );\n        this.exertion = exertion;\n    }\n\n    public UnfulfilledActionException( String message, Exertion exertion ) {\n        this( null, message, exertion );\n    }\n\n    public UnfulfilledActionException ( Throwable cause ) {\n        super(cause);\n    }\n\n    public Exertion getExertion() {\n        return this.exertion;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/parallel/ArchMasterParallelium.java",
    "content": "package com.pinecone.hydra.orchestration.parallel;\n\nimport com.pinecone.framework.system.executum.ArchThreadum;\nimport com.pinecone.framework.system.executum.Executum;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.orchestration.ArchExertion;\nimport com.pinecone.hydra.orchestration.GraphNode;\nimport com.pinecone.hydra.orchestration.Parallel;\nimport com.pinecone.hydra.orchestration.UnfulfilledActionException;\n\npublic abstract class ArchMasterParallelium extends ArchExertion implements ParallelExertion {\n    protected boolean       mbIsDetachedParallelium     = true;\n\n    protected long          mnMaximumExecutionMillis    = -1;\n\n    protected boolean       mbIsForceSynchronized       = false;\n\n    protected final Object  mFinaleLock                 = new Object();\n\n\n    protected Processum    mParentProcessum             = null;\n\n    protected Executum     mMasterExecutum              = new ArchThreadum( null, this.mParentProcessum ) {\n        @Override\n        public void apoptosis() {\n            super.apoptosis(); //TODO\n        }\n    };\n\n    protected Thread       mMasterStartWrapThread       = new Thread( new Runnable() {\n        protected ArchMasterParallelium ium = ArchMasterParallelium.this;\n\n        @Override\n        public void run() {\n            this.ium.invokeMasterSeqStart();\n        }\n    });\n\n    protected Thread       mMasterRollbackWrapThread    = new Thread( new Runnable() {\n        protected ArchMasterParallelium ium = ArchMasterParallelium.this;\n\n        @Override\n        public void run() {\n            this.ium.invokeMasterSeqRollback();\n        }\n    });\n\n\n    public ArchMasterParallelium() {\n\n    }\n\n\n    protected void invokeMasterSeqStart(){\n        try{\n            super.start();\n        }\n        finally {\n            this.after_master_thread_finished();\n        }\n    }\n\n    protected void invokeMasterSeqRollback(){\n        try{\n            super.rollback();\n        }\n        finally {\n            this.after_master_thread_finished();\n        }\n    }\n\n\n    protected void before_master_thread_executing() {\n        GraphNode parent = this.parent();\n        if( parent instanceof Parallel) {\n            ((Parallel) parent).notifyExecuting( this );\n        }\n    }\n\n    protected void join_master_thread_if_is() {\n        if( this.isJoined() ) {\n            try{\n                if( this.mnMaximumExecutionMillis <= 0 ) {\n                    this.getMasterExecutum().getAffiliateThread().join();\n                }\n                else {\n                    this.getMasterExecutum().getAffiliateThread().join( this.mnMaximumExecutionMillis );\n                }\n            }\n            catch ( InterruptedException e ) {\n                throw new UnfulfilledActionException( e );\n            }\n        }\n    }\n\n    protected void after_master_thread_finished() {\n        this.releaseFinaleLock();\n        GraphNode parent = this.parent();\n        if( parent instanceof Parallel) {\n            ((Parallel) parent).notifyFinished( this );\n        }\n    }\n\n    @Override\n    public Executum getMasterExecutum(){\n        return this.mMasterExecutum;\n    }\n\n    @Override\n    public void releaseFinaleLock(){\n        if( this.isForceSynchronized() ) {\n            synchronized ( this.mFinaleLock ) {\n                this.mFinaleLock.notify();\n            }\n        }\n    }\n\n    @Override\n    public void start() {\n        this.before_master_thread_executing();\n        this.mMasterExecutum.setThreadAffinity( this.mMasterStartWrapThread );\n        this.mMasterExecutum.getAffiliateThread().start();\n\n        this.join_master_thread_if_is();\n    }\n\n    @Override\n    public boolean isForceSynchronized() {\n        return this.mbIsForceSynchronized;\n    }\n\n    @Override\n    public void terminate() {\n        super.terminate();\n        this.mMasterExecutum.kill();\n    }\n\n    @Override\n    public void rollback() {\n        this.before_master_thread_executing();\n        this.mMasterExecutum.setThreadAffinity( this.mMasterRollbackWrapThread );\n        this.mMasterExecutum.getAffiliateThread().start();\n    }\n\n    @Override\n    public Object getFinaleLock(){\n        return this.mFinaleLock;\n    }\n\n    @Override\n    public ParallelExertion setForceSynchronized() {\n        this.mbIsForceSynchronized = true;\n        return this;\n    }\n\n    @Override\n    public ParallelExertion setNoneSynchronized() {\n        this.mbIsForceSynchronized = false;\n        return this;\n    }\n\n    @Override\n    public boolean isDetached() {\n        return this.mbIsDetachedParallelium;\n    }\n\n    @Override\n    public boolean isJoined() {\n        return !this.mbIsDetachedParallelium;\n    }\n\n    @Override\n    public ParallelExertion setDetach() {\n        this.mbIsDetachedParallelium = true;\n        return this;\n    }\n\n    @Override\n    public ParallelExertion setJoin() {\n        this.mbIsDetachedParallelium = false;\n        return this;\n    }\n\n    @Override\n    public ParallelExertion setMaximumExecutionTime( long millis ) {\n        this.mnMaximumExecutionMillis = millis;\n        return this;\n    }\n\n    @Override\n    public long getMaximumExecutionTime() {\n        return this.mnMaximumExecutionMillis;\n    }\n\n    @Override\n    public String nomenclature( Thread that ) {\n        return String.format( \"action-%s-%s\", this.getName(), that.getName() ).toLowerCase();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/parallel/ParallelExertion.java",
    "content": "package com.pinecone.hydra.orchestration.parallel;\n\nimport com.pinecone.framework.system.executum.Executum;\nimport com.pinecone.hydra.orchestration.Exertion;\n\npublic interface ParallelExertion extends Exertion {\n    Object getFinaleLock();\n\n    boolean isForceSynchronized();\n\n    ParallelExertion setForceSynchronized();\n\n    ParallelExertion setNoneSynchronized();\n\n    /**\n     * FinaleLock is used to control the final-synchronized of the parent sequential action-list\n     * Explicitly release this lock during the runtime, which can directly removes the buff of the 'ForceSynchronized'.\n     */\n    void releaseFinaleLock();\n\n    boolean isDetached();\n\n    boolean isJoined();\n\n    ParallelExertion setDetach();\n\n    ParallelExertion setJoin();\n\n    Executum getMasterExecutum();\n\n    ParallelExertion setMaximumExecutionTime( long millis );\n\n    long getMaximumExecutionTime();\n\n    String   nomenclature   ( Thread   that );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/parallel/WrappedMasterParallelium.java",
    "content": "package com.pinecone.hydra.orchestration.parallel;\n\nimport com.pinecone.hydra.orchestration.Exertion;\n\npublic class WrappedMasterParallelium extends ArchMasterParallelium {\n    protected Exertion mWrapped;\n\n    public WrappedMasterParallelium( Exertion exertion ) {\n        this.mWrapped = exertion;\n    }\n\n    @Override\n    protected void doStart() {\n        this.mWrapped.start();\n    }\n\n    @Override\n    protected void doTerminate() {\n        this.mWrapped.terminate();\n    }\n\n    @Override\n    protected void doRollback() {\n        this.mWrapped.rollback();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/recorder/ActionTape.java",
    "content": "package com.pinecone.hydra.orchestration.recorder;\n\npublic interface ActionTape {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/regulation/NeglectRegulation.java",
    "content": "package com.pinecone.hydra.orchestration.regulation;\n\npublic interface NeglectRegulation extends Regulation {\n    boolean isNeglectException( Exception e );\n\n    void add( Class<?> stereotype );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/regulation/Regulation.java",
    "content": "package com.pinecone.hydra.orchestration.regulation;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Regulation extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/orchestration/regulation/RuntimeNeglector.java",
    "content": "package com.pinecone.hydra.orchestration.regulation;\n\nimport com.pinecone.hydra.orchestration.Exertion;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class RuntimeNeglector implements NeglectRegulation {\n    protected List<Class<?> > mNeglectExceptions;\n    protected Exertion mParentExertion;\n\n    public RuntimeNeglector( Exertion parent ) {\n        this.mParentExertion    = parent;\n        this.mNeglectExceptions = new ArrayList<>();\n    }\n\n    public List<Class<?> > getNeglectExceptions() {\n        return this.mNeglectExceptions;\n    }\n\n    public Exertion getParentExertion() {\n        return this.mParentExertion;\n    }\n\n    @Override\n    public void add( Class<? > stereotype ) {\n        this.getNeglectExceptions().add( stereotype );\n    }\n\n    @Override\n    public boolean isNeglectException( Exception e ){\n        List<Class<?> > neglectExceptions = this.getNeglectExceptions();\n\n        for( Class<?> c : neglectExceptions ) {\n            if( c.isInstance( e ) ) {\n                return true;\n            }\n        }\n\n        return false;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ArchProcessManager.java",
    "content": "package com.pinecone.hydra.proc;\n\npublic abstract class ArchProcessManager implements ProcessManager {\n\n    protected abstract void expunge( UProcess that );\n\n    public static void invokeExpunge( ArchProcessManager manager, UProcess that ) {\n        manager.expunge( that );\n    }\n\n    public static void invokeExpunge( ProcessManager pm, UProcess that ) {\n        if ( pm instanceof ArchProcessManager ) {\n            ((ArchProcessManager) pm).expunge( that );\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ArchUProcess.java",
    "content": "package com.pinecone.hydra.proc;\n\nimport java.time.LocalDateTime;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.ApoptosisRejectSignalException;\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.executum.ArchProcessum;\nimport com.pinecone.framework.system.executum.Executum;\nimport com.pinecone.framework.system.executum.Lifecycle;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.executum.TaskManager;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.proc.entity.ElementNode;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.ns.ProcSpace;\nimport com.pinecone.hydra.proc.tomb.ResurgentTombstone;\nimport com.pinecone.hydra.proc.tomb.RuntimeTombstone;\nimport com.pinecone.hydra.system.ko.entity.ObjectTable;\n\npublic abstract class ArchUProcess implements UProcess {\n    protected Processum              mLocalProcess;\n\n    protected GUID                   mProcessID;\n    protected GUID                   mParentPID;\n    protected ObjectTable            mObjectTable;\n    protected ProcSpace              mProcSpace;         // TODO, Namespace, Hydra V3\n    protected RuntimeTombstone       mRuntimeTombstone;  // TODO, Tombstone, Hydra V2.7\n    protected ProcessActionTape      mActionTape;\n\n    protected ProcessManager         mProcessManager;\n\n    protected ExecutionImage         mExecutionImage;\n\n    protected Map<String, String[]>  mStartupArgs;\n    protected Map<String, String[]>  mEnvironmentVars;\n\n    protected ControllableLevel      mControllableLevel;\n    protected LocalDateTime          mEndTime;\n    protected LocalDateTime          mLastUpdateTime;\n\n    public ArchUProcess(\n            @Nullable Processum localProcess, GUID guid, String szName,\n            @Nullable UProcess parent, ProcessManager processManager, ExecutionImage image, ProcSpace procSpace,\n            Map<String, String[]> startupArgs, Map<String, String[]> environmentVars\n    ) {\n        this.mLocalProcess      = localProcess;\n        this.mProcessManager    = processManager;\n        this.mProcessID         = guid;\n        this.mExecutionImage    = image;\n        this.mProcSpace         = procSpace;\n        this.mRuntimeTombstone  = new ResurgentTombstone();\n        this.mStartupArgs       = startupArgs;\n        this.mEnvironmentVars   = environmentVars;\n        this.mControllableLevel = image.getControllableLevel();\n        this.mActionTape        = new GenericProcessActionTape();\n\n        if ( this.mLocalProcess == null ) {\n            this.mLocalProcess = new LocalSystemProcess( szName, parent );\n        }\n\n        if ( parent != null ) {\n            this.mParentPID     = parent.getPID();\n        }\n    }\n\n    public ArchUProcess(\n            @Nullable Processum localSystemProc, String szName,\n            @Nullable UProcess parent, ProcessManager processManager, ExecutionImage image, ProcSpace procSpace,\n            Map<String, String[]> startupArgs, Map<String, String[]> environmentVars\n    ) {\n        this( localSystemProc, processManager.getGuidAllocator().nextGUID(), szName, parent, processManager, image, procSpace, startupArgs, environmentVars );\n    }\n\n    public ArchUProcess(\n            Processum localSystemProc,\n            @Nullable UProcess parent, ProcessManager processManager, ExecutionImage image, ProcSpace procSpace,\n            Map<String, String[]> startupArgs, Map<String, String[]> environmentVars\n    ) {\n        this( localSystemProc, processManager.getGuidAllocator().nextGUID(), localSystemProc.getName(), parent, processManager, image, procSpace, startupArgs, environmentVars );\n    }\n\n    @Override\n    public Processum affinityLocalProcess() {\n        return this.mLocalProcess;\n    }\n\n    @Override\n    public ProcessActionTape actionTape() {\n        return this.mActionTape;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.mProcessID;\n    }\n\n    @Override\n    public long getLocalPID() {\n        return this.getExecutumId();\n    }\n\n    @Override\n    public ElementNode getAccount() {\n        return null;\n    }\n\n    @Override\n    public UProcess parentProcess() {\n        return (UProcess) this.parentExecutum();\n    }\n\n    @Override\n    public GUID actualParentPID() {\n        return this.mParentPID;\n    }\n\n    @Override\n    public void applyActualParentPID( GUID pid ) {\n        this.mParentPID = pid;\n    }\n\n    @Override\n    public ProcessManager getOwnedProcessManager() {\n        return this.mProcessManager;\n    }\n\n    @Override\n    public GUID getParentProcessId() {\n        if ( this.parentProcess() != null ) {\n            return this.parentProcess().getGuid();\n        }\n\n        return null;\n    }\n\n    @Override\n    public long getParentLocalPID() {\n        if ( this.parentProcess() != null ) {\n            return this.parentProcess().getLocalPID();\n        }\n        return -1;\n    }\n\n    @Override\n    public ProcSpace getProcNamespace() {\n        return this.mProcSpace;\n    }\n\n    @Override\n    public RuntimeTombstone getRuntimeTombstone() {\n        return this.mRuntimeTombstone;\n    }\n\n    @Override\n    public Map<String, String[]> getStartupArguments() {\n        return this.mStartupArgs;\n    }\n\n    @Override\n    public Map<String, String[]> getEnvironmentVariables() {\n        return this.mEnvironmentVars;\n    }\n\n    @Override\n    public ObjectTable getObjectTable() {\n        return this.mObjectTable;\n    }\n\n    @Override\n    public ExecutionImage getExecutionImage() {\n        return this.mExecutionImage;\n    }\n\n    @Override\n    public ControllableLevel getControllableLevel() {\n        return this.mControllableLevel;\n    }\n\n    @Override\n    public LocalDateTime getEndTime() {\n        return this.mEndTime;\n    }\n\n    @Override\n    public LocalDateTime getLastUpdateTime() {\n        return this.mLastUpdateTime;\n    }\n\n    @Override\n    public void triggerUpdateTerminationStatus() {\n        if ( this.getState() != Thread.State.TERMINATED ) {\n            throw new IllegalStateException( \"Bad time to trigger, I am still alive!\" );\n        }\n\n        this.triggerAfterRunnableTerminationStatus();\n    }\n\n    @Override\n    public void triggerAfterRunnableTerminationStatus() {\n        this.mLastUpdateTime = LocalDateTime.now();\n        this.mEndTime        = LocalDateTime.now();\n    }\n\n    /** Proxied Processum **/\n\n    @Override\n    public Map<Long, Executum> getOwnThreadGroup() {\n        return this.mLocalProcess.getOwnThreadGroup();\n    }\n\n    @Override\n    public TaskManager getTaskManager() {\n        return this.mLocalProcess.getTaskManager();\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.mLocalProcess.getCreateTime();\n    }\n\n    @Override\n    public LocalDateTime getStartTime() {\n        return this.mLocalProcess.getStartTime();\n    }\n\n    @Override\n    public String getName() {\n        return this.mLocalProcess.getName();\n    }\n\n    @Override\n    public void setName( String szName ) {\n        this.mLocalProcess.setName( szName );\n    }\n\n    @Override\n    public long getExecutumId() {\n        return this.mLocalProcess.getExecutumId();\n    }\n\n    @Override\n    public RuntimeSystem parentSystem() {\n        return this.mLocalProcess.parentSystem();\n    }\n\n    @Override\n    public RuntimeSystem revealNearestSystem() {\n        return this.mLocalProcess.revealNearestSystem();\n    }\n\n    @Override\n    public Executum parentExecutum() {\n        return this.mLocalProcess.parentExecutum();\n    }\n\n    @Override\n    public Executum setThreadAffinity( Thread affinity ) {\n        return this.mLocalProcess.setThreadAffinity( affinity );\n    }\n\n    @Override\n    public Thread getAffiliateThread() {\n        return this.mLocalProcess.getAffiliateThread();\n    }\n\n    @Override\n    public boolean isTerminated() {\n        return this.mLocalProcess.isTerminated();\n    }\n\n    @Override\n    public void start() {\n        this.mLocalProcess.start();\n    }\n\n    @Override\n    public void apoptosis() throws ApoptosisRejectSignalException {\n        this.mLocalProcess.apoptosis();\n    }\n\n    @Override\n    public void kill() {\n        this.mLocalProcess.kill();\n    }\n\n    @Override\n    public void interrupt() {\n        this.mLocalProcess.interrupt();\n    }\n\n    @Override\n    public void suspend() {\n        this.mLocalProcess.suspend();\n    }\n\n    @Override\n    public void resume() {\n        this.mLocalProcess.resume();\n    }\n\n    @Override\n    public void entreatLive() {\n        this.mLocalProcess.entreatLive();\n    }\n\n    @Override\n    public Thread.State getState() {\n        return this.mLocalProcess.getState();\n    }\n\n    @Override\n    public int getExceptionRestartTime() {\n        return this.mLocalProcess.getExceptionRestartTime();\n    }\n\n    @Override\n    public Lifecycle applyExceptionRestartTime( int time ) {\n        return this.mLocalProcess.applyExceptionRestartTime( time );\n    }\n\n    /** Proxied Processum End **/\n\n\n\n    static class LocalSystemProcess extends ArchProcessum {\n        LocalSystemProcess ( String szName, Processum parent ) {\n            super( szName, parent );\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ControllableLevel.java",
    "content": "package com.pinecone.hydra.proc;\n\npublic enum ControllableLevel {\n    None     ( 0x00, \"None\" ),\n\n    Monitor  ( 0x01, \"Monitor\" ),\n\n    Weak     ( 0x02, \"Weak\" ),\n\n    Absolute ( 0x03, \"Absolute\" ),\n    \n    ;\n\n    private final int code;\n\n    private final String name;\n\n    ControllableLevel( int code, String name ) {\n        this.code = code;\n        this.name = name;\n    }\n\n    public int getCode() {\n        return this.code;\n    }\n\n    public String getName() {\n        return this.name;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/GenericProcessActionTape.java",
    "content": "package com.pinecone.hydra.proc;\n\npublic class GenericProcessActionTape implements ProcessActionTape {\n\n    protected Throwable mLastError;\n\n    protected int mExitCode;\n\n    public GenericProcessActionTape() {\n\n    }\n\n    @Override\n    public Throwable getLastError() {\n        return this.mLastError;\n    }\n\n    @Override\n    public void setLastError( Throwable lastError ) {\n        this.mLastError = lastError;\n    }\n\n    @Override\n    public int getExitCode() {\n        return this.mExitCode;\n    }\n\n    @Override\n    public void setExitCode( int exitCode ) {\n        this.mExitCode = exitCode;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/InstitutionalProcess.java",
    "content": "package com.pinecone.hydra.proc;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\n\n/**\n *  Pinecone Ursus For Java, InstitutionalProcess\n *  Author: Harald.E (Dragon King)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Institutional Uniform Process\n *  体制化统一进程\n *  *****************************************************************************************\n *  1). Processum => Local Process, managed under local-first autonomy and jurisdictional control.\n *  2). UProcess => Uniform Process,\n *      centrally constituted and managed by unified authority, with reserved central control rights.\n *  *****************************************************************************************\n *  1). Processum => Local Process, 地方本地进程，由地方优先自治、管制\n *  2). UProcess => Uniform Process, 中央编制的统一进程，由中央统一权威管制，拥有保留的中央控制权\n *  *****************************************************************************************\n */\npublic interface InstitutionalProcess extends Processum {\n\n    default Processum ownedLocalProcess() {\n        return this;\n    }\n\n    UProcess ownedUniformProcess();\n\n    default GUID getPID() {\n        return this.ownedUniformProcess().getPID();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/KernelProcess.java",
    "content": "package com.pinecone.hydra.proc;\n\npublic interface KernelProcess extends LocalUProcess {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/LineageProcessEnvironmentSection.java",
    "content": "package com.pinecone.hydra.proc;\n\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic class LineageProcessEnvironmentSection implements ProcessEnvironmentSection {\n    protected Map<String, String[]> mSystemEnvironments;\n\n    public LineageProcessEnvironmentSection(Map<String, String[]> systemEnvironmentVars ) {\n        this.mSystemEnvironments = systemEnvironmentVars;\n    }\n\n    @Override\n    public Map<String, String[]> getSystemEnvironments() {\n        return this.mSystemEnvironments;\n    }\n\n    @Override\n    public Map<String, String[]> extendsFrom( final Map<String, String[]> superiorEnvironmentVars, final Map<String, String[]> contextEnvVars ) {\n        Map<String, String[]> neo = new HashMap<>( this.mSystemEnvironments );\n        neo.putAll( superiorEnvironmentVars );\n        if ( contextEnvVars != null ) {\n            neo.putAll( contextEnvVars );\n        }\n        return neo;\n    }\n\n    @Override\n    public Map<String, String[]> extendsFrom( UProcess superiorProcess, final Map<String, String[]> contextEnvVars ) {\n        return this.extendsFrom( superiorProcess.getEnvironmentVariables(), contextEnvVars );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/LocalHostedProcess.java",
    "content": "package com.pinecone.hydra.proc;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.ns.ProcSpace;\n\npublic class LocalHostedProcess extends ArchUProcess implements LocalUProcess {\n    public LocalHostedProcess(\n            @Nullable Processum localSystemProc, GUID guid, String szName,\n            UProcess parent, ProcessManager processManager, ExecutionImage image, ProcSpace procSpace,\n            Map<String, String[]> startupArgs, Map<String, String[]> environmentVars\n    ) {\n        super( localSystemProc, guid, szName, parent, processManager, image, procSpace, startupArgs, environmentVars );\n    }\n\n    public LocalHostedProcess(\n            @Nullable Processum localSystemProc, String szName,\n            UProcess parent, ProcessManager processManager, ExecutionImage image, ProcSpace procSpace,\n            Map<String, String[]> startupArgs, Map<String, String[]> environmentVars\n    ) {\n        this( localSystemProc, processManager.getGuidAllocator().nextGUID(), szName, parent, processManager, image, procSpace, startupArgs, environmentVars );\n    }\n\n    public LocalHostedProcess(\n            @Nullable Processum localSystemProc,\n            UProcess parent, ProcessManager processManager, ExecutionImage image, ProcSpace procSpace,\n            Map<String, String[]> startupArgs, Map<String, String[]> environmentVars\n    ) {\n        this( localSystemProc, processManager.getGuidAllocator().nextGUID(), image.getName(), parent, processManager, image, procSpace, startupArgs, environmentVars );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/LocalUProcess.java",
    "content": "package com.pinecone.hydra.proc;\n\npublic interface LocalUProcess extends UProcess {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ProcessActionTape.java",
    "content": "package com.pinecone.hydra.proc;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ProcessActionTape extends Pinenut {\n\n    Throwable getLastError();\n\n    void setLastError( Throwable lastError );\n\n    int getExitCode();\n\n    void setExitCode( int exitCode );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ProcessEnvironmentSection.java",
    "content": "package com.pinecone.hydra.proc;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ProcessEnvironmentSection extends Pinenut {\n\n    Map<String, String[]> getSystemEnvironments();\n\n    Map<String, String[]> extendsFrom( final Map<String, String[]> superiorEnvironmentVars, final Map<String, String[]> contextEnvVars );\n\n    default Map<String, String[]> extendsFrom( final Map<String, String[]> superiorEnvironmentVars ) {\n        return this.extendsFrom( superiorEnvironmentVars, null );\n    }\n\n    Map<String, String[]> extendsFrom( UProcess superiorProcess, final Map<String, String[]> contextEnvVars );\n\n    default Map<String, String[]> extendsFrom( UProcess superiorProcess ) {\n        return this.extendsFrom( superiorProcess, null );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ProcessManager.java",
    "content": "package com.pinecone.hydra.proc;\n\nimport java.util.Collection;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.regime.Regiment;\nimport com.pinecone.framework.system.regime.arch.Manager;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.image.ImageLoader;\nimport com.pinecone.hydra.proc.image.ImageModifier;\nimport com.pinecone.hydra.system.ko.CascadeKernelObjectInstrument;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.system.ko.QueryableInstrument;\n\npublic interface ProcessManager extends CascadeKernelObjectInstrument, Regiment, Manager, QueryableInstrument {\n\n    Processum superiorProcess();\n\n    UProcess getRootUProcess();\n\n    ImageLoader getImageLoader();\n\n    ProcessManagerConfig getKernelObjectConfig();\n\n    void applyRootUProcess( UProcess rootUProcess );\n\n    RuntimeSystem superiorSystem();\n\n    void applyGuidAllocator( GuidAllocator guidAllocator );\n\n    long     getVitalizeCount();\n\n    long     getFatalityCount();\n\n    long     processCount();\n\n    Collection<UProcess> fetchProcesses();\n\n    // Object clearance rate, help load balance and dispatch. [e.g. Using priority queue.]\n    default double getClearanceRate() {\n        double nFatality = this.getFatalityCount();\n        double nVitalize = this.getVitalizeCount();\n        return nFatality / ( nVitalize + nFatality );\n    }\n\n    void register( UProcess that );\n\n    void erase( UProcess that );\n\n    boolean autopsy( UProcess that );\n\n    LocalUProcess createLocalHostedProcess(\n             ExecutionImage image, UProcess parent, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars\n    );\n\n    UProcess getProcess( GUID pid );\n\n    Collection<UProcess> searchProcessesByName( String procName );\n\n    Collection<UProcess> searchProcessesByNameNoCase( String procName );\n\n    boolean containProcess( GUID pid );\n\n    default LocalUProcess createLocalHostedProcess(\n            ExecutionImage image, UProcess parent, Map<String, String[]> startupArgs\n    ) {\n        return this.createLocalHostedProcess( image, parent, startupArgs, null );\n    }\n\n    ImageModifier getImageModifier();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ProcessManagerConfig.java",
    "content": "package com.pinecone.hydra.proc;\n\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\n\npublic interface ProcessManagerConfig extends KernelObjectConfig {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ProcessManagerSystema.java",
    "content": "package com.pinecone.hydra.proc;\n\nimport com.pinecone.hydra.system.Hydrogen;\n\npublic interface ProcessManagerSystema extends Hydrogen {\n\n    ProcessManager processManager();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/RemoteUProcess.java",
    "content": "package com.pinecone.hydra.proc;\n\npublic interface RemoteUProcess extends UProcess {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/UProcess.java",
    "content": "package com.pinecone.hydra.proc;\n\nimport java.time.LocalDateTime;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.proc.entity.ProcessElement;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.ns.ProcSpace;\nimport com.pinecone.hydra.proc.tomb.RuntimeTombstone;\nimport com.pinecone.hydra.system.ko.entity.ObjectTable;\n\npublic interface UProcess extends Processum, ProcessElement {\n\n    ProcessActionTape actionTape();\n\n    UProcess parentProcess();\n\n    GUID actualParentPID();\n\n    void applyActualParentPID( GUID pid );\n\n    ProcessManager getOwnedProcessManager();\n\n    ProcSpace getProcNamespace();\n\n    RuntimeTombstone getRuntimeTombstone();\n\n    ObjectTable getObjectTable();\n\n    ExecutionImage getExecutionImage();\n\n    ControllableLevel getControllableLevel();\n\n    LocalDateTime getEndTime() ;\n\n    LocalDateTime getLastUpdateTime() ;\n\n    Map<String, String[]> getStartupArguments();\n\n    Map<String, String[]> getEnvironmentVariables();\n\n    Processum affinityLocalProcess();\n\n    void triggerUpdateTerminationStatus();\n\n    void triggerAfterRunnableTerminationStatus();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/UniformProcessConfig.java",
    "content": "package com.pinecone.hydra.proc;\n\nimport com.pinecone.hydra.system.ko.ArchKernelObjectConfig;\n\nimport java.util.Map;\n\npublic class UniformProcessConfig extends ArchKernelObjectConfig implements ProcessManagerConfig {\n\n    public UniformProcessConfig( Map<String, Object> config ) {\n        super(config);\n    }\n\n    public UniformProcessConfig(){\n        super();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/UniformProcessManager.java",
    "content": "package com.pinecone.hydra.proc;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.executum.ArchProcessum;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.image.ImageLoader;\nimport com.pinecone.hydra.proc.image.ImageModifier;\nimport com.pinecone.hydra.proc.image.SafeImageModifier;\nimport com.pinecone.hydra.proc.image.UniformMultiScopeImageLoader;\nimport com.pinecone.hydra.proc.ns.GenericSegregationSpace;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.centrum.UniformCentralSystem;\nimport com.pinecone.hydra.system.ko.CascadeInstrument;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\npublic class UniformProcessManager extends ArchProcessManager implements ProcessManager {\n\n    protected long                         mnVitalizeCount      = 0;\n    protected long                         mnFatalityCount      = 0;\n\n    protected String                       mSuperiorPathScope;\n    protected Namespace                    mThisNamespace;\n    protected GuidAllocator                mGuidAllocator;\n    protected Processum                    mSuperiorProcess;\n    protected UProcess                     mRootUProcess;\n    protected RuntimeSystem                mSuperiorSystem;\n    protected CascadeInstrument            mParentInstrument;\n    protected KernelObjectConfig           mKernelObjectConfig;\n    protected DynamicFactory               mDynamicFactory;\n    protected Map<GUID, UProcess>          mProcessMap;\n    protected ImageLoader                  mImageLoader;\n    protected ProcessEnvironmentSection    mProcessEnvironmentSection;\n\n    protected ImageModifier                mImageModifier;\n\n    public UniformProcessManager (\n            Processum superiorProcess, CascadeInstrument parentInstrument, String name, String superiorPathScope,\n            KernelObjectConfig config, @Nullable ImageLoader imageLoader, @Nullable GuidAllocator guidAllocator\n    ) {\n        this.mSuperiorPathScope         = superiorPathScope;\n        this.mSuperiorProcess           = superiorProcess;\n        this.mParentInstrument          = parentInstrument;\n        this.mProcessMap                = new ConcurrentHashMap<>();\n        this.mKernelObjectConfig        = config;\n        this.mGuidAllocator             = guidAllocator;\n        this.mDynamicFactory            = new GenericDynamicFactory( superiorProcess.getTaskManager().getClassLoader() );\n        this.mImageLoader               = imageLoader;\n        this.mProcessEnvironmentSection = new LineageProcessEnvironmentSection( this.mSuperiorProcess.parentSystem().getEnvironmentVars() );\n        this.mImageModifier             = new SafeImageModifier();\n\n        if ( this.mSuperiorProcess instanceof RuntimeSystem ) {\n            this.mSuperiorSystem = (RuntimeSystem) this.mSuperiorProcess;\n        }\n        else {\n            this.mSuperiorSystem = this.mSuperiorProcess.parentSystem();\n        }\n\n        this.setTargetingName( name );\n\n\n        if ( this.mSuperiorProcess != null ) {\n            if ( this.mSuperiorProcess instanceof RuntimeSystem ) {\n                this.mSuperiorSystem = (RuntimeSystem) this.mSuperiorProcess;\n            }\n            else  {\n                this.mSuperiorSystem = this.mSuperiorProcess.parentSystem();\n            }\n            if ( this.mSuperiorSystem instanceof UniformCentralSystem ) {\n                UniformCentralSystem system = (UniformCentralSystem) this.mSuperiorSystem;\n                if ( this.mGuidAllocator == null ) {\n                    this.mGuidAllocator = system.getSystemGuidAllocator();\n                }\n                if ( this.mImageLoader == null ) {\n                    this.mImageLoader = (ImageLoader) system.imageLoader();\n                }\n            }\n        }\n\n        if ( this.mGuidAllocator == null ) {\n            throw new IllegalArgumentException( \"GUIDAllocator is undefined.\" );\n        }\n\n        if ( this.mImageLoader == null ) {\n            this.mImageLoader = new UniformMultiScopeImageLoader( (Hydrogen) this.superiorSystem(), (HyComponent) null );\n        }\n\n        if ( this.mSuperiorProcess instanceof UProcess ) {\n            this.applyRootUProcess( (UProcess) this.mSuperiorProcess );\n        }\n        else if ( this.mSuperiorProcess instanceof InstitutionalProcess ) {\n            this.applyRootUProcess( ( (InstitutionalProcess) this.mSuperiorProcess ).ownedUniformProcess() );\n        }\n    }\n\n    public UniformProcessManager (\n            Processum superiorProcess, CascadeInstrument parentInstrument, String name, String superiorPathScope,\n            KernelObjectConfig config\n    ) {\n        this( superiorProcess, parentInstrument, name, superiorPathScope, config, null, null );\n    }\n\n    @Override\n    public ImageModifier getImageModifier() {\n        return this.mImageModifier;\n    }\n\n    @Override\n    public void applyGuidAllocator( GuidAllocator guidAllocator ) {\n        this.mGuidAllocator = guidAllocator;\n    }\n\n    @Override\n    public Processum superiorProcess() {\n        return this.mSuperiorProcess;\n    }\n\n    @Override\n    public UProcess getRootUProcess() {\n        return this.mRootUProcess;\n    }\n\n    @Override\n    public ImageLoader getImageLoader() {\n        return this.mImageLoader;\n    }\n\n    @Override\n    public ProcessManagerConfig getKernelObjectConfig() {\n        return (ProcessManagerConfig) this.mKernelObjectConfig;\n    }\n\n    @Override\n    public void applyRootUProcess( UProcess rootUProcess ) {\n        this.mRootUProcess = rootUProcess;\n    }\n\n    @Override\n    public RuntimeSystem superiorSystem() {\n        return this.mSuperiorSystem;\n    }\n\n    @Override\n    public CascadeInstrument parent() {\n        return this.mParentInstrument;\n    }\n\n    @Override\n    public void setParent( CascadeInstrument parent ) {\n        this.mParentInstrument = parent;\n    }\n\n    @Override\n    public Namespace getTargetingName() {\n        return this.mThisNamespace;\n    }\n\n    @Override\n    public void setTargetingName( Namespace name ) {\n        this.mThisNamespace = name;\n    }\n\n    @Override\n    public String getSuperiorPathScope() {\n        return this.mSuperiorPathScope;\n    }\n\n    @Override\n    public void applySuperiorPathScope( String superiorPathScope ) {\n        this.mSuperiorPathScope = superiorPathScope;\n    }\n\n    @Override\n    public GuidAllocator getGuidAllocator() {\n        return this.mGuidAllocator;\n    }\n\n    @Override\n    public KernelObjectConfig getConfig() {\n        return this.mKernelObjectConfig;\n    }\n\n\n\n\n    @Override\n    public long    getVitalizeCount() {\n        return this.mnVitalizeCount;\n    }\n\n    @Override\n    public long    getFatalityCount() {\n        return this.mnFatalityCount;\n    }\n\n    @Override\n    public long    processCount() {\n        return this.mProcessMap.size();\n    }\n\n    @Override\n    public Collection<UProcess> fetchProcesses() {\n        return this.mProcessMap.values();\n    }\n\n    @Override\n    public void register( UProcess that ) {\n        if( !this.autopsy( that ) ) {\n            this.mProcessMap.put( that.getPID(), that );\n            ++this.mnVitalizeCount;\n        }\n        else {\n            throw new IllegalStateException( \"Process is dead.\" );\n        }\n    }\n\n    @Override\n    public void erase( UProcess that ) {\n        if( this.autopsy( that ) ) {\n            this.expunge( that );\n        }\n        else {\n            throw new IllegalStateException( \"Process is still alive.\" );\n        }\n    }\n\n    @Override\n    protected void expunge( UProcess that ) {\n        this.mProcessMap.remove( that.getPID() );\n        ++this.mnFatalityCount;\n        that.triggerAfterRunnableTerminationStatus();\n    }\n\n    @Override\n    public boolean autopsy( UProcess that ) {\n        return that.getState() == Thread.State.TERMINATED;\n    }\n\n    @Override\n    public LocalUProcess createLocalHostedProcess(\n            ExecutionImage image, UProcess parent, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars\n    ) {\n        if ( parent == null ) {\n            parent = this.mRootUProcess;\n        }\n        Processum hosted = new ArchProcessum( image.getName(), parent ) {};\n        Thread primaryThread = new Thread( image.getEntryPoint(), ( image.getName() + \"-main\" ).toLowerCase() );\n        hosted.setThreadAffinity( primaryThread );\n\n        if ( startupArgs == null ) {\n            startupArgs = new HashMap<>();\n        }\n        LocalUProcess process = new LocalHostedProcess(\n                hosted, parent, this, image, new GenericSegregationSpace(), startupArgs,\n                this.mProcessEnvironmentSection.extendsFrom( parent, contextEnvironmentVars )\n        );\n\n        // Register the process in the entry-point-runnable for process status surveillance purpose.\n        image.getEntryPoint().applyOwnedProcess( process );\n        this.register( process );\n\n        return process;\n    }\n\n    public UProcess getProcess( GUID pid ) {\n        return this.mProcessMap.get( pid );\n    }\n\n    @Override\n    public Collection<UProcess> searchProcessesByName( String procName ) {\n        Collection<UProcess> li = new ArrayList<>();\n        for( UProcess process : this.mProcessMap.values() ) {\n            if ( process.getName().equals( procName ) ) {\n                li.add( process );\n            }\n        }\n        return li;\n    }\n\n    @Override\n    public Collection<UProcess> searchProcessesByNameNoCase( String procName ) {\n        Collection<UProcess> li = new ArrayList<>();\n        for( UProcess process : this.mProcessMap.values() ) {\n            if ( process.getName().equalsIgnoreCase( procName ) ) {\n                li.add( process );\n            }\n        }\n        return li;\n    }\n\n    @Override\n    public boolean containProcess( GUID pid ) {\n        return this.mProcessMap.containsKey( pid );\n    }\n\n    @Override\n    public String getPath( GUID objectGuid ) {\n        return objectGuid.toString();\n        // Process PID is the path in this case.\n        // /proc/${pid}\n    }\n\n    @Override\n    public String querySystemKernelObjectPath( GUID objectGuid ) {\n        String thisScopePath = this.getPath( objectGuid );\n        if ( thisScopePath == null ) {\n            return null;\n        }\n\n        return this.getSuperiorPathScope() + this.getConfig().getPathNameSeparator() + thisScopePath;\n    }\n\n    @Override\n    public GUID queryGUIDByPath( String path ) {\n        return this.queryNode( path ).getGuid();\n    }\n\n    @Override\n    public EntityNode queryNode( String path ) {\n        String pathSeparator = this.getKernelObjectConfig().getPathNameSeparator();\n\n        String[] split = path.split(pathSeparator);\n        if( split.length == 0 ) {\n            return null;\n        }\n        if( split.length > 1 ) {\n            // 后续补齐查找逻辑\n            return null;\n        }\n        else {\n            GUID guid = this.mGuidAllocator.parse( split[0] );\n            if ( guid == null ) {\n                return null;\n            }\n            return this.mProcessMap.get( this.mGuidAllocator.parse( split[0] ) );\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/entity/ElementNode.java",
    "content": "package com.pinecone.hydra.proc.entity;\n\nimport com.pinecone.hydra.system.ko.meta.ElementObject;\n\npublic interface ElementNode extends ElementObject {\n\n    @Override\n    default String objectCategoryName() {\n        return \"Process\";\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/entity/ProcessElement.java",
    "content": "package com.pinecone.hydra.proc.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface ProcessElement extends ElementNode {\n\n    String getName();\n\n    long getLocalPID();\n\n    default GUID getPID() {\n        return this.getGuid();\n    }\n\n    @Override\n    GUID getGuid();\n\n    GUID getParentProcessId();\n\n    long getParentLocalPID();\n\n    Thread.State getState();\n\n    ElementNode getAccount();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/event/ProcessEvent.java",
    "content": "package com.pinecone.hydra.proc.event;\n\npublic enum ProcessEvent {\n    Prepare    ( 0x00, \"Prepare\" ), // Prepare to start, no image loaded.\n\n    Created    ( 0x01, \"Created\" ),\n\n    Vitalized  ( 0x02, \"Vitalized\" ),\n\n    Terminated ( 0x03, \"Terminated\" ),\n\n    Error      ( 0x04, \"Error\" ),\n\n    ;\n\n    private final int code;\n\n    private final String name;\n\n    ProcessEvent( int code, String name ) {\n        this.code = code;\n        this.name = name;\n    }\n\n    public int getCode() {\n        return this.code;\n    }\n\n    public String getName() {\n        return this.name;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/event/ProcessEventHandler.java",
    "content": "package com.pinecone.hydra.proc.event;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.proc.image.EntryPointRunnable;\n\npublic interface ProcessEventHandler extends Pinenut {\n\n    void fired( EntryPointRunnable runnable, ProcessEvent event );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/event/ProcessLifecycleHandler.java",
    "content": "package com.pinecone.hydra.proc.event;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.proc.image.EntryPointRunnable;\n\npublic interface ProcessLifecycleHandler extends Pinenut {\n\n    void fired( String imageAddress, EntryPointRunnable runnable, ProcessEvent event );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ArchEntryPointRunnable.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.event.ProcessEventHandler;\n\npublic abstract class ArchEntryPointRunnable implements EntryPointRunnable {\n\n    protected UProcess mOwnedProcess;\n\n    protected ProcessEventHandler mProcessEventHandler;\n\n    List<ProcessEventHandler> mSysProcEventHandlers;\n\n    public ArchEntryPointRunnable( UProcess ownedProcess, ProcessEventHandler processEventHandler ) {\n        this.mOwnedProcess         = ownedProcess;\n        this.mProcessEventHandler  = processEventHandler;\n        this.mSysProcEventHandlers = new ArrayList<>();\n    }\n\n    public ArchEntryPointRunnable( ProcessEventHandler processEventHandler ) {\n        this( null, processEventHandler );\n    }\n\n    public ArchEntryPointRunnable() {\n        this( null, null );\n    }\n\n    @Override\n    public ProcessEventHandler processEventHandler() {\n        return this.mProcessEventHandler;\n    }\n\n    @Override\n    public void applyProcessEventHandler( ProcessEventHandler handler ) {\n        if ( this.mOwnedProcess.getState() != Thread.State.NEW ) {\n            throw new IllegalStateException(\n                    \"Process event handler can only be set before the process is started.\"\n            );\n        }\n        this.mProcessEventHandler = handler;\n    }\n\n    @Override\n    public UProcess ownedProcess() {\n        return this.mOwnedProcess;\n    }\n\n    @Override\n    public void applyOwnedProcess( UProcess process ) {\n        this.mOwnedProcess = process;\n    }\n\n\n    static List<ProcessEventHandler> getSysProcEventHandlers( EntryPointRunnable entryPointRunnable ) {\n        if ( entryPointRunnable instanceof ArchEntryPointRunnable ) {\n            return ((ArchEntryPointRunnable) entryPointRunnable).mSysProcEventHandlers;\n        }\n        else {\n            return null;\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ArchExecutionImage.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport java.net.URI;\n\nimport com.pinecone.hydra.proc.ControllableLevel;\nimport com.pinecone.hydra.proc.UProcess;\n\npublic abstract class ArchExecutionImage implements ExecutionImage {\n\n    protected String                        mszName;\n    protected URI                           mResourceURI;\n    protected String                        mszImageAddress;\n    protected Class<? extends UProcess>     mProcessClassType;\n    protected ClassLoader                   mClassLoader;\n    protected EntryPointRunnable            mEntryPoint;\n    protected ImageLoader                   mImageLoader;\n    protected boolean                       mbReadOnly;\n    protected boolean                       mbReusable;\n    protected String                        mszSignature;\n    protected ControllableLevel             mControllableLevel;\n\n    protected ArchExecutionImage(\n            String name, EntryPointRunnable entryPoint, Class<? extends UProcess> processClassType,\n            URI resourceURI, ClassLoader classLoader, ImageLoader imageLoader,\n            boolean readOnly, boolean reusable, String signature,\n            ControllableLevel controllableLevel\n    ) {\n        this.mszName            = name;\n        this.mEntryPoint        = entryPoint;\n        this.mProcessClassType  = processClassType;\n        this.mResourceURI       = resourceURI;\n        this.mClassLoader       = classLoader;\n        this.mImageLoader       = imageLoader;\n        this.mbReadOnly         = readOnly;\n        this.mbReusable         = reusable;\n        this.mszSignature       = signature;\n        this.mControllableLevel = controllableLevel;\n    }\n\n    protected ArchExecutionImage(\n            String name, EntryPointRunnable entryPoint, Class<? extends UProcess> processClassType, URI resourceURI, ImageLoader imageLoader, String signature, ControllableLevel controllableLevel\n    ) {\n        this( name, entryPoint, processClassType, resourceURI, imageLoader.getClassLoader(), imageLoader,\n              true, true, signature, controllableLevel\n        );\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public URI getResourceURI() {\n        return this.mResourceURI;\n    }\n\n    protected void  applyImageAddress( String address ) {\n        this.mszImageAddress = address;\n    }\n\n    @Override\n    public String getImageAddress() {\n        return this.mszImageAddress;\n    }\n\n    @Override\n    public Class<UProcess> processClassType() {\n        return null;\n    }\n\n    @Override\n    public EntryPointRunnable getEntryPoint() {\n        return this.mEntryPoint;\n    }\n\n    @Override\n    public ClassLoader getClassLoader() {\n        return this.mClassLoader;\n    }\n\n    @Override\n    public ImageLoader getImageLoader() {\n        return this.mImageLoader;\n    }\n\n    @Override\n    public boolean isReadOnly() {\n        return this.mbReadOnly;\n    }\n\n    @Override\n    public boolean isReusable() {\n        return this.mbReusable;\n    }\n\n    @Override\n    public String getSignature() {\n        return this.mszSignature;\n    }\n\n    @Override\n    public ControllableLevel getControllableLevel() {\n        return this.mControllableLevel;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ArchImageLoader.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.ArchSystemCascadeComponent;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\n\npublic abstract class ArchImageLoader extends ArchSystemCascadeComponent implements ImageLoader {\n    protected ClassLoader         mClassLoader;\n\n    public ArchImageLoader( Namespace name, Hydrogen system, HyComponent parent, ClassLoader classLoader ) {\n        super( name, system, system.getComponentManager(), parent );\n\n        this.mClassLoader = classLoader;\n    }\n\n    public ArchImageLoader( Hydrogen system, HyComponent parent ) {\n        this( null, system, parent, system.getGlobalClassLoader() );\n    }\n\n    public ArchImageLoader( Hydrogen system ) {\n        this( system, null );\n    }\n\n    @Override\n    public ClassLoader getClassLoader() {\n        return this.mClassLoader;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/EntryPointRunnable.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.system.functions.Executor;\nimport com.pinecone.hydra.proc.ArchProcessManager;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.proc.event.ProcessEventHandler;\n\npublic interface EntryPointRunnable extends Runnable, Executor {\n\n    UProcess ownedProcess();\n\n    void applyOwnedProcess( UProcess process );\n\n    ProcessEventHandler processEventHandler();\n\n    void applyProcessEventHandler( ProcessEventHandler handler );\n\n    int main( Map<String, String[]> args ) throws Exception;\n\n    @Override\n    default void execute() throws Exception {\n        int c = this.main( this.ownedProcess().getStartupArguments() );\n        this.ownedProcess().actionTape().setExitCode( c );\n    }\n\n    /**\n     * Overriding is discouraged; lifecycle supervision is required in principle.\n     * 原则上，请勿重写，需要检察程序生命周期行为。\n     */\n    @Override\n    default void run() {\n        ProcessEventHandler processEventHandler        = this.processEventHandler();\n        List<ProcessEventHandler> sysProcEventHandlers = ArchEntryPointRunnable.getSysProcEventHandlers( this );\n        ProcessEvent termEvent                         = null;\n        try {\n            ProcessEvent vitalEvent = ProcessEvent.Vitalized;\n            if ( processEventHandler != null ) {\n                processEventHandler.fired( this, vitalEvent );\n            }\n            if ( sysProcEventHandlers != null ) {\n                for ( ProcessEventHandler sysHandler : sysProcEventHandlers ) {\n                    sysHandler.fired( this, vitalEvent );\n                }\n            }\n\n            int c = this.main( this.ownedProcess().getStartupArguments() );\n            this.ownedProcess().actionTape().setExitCode( c );\n        }\n        catch ( Exception e ) {\n            this.ownedProcess().actionTape().setLastError( e );\n            termEvent = ProcessEvent.Error;\n            throw new ProvokeHandleException( e );\n        }\n        finally {\n            UProcess owned = this.ownedProcess();\n            ProcessManager processManager = owned.getOwnedProcessManager();\n            if ( processManager instanceof ArchProcessManager ) {\n                ArchProcessManager.invokeExpunge( (ArchProcessManager) processManager, owned );\n            }\n\n            if ( termEvent == null ) {\n                termEvent = ProcessEvent.Terminated;\n            }\n            if ( processEventHandler != null ) {\n                processEventHandler.fired( this, termEvent );\n            }\n            if ( sysProcEventHandlers != null ) {\n                for ( ProcessEventHandler sysHandler : sysProcEventHandlers ) {\n                    sysHandler.fired( this, termEvent );\n                }\n            }\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ExecutionImage.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport java.net.URI;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.proc.ControllableLevel;\nimport com.pinecone.hydra.proc.UProcess;\n\npublic interface ExecutionImage extends Pinenut {\n\n    String getName();\n\n    URI getResourceURI();\n\n    String getImageAddress();\n\n    Class<UProcess> processClassType();\n\n    EntryPointRunnable getEntryPoint();\n\n    ClassLoader getClassLoader();\n\n    ImageLoader getImageLoader();\n\n    boolean isReadOnly();\n\n    boolean isReusable();\n\n    String getSignature();\n\n    ControllableLevel getControllableLevel();\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/FileSystemMappingImageLoader.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.architecture.CascadeComponent;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.proc.image.kom.ImageElement;\nimport com.pinecone.hydra.proc.image.kom.VirtualExeImageInstrument;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.centrum.UniformCentralSystem;\nimport com.pinecone.hydra.system.imperium.KernelObjectRootMountPoint;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\n\npublic class FileSystemMappingImageLoader extends ArchImageLoader implements ImageLoader {\n\n    protected VirtualExeImageInstrument mVirtualExeImageInstrument;\n\n    public FileSystemMappingImageLoader( Namespace name, Hydrogen system, HyComponent parent, ClassLoader classLoader, @Nullable VirtualExeImageInstrument virtualExeImageInstrument ) {\n        super( name, system, parent, classLoader );\n\n        this.mVirtualExeImageInstrument = virtualExeImageInstrument;\n\n        if ( virtualExeImageInstrument == null ) {\n            if ( system instanceof UniformCentralSystem) {\n                KOMInstrument e = ((UniformCentralSystem) system).imperiumPrivy().getExpressInstrument().getMountedInstrument( KernelObjectRootMountPoint.SysImages.getMountPoint() );\n                if ( e instanceof VirtualExeImageInstrument ) {\n                    this.mVirtualExeImageInstrument = (VirtualExeImageInstrument) e;\n                }\n            }\n        }\n    }\n\n    public FileSystemMappingImageLoader( Hydrogen system, HyComponent parent, @Nullable VirtualExeImageInstrument virtualExeImageInstrument ) {\n        this( null, system, parent, system.getGlobalClassLoader(), virtualExeImageInstrument );\n    }\n\n    public FileSystemMappingImageLoader( Hydrogen system, @Nullable VirtualExeImageInstrument virtualExeImageInstrument ) {\n        this( system, null, virtualExeImageInstrument );\n    }\n\n\n    @Override\n    public ExecutionImage queryExecutionImage( String path ) {\n        ExecutionImage image = this.mVirtualExeImageInstrument.queryImage( path );\n        if ( image != null ) {\n            return image;\n        }\n\n        if ( this.getSystem() instanceof UniformCentralSystem ) {\n            EntityNode e = ((UniformCentralSystem) this.getSystem()).imperiumPrivy().getExpressInstrument().queryNode( path );\n            if ( e instanceof ImageElement) {\n                return ((ImageElement) e).getImage();\n            }\n        }\n\n        return null;\n    }\n\n    @Override\n    public void registerLocalScopeExecutionImage( String dirPath, ExecutionImage image ) {\n        this.mVirtualExeImageInstrument.mount( dirPath, image );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/GenericClassImage.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport java.net.URI;\nimport java.net.URISyntaxException;\n\nimport com.pinecone.hydra.proc.ControllableLevel;\nimport com.pinecone.hydra.proc.UProcess;\n\npublic class GenericClassImage extends ArchExecutionImage implements JVMClassExecutionImage {\n\n    static URI evalClassURI( Class<?> clazz ) throws ImageLoadProcedureException {\n        try {\n            return clazz.getProtectionDomain().getCodeSource().getLocation().toURI();\n        }\n        catch ( URISyntaxException e ) {\n            throw new ImageLoadProcedureException( e );\n        }\n    }\n\n    public GenericClassImage(\n            String name, EntryPointRunnable entryPoint, Class<? extends UProcess> processClassType, ImageLoader imageLoader\n    ) throws ImageLoadProcedureException {\n        super(\n                name, entryPoint, processClassType, evalClassURI( entryPoint.getClass() ),\n                imageLoader, JVMClassExecutionImage.class.getSimpleName(), ControllableLevel.Absolute\n        );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ImageLoadException.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class ImageLoadException extends Exception implements Pinenut {\n\n    public ImageLoadException    () {\n        super();\n    }\n\n    public ImageLoadException    ( String message ) {\n        super(message);\n    }\n\n    public ImageLoadException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ImageLoadException    ( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ImageLoadProcedureException.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class ImageLoadProcedureException extends PineRuntimeException {\n\n    public ImageLoadProcedureException    () {\n        super();\n    }\n\n    public ImageLoadProcedureException    ( String message ) {\n        super(message);\n    }\n\n    public ImageLoadProcedureException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ImageLoadProcedureException    ( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ImageLoader.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport com.pinecone.framework.system.architecture.Component;\n\npublic interface ImageLoader extends Component {\n\n    ClassLoader getClassLoader();\n\n    ExecutionImage queryExecutionImage( String path );\n\n    void registerLocalScopeExecutionImage ( String dirPath, ExecutionImage image );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/ImageModifier.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport java.net.URI;\n\nimport com.pinecone.framework.system.Unsafe;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.proc.event.ProcessEventHandler;\n\npublic interface ImageModifier extends Pinenut {\n\n    @Unsafe\n    void addSystemProcessEventHandler( EntryPointRunnable runnable, ProcessEventHandler handler );\n\n    @Unsafe\n    void removeSystemProcessEventHandler( EntryPointRunnable runnable, ProcessEventHandler handler );\n\n    @Unsafe\n    int querySystemProcessEventHandlersSize( EntryPointRunnable runnable );\n\n    @Unsafe\n    void  applyImageAddress( ExecutionImage image, String address );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/JVMClassExecutionImage.java",
    "content": "package com.pinecone.hydra.proc.image;\n\npublic interface JVMClassExecutionImage extends ExecutionImage {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/LocalHostedClassImage.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport com.pinecone.hydra.proc.LocalHostedProcess;\nimport com.pinecone.hydra.proc.ProcessManager;\n\npublic class LocalHostedClassImage extends GenericClassImage {\n\n    public LocalHostedClassImage(\n            String name, EntryPointRunnable entryPoint, ImageLoader imageLoader\n    ) throws ImageLoadProcedureException {\n        super( name, entryPoint, LocalHostedProcess.class, imageLoader );\n    }\n\n    public LocalHostedClassImage(\n            String name, EntryPointRunnable entryPoint, ProcessManager manager\n    ) throws ImageLoadProcedureException {\n        this( name, entryPoint, manager.getImageLoader() );\n    }\n\n    public LocalHostedClassImage(\n            EntryPointRunnable entryPoint, ProcessManager manager\n    ) throws ImageLoadProcedureException {\n        this( \"\", entryPoint, manager.getImageLoader() );\n\n        this.mszName = this.getClass().getName();\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/SafeImageModifier.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport java.net.URI;\nimport java.util.List;\n\nimport com.pinecone.framework.system.Unsafe;\nimport com.pinecone.hydra.proc.event.ProcessEventHandler;\n\npublic class SafeImageModifier implements ImageModifier {\n    public SafeImageModifier() {\n\n    }\n\n    protected List<ProcessEventHandler> retrieveSysProcEventHandlers( EntryPointRunnable runnable ) {\n        List<ProcessEventHandler> those = ArchEntryPointRunnable.getSysProcEventHandlers( runnable );\n        if ( those != null ) {\n            return those;\n        }\n        throw new IllegalArgumentException( \"EntryPointRunnable has no SystemProcessEventHandles.\" );\n    }\n\n    @Override\n    @Unsafe\n    public void addSystemProcessEventHandler( EntryPointRunnable runnable, ProcessEventHandler handler ) {\n        List<ProcessEventHandler> those = this.retrieveSysProcEventHandlers( runnable );\n        those.add( handler );\n    }\n\n    @Override\n    @Unsafe\n    public void removeSystemProcessEventHandler( EntryPointRunnable runnable, ProcessEventHandler handler ) {\n        List<ProcessEventHandler> those = this.retrieveSysProcEventHandlers( runnable );\n        those.remove( handler );\n    }\n\n    @Override\n    @Unsafe\n    public int querySystemProcessEventHandlersSize( EntryPointRunnable runnable ) {\n        List<ProcessEventHandler> those = this.retrieveSysProcEventHandlers( runnable );\n        return those.size();\n    }\n\n    @Override\n    @Unsafe\n    public void  applyImageAddress( ExecutionImage image, String address ) {\n        if ( image instanceof ArchExecutionImage ) {\n            ((ArchExecutionImage) image). applyImageAddress( address );\n            return;\n        }\n\n        throw new IllegalArgumentException( \"Unable to modify `image-uri`.\" );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/URLImageLoader.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport java.net.URI;\n\npublic interface URLImageLoader extends ImageLoader {\n\n    ExecutionImage queryExecutionImage( URI uri );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/UniformImageLoader.java",
    "content": "package com.pinecone.hydra.proc.image;\n\npublic interface UniformImageLoader extends URLImageLoader {\n\n    ImageLoader localMappingImageLoader();\n\n    void addScope( String protocol, ImageLoader imageLoader );\n\n    ImageLoader getScope( String protocol );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/UniformMultiScopeImageLoader.java",
    "content": "package com.pinecone.hydra.proc.image;\n\nimport java.net.URI;\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.architecture.CascadeComponent;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.centrum.UniformCentralSystem;\n\npublic class UniformMultiScopeImageLoader extends ArchImageLoader implements UniformImageLoader {\n\n    protected Map<String, ImageLoader> mLoaderScope;\n\n    protected ImageLoader mLocalMappingImageLoader;\n\n    public UniformMultiScopeImageLoader( Namespace name, Hydrogen system, HyComponent parent, ClassLoader classLoader, @Nullable ImageLoader localMappingImageLoader ) {\n        super( name, system, parent, classLoader );\n\n        this.mLoaderScope             = new ConcurrentHashMap<>();\n        this.mLocalMappingImageLoader = localMappingImageLoader;\n\n        if ( this.mLocalMappingImageLoader == null ) {\n            if ( system instanceof UniformCentralSystem ) {\n                UniformImageLoader pl = (UniformImageLoader) ( (UniformCentralSystem) system ).imageLoader();\n                this.mLocalMappingImageLoader = pl.localMappingImageLoader();\n            }\n            else {\n                throw new IllegalArgumentException( \"`UniformMultiScopeImageLoader` must includes at least one `localMappingImageLoader`.\" );\n            }\n        }\n\n        this.mLoaderScope.put( \"uofs\", this.mLocalMappingImageLoader );\n    }\n\n    public UniformMultiScopeImageLoader( Hydrogen system, HyComponent parent, @Nullable ImageLoader localMappingImageLoader ) {\n        this( null, system, parent, system.getGlobalClassLoader(), localMappingImageLoader );\n    }\n\n    public UniformMultiScopeImageLoader( Hydrogen system, HyComponent parent ) {\n        this( null, system, parent, system.getGlobalClassLoader(), null );\n    }\n\n\n    public UniformMultiScopeImageLoader( Hydrogen system, @Nullable ImageLoader localMappingImageLoader ) {\n        this( system, null, localMappingImageLoader );\n    }\n\n    @Override\n    public ImageLoader localMappingImageLoader() {\n        return this.mLocalMappingImageLoader;\n    }\n\n    @Override\n    public void addScope( String protocol, ImageLoader imageLoader ) {\n        this.mLoaderScope.put( protocol.toLowerCase(), imageLoader );\n    }\n\n    @Override\n    public ImageLoader getScope( String protocol ) {\n        return this.mLoaderScope.get( protocol.toLowerCase() );\n    }\n\n    @Override\n    public ExecutionImage queryExecutionImage( URI uri ) {\n        String p = uri.getScheme().toLowerCase();\n        ImageLoader imageLoader = this.mLoaderScope.get( p );\n        if ( imageLoader == null ) {\n            return null;\n        }\n\n        if ( imageLoader instanceof URLImageLoader ) {\n            URLImageLoader urlImageLoader = (URLImageLoader) imageLoader;\n            return urlImageLoader.queryExecutionImage( uri );\n        }\n\n        ExecutionImage image = imageLoader.queryExecutionImage( uri.getPath() );\n        if ( image != null ) {\n            return image;\n        }\n\n        CascadeComponent component = this.parent();\n        if ( component instanceof URLImageLoader ) {\n            image = ( (URLImageLoader) component ).queryExecutionImage( uri );\n            if ( image != null ) {\n                return image;\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public ExecutionImage queryExecutionImage( String path ) {\n        ExecutionImage image = this.mLocalMappingImageLoader.queryExecutionImage( path );\n        if ( image != null ) {\n            return image;\n        }\n\n        CascadeComponent component = this.parent();\n        if ( component instanceof ImageLoader ) {\n            image = ( (ImageLoader) component ).queryExecutionImage( path );\n            if ( image != null ) {\n                return image;\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public void registerLocalScopeExecutionImage( String dirPath, ExecutionImage image ) {\n        this.mLocalMappingImageLoader.registerLocalScopeExecutionImage( dirPath, image );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/kom/ElementNode.java",
    "content": "package com.pinecone.hydra.proc.image.kom;\n\nimport com.pinecone.hydra.system.ko.meta.ElementObject;\n\npublic interface ElementNode extends ElementObject {\n\n    @Override\n    default String objectCategoryName() {\n        return \"Process\";\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/kom/GenericImageElement.java",
    "content": "package com.pinecone.hydra.proc.image.kom;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\n\npublic class GenericImageElement implements ImageElement {\n    protected ExecutionImage mImage;\n    protected GUID           mGUID;\n\n    public GenericImageElement() {\n\n    }\n\n    public GenericImageElement( ExecutionImage image, GUID guid ) {\n        this.mImage = image;\n        this.mGUID  = guid;\n    }\n\n    @Override\n    public ExecutionImage getImage() {\n        return this.mImage;\n    }\n\n    @Override\n    public String getName() {\n        return this.mImage.getName();\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.mGUID;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/kom/ImageElement.java",
    "content": "package com.pinecone.hydra.proc.image.kom;\n\nimport com.pinecone.hydra.proc.image.ExecutionImage;\n\npublic interface ImageElement extends ElementNode {\n\n    ExecutionImage getImage();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/kom/VirtualExeImageInstrument.java",
    "content": "package com.pinecone.hydra.proc.image.kom;\n\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\n\npublic interface VirtualExeImageInstrument extends KOMInstrument {\n\n    ImageElement mount( String parentPath, ExecutionImage image ) ;\n\n    ImageElement queryImageElement( String path ) ;\n\n    ExecutionImage queryImage( String path ) ;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/image/kom/VirtualMappingExeImageInstrument.java",
    "content": "package com.pinecone.hydra.proc.image.kom;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.system.centrum.UniformCentralSystem;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.system.ko.runtime.ArchRuntimeKOMTree;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\n\npublic class VirtualMappingExeImageInstrument extends ArchRuntimeKOMTree implements VirtualExeImageInstrument {\n\n    public VirtualMappingExeImageInstrument( @Nullable Processum superiorProcess, String superiorPathScope, KernelObjectConfig kernelObjectConfig, @Nullable GuidAllocator guidAllocator ) {\n        super( superiorProcess, superiorPathScope, kernelObjectConfig, guidAllocator );\n    }\n\n    public VirtualMappingExeImageInstrument( @Nullable Processum superiorProcess, String superiorPathScope, KernelObjectConfig kernelObjectConfig ) {\n        this( superiorProcess, superiorPathScope, kernelObjectConfig, null );\n    }\n\n    public VirtualMappingExeImageInstrument( Processum superiorProcess, String superiorPathScope ) {\n        this( superiorProcess, superiorPathScope, UniformCentralSystem.evalCentralSystem(superiorProcess).fundamentalKernelObjectConfig(), null );\n    }\n\n    @Override\n    public ImageElement mount( String parentPath, ExecutionImage image ) {\n        ImageElement element = new GenericImageElement( image, this.guidAllocator.nextGUID() );\n\n        this.add( parentPath + this.getConfig().getPathNameSeparator() + image.getName(), element );\n\n        return element;\n    }\n\n    @Override\n    public ImageElement queryImageElement( String path ) {\n        EntityNode e = this.queryNode( path );\n        if ( e instanceof ImageElement ) {\n            return (ImageElement) e;\n        }\n\n        return null;\n    }\n\n    @Override\n    public ExecutionImage queryImage( String path ) {\n        ImageElement element = this.queryImageElement( path );\n        if ( element != null ) {\n            return element.getImage();\n        }\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ns/ControlGroup.java",
    "content": "package com.pinecone.hydra.proc.ns;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ControlGroup extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ns/GenericSegregationSpace.java",
    "content": "package com.pinecone.hydra.proc.ns;\n\npublic class GenericSegregationSpace implements ProcSpace {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/ns/ProcSpace.java",
    "content": "package com.pinecone.hydra.proc.ns;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ProcSpace extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/signal/Signal.java",
    "content": "package com.pinecone.hydra.proc.signal;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Signal extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/tomb/ResurgentTombstone.java",
    "content": "package com.pinecone.hydra.proc.tomb;\n\npublic class ResurgentTombstone implements RuntimeTombstone {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/tomb/RuntimeTombstone.java",
    "content": "package com.pinecone.hydra.proc.tomb;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface RuntimeTombstone extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/proc/tomb/TombCheckpoint.java",
    "content": "package com.pinecone.hydra.proc.tomb;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface TombCheckpoint extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ActionType.java",
    "content": "package com.pinecone.hydra.servgram;\n\npublic enum ActionType {\n    Sequential          (\"Sequential\"),\n    Parallel            (\"Parallel\"),\n    Loop                (\"Loop\"),\n\n    SequentialActions   (\"SequentialActions\"),\n    ParallelActions     (\"ParallelActions\"),\n    LoopActions         (\"LoopActions\"),\n\n    Break               (\"Break\"),\n    Continue            (\"Continue\"),\n    Jump                (\"Jump\"),;\n\n\n\n    private final String value;\n\n    ActionType( String value ){\n        this.value = value;\n    }\n\n    public String getName(){\n        return this.value;\n    }\n\n    public boolean isActionGroup() {\n        return this == ActionType.SequentialActions || this == ActionType.ParallelActions || this == ActionType.LoopActions;\n    }\n\n    public ActionType reinterpretActions() {\n        switch ( this ) {\n            case LoopActions: {\n                return ActionType.Loop;\n            }\n            case ParallelActions: {\n                return ActionType.Parallel;\n            }\n            case SequentialActions: {\n                return ActionType.Sequential;\n            }\n            default: {\n                return this;\n            }\n        }\n    }\n\n    public static String queryName( ActionType type ) {\n        return type.getName();\n    }\n\n    public static ActionType queryActionType( String sz ) {\n        return ActionType.valueOf( sz );\n    }\n\n    public static final String  ConfigActionTypeKey  = \"Type\"  ;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ArchGramFactory.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.system.executum.TaskManager;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.ulf.util.lang.ArchMultiScopeFactory;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.List;\n\npublic abstract class ArchGramFactory extends ArchMultiScopeFactory implements GramFactory {\n    protected ArchGramFactory( TaskManager taskManager, ClassLoader classLoader, MultiGramsLoader gramLoader, GramScope gramScope ) {\n        super( taskManager, classLoader, gramLoader, gramScope );\n    }\n\n    @Override\n    public ClassLoader getClassLoader() {\n        return this.mClassLoader;\n    }\n\n    @Override\n    public GramScope getClassScope() {\n        return (GramScope) super.getClassScope();\n    }\n\n    @Override\n    public MultiGramsLoader getTraitClassLoader() {\n        return (MultiGramsLoader) super.getTraitClassLoader();\n    }\n\n    public Servgram newInstance ( Class<? > that, Class<?>[] stereotypes, Object[] args ) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {\n        return (Servgram) super.newInstance( that, stereotypes, args );\n    }\n\n    @Override\n    public Servgram spawn ( Name name, Object... args ) throws InvocationTargetException {\n        return this.spawn( name, null, args );\n    }\n\n    @Override\n    public Servgram spawn ( Name name, Class<?>[] stereotypes, Object... args ) throws InvocationTargetException {\n        return (Servgram) super.spawn( name, stereotypes, args );\n    }\n\n    @Override\n    public List<Servgram > popping ( Name name, Object... args ) {\n        return this.popping( name, null, args );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public List<Servgram > popping ( Name name, Class<?>[] stereotypes, Object... args ) {\n        return (List<Servgram >) super.popping( name, stereotypes, args );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ArchGramLoader.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.hydra.servgram.filters.AnnotationValueFilter;\nimport com.pinecone.ulf.util.lang.*;\nimport javassist.ClassPool;\nimport javassist.bytecode.annotation.Annotation;\n\n/**\n * Notice: TODO, IOC Inject Implement.\n */\npublic abstract class ArchGramLoader extends ArchMultiScopeLoader implements MultiGramsLoader {\n    protected AnnotationValueFilter                mAnnoValueFilter    ;\n\n    protected ArchGramLoader( GramScope gramScope, ClassLoader classLoader, ClassPool classPool ) {\n        super( gramScope, classLoader, classPool, null, null );\n\n        this.mClassScanner         = new PooledClassCandidateScanner( new LocalGramScopeSet( this.mClassLoader ), this.mClassLoader, this.mClassPool );\n        this.mClassInspector       = new GenericPreloadClassInspector( this.mClassPool );\n    }\n\n    protected ArchGramLoader( GramScope gramScope, ClassLoader classLoader ) {\n        this( gramScope, classLoader, ClassPool.getDefault() );\n    }\n\n    @Override\n    public void setAnnotationValueFilter( AnnotationValueFilter filter ) {\n        this.mAnnoValueFilter = filter;\n    }\n\n    @Override\n    protected boolean isAnnotationQualified( Annotation that, String szName ) {\n        return !this.mAnnoValueFilter.match( that, szName );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Class<? extends Servgram > load( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? extends Servgram > )super.load( simpleName );\n    }\n\n    // Directly by it`s name.\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Class<? extends Servgram > loadByName( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? extends Servgram > )super.loadByName( simpleName );\n    }\n\n    // Scanning class`s annotations, methods or others.\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Class<? extends Servgram > loadInClassTrait( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? extends Servgram > )super.loadInClassTrait( simpleName );\n    }\n\n    @Override\n    protected Class<? extends Servgram > loadSingleByFullClassName( String szFullClassName ) {\n        try {\n            Class<?> clazz = this.mClassLoader.loadClass( szFullClassName );\n            if( this.filter( clazz ) ) {\n                return null;\n            }\n            if ( Servgram.class.isAssignableFrom( clazz ) ) {\n                return clazz.asSubclass( Servgram.class );\n            }\n        }\n        catch ( ClassNotFoundException e ) {\n            return null;\n        }\n\n        return null;\n    }\n\n    @Override\n    public MultiGramsLoader updateScope() {\n        return (MultiGramsLoader)super.updateScope();\n    }\n\n    @Override\n    public void clearCache() {\n        this.mLoadedClassesPool.clear();\n        this.mVisitedClasses.clear();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ArchGramScopeSet.java",
    "content": "package com.pinecone.hydra.servgram;\n\n\nimport com.pinecone.framework.util.lang.ArchClassScopeSet;\nimport com.pinecone.framework.util.lang.ScopedPackage;\n\nimport java.util.Set;\n\npublic abstract class ArchGramScopeSet extends ArchClassScopeSet implements GramScope {\n    protected ArchGramScopeSet( Set<ScopedPackage > scope, ClassLoader classLoader ) {\n        super( scope, classLoader );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ArchServgramOrchestrator.java",
    "content": "package com.pinecone.hydra.servgram;\n\n\nimport com.pinecone.framework.system.GenericMasterTaskManager;\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.Pinecore;\nimport com.pinecone.framework.system.executum.Executum;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.hydra.orchestration.Exertion;\nimport com.pinecone.hydra.orchestration.IntegrityLevel;\nimport com.pinecone.ulf.util.lang.MultiScopeFactory;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.locks.ReentrantLock;\n\npublic abstract class ArchServgramOrchestrator extends GenericMasterTaskManager implements ServgramOrchestrator {\n    private Logger              logger                ;\n    private GramFactory         mGramFactory          ;\n    private PatriarchalConfig   mSectionConfig        ;\n    private PatriarchalConfig   mOrchestrationConfig  ;\n    private List                mServgramScopesConf   ;\n    private GramTransaction     mTransaction          ;\n    private Exertion            mPrimaryAction        ;\n    private ReentrantLock       mNotifyLock           ;\n\n    public ArchServgramOrchestrator( Processum parent, PatriarchalConfig sectionConfig, @Nullable GramFactory factory, GramTransaction transaction ) {\n        super( parent );\n\n        this.mSectionConfig            = sectionConfig;\n        this.logger                    = LoggerFactory.getLogger( String.format( \"Tracer<%s>\", this.className() ) );\n        this.mOrchestrationConfig      = this.mSectionConfig.getChild( AutoOrchestrator.ConfigOrchestrationKey );\n        this.mServgramScopesConf       = (List)((JSONObject) this.mOrchestrationConfig).get( AutoOrchestrator.ConfigServgramScopesKey );\n        this.mNotifyLock               = new ReentrantLock();\n\n        this.prepareFactory( factory );\n        this.mTransaction              = transaction;\n    }\n\n    protected void prepareFactory( @Nullable GramFactory factory ) {\n        if( factory != null ) {\n            this.mGramFactory              = factory;\n            for( Object o : this.mServgramScopesConf ) {\n                this.mGramFactory.getClassScope().addScope( o.toString() );\n            }\n            this.mGramFactory.getTraitClassLoader().updateScope();\n        }\n    }\n\n    @Override\n    public Pinecore getSystem() {\n        return (Pinecore) super.getSystem();\n    }\n\n    @Override\n    public PatriarchalConfig getSectionConfig() {\n        return this.mSectionConfig;\n    }\n\n    @Override\n    public PatriarchalConfig getScheme(){\n        return this.getSectionConfig();\n    }\n\n    @Override\n    public PatriarchalConfig getOrchestrationConfig() {\n        return this.mOrchestrationConfig;\n    }\n\n    @Override\n    public GramTransaction getTransaction() {\n        return this.mTransaction;\n    }\n\n    @Override\n    public void setTransaction( GramTransaction transaction ) {\n        this.mTransaction = transaction;\n    }\n\n    @Override\n    public MultiScopeFactory getClassFactory() {\n        return this.mGramFactory;\n    }\n\n    protected Exertion wrapServgramAction( Servgram servgram, ActionType type ) {\n        switch ( type ) {\n            case Parallel: {\n                return new LocalParallelGramExertium( this, servgram );\n            }\n            case Sequential:{\n                return new LocalSequentialGramExertium( this,servgram );\n            }\n            default:{\n                throw new IllegalArgumentException( \"ServgramAction can only be [Parallel, Sequential].\" );\n            }\n        }\n    }\n\n    protected void orchestrateTransactionGroup( GramTransaction transaction, GramTransaction.TransactionArgs args, Map protoConf, int stratum ) throws OrchestrateInterruptException {\n        List childrenList = (List) protoConf.get( GramTransaction.ConfigTransactionsListKey );\n        GramTransaction child = new LocalGramTransaction( args.name, args.type.reinterpretActions(), childrenList, this, this.getParentProcessum() );\n        this.orchestrateTransaction( child, stratum + 1 );\n        transaction.add( child );\n    }\n\n    protected abstract List<Servgram > popping( String szName );\n\n    protected abstract List<Servgram > popping( Name name );\n\n    protected void orchestrateServgramium( GramTransaction transaction, GramTransaction.TransactionArgs args, Map protoConf, boolean bPrimary, int stratum ) throws OrchestrateInterruptException {\n        List<Servgram > grams = this.popping( args.name );\n        if( grams.isEmpty() ) {\n            this.tracer().warn( \"[Lifecycle] <None of servgrams been found.>\" );\n        }\n\n        if( bPrimary ) {\n            if( stratum > 0 && this.mPrimaryAction != null ) {\n                throw new OrchestrateInterruptException( \"Primary Servgram can only have one instance in the whole transaction graph.\" );\n            }\n            if( grams.size() > 1 ) {\n                throw new OrchestrateInterruptException( \"Primary Servgram can only have one instance.\" );\n            }\n            else {\n                if( !grams.isEmpty() ){\n                    this.mPrimaryAction = this.wrapServgramAction( grams.get(0), args.type );\n                    this.mPrimaryAction.setIntegrityLevel( args.level );\n                    this.mTransaction.addFirst( this.mPrimaryAction );\n                }\n            }\n        }\n        else {\n            for( Servgram gram : grams ) {\n                Exertion act = this.wrapServgramAction( gram, args.type );\n                act.setIntegrityLevel( args.level );\n                transaction.add( act );\n            }\n        }\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    protected void orchestrateTransaction( GramTransaction transaction, int stratum ) throws OrchestrateInterruptException {\n        try{\n            for( Object o : transaction.getTransactionList() ) {\n                Map jo = (Map) o;\n                GramTransaction.TransactionArgs args = new GramTransaction.TransactionArgs(\n                        (String) jo.getOrDefault( GramTransaction.ConfigTransactionNameKey, \"\" ),\n                        ActionType.queryActionType( (String) jo.getOrDefault( ActionType.ConfigActionTypeKey, ActionType.Sequential.getName() ) ),\n                        IntegrityLevel.queryIntegrityLevel( jo.getOrDefault( IntegrityLevel.ConfIntegrityLevelKey, IntegrityLevel.Warning ).toString() )\n                );\n\n                boolean    bPrimary = (boolean) jo.getOrDefault( GramTransaction.ConfigPrimaryNameKey, false );\n                if( args.type.isActionGroup() ) {\n                    this.orchestrateTransactionGroup( transaction, args, jo, stratum );\n                }\n                else {\n                    this.orchestrateServgramium( transaction, args, jo, bPrimary, stratum );\n                }\n            }\n        }\n        catch ( Exception e ) {\n            throw new OrchestrateInterruptException( \"Orchestrate has been interrupted, transaction compromised.\", e );\n        }\n    }\n\n    public void onlyOrchestrateTransaction() throws OrchestrateInterruptException {\n        this.orchestrateTransaction( this.mTransaction, 0 );\n    }\n\n    @Override\n    public void orchestrate() throws OrchestrateInterruptException {\n        this.infoLifecycle( \"Executing designed orchestration sequence\" );\n\n        this.onlyOrchestrateTransaction();\n        this.startTransaction();\n    }\n\n    /**\n     * startTransaction\n     * Transaction should be joined or sequential with atomic trait.\n     * Default transaction set the affinity to its parent thread, with sequential trait.\n     * If you wish to set the affinity to a master thread, please overriding this method.\n     */\n    @Override\n    public void startTransaction() throws OrchestrateInterruptException {\n        if( this.mTransaction instanceof Executum ) {\n            this.add( (Executum) this.mTransaction );\n            ((Executum) this.mTransaction).setThreadAffinity( Thread.currentThread() );\n        }\n\n        this.mTransaction.start();\n\n        if( this.mTransaction instanceof Executum ) {\n            this.erase( (Executum) this.mTransaction );\n        }\n    }\n\n    @Override\n    public List    preloads( String szName ) {\n        return this.popping( szName );\n    }\n\n    @Override\n    public List    preloads( Name name ) {\n        return this.popping( name );\n    }\n\n    @Override\n    public Logger tracer() {\n        return this.logger;\n    }\n\n    protected ArchServgramOrchestrator infoLifecycle( String szWhat, String szStateOrExtra ) {\n        this.tracer().info( \"[Lifecycle] [{}] <{}>\", szWhat, szStateOrExtra );\n        return this;\n    }\n\n    protected ArchServgramOrchestrator infoLifecycle( String szStateOrExtra ) {\n        StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();\n        return this.infoLifecycle( stackTraceElements[ 2 ].getMethodName(), szStateOrExtra );\n    }\n\n    @Override\n    public void notifyFinished  ( Executum that ) {\n        this.mNotifyLock.lock();\n        try{\n            this.erase( that );\n        }\n        finally {\n            this.mNotifyLock.unlock();\n        }\n\n        super.notifyFinished( that );\n    }\n\n    @Override\n    public void notifyExecuting ( Executum that ) {\n        this.mNotifyLock.lock();\n        try{\n            this.add( that );\n        }\n        finally {\n            this.mNotifyLock.unlock();\n        }\n\n        super.notifyExecuting( that );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ArchServgramium.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.framework.system.GenericMasterTaskManager;\nimport com.pinecone.framework.system.executum.ArchProcessum;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport org.slf4j.Logger;\nimport com.pinecone.hydra.system.Hydrogen;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\n\npublic abstract class ArchServgramium extends ArchProcessum implements Servgramium, Slf4jTraceable {\n    protected Logger                 mLogger;\n    protected String                 mszGramName;\n\n    protected PatriarchalConfig      mServgramList;\n    protected PatriarchalConfig      mServgramConf;\n    protected boolean                mbTraceLifecycle;\n\n    public ArchServgramium( String szGramName, Processum parent ) {\n        super( szGramName, parent );\n        this.mszGramName     = szGramName;\n        this.mTaskManager    = new GenericMasterTaskManager( this );\n        this.mLogger         = this.parentSystem().getTracerScope().newLogger( this.className() );\n        this.loadConfig();\n        this.infoLifecycle( \"MeeseekSpawned\", \"I'm Mr.Meeseek[\" + this.className() + \"], look at me !\" );\n    }\n\n    protected ArchServgramium( Servgramium shared, boolean bs ) {\n        super( shared.getName(), (Processum) shared.parentExecutum() );\n        ArchServgramium that = (ArchServgramium) shared;\n        this.mszGramName     = that.mszGramName;\n        this.mTaskManager    = that.mTaskManager;\n        this.mLogger         = that.mLogger;\n        this.mServgramList   = that.mServgramList;\n        this.mServgramConf   = that.mServgramConf;\n    }\n\n    @Override\n    public Logger getLogger() {\n        return this.mLogger;\n    }\n\n    @Override\n    public ArchServgramium infoLifecycle( String szWhat, String szStateOrExtra ) {\n        this.getLogger().info( \"[Lifecycle] [{}] <{}>\", szWhat, szStateOrExtra );\n        return this;\n    }\n\n    @Override\n    public ArchServgramium infoLifecycle( String szStateOrExtra ) {\n        StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();\n        return this.infoLifecycle( stackTraceElements[ 2 ].getMethodName(), szStateOrExtra );\n    }\n\n    protected void loadConfig() {\n        this.mServgramList     = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey );\n        Object dyServgramConf  = this.mServgramList.get( this.gramName() );\n        if( dyServgramConf instanceof String ) {\n            try{\n                this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) );\n            }\n            catch ( IOException e ) {\n                this.parentSystem().handleKillException( e );\n            }\n        }\n        else {\n            this.mServgramConf = this.mServgramList.getChild( this.gramName() );\n        }\n\n        //Debug.trace( this.mMeeseekConf );\n    }\n\n    public boolean isTraceLifecycle() {\n        return this.mbTraceLifecycle;\n    }\n\n    @Override\n    public ServgramOrchestrator getAttachedOrchestrator() {\n        return (ServgramOrchestrator) this.parentExecutum().getTaskManager();\n    }\n\n    @Override\n    public String       gramName() {\n        return this.mszGramName;\n    }\n\n    @Override\n    public Hydrogen parentSystem() {\n        return (Hydrogen) super.parentSystem();\n    }\n\n    @Override\n    public PatriarchalConfig getConfig() {\n        return this.mServgramConf;\n    }\n\n    @Override\n    public abstract void execute() throws Exception;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/AutoOrchestrator.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.system.executum.EventedTaskManager;\nimport com.pinecone.framework.util.config.Config;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.framework.system.regime.Orchestrator;\n\nimport java.util.List;\n\npublic interface AutoOrchestrator extends EventedTaskManager, Orchestrator {\n    String  ConfigOrchestrationKey   = \"Orchestration\"   ;\n    String  ConfigServgramScopesKey  = \"ServgramScopes\"  ;\n\n    void    orchestrate() throws OrchestrateInterruptException ;\n\n    List    preloads( String szName ) ;\n\n    List    preloads( Name name ) ;\n\n    Config  getScheme();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/Gram.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport java.lang.annotation.*;\n\n@Target({ElementType.TYPE})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface Gram {\n    String ValueKey = \"value\";\n\n    String value() default \"\";\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/GramFactory.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.framework.util.name.ScopeName;\nimport com.pinecone.ulf.util.lang.MultiScopeFactory;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.List;\n\npublic interface GramFactory extends MultiScopeFactory {\n    @Override\n    ClassLoader      getClassLoader();\n\n    @Override\n    GramScope        getClassScope();\n\n    @Override\n    MultiGramsLoader getTraitClassLoader();\n\n    @Override\n    default Servgram spawn( String name, Class<?>[] stereotypes, Object... args ) throws InvocationTargetException {\n        return this.spawn( new ScopeName(name), stereotypes, args );\n    }\n\n    @Override\n    Servgram spawn( Name name, Class<?>[] stereotypes, Object... args ) throws InvocationTargetException;\n\n    @Override\n    default Servgram spawn( String name, Object... args ) throws InvocationTargetException {\n        return this.spawn( new ScopeName(name), args );\n    }\n\n    @Override\n    Servgram spawn( Name name, Object... args ) throws InvocationTargetException;\n\n    @Override\n    default List<Servgram > popping( String name, Class<?>[] stereotypes, Object... args ) {\n        return this.popping( new ScopeName(name), stereotypes, args );\n    }\n\n    @Override\n    List<Servgram > popping( Name name, Class<?>[] stereotypes, Object... args );\n\n    @Override\n    default List<Servgram > popping( String name, Object... args ) {\n        return this.popping( new ScopeName(name), args );\n    }\n\n    @Override\n    List<Servgram > popping( Name name, Object... args );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/GramLoader.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.hydra.servgram.filters.AnnotationValueFilter;\nimport com.pinecone.ulf.util.lang.TraitClassLoader;\n\npublic interface GramLoader extends TraitClassLoader {\n    @Override\n    Class<? extends Servgram > load( Name simpleName ) throws ClassNotFoundException ;\n\n    // Directly by it`s name.\n    @Override\n    Class<? extends Servgram > loadByName( Name simpleName ) throws ClassNotFoundException ;\n\n    // Scanning class`s annotations, methods or others.\n    @Override\n    Class<? extends Servgram > loadInClassTrait( Name simpleName ) throws ClassNotFoundException ;\n\n    @Override\n    GramLoader updateScope();\n\n    void setAnnotationValueFilter( AnnotationValueFilter filter );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/GramScope.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.util.lang.ClassScope;\n\npublic interface GramScope extends ClassScope {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/GramTransaction.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.hydra.orchestration.GraphStratum;\nimport com.pinecone.hydra.orchestration.IntegrityLevel;\nimport com.pinecone.hydra.orchestration.Transaction;\n\nimport java.util.List;\n\npublic interface GramTransaction extends Transaction, GraphStratum {\n    String  ConfigTransactionsListKey  = \"Transactions\"  ;\n    String  ConfigTransactionNameKey   = \"Name\"          ;\n    String  ConfigPrimaryNameKey       = \"Primary\"       ;\n\n    GramTransaction loadActionsFromConfig();\n\n    List getTransactionList();\n\n    class TransactionArgs {\n        String         name   ;\n        ActionType     type     ;\n        IntegrityLevel level    ;\n\n        TransactionArgs( String name, ActionType type, IntegrityLevel level ) {\n            this.name   = name;\n            this.type   = type;\n            this.level  = level;\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalGramFactory.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.system.executum.TaskManager;\n\npublic class LocalGramFactory extends ArchGramFactory {\n    public LocalGramFactory( TaskManager taskManager, ClassLoader classLoader, MultiGramsLoader gramLoader, GramScope gramScope ) {\n        super( taskManager, classLoader, gramLoader, gramScope );\n    }\n\n    public LocalGramFactory( TaskManager taskManager ) {\n        this( taskManager, taskManager.getClassLoader(), null, null );\n\n        this.mClassScope        = new LocalGramScopeSet( this );\n        this.mTraitClassLoader  = new LocalGramLoader( this );\n    }\n\n    public LocalGramFactory( TaskManager taskManager, GramScope gramScope ) {\n        this( taskManager, taskManager.getClassLoader(), null, gramScope );\n\n        this.mTraitClassLoader = new LocalGramLoader( this );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalGramLoader.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.hydra.servgram.filters.ExcludeGramFilters;\nimport com.pinecone.hydra.servgram.filters.GramAnnotationValueFilter;\n\npublic class LocalGramLoader extends ArchGramLoader {\n    public LocalGramLoader( GramScope gramScope, ClassLoader classLoader ) {\n        super( gramScope, classLoader );\n\n        this.mClassScanner.addExcludeFilter( new ExcludeGramFilters( this.mClassInspector ) );\n        this.setAnnotationValueFilter( new GramAnnotationValueFilter() );\n    }\n\n    public LocalGramLoader( GramFactory factory ) {\n        this( factory.getClassScope(), factory.getClassLoader() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalGramScopeSet.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.unit.LinkedTreeSet;\nimport com.pinecone.framework.util.lang.ScopedPackage;\n\nimport java.util.Set;\n\npublic class LocalGramScopeSet extends ArchGramScopeSet {\n    public LocalGramScopeSet( Set<ScopedPackage> scope, ClassLoader classLoader ) {\n        super( scope, classLoader );\n    }\n\n    public LocalGramScopeSet( ClassLoader classLoader ) {\n        super( new LinkedTreeSet<>(), classLoader );\n    }\n\n    public LocalGramScopeSet( GramFactory factory ) {\n        super( new LinkedTreeSet<>(), factory.getClassLoader() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalGramTransaction.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.system.ApoptosisRejectSignalException;\nimport com.pinecone.framework.system.executum.ArchProcessum;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.orchestration.*;\nimport com.pinecone.hydra.orchestration.regulation.NeglectRegulation;\n\nimport java.util.List;\n\npublic class LocalGramTransaction extends ArchProcessum implements GramTransaction, Processum {\n    private   List                    mActionList   ;\n    private   ActionType              mActionType   ;\n    protected ServgramOrchestrator    mOrchestrator ;\n    protected Transaction             mTransaction  ;\n\n    public LocalGramTransaction( String name, ServgramOrchestrator orchestrator, Processum parent ) {\n        super( name, parent );\n        this.mOrchestrator = orchestrator;\n        this.mActionType   = ActionType.queryActionType( this.mOrchestrator.getOrchestrationConfig().get( ActionType.ConfigActionTypeKey ).toString() );\n        this.mActionList   = (List)((JSONObject) orchestrator.getOrchestrationConfig()).get( GramTransaction.ConfigTransactionsListKey );\n\n        this.prepareTransactionByType();\n        this.setName( name );\n\n        orchestrator.tracer().info( String.format( \"[Lifecycle] [%s, %s] <Contrived>\", name, this.mActionType ) );\n    }\n\n    public LocalGramTransaction( ServgramOrchestrator orchestrator, Processum parent ) {\n        this( orchestrator.getOrchestrationConfig().getOrDefault( GramTransaction.ConfigTransactionNameKey, \"Anonymous\" ).toString(), orchestrator, parent );\n    }\n\n    // Children Transaction\n    public LocalGramTransaction( String name, ActionType actionType, List actionList, ServgramOrchestrator orchestrator, Processum parent ) {\n        super( name, parent );\n        this.mOrchestrator = orchestrator;\n        this.mActionType   = actionType;\n        this.mActionList   = actionList;\n\n        this.prepareTransactionByType();\n        this.setName( name );\n    }\n\n\n    protected void prepareTransactionByType() {\n        switch ( this.mActionType ) {\n            case Loop: {\n                this.mTransaction = new LoopAction();\n                break;\n            }\n            case Parallel:{\n                this.mTransaction = new ParallelAction();\n                break;\n            }\n            case Sequential:{\n                this.mTransaction = new SequentialAction();\n                break;\n            }\n            default: {\n                throw new IllegalArgumentException( \"MasterTransaction can only be [Loop, Parallel, Sequential].\" );\n            }\n        }\n    }\n\n    @Override\n    public void apoptosis() throws ApoptosisRejectSignalException {\n        this.terminate();\n    }\n\n    @Override\n    public void kill() {\n        this.terminate();\n        if( !this.isEnded() ) {\n            super.kill();\n        }\n    }\n\n    @Override\n    public GramTransaction loadActionsFromConfig() {\n        return this;\n    }\n\n    @Override\n    public List getTransactionList() {\n        return this.mActionList;\n    }\n\n    @Override\n    public void add( Exertion exertion ) {\n        this.mTransaction.add(exertion);\n    }\n\n    @Override\n    public void addFirst( Exertion exertion ) {\n        this.mTransaction.addFirst(exertion);\n    }\n\n    @Override\n    public void reset() {\n        this.mTransaction.reset();\n    }\n\n    @Override\n    public void start() {\n        this.mTransaction.start();\n    }\n\n    @Override\n    public void terminate() {\n        this.mTransaction.terminate();\n    }\n\n    @Override\n    public void rollback() {\n        this.mTransaction.rollback();\n    }\n\n    @Override\n    public NeglectRegulation getSeqExceptionNeglector(){\n        return this.mTransaction.getSeqExceptionNeglector();\n    }\n\n    @Override\n    public void setSeqExceptionNeglector( NeglectRegulation neglector ) {\n        this.mTransaction.setSeqExceptionNeglector( neglector );\n    }\n\n    @Override\n    public ExertionStatus getStatus() {\n        return this.mTransaction.getStatus();\n    }\n\n    @Override\n    public String getName(){\n        return this.mTransaction.getName();\n    }\n\n    @Override\n    public void setName( String name ){\n        this.mTransaction.setName( name );\n    }\n\n    @Override\n    public IntegrityLevel getIntegrityLevel(){\n        return this.mTransaction.getIntegrityLevel();\n    }\n\n    @Override\n    public void setIntegrityLevel( IntegrityLevel level ){\n        this.mTransaction.setIntegrityLevel( level );\n    }\n\n    @Override\n    public long getStartNano() {\n        return this.mTransaction.getStartNano();\n    }\n\n    @Override\n    public void setDefaultRollback( boolean b ){\n        this.mTransaction.setDefaultRollback( b );\n    }\n\n    @Override\n    public boolean isDefaultRollback(){\n        return this.mTransaction.isDefaultRollback();\n    }\n\n    @Override\n    public int getStratumId(){\n        return this.mTransaction.getStratumId();\n    }\n\n    @Override\n    public ArchGraphNode parent(){\n        return (ArchGraphNode)this.mTransaction.parent();\n    }\n\n    @Override\n    public List<GraphNode > getChildren() {\n        return ( (GraphStratum)this.mTransaction ).getChildren();\n    }\n\n    @Override\n    public Exception getLastError() {\n        return this.mTransaction.getLastError();\n    }\n\n    @Override\n    public void registerExertionStartCallback( ExertionEventCallback callback ) {\n        this.mTransaction.registerExertionStartCallback( callback );\n    }\n\n    @Override\n    public void registerExertionEndCallback( ExertionEventCallback callback ) {\n        this.mTransaction.registerExertionEndCallback( callback );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalParallelGramExertium.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.system.NotImplementedException;\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.system.executum.Executum;\nimport com.pinecone.hydra.orchestration.parallel.ArchMasterParallelium;\n\npublic class LocalParallelGramExertium extends ArchMasterParallelium {\n    protected Servgram              mWrapServgram;\n    protected ServgramOrchestrator  mOrchestrator;\n\n    public LocalParallelGramExertium( ServgramOrchestrator orchestrator, Servgram servgram ) {\n        this.mWrapServgram = servgram;\n        this.mOrchestrator = orchestrator;\n        this.setName( servgram.getName() );\n    }\n\n    @Override\n    protected void doStart() {\n        try{\n            Thread thisThread = this.getMasterExecutum().getAffiliateThread();\n            thisThread.setName( this.nomenclature( thisThread ) );\n\n            this.mWrapServgram.execute();\n        }\n        catch ( Exception e ) {\n            throw new ProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    protected void doTerminate() {\n        LocalSequentialGramExertium.terminate( this.mWrapServgram );\n    }\n\n    @Override\n    protected void doRollback() {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    protected void intoStart() {\n        super.intoStart();\n        this.notifyExecuting();\n    }\n\n    @Override\n    protected void intoFinished() {\n        super.intoFinished();\n        this.notifyFinished();\n    }\n\n    @Override\n    protected void intoTerminated() {\n        super.intoTerminated();\n        this.notifyFinished();\n    }\n\n    @Override\n    protected void intoRollback() {\n        super.intoRollback();\n        this.notifyExecuting();\n    }\n\n    @Override\n    protected void intoError( Exception e ) {\n        super.intoError( e );\n        this.notifyFinished();\n    }\n\n    protected void notifyExecuting() {\n        if( this.mWrapServgram instanceof Executum ) {\n            this.mOrchestrator.notifyExecuting( (Executum)this.mWrapServgram );\n        }\n    }\n\n    protected void notifyFinished() {\n        if( this.mWrapServgram instanceof Executum ) {\n            this.mOrchestrator.notifyFinished( (Executum)this.mWrapServgram );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalSequentialGramExertium.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.system.ApoptosisRejectSignalException;\nimport com.pinecone.framework.system.NotImplementedException;\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.executum.Executum;\nimport com.pinecone.hydra.orchestration.ArchExertion;\n\npublic class LocalSequentialGramExertium extends ArchExertion {\n    protected Servgram              mWrapServgram;\n    protected ServgramOrchestrator  mOrchestrator;\n\n    public LocalSequentialGramExertium( ServgramOrchestrator orchestrator, Servgram servgram ) {\n        this.mWrapServgram = servgram;\n        this.mOrchestrator = orchestrator;\n        this.setName( servgram.getName() );\n    }\n\n    @Override\n    protected void doStart() {\n        try{\n            this.mWrapServgram.execute();\n        }\n        catch ( Exception e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    protected void doTerminate() {\n        LocalSequentialGramExertium.terminate( this.mWrapServgram );\n    }\n\n    @Override\n    protected void doRollback() {\n        throw new NotImplementedException();\n    }\n\n    protected static void terminate( Servgram servgram ) throws ProxyProvokeHandleException {\n        try{\n            try{\n                servgram.terminate();\n            }\n            catch ( ApoptosisRejectSignalException e ) {\n                if( servgram instanceof Servgramium ) {\n                    ((Servgramium) servgram).kill();\n                }\n            }\n        }\n        catch ( Exception e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    protected void intoStart() {\n        super.intoStart();\n        this.notifyExecuting();\n    }\n\n    @Override\n    protected void intoFinished() {\n        super.intoFinished();\n        this.notifyFinished();\n    }\n\n    @Override\n    protected void intoTerminated() {\n        super.intoTerminated();\n        this.notifyFinished();\n    }\n\n    @Override\n    protected void intoRollback() {\n        super.intoRollback();\n        this.notifyExecuting();\n    }\n\n    @Override\n    protected void intoError( Exception e ) {\n        super.intoError( e );\n        this.notifyFinished();\n    }\n\n    protected void notifyExecuting() {\n        if( this.mWrapServgram instanceof Executum ) {\n            this.mOrchestrator.notifyExecuting( (Executum)this.mWrapServgram );\n        }\n    }\n\n    protected void notifyFinished() {\n        if( this.mWrapServgram instanceof Executum ) {\n            this.mOrchestrator.notifyFinished( (Executum)this.mWrapServgram );\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/LocalServgramOrchestrator.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.framework.util.name.Name;\n\nimport java.util.List;\n\npublic class LocalServgramOrchestrator extends ArchServgramOrchestrator {\n    public LocalServgramOrchestrator( Processum parent, PatriarchalConfig sectionConfig, @Nullable GramFactory factory, GramTransaction transaction ) {\n        super( parent, sectionConfig, factory, transaction );\n    }\n\n    public LocalServgramOrchestrator( Processum parent, PatriarchalConfig sectionConfig ) {\n        super( parent, sectionConfig, null, null );\n\n        this.prepareFactory( new LocalGramFactory( this ) );\n        this.setTransaction( new LocalGramTransaction( this, parent ) );\n    }\n\n    @Override\n    protected List<Servgram > popping( String szName ) {\n        return ( (GramFactory)this.getClassFactory() ).popping( szName, szName, this.getSystem() );\n    }\n\n    @Override\n    protected List<Servgram > popping( Name name ) {\n        return ( (GramFactory)this.getClassFactory() ).popping( name, name.getName(), this.getSystem() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/MultiGramsLoader.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.util.lang.MultiClassScopeLoader;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.ulf.util.lang.MultiTraitClassLoader;\n\nimport java.util.List;\n\npublic interface MultiGramsLoader extends GramLoader, MultiClassScopeLoader, MultiTraitClassLoader {\n    @Override\n    List<Class<? > > loads( Name name ) ;\n\n    @Override\n    List<Class<? > > loadsByName( Name simpleName );\n\n    @Override\n    List<Class<? > > loadsInClassTrait( Name simpleName ) ;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/OrchestrateInterruptException.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class OrchestrateInterruptException extends Exception implements Pinenut {\n    public OrchestrateInterruptException    () {\n        super();\n    }\n\n    public OrchestrateInterruptException    ( String message ) {\n        super(message);\n    }\n\n    public OrchestrateInterruptException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public OrchestrateInterruptException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected OrchestrateInterruptException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/Servgram.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\n\npublic interface Servgram extends Pinenut {\n    String getName();\n\n    default String gramName(){\n        return this.className();\n    }\n\n    PatriarchalConfig getConfig();\n\n    RuntimeSystem parentSystem();\n\n    void terminate() ;\n\n    void execute() throws Exception ;\n\n    // Who summoned me ?\n    ServgramOrchestrator getAttachedOrchestrator();\n\n    String ConfigServgramsKey = \"Servgrams\";\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/ServgramOrchestrator.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.ulf.util.lang.MultiScopeFactory;\nimport org.slf4j.Logger;\n\npublic interface ServgramOrchestrator extends AutoOrchestrator {\n    PatriarchalConfig getSectionConfig();\n\n    PatriarchalConfig getOrchestrationConfig();\n\n    GramTransaction   getTransaction();\n\n    void    setTransaction( GramTransaction transaction );\n\n    void    onlyOrchestrateTransaction() throws OrchestrateInterruptException ;\n\n    void    startTransaction() throws OrchestrateInterruptException ;\n\n    Logger  tracer();\n\n    MultiScopeFactory getClassFactory();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/Servgramium.java",
    "content": "package com.pinecone.hydra.servgram;\n\nimport com.pinecone.framework.system.executum.Processum;\n\npublic interface Servgramium extends Servgram, Processum {\n    @Override\n    default void terminate() {\n        this.apoptosis();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/Servgramlet.java",
    "content": "package com.pinecone.hydra.servgram;\n\npublic interface Servgramlet extends Servgram {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/filters/AnnotationValueFilter.java",
    "content": "package com.pinecone.hydra.servgram.filters;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport javassist.bytecode.annotation.Annotation;\n\npublic interface AnnotationValueFilter extends Pinenut {\n    boolean match( Annotation that, String destinationName );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/filters/ExcludeGramFilters.java",
    "content": "package com.pinecone.hydra.servgram.filters;\n\nimport com.pinecone.framework.util.lang.TypeFilter;\nimport com.pinecone.ulf.util.lang.HierarchyClassInspector;\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.NotFoundException;\n\nimport java.io.IOException;\n\npublic class ExcludeGramFilters implements TypeFilter {\n    protected HierarchyClassInspector  mClassInspector;\n\n    public ExcludeGramFilters( HierarchyClassInspector inspector ) {\n        this.mClassInspector = inspector;\n    }\n\n    @Override\n    public boolean match( String szClassName, Object pool ) throws IOException {\n        try{\n            CtClass clz = ( (ClassPool) pool ).get( szClassName );\n            if( clz.isInterface() ) {\n                return true;\n            }\n            if( this.mClassInspector.isImplemented( clz, com.pinecone.hydra.servgram.Servgram.class ) ) {\n                return false;\n            }\n            return !this.mClassInspector.hasOwnAnnotation( clz, com.pinecone.hydra.servgram.Gram.class ) ;\n        }\n        catch ( NotFoundException e ) {\n            return true;\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/servgram/filters/GramAnnotationValueFilter.java",
    "content": "package com.pinecone.hydra.servgram.filters;\n\nimport com.pinecone.hydra.servgram.Gram;\nimport javassist.bytecode.annotation.Annotation;\n\npublic class GramAnnotationValueFilter implements AnnotationValueFilter {\n    public boolean match( Annotation that, String destinationName ) {\n        if( that.getTypeName().equals( com.pinecone.hydra.servgram.Gram.class.getName() ) ) {\n            String szAN = that.getMemberValue( Gram.ValueKey ).toString();\n            if( szAN.startsWith( \"\\\"\" ) ){\n                return !szAN.equals(\"\\\"\" + destinationName + \"\\\"\");\n            }\n            return !szAN.equals( destinationName );\n        }\n\n        return true;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/App.java",
    "content": "package com.pinecone.hydra.task;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface App extends TaskFamilyMeta {\n    long getEnumId();\n\n    GUID getGuid();\n\n    String getName();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/ArchInstanceMeta.java",
    "content": "package com.pinecone.hydra.task;\n\nimport java.time.LocalDateTime;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\nimport com.pinecone.hydra.task.marshal.TaskScheduleType;\n\npublic abstract class ArchInstanceMeta implements TaskInstanceMeta {\n    protected GUID guid;\n    protected GUID taskGuid;\n    protected String instanceName;\n    protected String taskName;\n    protected LocalDateTime businessTime;\n    protected short priority;\n    protected String imagePath;\n    protected short actuallyPriority;\n    protected TaskInstanceStatus instanceStatus;\n    protected String taskType;\n    protected int runCount;\n    protected int sequenceCnt;\n    protected int retryCnt;\n    protected String errorCause;\n    protected boolean dryRun;\n    protected TaskScheduleCycle scheduleCycle;\n    protected TaskScheduleType scheduleType;\n    protected LocalDateTime lastStartTime;\n    protected LocalDateTime lastEndTime;\n    protected LocalDateTime expectTime;\n    protected LocalDateTime fireTime;\n    protected LocalDateTime startTime;\n    protected LocalDateTime finishTime;\n    protected LocalDateTime scheduleHostTime;\n    protected LocalDateTime submitTime;\n    protected LocalDateTime scheduleTime;\n    protected String        processorName;\n    protected LocalDateTime createTime;\n    protected LocalDateTime updateTime;\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public GUID getTaskGuid() {\n        return this.taskGuid;\n    }\n\n    @Override\n    public String getInstanceName() {\n        return this.instanceName;\n    }\n\n    @Override\n    public String getTaskName() {\n        return this.taskName;\n    }\n\n    @Override\n    public LocalDateTime getBusinessTime() {\n        return this.businessTime;\n    }\n\n    @Override\n    public short getPriority() {\n        return this.priority;\n    }\n\n    @Override\n    public String getImagePath() {\n        return this.imagePath;\n    }\n\n    @Override\n    public short getActuallyPriority() {\n        return this.actuallyPriority;\n    }\n\n    @Override\n    public TaskInstanceStatus getInstanceStatus() {\n        return this.instanceStatus;\n    }\n\n\n\n    @Override\n    public String getTaskType() {\n        return this.taskType;\n    }\n\n    @Override\n    public int getRunCount() {\n        return this.runCount;\n    }\n\n    @Override\n    public int getSequenceCnt() {\n        return this.sequenceCnt;\n    }\n\n    @Override\n    public int getRetryCnt() {\n        return this.retryCnt;\n    }\n\n    @Override\n    public String getErrorCause() {\n        return this.errorCause;\n    }\n\n    @Override\n    public boolean isDryRun() {\n        return this.dryRun;\n    }\n\n    @Override\n    public TaskScheduleCycle getKernelScheduleCycle() {\n        return this.scheduleCycle;\n    }\n\n    @Override\n    public TaskScheduleType getKernelScheduleType() {\n        return this.scheduleType;\n    }\n\n    @Override\n    public LocalDateTime getLastStartTime() {\n        return this.lastStartTime;\n    }\n\n    @Override\n    public LocalDateTime getLastEndTime() {\n        return this.lastEndTime;\n    }\n\n    @Override\n    public LocalDateTime getExpectTime() {\n        return this.expectTime;\n    }\n\n    @Override\n    public LocalDateTime getFireTime() {\n        return this.fireTime;\n    }\n\n    @Override\n    public LocalDateTime getStartTime() {\n        return this.startTime;\n    }\n\n    @Override\n    public LocalDateTime getFinishTime() {\n        return this.finishTime;\n    }\n\n    @Override\n    public LocalDateTime getScheduleHostTime() {\n        return this.scheduleHostTime;\n    }\n\n    @Override\n    public LocalDateTime getSubmitTime() {\n        return this.submitTime;\n    }\n\n    @Override\n    public LocalDateTime getScheduleTime() {\n        return this.scheduleTime;\n    }\n\n\n    @Override\n    public void setExpectTime(LocalDateTime expectTime) {\n        this.expectTime = expectTime;\n    }\n\n    @Override\n    public void setFireTime(LocalDateTime fireTime) {\n        this.fireTime = fireTime;\n    }\n\n    @Override\n    public void setStartTime(LocalDateTime startTime) {\n        this.startTime = startTime;\n    }\n\n    @Override\n    public void setFinishTime(LocalDateTime finishTime) {\n        this.finishTime = finishTime;\n    }\n\n    @Override\n    public void setScheduleHostTime(LocalDateTime scheduleHostTime) {\n        this.scheduleHostTime = scheduleHostTime;\n    }\n\n    @Override\n    public void setSubmitTime(LocalDateTime submitTime) {\n        this.submitTime = submitTime;\n    }\n\n    @Override\n    public void setScheduleTime(LocalDateTime scheduleTime) {\n        this.scheduleTime = scheduleTime;\n    }\n\n    @Override\n    public String getProcessorName() {\n        return this.processorName;\n    }\n\n    @Override\n    public void setProcessorName(String processorName) {\n        this.processorName = processorName;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.updateTime;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/ArchTask.java",
    "content": "package com.pinecone.hydra.task;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\n\npublic abstract class ArchTask implements Task {\n\n    protected Identification            mServiceId;\n\n    protected TaskElement               mTaskElement;\n\n    protected Map<String, Object >      mMetaDataScope;\n\n    public ArchTask( Identification serviceId, TaskElement serviceElement, Map<String, Object > metaDataScope ){\n        this.mServiceId     = serviceId;\n        this.mTaskElement   = serviceElement;\n        this.mMetaDataScope = metaDataScope;\n    }\n\n    public ArchTask( Identification serviceId, TaskElement serviceElement ){\n       this( serviceId, serviceElement, null );\n    }\n\n\n    public TaskElement getTaskElement() {\n        return this.mTaskElement;\n    }\n\n    @Override\n    public String getName() {\n        return this.mTaskElement.getName();\n    }\n\n    @Override\n    public String getDisplayName() {\n        return this.mTaskElement.getName();\n    }\n\n    @Override\n    public String getFullName() {\n        return this.mTaskElement.getKomPath();\n    }\n\n\n    public GUID getGuid() {\n        return this.mTaskElement.getGuid();\n    }\n\n    @Override\n    public Identification getId() {\n        return this.getGuid();\n    }\n\n    @Override\n    public String getScenario() {\n        return this.mTaskElement.getScenario();\n    }\n\n    @Override\n    public String getMarshallingArchitecture() {\n        return this.mTaskElement.getMarshallingArchitecture();\n    }\n\n    @Override\n    public String getExtraInformation() {\n        return this.mTaskElement.getExtraInformation();\n    }\n\n    @Override\n    public short getPriority() {\n        return this.mTaskElement.getPriority();\n    }\n\n    @Override\n    public short getActuallyPriority() {\n        return this.mTaskElement.getActuallyPriority();\n    }\n\n    @Override\n    public boolean isDryRun() {\n        return this.mTaskElement.isDryRun();\n    }\n\n    @Override\n    public boolean isEnable() {\n        return this.mTaskElement.isEnable();\n    }\n\n    @Override\n    public String getDescription() {\n        return this.mTaskElement.getDescription();\n    }\n\n    @Override\n    public String getProcessorName() {\n        return this.mTaskElement.getProcessorName();\n    }\n\n    @Override\n    public Map<String, Object> getMetaDataScope() {\n        return this.mMetaDataScope;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/ArchTaskFamilyMeta.java",
    "content": "package com.pinecone.hydra.task;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\npublic abstract class ArchTaskFamilyMeta implements TaskFamilyMeta {\n    protected GUID   guid;\n\n    protected String name;\n\n    protected String scenario;\n\n    protected String marshallingArchitecture;\n\n    protected String extraInformation;\n\n    protected String szElementaryConfig;\n\n    protected Map<String, Object > elementaryConfig;\n\n\n    protected String description;\n\n    public ArchTaskFamilyMeta() {\n    }\n\n    public ArchTaskFamilyMeta( Map<String, Object > joEntity ) {\n        this.apply( joEntity );\n    }\n\n    protected ArchTaskFamilyMeta apply( Map<String, Object > joEntity ) {\n        String szGuid = (String) joEntity.get( \"guid\" );\n        if( szGuid != null ) {\n            this.guid = GUIDs.GUID128( (String) joEntity.get( \"guid\" ) );\n        }\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n\n        return this;\n    }\n\n\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public Identification getId() {\n        return this.getGuid();\n    }\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public String getScenario() {\n        return this.scenario;\n    }\n\n    @Override\n    public String getMarshallingArchitecture() {\n        return this.marshallingArchitecture;\n    }\n\n    @Override\n    public String getExtraInformation() {\n        return this.extraInformation;\n    }\n\n    @Override\n    public String getDescription() {\n        return this.description;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/ArchTaskInstance.java",
    "content": "package com.pinecone.hydra.task;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\nimport com.pinecone.hydra.task.marshal.TaskScheduleType;\n\nimport java.time.LocalDateTime;\n\npublic abstract class ArchTaskInstance implements TaskInstance {\n\n    protected InstanceEntry             mInstanceEntry;\n\n    protected Task                      mOwnedTask;\n\n    public ArchTaskInstance( InstanceEntry instanceEntry, Task ownedTask ) {\n        this.mInstanceEntry = instanceEntry;\n        this.mOwnedTask     = ownedTask;\n    }\n\n    @Override\n    public Task getOwnedTask() {\n        return this.mOwnedTask;\n    }\n\n    @Override\n    public TaskInstrument getTaskInstrument() {\n        return this.mInstanceEntry.getTaskInstrument();\n    }\n\n    @Override\n    public String getRunStatus() {\n        return this.mInstanceEntry.getRunStatus();\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.mInstanceEntry.getGuid();\n    }\n\n    @Override\n    public GUID getTaskGuid() {\n        return this.mInstanceEntry.getTaskGuid();\n    }\n\n    @Override\n    public String getInstanceName() {\n        return this.mInstanceEntry.getInstanceName();\n    }\n\n    @Override\n    public LocalDateTime getBusinessTime() {\n        return this.mInstanceEntry.getBusinessTime();\n    }\n\n    @Override\n    public short getPriority() {\n        return this.mInstanceEntry.getPriority();\n    }\n\n    @Override\n    public String getImagePath() {\n        return this.mInstanceEntry.getImagePath();\n    }\n\n    @Override\n    public short getActuallyPriority() {\n        return this.mInstanceEntry.getActuallyPriority();\n    }\n\n    @Override\n    public TaskInstanceStatus getInstanceStatus() {\n        return this.mInstanceEntry.getInstanceStatus();\n    }\n\n    @Override\n    public String getTaskType() {\n        return this.mInstanceEntry.getTaskType();\n    }\n\n    @Override\n    public int getRunCount() {\n        return this.mInstanceEntry.getRunCount();\n    }\n\n    @Override\n    public int getRetryCnt() {\n        return this.mInstanceEntry.getRetryCnt();\n    }\n\n    @Override\n    public int getSequenceCnt() {\n        return this.mInstanceEntry.getSequenceCnt();\n    }\n\n    @Override\n    public String getErrorCause() {\n        return this.mInstanceEntry.getErrorCause();\n    }\n\n    @Override\n    public boolean isDryRun() {\n        return this.mInstanceEntry.isDryRun();\n    }\n\n    @Override\n    public TaskScheduleCycle getKernelScheduleCycle() {\n        return this.mInstanceEntry.getKernelScheduleCycle();\n    }\n\n    @Override\n    public TaskScheduleType getKernelScheduleType() {\n        return this.mInstanceEntry.getKernelScheduleType();\n    }\n\n    @Override\n    public LocalDateTime getLastStartTime() {\n        return this.mInstanceEntry.getLastStartTime();\n    }\n\n    @Override\n    public LocalDateTime getLastEndTime() {\n        return this.mInstanceEntry.getLastEndTime();\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.mInstanceEntry.getCreateTime();\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.mInstanceEntry.getUpdateTime();\n    }\n\n    @Override\n    public InstanceEntry getInstanceEntry() {\n        return this.mInstanceEntry;\n    }\n\n    @Override\n    public LocalDateTime getExpectTime() {\n        return this.mInstanceEntry.getExpectTime();\n    }\n\n    @Override\n    public LocalDateTime getFireTime() {\n        return this.mInstanceEntry.getFireTime();\n    }\n\n    @Override\n    public LocalDateTime getStartTime() {\n        return this.mInstanceEntry.getStartTime();\n    }\n\n    @Override\n    public LocalDateTime getFinishTime() {\n        return this.mInstanceEntry.getFinishTime();\n    }\n\n    @Override\n    public LocalDateTime getScheduleHostTime() {\n        return this.mInstanceEntry.getScheduleHostTime();\n    }\n\n    @Override\n    public LocalDateTime getSubmitTime() {\n        return this.mInstanceEntry.getSubmitTime();\n    }\n\n    @Override\n    public LocalDateTime getScheduleTime() {\n        return this.mInstanceEntry.getScheduleTime();\n    }\n\n    @Override\n    public String getProcessorName() {\n        return this.mInstanceEntry.getProcessorName();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/InstanceEventType.java",
    "content": "package com.pinecone.hydra.task;\n\npublic enum InstanceEventType {\n\n    TaskSubmit             ( \"TaskSubmit\"           ),\n    TaskTimeReady          ( \"TaskTimeReady\"        ),\n    CheckDependencyReady   ( \"CheckDependencyReady\" ),\n    DepartureReady         ( \"DepartureReady\"       ), // 实例离港完成，进入工作节点，等待远端正式执行\n    TaskRun                ( \"TaskRun\"              ), // 实例启动完成，进入工作节点，远端进程进入运行\n    TaskExecSuccess        ( \"TaskExecSuccess\"      ),\n    TaskExecFail           ( \"TaskExecFail\"         ),\n    TaskSuccess            ( \"TaskSuccess\"          ),\n    TaskFail               ( \"TaskFail\"             ),\n    AuditPost              ( \"AuditPost\"            ),\n    AuditSuccess           ( \"AuditSuccess\"         ),\n    AuditFail              ( \"AuditFail\"            ),\n    TaskKilled             ( \"TaskKilled\"           );\n\n    private final String name;\n\n    InstanceEventType( String name ) {\n        this.name = name;\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public static InstanceEventType valueOfName( String name ) {\n        try {\n            return InstanceEventType.valueOf(name);\n        }\n        catch (IllegalArgumentException e) {\n            return null;\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/ProcApp.java",
    "content": "package com.pinecone.hydra.task;\n\npublic interface ProcApp extends App {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/Task.java",
    "content": "package com.pinecone.hydra.task;\n\nimport java.util.Map;\n\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\n\npublic interface Task extends TaskFamilyMeta {\n\n    TaskElement getTaskElement();\n\n    String getName();        // Service Name, e.g. WpnService\n\n    String getDisplayName(); // Service Long Name, e.g. Windows Push Notification System Service\n\n    String getDescription();\n\n    String getFullName();\n\n    String getScenario() ;\n\n    String getMarshallingArchitecture() ;\n\n    String getExtraInformation() ;\n\n    short getPriority();\n\n    short getActuallyPriority();\n\n\n    boolean isDryRun() ;\n\n    boolean isEnable() ;\n\n    String getProcessorName();\n\n\n    Map<String, Object> getMetaDataScope();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskApp.java",
    "content": "package com.pinecone.hydra.task;\n\npublic interface TaskApp extends App {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskExtraMeta.java",
    "content": "package com.pinecone.hydra.task;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface TaskExtraMeta extends Pinenut {\n\n    TaskFamilyMeta getKernelMeta();\n\n    GUID getGuid() ;\n\n    String getTaskName();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskFamilyMeta.java",
    "content": "package com.pinecone.hydra.task;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.Identification;\n\npublic interface TaskFamilyMeta extends Pinenut  {\n\n    //long getEnumId();\n\n    //GUID getGuid();\n\n    Identification getId() ;\n\n    String getName();\n\n    String getScenario();\n\n    String getMarshallingArchitecture();\n\n    String getExtraInformation();\n\n    String getDescription();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskInstance.java",
    "content": "package com.pinecone.hydra.task;\n\nimport java.time.LocalDateTime;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\nimport com.pinecone.hydra.task.marshal.TaskScheduleType;\n\npublic interface TaskInstance extends Pinenut {\n\n    default Identification getId() {\n        return this.getGuid();\n    }\n\n    Object getProcessObject();\n\n    Task getOwnedTask();\n\n    TaskInstrument getTaskInstrument();\n\n    String getRunStatus ();\n\n    GUID getGuid();\n\n    GUID getTaskGuid();\n\n    String getInstanceName();\n\n    LocalDateTime getBusinessTime ();\n\n    short getPriority();\n\n    String getImagePath();\n\n    short getActuallyPriority();\n\n    TaskInstanceStatus getInstanceStatus ();\n\n    String getTaskType ();\n\n    int getRunCount ();\n\n    int getSequenceCnt();\n\n    int getRetryCnt();\n\n    boolean isDryRun() ;\n\n    String getErrorCause();\n\n    TaskScheduleCycle getKernelScheduleCycle ();\n\n    TaskScheduleType getKernelScheduleType ();\n\n    LocalDateTime getLastStartTime ();\n\n    LocalDateTime getLastEndTime ();\n\n    LocalDateTime getCreateTime ();\n\n    LocalDateTime getUpdateTime ();\n\n    InstanceEntry getInstanceEntry();\n\n    LocalDateTime getExpectTime();\n\n    LocalDateTime getFireTime();\n\n    LocalDateTime getStartTime();\n\n    LocalDateTime getFinishTime();\n\n    LocalDateTime getScheduleHostTime();\n\n    LocalDateTime getSubmitTime();\n\n    LocalDateTime getScheduleTime();\n\n    String getProcessorName();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskInstanceExecState.java",
    "content": "package com.pinecone.hydra.task;\n\npublic enum TaskInstanceExecState {\n\n    Success   ( \"Success\"   ),\n    Fail      ( \"Fail\"      ),\n    Running   ( \"Running\"   ),\n    Submitted ( \"Submitted\" ),\n    Killed    ( \"Killed\"    ),\n    ;\n\n    private final String name;\n\n    TaskInstanceExecState( String name ) {\n        this.name = name;\n    }\n\n    public String getName() {\n        return this.name;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskInstanceMeta.java",
    "content": "package com.pinecone.hydra.task;\n\nimport java.time.LocalDateTime;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\nimport com.pinecone.hydra.task.marshal.TaskScheduleType;\n\npublic interface TaskInstanceMeta extends Pinenut {\n    GUID getGuid();\n\n    GUID getTaskGuid();\n\n    String getInstanceName();\n\n    String getTaskName();\n\n    LocalDateTime getBusinessTime ();\n\n    short getPriority();\n\n    String getImagePath();\n\n    short getActuallyPriority();\n\n    TaskInstanceStatus getInstanceStatus ();\n\n    String getTaskType ();\n\n    int getRunCount ();\n\n    int getSequenceCnt();\n\n    int getRetryCnt();\n\n    String getErrorCause();\n\n    boolean isDryRun() ;\n\n    TaskScheduleCycle getKernelScheduleCycle ();\n\n    TaskScheduleType getKernelScheduleType ();\n\n    LocalDateTime getLastStartTime ();\n\n    LocalDateTime getLastEndTime ();\n\n    LocalDateTime getCreateTime ();\n\n    LocalDateTime getUpdateTime ();\n\n\n    LocalDateTime getExpectTime();\n\n    LocalDateTime getFireTime();\n\n    LocalDateTime getStartTime();\n\n    LocalDateTime getFinishTime();\n\n    LocalDateTime getScheduleHostTime();\n\n    LocalDateTime getSubmitTime();\n\n    LocalDateTime getScheduleTime();\n\n    String getProcessorName();\n\n    void setExpectTime(LocalDateTime expectTime);\n\n    void setFireTime(LocalDateTime fireTime);\n\n    void setStartTime(LocalDateTime startTime);\n\n    void setFinishTime(LocalDateTime finishTime);\n\n    void setScheduleHostTime(LocalDateTime scheduleHostTime);\n\n    void setSubmitTime(LocalDateTime submitTime);\n\n    void setScheduleTime(LocalDateTime scheduleTime);\n\n    void setProcessorName(String processorName);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/TaskInstanceStatus.java",
    "content": "package com.pinecone.hydra.task;\n\npublic enum TaskInstanceStatus {\n    New                        ( 0x1000, \"WAIT\"     , \"New\"               ), // Instance created, pending for starting and scheduling.\n    DependencyWait             ( 0x1001, \"WAIT\"     , \"DependencyWait\"    ), // DAG dependency wait.\n    ResourceWait               ( 0x1002, \"WAIT\"     , \"ResourceWait\"      ), // Waiting for resource allocation.\n    DepartureStandby           ( 0x1003, \"WAIT\"     , \"DepartureStandby\"  ), // Ready to launch.\n    ProcessStandby             ( 0x1004, \"WAIT\"     , \"ProcessStandby\"    ), // Process spawned.\n\n    Running                    ( 0x2000, \"RUNNING\"  , \"Running\"           ), // Running.\n    Audit                      ( 0x2001, \"RUNNING\"  , \"Audit\"             ), // Auditing and checking. [e.g. DQC check, Event check, etc.]\n\n    Suspended                  ( 0x3000, \"SUSPENDED\", \"Suspended\"         ), // Process suspended.\n\n    Finished                   ( 0x4000, \"SUCCESS\"  , \"Finished\"          ), // Finished termination.\n    Killed                     ( 0x5000, \"KILLED\"   , \"Killed\"            ), // Forced termination.\n    Error                      ( 0x6000, \"FAIL\"     , \"Error\"             ), // Error termination.\n    AuditFailed                ( 0x6001, \"FAIL\"     , \"AuditFailed\"       ), // Auditing failed. [e.g. DQC failed etc.]\n    ;\n\n    private final int code;\n    private final String name;\n    private final String group;\n\n    TaskInstanceStatus( int code, String group, String name ) {\n        this.code  = code;\n        this.group = group;\n        this.name  = name;\n    }\n\n    public String getName(){\n        return this.name;\n    }\n\n    public int getCode() {\n        return this.code;\n    }\n\n    public String getGroup() {\n        return this.group;\n    }\n\n    public static TaskInstanceStatus getByName(String name ) {\n        for ( TaskInstanceStatus status : TaskInstanceStatus.values() ) {\n            if ( status.name.equals(name) ) {\n                return status;\n            }\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/Taskiom.java",
    "content": "package com.pinecone.hydra.task;\n\npublic interface Taskiom extends Task {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/Taskium.java",
    "content": "package com.pinecone.hydra.task;\n\nimport com.pinecone.framework.system.executum.Processum;\n\npublic interface Taskium extends TaskInstance {\n    @Override\n    Processum getProcessObject();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/KernelTaskConfig.java",
    "content": "package com.pinecone.hydra.task.kom;\n\nimport com.pinecone.hydra.system.ko.ArchKernelObjectConfig;\n\npublic class KernelTaskConfig extends ArchKernelObjectConfig implements TaskConfig {\n\n    protected String mszInstanceTitleTimeFormat = TaskMetaConstants.InstanceTitleTimeFormat;\n\n    @Override\n    public String getInstanceTitleTimeFormat() {\n        return this.mszInstanceTitleTimeFormat;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/TaskConfig.java",
    "content": "package com.pinecone.hydra.task.kom;\n\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\n\npublic interface TaskConfig extends KernelObjectConfig {\n\n    String getInstanceTitleTimeFormat();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/TaskFamilyNode.java",
    "content": "package com.pinecone.hydra.task.kom;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.task.TaskFamilyMeta;\n\npublic interface TaskFamilyNode extends TaskFamilyMeta {\n    long getEnumId();\n\n    void setEnumId(long id);\n\n    void setName(String name);\n\n    GUID getGuid();\n\n    void setGuid(GUID guid);\n\n    @Override\n    default Identification getId() {\n        return this.getGuid();\n    }\n\n    String getScenario();\n\n    void setScenario( String scenario );\n\n    String getMarshallingArchitecture();\n\n    void setMarshallingArchitecture( String marshallingArchitecture );\n\n    String getExtraInformation();\n\n    void setExtraInformation( String extraInformation );\n\n    String getDescription();\n\n    void setDescription( String description );\n\n    TaskFamilyNode apply( Map<String, Object> joEntity ) ;\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/TaskInstrument.java",
    "content": "package com.pinecone.hydra.task.kom;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.kom.entity.AppElement;\nimport com.pinecone.hydra.task.kom.entity.ElementNode;\nimport com.pinecone.hydra.task.kom.entity.Namespace;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.system.ko.kom.ReparseKOMTree;\nimport com.pinecone.hydra.task.kom.instance.InstanceInstrument;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface TaskInstrument extends ReparseKOMTree {\n\n    TaskConfig KernelServiceConfig = new KernelTaskConfig();\n\n    AppElement affirmJob(String path );\n\n    Namespace          affirmNamespace( String path );\n\n    TaskElement        affirmTask( String path ,TaskElement metaInfos );\n\n    ElementNode        queryElement( String path );\n\n    boolean            containsChild( GUID parentGuid, String childName );\n\n    void               update( TreeNode treeNode );\n\n    InstanceInstrument getInstanceInstrument();\n\n\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/TaskMetaConstants.java",
    "content": "package com.pinecone.hydra.task.kom;\n\npublic final class TaskMetaConstants {\n\n    public static final String InstanceTitleTimeFormat = \"yyyyMMdd_HHmmss\";\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/TaskPathSelector.java",
    "content": "package com.pinecone.hydra.task.kom;\n\nimport com.pinecone.framework.util.name.path.PathResolver;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.kom.StandardPathSelector;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\n\npublic class TaskPathSelector extends StandardPathSelector {\n    public TaskPathSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) {\n        super( pathResolver, trieTree, dirMan, fileMans );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/UniformTaskInstrument.java",
    "content": "package com.pinecone.hydra.task.kom;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.task.kom.entity.AppElement;\nimport com.pinecone.hydra.task.kom.entity.ElementNode;\nimport com.pinecone.hydra.task.kom.entity.GenericAppElement;\nimport com.pinecone.hydra.task.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.task.kom.entity.GenericTaskElement;\nimport com.pinecone.hydra.task.kom.entity.Namespace;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.kom.entity.TaskTreeNode;\nimport com.pinecone.hydra.task.kom.instance.InstanceInstrument;\nimport com.pinecone.hydra.task.kom.instance.KernelInstanceInstrument;\nimport com.pinecone.hydra.task.kom.operator.GenericElementOperatorFactory;\nimport com.pinecone.hydra.task.kom.source.AppNodeManipulator;\nimport com.pinecone.hydra.task.kom.source.TaskMasterManipulator;\nimport com.pinecone.hydra.task.kom.source.TaskNamespaceManipulator;\nimport com.pinecone.hydra.task.kom.source.TaskNodeManipulator;\nimport com.pinecone.hydra.system.identifier.KOPathResolver;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.system.ko.kom.ArchReparseKOMTree;\nimport com.pinecone.hydra.system.ko.kom.GenericReparseKOMTreeAddition;\nimport com.pinecone.hydra.system.ko.kom.MultiFolderPathSelector;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.RegimentedImperialTree;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128V7;\n\npublic class UniformTaskInstrument extends ArchReparseKOMTree implements TaskInstrument {\n    //GenericDistributedScopeTree\n    protected ImperialTree                imperialTree;\n\n    protected TaskMasterManipulator       taskMasterManipulator;\n\n    protected TaskNamespaceManipulator    taskNamespaceManipulator;\n\n    protected AppNodeManipulator          appNodeManipulator;\n\n    protected TaskNodeManipulator         taskNodeManipulator;\n\n    protected List<GUIDNameManipulator >  folderManipulators;\n\n    protected List<GUIDNameManipulator >  fileManipulators;\n\n    protected InstanceInstrument          instanceInstrument;\n\n    public UniformTaskInstrument(\n            Processum superiorProcess, KOIMasterManipulator masterManipulator, TaskInstrument parent, String name, KernelObjectConfig config,\n            @Nullable GuidAllocator guidAllocator\n    ) {\n        super( superiorProcess, masterManipulator, TaskInstrument.KernelServiceConfig, parent, name, guidAllocator );\n\n        this.taskMasterManipulator       = (TaskMasterManipulator) masterManipulator;\n        this.taskNamespaceManipulator    = this.taskMasterManipulator.getNamespaceManipulator();\n        this.appNodeManipulator          = this.taskMasterManipulator.getAppNodeManipulator();\n        this.taskNodeManipulator         = this.taskMasterManipulator.getTaskNodeManipulator();\n        KOISkeletonMasterManipulator skeletonMasterManipulator = this.taskMasterManipulator.getSkeletonMasterManipulator();\n        TreeMasterManipulator        treeMasterManipulator     = (TreeMasterManipulator) skeletonMasterManipulator;\n        this.imperialTree                = new RegimentedImperialTree(treeMasterManipulator);\n        this.operatorFactory             = new GenericElementOperatorFactory(this,(TaskMasterManipulator) masterManipulator);\n        this.pathResolver                = new KOPathResolver( this.kernelObjectConfig );\n\n        // TODO for customize service tree architecture.\n        this.folderManipulators          = new ArrayList<>( List.of( this.taskNamespaceManipulator, this.appNodeManipulator) );\n        this.fileManipulators            = new ArrayList<>( List.of( this.appNodeManipulator, this.taskNodeManipulator) );\n        this.pathSelector                = new MultiFolderPathSelector(\n                this.pathResolver, this.imperialTree, this.folderManipulators.toArray( new GUIDNameManipulator[]{} ), this.fileManipulators.toArray( new GUIDNameManipulator[]{} )\n        );\n        this.mReparseKOM                 = new GenericReparseKOMTreeAddition( this );\n        this.instanceInstrument          = new KernelInstanceInstrument( this, this.taskMasterManipulator.getInstanceNodeManipulator() );\n        this.kernelObjectConfig          = config;\n    }\n\n    public UniformTaskInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, KernelObjectConfig config ) {\n        this( superiorProcess, masterManipulator, null, TaskInstrument.class.getSimpleName(), config, new GuidAllocator128V7());\n    }\n\n//    public UniformTaskInstrument( Hydrogen hydrogen ) {\n//        this.hydrogen = hydrogen;\n//    }\n\n    public UniformTaskInstrument( KOIMappingDriver driver, KernelObjectConfig config ) {\n        this(\n                driver.getSuperiorProcess(),\n                driver.getMasterManipulator(),\n                config\n        );\n    }\n\n    public UniformTaskInstrument( KOIMappingDriver driver, TaskInstrument parent, String name, KernelObjectConfig config ) {\n        this(\n                driver.getSuperiorProcess(),\n                driver.getMasterManipulator(),\n                parent,\n                name,\n                config,\n                null\n        );\n    }\n\n    protected TaskTreeNode affirmTreeNodeByPath( String path, Class<? > cnSup, Class<? > nsSup ) {\n        String[] parts = this.pathResolver.segmentPathParts( path );\n        String currentPath = \"\";\n        GUID parentGuid = GUIDs.Dummy128();\n\n        TaskTreeNode node = this.queryElement(path);\n        if ( node != null ){\n            return node;\n        }\n\n        TaskTreeNode ret = null;\n        for( int i = 0; i < parts.length; ++i ){\n            currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : \"\" ) + parts[ i ];\n            node = this.queryElement( currentPath );\n            if ( node == null){\n                if ( i == parts.length - 1 && cnSup != null ){\n                    ElementNode en = (ElementNode) this.dynamicFactory.optNewInstance( cnSup, new Object[]{ this } );\n                    en.setName( parts[i] );\n                    GUID guid = this.put( en );\n                    this.affirmOwnedNode( parentGuid, guid );\n                    return en;\n                }\n                else {\n                    Namespace namespace = (Namespace) this.dynamicFactory.optNewInstance( nsSup, new Object[]{ this } );\n                    namespace.setName( parts[i] );\n                    GUID guid = this.put( namespace );\n                    if ( i != 0 ){\n                        this.affirmOwnedNode( parentGuid, guid );\n                        parentGuid = guid;\n                    }\n                    else {\n                        parentGuid = guid;\n                    }\n\n                    ret = namespace;\n                }\n            }\n            else {\n                parentGuid = node.getGuid();\n            }\n        }\n\n        return ret;\n    }\n\n    @Override\n    public InstanceInstrument getInstanceInstrument() {\n        return this.instanceInstrument;\n    }\n\n    @Override\n    public AppElement affirmJob(String path ) {\n        return (AppElement) this.affirmTreeNodeByPath( path, GenericAppElement.class, GenericNamespace.class );\n    }\n\n    @Override\n    public TaskElement affirmTask( String path ,TaskElement metaInfos) {\n        TaskElement taskElement =  (TaskElement) this.affirmTreeNodeByPath( path, GenericTaskElement.class, GenericNamespace.class );\n        taskElement.setActuallyPriority( metaInfos.getActuallyPriority() );\n        taskElement.setDeploymentMethod( metaInfos.getDeploymentMethod() );\n        taskElement.setEnable( metaInfos.isEnable());\n        taskElement.setDryRun( metaInfos.isDryRun() );\n        taskElement.setPriority( metaInfos.getPriority() );\n        taskElement.setResourceType( metaInfos.getResourceType() );\n        taskElement.setScheduleCycle( metaInfos.getScheduleCycle() );\n        taskElement.setScheduleType( metaInfos.getScheduleType() );\n        taskElement.setType( metaInfos.getType() );\n        taskElement.setImagePath( metaInfos.getImagePath() );\n        taskElement.setName( metaInfos.getName() );\n        taskElement.setGuid( metaInfos.getGuid() );\n        return taskElement;\n    }\n\n    @Override\n    public ElementNode queryElement( String path ) {\n        GUID guid = this.queryGUIDByPath( path );\n        if( guid != null ) {\n            return this.get( guid ).evinceElementNode();\n        }\n\n        return null;\n    }\n\n    @Override\n    public Namespace affirmNamespace( String path ) {\n        return ( Namespace ) this.affirmTreeNodeByPath( path, null, GenericNamespace.class );\n    }\n\n    protected boolean containsChild( GUIDNameManipulator manipulator, GUID parentGuid, String childName ) {\n        List<GUID > guids = manipulator.getGuidsByName( childName );\n        for( GUID guid : guids ) {\n            List<GUID > ps = this.imperialTree.fetchParentGuids( guid );\n            if( ps.contains( parentGuid ) ){\n                return true;\n            }\n        }\n        return false;\n    }\n\n    @Override\n    public boolean containsChild( GUID parentGuid, String childName ) {\n        for( GUIDNameManipulator manipulator : this.fileManipulators ) {\n            if( this.containsChild( manipulator, parentGuid, childName ) ) {\n                return true;\n            }\n        }\n\n        for( GUIDNameManipulator manipulator : this.folderManipulators ) {\n            if( this.containsChild( manipulator, parentGuid, childName ) ) {\n                return true;\n            }\n        }\n        return false;\n    }\n\n\n    /**\n     * Affirm path exist in cache, if required.\n     * 确保路径存在于缓存，如果有明确实现必要的话。\n     * 对于GenericDistributedScopeTree::getPath, 默认会自动写入缓存，因此这里可以通过getPath保证路径缓存一定存在。\n     * @param guid, target guid.\n     * @return Path\n     */\n    protected void affirmPathExist( GUID guid ) {\n        this.imperialTree.getCachePath( guid );\n    }\n\n    @Override\n    public TaskTreeNode get( GUID guid ){\n        return (TaskTreeNode) super.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode treeNode ) {\n        TreeNodeOperator operator = this.operatorFactory.getOperator( treeNode.getMetaType() );\n        operator.update( treeNode );\n    }\n\n    @Override\n    public void remove( GUID guid ) {\n        super.remove( guid );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/AppElement.java",
    "content": "package com.pinecone.hydra.task.kom.entity;\n\nimport com.pinecone.hydra.task.kom.TaskFamilyNode;\n\npublic interface AppElement extends FolderElement, TaskFamilyNode {\n    @Override\n    default AppElement evinceAppElement() {\n        return this;\n    }\n\n    String getType();\n\n    void setType( String type );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/ArchElementNode.java",
    "content": "package com.pinecone.hydra.task.kom.entity;\n\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanColonist;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.task.ArchTaskFamilyMeta;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\n\npublic abstract class ArchElementNode extends ArchTaskFamilyMeta implements ElementNode {\n    protected long                       enumId;\n\n    protected GUID                       metaGuid;\n\n    protected GUIDImperialTrieNode       distributedTreeNode;\n\n    protected TaskInstrument             taskInstrument;\n\n    protected LocalDateTime              createTime;\n\n    protected LocalDateTime              updateTime;\n\n    public ArchElementNode() {\n        super();\n\n        this.createTime = LocalDateTime.now();\n        this.updateTime = LocalDateTime.now();\n    }\n\n    public ArchElementNode( Map<String, Object > joEntity ) {\n        super( joEntity );\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n        this.createTime = LocalDateTime.now();\n        this.updateTime = LocalDateTime.now();\n    }\n\n    public ArchElementNode( Map<String, Object > joEntity, TaskInstrument taskInstrument) {\n        super( joEntity );\n        this.apply(taskInstrument);\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public ArchElementNode( TaskInstrument taskInstrument ) {\n        this.apply(taskInstrument);\n    }\n\n    public void apply( TaskInstrument taskInstrument ) {\n        this.taskInstrument = taskInstrument;\n        if ( this.getGuid() == null ) {\n            GuidAllocator guidAllocator = this.taskInstrument.getGuidAllocator();\n            this.setGuid( guidAllocator.nextGUID() );\n        }\n        if ( this.createTime == null ) {\n            this.createTime = LocalDateTime.now();\n            this.updateTime = LocalDateTime.now();\n        }\n    }\n\n    @Override\n    public ArchElementNode apply( Map<String, Object > joEntity ) {\n        super.apply( joEntity );\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n\n        return this;\n    }\n\n    @Override\n    public String getKomPath() {\n        return this.taskInstrument.getPath( this.getGuid() );\n    }\n\n    @Override\n    public String getSystemKernelObjectPath() {\n        return this.taskInstrument.querySystemKernelObjectPath( this.getGuid() );\n    }\n\n    @Override\n    public GUID getMetaGuid() {\n        return this.metaGuid;\n    }\n\n    @Override\n    public void setMetaGuid( GUID metaGuid ) {\n        this.metaGuid = metaGuid;\n    }\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public void setEnumId( long enumId ) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid( GUID guid ) {\n        this.guid = guid;\n    }\n\n    @Override\n    public void setName( String name ) {\n        this.name = name;\n    }\n\n    @Override\n    public void setScenario( String scenario ) {\n        this.scenario = scenario;\n    }\n\n    @Override\n    public void setMarshallingArchitecture( String marshallingArchitecture ) {\n        this.marshallingArchitecture = marshallingArchitecture;\n    }\n\n    @Override\n    public void setExtraInformation( String extraInformation ) {\n        this.extraInformation = extraInformation;\n    }\n\n    @Override\n    public void setDescription( String description ) {\n        this.description = description;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    @Override\n    public void setCreateTime( LocalDateTime createTime ) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.updateTime;\n    }\n\n    @Override\n    public void setUpdateTime( LocalDateTime updateTime ) {\n        this.updateTime = updateTime;\n    }\n\n    @Override\n    public GUIDImperialTrieNode getDistributedTreeNode() {\n        return this.distributedTreeNode;\n    }\n\n    @Override\n    public void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ) {\n        this.distributedTreeNode = distributedTreeNode;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n\n    protected Collection<ElementNode > fetchChildren() {\n        Collection<GUID > guids = this.fetchChildrenGuids();\n        List<ElementNode > elementNodes = new ArrayList<>();\n        for( GUID guid : guids ){\n            ElementNode elementNode = (ElementNode) this.taskInstrument.get( guid );\n            elementNodes.add( elementNode );\n        }\n        return elementNodes;\n    }\n\n    protected Collection<GUID > fetchChildrenGuids() {\n        return this.taskInstrument.fetchChildrenGuids( this.getGuid() );\n    }\n\n    protected void addChild( ElementNode child ) {\n        GUID childId;\n        boolean bContainsChild = this.containsChild( child.getName() );\n        if( bContainsChild ) {\n            return;\n        }\n        else {\n            childId = this.taskInstrument.put( child );\n        }\n\n\n        this.taskInstrument.affirmOwnedNode( this.guid, childId );\n    }\n\n    protected boolean containsChild( String childName ) {\n        return this.taskInstrument.containsChild( this.guid, childName );\n    }\n\n    @Override\n    public JSONObject toJSONObject() {\n        return BeanColonist.DirectColonist.populate( this, UnbeanifiedKeys );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/ElementNode.java",
    "content": "package com.pinecone.hydra.task.kom.entity;\n\nimport java.time.LocalDateTime;\nimport java.util.Set;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.system.ko.meta.ElementObject;\nimport com.pinecone.hydra.task.kom.TaskFamilyNode;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\n\npublic interface ElementNode extends TaskTreeNode, TaskFamilyNode, ElementObject {\n\n    Set<String > UnbeanifiedKeys = Set.of( \"distributedTreeNode\" );\n\n    @Override\n    default String objectCategoryName() {\n        return \"Task\";\n    }\n\n    default Namespace evinceNamespace() {\n        return null;\n    }\n\n    default AppElement evinceAppElement() {\n        return null;\n    }\n\n    default TaskElement evinceTaskElement() {\n        return null;\n    }\n\n    GUIDImperialTrieNode getDistributedTreeNode();\n\n    void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode );\n\n    JSONObject toJSONObject();\n\n    @Override\n    default ElementNode evinceElementNode(){\n        return this;\n    }\n\n    GUID getMetaGuid();\n\n    void setMetaGuid( GUID metaGuid );\n\n    String getKomPath();\n\n    String getSystemKernelObjectPath();\n\n    String getName();\n    void setName( String name );\n\n    LocalDateTime getCreateTime();\n    void setCreateTime( LocalDateTime createTime );\n\n    LocalDateTime getUpdateTime();\n    void setUpdateTime( LocalDateTime updateTime );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/EntryNode.java",
    "content": "package com.pinecone.hydra.task.kom.entity;\n\nimport com.pinecone.hydra.unit.imperium.entity.MetaEntryNode;\nimport com.pinecone.slime.entity.EnumIndexableEntity;\n\npublic interface EntryNode extends MetaEntryNode, EnumIndexableEntity {\n\n    @Override\n    default EntryNode evinceEntryNode() {\n        return this;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/FolderElement.java",
    "content": "package com.pinecone.hydra.task.kom.entity;\n\nimport java.util.Collection;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface FolderElement extends ElementNode {\n\n    Collection<ElementNode > fetchChildren();\n\n    Collection<GUID > fetchChildrenGuids();\n\n    void addChild( ElementNode child );\n\n    boolean containsChild( String childName );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/GenericAppElement.java",
    "content": "package com.pinecone.hydra.task.kom.entity;\n\nimport java.util.Collection;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanColonist;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\n\npublic class GenericAppElement extends ArchElementNode implements AppElement {\n    protected String        taskType;\n\n    public GenericAppElement() {\n        super();\n    }\n\n    public GenericAppElement( Map<String, Object > joEntity ) {\n        super( joEntity );\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericAppElement( Map<String, Object > joEntity, TaskInstrument taskInstrument ) {\n        super( joEntity, taskInstrument);\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericAppElement( TaskInstrument taskInstrument ) {\n        super(taskInstrument);\n    }\n\n    @Override\n    public String getType() {\n        return this.taskType;\n    }\n\n    @Override\n    public void setType( String taskType ) {\n        this.taskType = taskType;\n    }\n\n    @Override\n    public Collection<ElementNode > fetchChildren() {\n        return super.fetchChildren();\n    }\n\n    @Override\n    public Collection<GUID > fetchChildrenGuids() {\n        return super.fetchChildrenGuids();\n    }\n\n    @Override\n    public void addChild( ElementNode child ) {\n        if( child instanceof FolderElement ) {\n            throw new IllegalArgumentException( \"Foisting `FolderElement` into application node is not accepted.\" );\n        }\n        super.addChild( child );\n    }\n\n    @Override\n    public boolean containsChild( String childName ) {\n        return super.containsChild( childName );\n    }\n\n    @Override\n    public JSONObject toJSONObject() {\n        Collection<ElementNode > children = this.fetchChildren();\n        JSONObject jo         = BeanColonist.DirectColonist.populate( this, UnbeanifiedKeys );\n        JSONObject joChildren = new JSONMaptron();\n\n        for( ElementNode node : children ) {\n            joChildren.put( node.getName(), node.toJSONObject() );\n        }\n        jo.put( \"tasks\", joChildren );\n        return jo;\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/GenericNamespace.java",
    "content": "package com.pinecone.hydra.task.kom.entity;\n\nimport java.util.Collection;\nimport java.util.Map;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanColonist;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.source.TaskNamespaceManipulator;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\n\npublic class GenericNamespace extends ArchElementNode implements Namespace {\n    protected GUID                        metaGuid;\n\n    protected GUIDImperialTrieNode        distributedTreeNode;\n\n    protected TaskNamespaceManipulator    namespaceManipulator;\n\n\n    public GenericNamespace() {\n        super();\n    }\n\n    public GenericNamespace( Map<String, Object > joEntity ) {\n        super( joEntity );\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericNamespace( Map<String, Object > joEntity, TaskInstrument taskInstrument) {\n        super( joEntity, taskInstrument);\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericNamespace( TaskInstrument taskInstrument) {\n        super(taskInstrument);\n    }\n\n    public GenericNamespace(TaskInstrument taskInstrument, TaskNamespaceManipulator namespaceManipulator ) {\n        this(taskInstrument);\n        this.namespaceManipulator = namespaceManipulator;\n    }\n\n    @Override\n    public GUIDImperialTrieNode getDistributedTreeNode() {\n        return this.distributedTreeNode;\n    }\n\n    @Override\n    public void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ) {\n        this.distributedTreeNode = distributedTreeNode;\n    }\n\n    @Override\n    public GUID getMetaGuid() {\n        return this.metaGuid;\n    }\n\n    @Override\n    public void setMetaGuid( GUID metaGuid ) {\n        this.metaGuid = metaGuid;\n    }\n\n    @Override\n    public JSONObject toJSONObject() {\n        Collection<ElementNode > children = this.fetchChildren();\n        JSONObject jo = new JSONMaptron();\n\n        for( ElementNode node : children ) {\n            jo.put( node.getName(), node.toJSONObject() );\n        }\n        return jo;\n    }\n\n    @Override\n    public JSONObject toJSONDetails() {\n        return BeanColonist.DirectColonist.populate( this, ElementNode.UnbeanifiedKeys );\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"guid\"        , this.getGuid()            ),\n                new KeyValue<>( \"name\"        , this.getName()            )\n        } );\n    }\n\n    @Override\n    public String toString() {\n        return this.name;\n    }\n\n    @Override\n    public Collection<ElementNode > fetchChildren() {\n        return super.fetchChildren();\n    }\n\n    @Override\n    public Collection<GUID > fetchChildrenGuids() {\n        return super.fetchChildrenGuids();\n    }\n\n    @Override\n    public void addChild( ElementNode child ) {\n        super.addChild( child );\n    }\n\n    @Override\n    public boolean containsChild( String childName ) {\n        return super.containsChild( childName );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/GenericTaskElement.java",
    "content": "package com.pinecone.hydra.task.kom.entity;\n\nimport java.time.LocalDateTime;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.task.TaskExtraMeta;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\nimport com.pinecone.hydra.task.marshal.TaskScheduleType;\n\npublic class GenericTaskElement extends ArchElementNode implements TaskElement {\n    protected String                   taskType;\n    protected String                   imagePath;\n    protected String                   resourceType;\n    protected String                   deploymentMethod;\n\n    protected short                    priority;\n    protected short                    actuallyPriority;\n    protected boolean                  dryRun;\n\n    protected String                   scheduleCron;\n    protected TaskScheduleCycle        scheduleCycle;\n    protected TaskScheduleType         scheduleType;\n    protected boolean                  enable;\n\n    protected LocalDateTime            scheduleStartTime;\n    protected LocalDateTime            scheduleEndTime;\n    protected LocalDateTime            nextScheduleTime;\n\n    protected String                   processorName;\n\n    private void initSelf( Map<String, Object > joEntity ) {\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n        if ( this.szElementaryConfig != null ) {\n            this.elementaryConfig = (JSONObject)JSON.parse( this.szElementaryConfig );\n        }\n    }\n\n    public GenericTaskElement() {\n        super();\n    }\n\n    public GenericTaskElement( Map<String, Object > joEntity ) {\n        super( joEntity );\n        this.initSelf( joEntity );\n    }\n\n    public GenericTaskElement( Map<String, Object > joEntity, TaskInstrument taskInstrument ) {\n        super( joEntity, taskInstrument);\n        this.initSelf( joEntity );\n    }\n\n    public GenericTaskElement( TaskInstrument taskInstrument ) {\n        super(taskInstrument);\n    }\n\n    @Override\n    public String getType() {\n        return this.taskType;\n    }\n\n    @Override\n    public void setType( String taskType ) {\n        this.taskType = taskType;\n    }\n\n    @Override\n    public String getImagePath() {\n        return this.imagePath;\n    }\n\n    @Override\n    public void setImagePath( String imagePath ) {\n        this.imagePath = imagePath;\n    }\n\n    @Override\n    public String getResourceType() {\n        return this.resourceType;\n    }\n\n    @Override\n    public void setResourceType( String resourceType ) {\n        this.resourceType = resourceType;\n    }\n\n    @Override\n    public short getPriority() {\n        return this.priority;\n    }\n\n    @Override\n    public void setPriority( int priority ) {\n        this.priority = (short) priority;\n    }\n\n    @Override\n    public short getActuallyPriority() {\n        return this.actuallyPriority;\n    }\n\n    @Override\n    public void setActuallyPriority( int actuallyPriority ) {\n        this.actuallyPriority = (short) actuallyPriority;\n    }\n\n    @Override\n    public String getDeploymentMethod() {\n        return this.deploymentMethod;\n    }\n\n    @Override\n    public void setDeploymentMethod( String deploymentMethod ) {\n        this.deploymentMethod = deploymentMethod;\n    }\n\n\n    @Override\n    public boolean isDryRun() {\n        return this.dryRun;\n    }\n\n    @Override\n    public void setDryRun( boolean dryRun ) {\n        this.dryRun = dryRun;\n    }\n\n\n    @Override\n    public String getScheduleCron() {\n        return this.scheduleCron;\n    }\n\n    @Override\n    public void setScheduleCron( String scheduleCron ) {\n        this.scheduleCron = scheduleCron;\n    }\n\n    @Override\n    public LocalDateTime getNextScheduleTime() {\n        return this.nextScheduleTime;\n    }\n\n    @Override\n    public void setNextScheduleTime( LocalDateTime nextScheduleTime ) {\n        this.nextScheduleTime = nextScheduleTime;\n    }\n\n    @Override\n    public void setScheduleCycle ( TaskScheduleCycle kernelScheduleCycle ) {\n        this.scheduleCycle = kernelScheduleCycle;\n    }\n\n    @Override\n    public TaskScheduleCycle getScheduleCycle() {\n        return this.scheduleCycle;\n    }\n\n    @Override\n    public void setScheduleType ( TaskScheduleType kernelScheduleType ) {\n        this.scheduleType = kernelScheduleType;\n    }\n\n    @Override\n    public TaskScheduleType getScheduleType() {\n        return this.scheduleType;\n    }\n\n\n\n    @Override\n    public boolean isEnable() {\n        return this.enable;\n    }\n\n    @Override\n    public void setEnable( boolean enable ) {\n        this.enable = enable;\n    }\n\n    @Override\n    public LocalDateTime getScheduleStartTime() {\n        return this.scheduleStartTime;\n    }\n\n    @Override\n    public void setScheduleStartTime( LocalDateTime scheduleStartTime ) {\n            this.scheduleStartTime = scheduleStartTime;\n    }\n\n    @Override\n    public LocalDateTime getScheduleEndTime() {\n        return  this.scheduleEndTime;\n    }\n\n    @Override\n    public void setScheduleEndTime( LocalDateTime scheduleEndTime ) {\n        this.scheduleEndTime = scheduleEndTime;\n    }\n\n    @Override\n    public String getProcessorName() {\n        return this.processorName;\n    }\n\n    @Override\n    public void setProcessorName( String processorName ) {\n        this.processorName = processorName;\n    }\n\n    @Override\n    public TaskExtraMeta getExtraMeta() {\n        return null;\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/Namespace.java",
    "content": "package com.pinecone.hydra.task.kom.entity;\n\nimport java.util.Set;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\n\npublic interface Namespace extends FolderElement {\n\n    Set<String > UnbeanifiedKeys = Set.of( \"distributedTreeNode\", \"classificationRules\" );\n\n    long getEnumId();\n\n    void setEnumId(long id);\n\n    GUID getGuid();\n\n    void setGuid(GUID guid);\n\n    GUID getMetaGuid();\n\n    void setMetaGuid(GUID metaGuid);\n\n    String getName();\n\n    void setName(String name);\n\n    GUIDImperialTrieNode getDistributedTreeNode();\n\n    void setDistributedTreeNode(GUIDImperialTrieNode distributedTreeNode);\n\n    @Override\n    default Namespace evinceNamespace() {\n        return this;\n    }\n\n    JSONObject toJSONDetails();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/TaskElement.java",
    "content": "package com.pinecone.hydra.task.kom.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.TaskExtraMeta;\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\nimport com.pinecone.hydra.task.marshal.TaskScheduleType;\n\nimport java.time.LocalDateTime;\n\npublic interface TaskElement extends ElementNode {\n\n    @Override\n    default TaskElement evinceTaskElement() {\n        return this;\n    }\n\n    String getImagePath();\n    void setImagePath( String path );\n\n    String getType();\n    void setType( String type );\n\n    String getDeploymentMethod();\n    void setDeploymentMethod( String deploymentMethod );\n\n    String getResourceType();\n    void setResourceType( String resourceType );\n\n    short getPriority();\n    void setPriority( int priority );\n\n    short getActuallyPriority();\n    void setActuallyPriority( int priority );\n\n\n    TaskScheduleCycle getScheduleCycle();\n    void setScheduleCycle ( TaskScheduleCycle kernelScheduleCycle ) ;\n\n    TaskScheduleType getScheduleType();\n    void setScheduleType ( TaskScheduleType kernelScheduleType ) ;\n\n\n    boolean isDryRun() ;\n    void setDryRun( boolean dryRun ) ;\n\n    String getScheduleCron();\n    void setScheduleCron( String scheduleCron ) ;\n\n    boolean isEnable() ;\n    void setEnable( boolean enable ) ;\n\n    LocalDateTime getScheduleStartTime();\n    void setScheduleStartTime( LocalDateTime scheduleStartTime );\n\n    LocalDateTime getScheduleEndTime();\n    void setScheduleEndTime( LocalDateTime scheduleEndTime );\n\n    LocalDateTime getNextScheduleTime();\n    void setNextScheduleTime( LocalDateTime nextScheduleTime );\n\n\n    String getProcessorName();\n    void setProcessorName( String processorName );\n\n\n    TaskExtraMeta getExtraMeta();\n\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/entity/TaskTreeNode.java",
    "content": "package com.pinecone.hydra.task.kom.entity;\n\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface TaskTreeNode extends TreeNode {\n    String getName();\n\n    default String getMetaType() {\n        return this.className().replace(\"Generic\",\"\");\n    }\n\n    default TaskTreeNode evinceTreeNode(){\n        return this;\n    }\n\n    default ElementNode evinceElementNode(){\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/instance/GenericInstanceEntry.java",
    "content": "package com.pinecone.hydra.task.kom.instance;\n\nimport java.time.LocalDateTime;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.task.ArchInstanceMeta;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.EntryNode;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\nimport com.pinecone.hydra.task.marshal.TaskScheduleType;\n\npublic class GenericInstanceEntry extends ArchInstanceMeta implements InstanceEntry, EntryNode {\n\n    protected long enumId;\n\n    protected TaskInstrument taskInstrument;\n\n    protected TaskElement taskElement;\n\n    public GenericInstanceEntry() {\n        super();\n\n        this.createTime = LocalDateTime.now();\n        this.updateTime = LocalDateTime.now();\n    }\n\n    public GenericInstanceEntry( Map<String, Object > joEntity ) {\n        this();\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericInstanceEntry( Map<String, Object > joEntity, TaskInstrument taskInstrument ) {\n        this.apply(taskInstrument);\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericInstanceEntry( TaskInstrument taskInstrument ) {\n        this( taskInstrument, null );\n    }\n\n    public GenericInstanceEntry( TaskInstrument taskInstrument, @Nullable TaskElement taskElement ) {\n        this.taskElement = taskElement;\n        this.apply(taskInstrument);\n    }\n\n    public void apply( TaskInstrument taskInstrument ) {\n        this.taskInstrument = taskInstrument;\n        if ( this.getGuid() == null ) {\n            GuidAllocator guidAllocator = this.taskInstrument.getGuidAllocator();\n            this.setGuid( guidAllocator.nextGUID() );\n        }\n        if ( this.createTime == null ) {\n            this.createTime = LocalDateTime.now();\n            this.updateTime = LocalDateTime.now();\n        }\n\n        if ( this.taskElement == null && this.getTaskGuid() != null ) {\n            this.taskElement = (TaskElement) this.taskInstrument.get( this.getTaskGuid() );\n        }\n    }\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public String getTaskName() {\n        if ( this.taskName == null ) {\n            if ( this.taskElement != null ) {\n                this.taskName = this.taskElement.getName();\n            }\n        }\n        return this.taskName;\n    }\n\n    @Override\n    public TaskElement taskElement() {\n        return this.taskElement;\n    }\n\n    @Override\n    public void setGuid ( GUID guid ) {\n        this.guid = guid;\n    }\n\n    @Override\n    public void setTaskGuid ( GUID taskGuid ) {\n        this.taskGuid = taskGuid;\n    }\n\n    @Override\n    public void setInstanceName ( String instanceName ) {\n        this.instanceName = instanceName;\n    }\n\n    @Override\n    public void setTaskName( String taskName ) {\n        this.taskName = taskName;\n    }\n\n    @Override\n    public void setBusinessTime ( LocalDateTime businessTime ) {\n        this.businessTime = businessTime;\n    }\n\n    @Override\n    public void setPriority ( int priority ) {\n        this.priority = (short) priority;\n    }\n\n    @Override\n    public void setImagePath( String imagePath ) {\n        this.imagePath = imagePath;\n    }\n\n    @Override\n    public void setActuallyPriority ( int actuallyPriority ) {\n        this.actuallyPriority = (short) actuallyPriority;\n    }\n\n    @Override\n    public void setInstanceStatus ( TaskInstanceStatus instanceStatus ) {\n        this.instanceStatus = instanceStatus;\n    }\n\n    @Override\n    public void setTaskType ( String taskType ) {\n        this.taskType = taskType;\n    }\n\n    @Override\n    public void setRunCount ( int runCount ) {\n        this.runCount = runCount;\n    }\n\n    @Override\n    public void setSequenceCnt( int sequenceCnt ) {\n        this.sequenceCnt = sequenceCnt;\n    }\n\n    @Override\n    public void setRetryCnt( int retryCnt ) {\n        this.retryCnt = retryCnt;\n    }\n\n    @Override\n    public void setDryRun ( boolean dryRun ) {\n        this.dryRun = dryRun;\n    }\n\n    @Override\n    public void setErrorCause( String errorCause ) {\n        this.errorCause = errorCause;\n    }\n\n    @Override\n    public void setScheduleCycle ( TaskScheduleCycle kernelScheduleCycle ) {\n        this.scheduleCycle = kernelScheduleCycle;\n    }\n\n    @Override\n    public void setScheduleType ( TaskScheduleType kernelScheduleType ) {\n        this.scheduleType = kernelScheduleType;\n    }\n\n    @Override\n    public void setLastStartTime ( LocalDateTime lastStartTime ) {\n        this.lastStartTime = lastStartTime;\n    }\n\n    @Override\n    public void setLastEndTime ( LocalDateTime lastEndTime ) {\n        this.lastEndTime = lastEndTime;\n    }\n\n    @Override\n    public void setCreateTime ( LocalDateTime createTime ) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public void setUpdateTime ( LocalDateTime updateTime ) {\n        this.updateTime = updateTime;\n    }\n\n    @Override\n    public TaskInstrument getTaskInstrument() {\n        return this.taskInstrument;\n    }\n\n\n    @Override\n    public String getRunStatus() {\n        if ( this.instanceStatus == null ) {\n            return null;\n        }\n        return this.instanceStatus.getName();\n    }\n\n    @Override\n    public void setRunStatus ( String status ) {\n        this.instanceStatus = TaskInstanceStatus.getByName( status );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/instance/InstanceEntry.java",
    "content": "package com.pinecone.hydra.task.kom.instance;\n\nimport java.time.LocalDateTime;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.TaskInstanceMeta;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.EntryNode;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\nimport com.pinecone.hydra.task.marshal.TaskScheduleType;\n\npublic interface InstanceEntry extends TaskInstanceMeta, EntryNode {\n\n     @Override\n     default String getName() {\n          return this.getInstanceName();\n     }\n\n     @Override\n     String getTaskName();\n\n     TaskElement taskElement();\n\n     void setGuid ( GUID guid );\n\n     void setTaskGuid ( GUID taskGuid );\n\n     void setInstanceName ( String instanceName );\n\n     void setBusinessTime ( LocalDateTime businessTime );\n\n     void setTaskName ( String taskName );\n\n     void setPriority ( int priority );\n\n     void setImagePath( String imagePath );\n\n     void setActuallyPriority ( int actuallyPriority );\n\n     void setInstanceStatus ( TaskInstanceStatus instanceStatus );\n\n     void setTaskType ( String taskType );\n\n     void setRunCount ( int runCount );\n\n     void setSequenceCnt( int sequenceCnt );\n\n     void setRetryCnt( int retryCnt );\n\n     void setRunStatus( String runStatus );\n\n     void setDryRun ( boolean dryRun );\n\n     void setErrorCause( String errorCause );\n\n     void setScheduleCycle ( TaskScheduleCycle kernelScheduleCycle ) ;\n\n     void setScheduleType ( TaskScheduleType kernelScheduleType ) ;\n\n     void setLastStartTime ( LocalDateTime lastStartTime );\n\n     void setLastEndTime ( LocalDateTime lastEndTime );\n\n     void setCreateTime ( LocalDateTime createTime );\n\n     void setUpdateTime ( LocalDateTime updateTime );\n\n     TaskInstrument getTaskInstrument();\n\n\n\n     String getRunStatus ();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/instance/InstanceInstrument.java",
    "content": "package com.pinecone.hydra.task.kom.instance;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.regime.Instrument;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.MetaPersistenceException;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.slime.meta.TableIndexMeta;\n\nimport java.time.LocalDateTime;\nimport java.util.Collection;\nimport java.util.List;\n\npublic interface InstanceInstrument extends Instrument {\n\n\n    void addInstance( InstanceEntry instanceEntry );\n\n    void addInstance( GUID taskGuid, InstanceEntry instanceEntry );\n\n    void updateInstance( InstanceEntry instanceEntry ) throws MetaPersistenceException;\n\n    InstanceEntry getInstanceEntry( GUID insGuid );\n\n    List<InstanceEntry> queryInstances( String taskTreePath, long offset, long pageSize );\n\n    long countInstanceByGuid( GUID taskGuid );\n\n    default List<InstanceEntry> queryInstances( String taskTreePath ) {\n        return this.queryInstances( this.getTaskInstrument().queryGUIDByPath( taskTreePath ) );\n    }\n\n    default List<InstanceEntry> queryInstances( GUID taskGuid ) {\n        return this.queryInstances( taskGuid, 0, this.countInstanceByGuid( taskGuid ) );\n    }\n\n    List<InstanceEntry> queryInstances( GUID taskGuid, long offset, long pageSize );\n\n    TaskInstrument getTaskInstrument();\n\n    InstanceEntry makeInstanceEntry( GUID taskGuid, @Nullable String insName, @Nullable LocalDateTime bizTime );\n\n    default InstanceEntry makeInstanceEntry( GUID taskGuid ) {\n        return this.makeInstanceEntry( taskGuid, null, null );\n    }\n\n    void removeInstance( GUID insGuid );\n\n    InstanceEntry findLastExecuted( GUID taskGuid, String bizTime );\n\n\n\n\n\n    TableIndexMeta querySchedulableIdRange( Collection<TaskInstanceStatus> runStatuses, LocalDateTime targetTime );\n\n    List<InstanceEntry> fetchSchedulableInstances(\n            long idMin, long idMax, Collection<TaskInstanceStatus> runStatuses, LocalDateTime targetTime\n    );\n\n    TableIndexMeta querySchedulableIdRange( Collection<TaskInstanceStatus> runStatuses, LocalDateTime targetTime, short actuallyPriority );\n\n    List<InstanceEntry> fetchSchedulableInstances(\n            long idMin, long idMax, Collection<TaskInstanceStatus> runStatuses, LocalDateTime targetTime, short actuallyPriority\n    );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/instance/KernelInstanceInstrument.java",
    "content": "package com.pinecone.hydra.task.kom.instance;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.MetaPersistenceException;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.kom.instance.source.InstanceNodeManipulator;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.slime.meta.TableIndexMeta;\n\nimport java.time.LocalDateTime;\nimport java.util.Collection;\nimport java.util.List;\n\npublic class KernelInstanceInstrument implements InstanceInstrument {\n\n    protected InstanceNodeManipulator   mInstanceManipulator;\n\n    protected TaskInstrument            mTaskInstrument;\n\n\n    public KernelInstanceInstrument( TaskInstrument instrument, InstanceNodeManipulator manipulator ) {\n        this.mTaskInstrument = instrument;\n        this.mInstanceManipulator = manipulator;\n    }\n\n    @Override\n    public TaskInstrument getTaskInstrument() {\n        return this.mTaskInstrument;\n    }\n\n    @Override\n    public void addInstance( InstanceEntry instanceEntry ) {\n        this.mInstanceManipulator.insert( instanceEntry );\n    }\n\n    @Override\n    public void addInstance( GUID taskGuid, InstanceEntry instanceEntry ) {\n        instanceEntry.setTaskGuid( taskGuid );\n        if ( instanceEntry.getGuid() == null ) {\n            instanceEntry.setGuid( this.mTaskInstrument.getGuidAllocator().nextGUID() );\n        }\n        this.addInstance( instanceEntry );\n    }\n\n    @Override\n    public void updateInstance( InstanceEntry instanceEntry ) throws MetaPersistenceException {\n        try {\n            this.mInstanceManipulator.update( instanceEntry );\n        }\n        catch ( Exception e ) {\n            throw new MetaPersistenceException( e );\n        }\n    }\n\n    @Override\n    public InstanceEntry getInstanceEntry( GUID insGuid ) {\n        return this.mInstanceManipulator.queryByGuid( insGuid, this.mTaskInstrument );\n    }\n\n    @Override\n    public List<InstanceEntry> queryInstances( String taskPath, long offset, long pageSize ) {\n        GUID guid = this.mTaskInstrument.queryGUIDByPath( taskPath );\n        if ( guid == null ) {\n            return null;\n        }\n        return this.mInstanceManipulator.queryByTaskGuid( this.mTaskInstrument, guid, offset, pageSize );\n    }\n\n    @Override\n    public List<InstanceEntry> queryInstances( GUID taskGuid, long offset, long pageSize ) {\n        return this.mInstanceManipulator.queryByTaskGuid( this.mTaskInstrument, taskGuid, offset, pageSize );\n    }\n\n    @Override\n    public long countInstanceByGuid( GUID taskGuid ) {\n        return this.mInstanceManipulator.countInstanceByTaskGuid( taskGuid );\n    }\n\n    @Override\n    public InstanceEntry makeInstanceEntry( GUID taskGuid, @Nullable String insName, @Nullable LocalDateTime bizTime ) {\n        TreeNode tn = this.mTaskInstrument.get( taskGuid );\n        if ( tn instanceof TaskElement ) {\n            TaskElement taskElement = (TaskElement) tn;\n            InstanceEntry instanceEntry = new GenericInstanceEntry( this.mTaskInstrument, taskElement );\n            instanceEntry.setTaskGuid( taskGuid );\n            instanceEntry.setGuid( this.mTaskInstrument.getGuidAllocator().nextGUID() );\n            instanceEntry.setPriority( taskElement.getPriority() );\n            instanceEntry.setActuallyPriority( taskElement.getPriority() );\n            instanceEntry.setTaskType( taskElement.getType() );\n//            instanceEntry.setInstanceName( taskElement.getName() );\n//            instanceEntry.setBusinessTime( taskElement.getBusinessTime() );\n//            instanceEntry.setScheduleCycleCode( taskElement.getScheduleCycleCode() );\n            instanceEntry.setScheduleCycle( taskElement.getScheduleCycle() );\n            instanceEntry.setScheduleType( taskElement.getScheduleType() );\n            instanceEntry.setRunCount( 0 );\n            instanceEntry.setDryRun( taskElement.isDryRun() );\n            instanceEntry.setInstanceStatus( TaskInstanceStatus.New );\n            return instanceEntry;\n        }\n        return null;\n    }\n\n    @Override\n    public void removeInstance( GUID insGuid ) {\n        this.mInstanceManipulator.remove( insGuid );\n    }\n\n    @Override\n    public InstanceEntry findLastExecuted( GUID taskGuid, String bizTime ) {\n        return this.mInstanceManipulator.findLastExecuted( taskGuid, this.mTaskInstrument, bizTime );\n    }\n\n\n\n\n\n    @Override\n    public TableIndexMeta querySchedulableIdRange( Collection<TaskInstanceStatus> runStatuses, LocalDateTime targetTime ) {\n        return this.mInstanceManipulator.selectSchedulableIdRange( runStatuses, targetTime, null );\n    }\n\n    @Override\n    public List<InstanceEntry> fetchSchedulableInstances(\n            long idMin, long idMax, Collection<TaskInstanceStatus> runStatuses, LocalDateTime targetTime\n    ) {\n        return this.mInstanceManipulator.fetchSchedulableInstances( this.mTaskInstrument, idMin, idMax, runStatuses, targetTime, null );\n    }\n\n    @Override\n    public TableIndexMeta querySchedulableIdRange( Collection<TaskInstanceStatus> runStatuses, LocalDateTime targetTime, short actuallyPriority ) {\n        return this.mInstanceManipulator.selectSchedulableIdRange( runStatuses, targetTime, actuallyPriority );\n    }\n\n    @Override\n    public List<InstanceEntry> fetchSchedulableInstances(\n            long idMin, long idMax, Collection<TaskInstanceStatus> runStatuses, LocalDateTime targetTime, short actuallyPriority\n    ) {\n        return this.mInstanceManipulator.fetchSchedulableInstances( this.mTaskInstrument, idMin, idMax, runStatuses, targetTime, actuallyPriority );\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/instance/source/InstanceNodeManipulator.java",
    "content": "package com.pinecone.hydra.task.kom.instance.source;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\nimport com.pinecone.slime.meta.TableIndexMeta;\n\nimport java.time.LocalDateTime;\nimport java.util.Collection;\nimport java.util.List;\n\npublic interface InstanceNodeManipulator extends Pinenut {\n\n    void insert( InstanceEntry instanceEntry );\n\n    void update( InstanceEntry instanceEntry );\n\n    InstanceEntry queryByGuid( GUID guid, TaskInstrument instrument );\n\n    int countInstance();\n\n    long countInstanceByName( String name );\n\n    List<InstanceEntry> fetchInstances( TaskInstrument instrument, long offset, long pageSize );\n\n    default List<InstanceEntry> fetchInstances( TaskInstrument instrument ) {\n        return this.fetchInstances( instrument, 0, this.countInstance() );\n    }\n\n    List<InstanceEntry> queryByTaskGuid( TaskInstrument instrument, GUID taskGuid, long offset, long pageSize );\n\n    long countInstanceByTaskGuid( GUID taskGuid );\n\n    void remove( GUID guid );\n\n    InstanceEntry findLastExecuted( GUID taskGuid, TaskInstrument instrument, String bizTime );\n\n\n    TableIndexMeta selectSchedulableIdRange( Collection<TaskInstanceStatus> runStatuses, LocalDateTime targetTime, @Nullable Short actuallyPriority );\n\n    List<InstanceEntry> fetchSchedulableInstances(\n            TaskInstrument instrument,\n            long idMin, long idMax, Collection<TaskInstanceStatus> runStatuses, LocalDateTime targetTime, @Nullable Short actuallyPriority\n    );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/marshaling/TaskInstrumentDecoder.java",
    "content": "package com.pinecone.hydra.task.kom.marshaling;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.kom.entity.ElementNode;\n\npublic interface TaskInstrumentDecoder extends Pinenut {\n    default ElementNode decode( Object val, GUID parentGUID ) {\n        if ( val instanceof Map ) {\n            Map map = (Map) val;\n            if( map.isEmpty() ) {\n                return null;\n            }\n            else if( map.size() > 1 ) {\n                throw new IllegalArgumentException( \"Root element should has at last 1.\" );\n            }\n\n            Map.Entry kv = (Map.Entry) map.entrySet().iterator().next();\n            return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID );\n        }\n\n        return null;\n    }\n\n    ElementNode decode( String key, Object val, GUID parentGUID );\n\n    default ElementNode decode( Map.Entry kv, GUID parentGUID ) {\n        return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID );\n    }\n\n    default ElementNode decode( Object val ) {\n        return this.decode( val, null );\n    }\n\n    default ElementNode decode( String key, Object val ) {\n        return this.decode( key, val, null );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/marshaling/TaskInstrumentEncoder.java",
    "content": "package com.pinecone.hydra.task.kom.marshaling;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.task.kom.entity.ElementNode;\n\npublic interface TaskInstrumentEncoder extends Pinenut {\n    Object encode(ElementNode node);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/marshaling/TaskJSONDecoder.java",
    "content": "package com.pinecone.hydra.task.kom.marshaling;\n\nimport java.util.Collection;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.AppElement;\nimport com.pinecone.hydra.task.kom.entity.ElementNode;\nimport com.pinecone.hydra.task.kom.entity.FolderElement;\nimport com.pinecone.hydra.task.kom.entity.GenericAppElement;\nimport com.pinecone.hydra.task.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.task.kom.entity.GenericTaskElement;\nimport com.pinecone.hydra.task.kom.entity.Namespace;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\n\npublic class TaskJSONDecoder implements TaskInstrumentDecoder {\n    protected TaskInstrument instrument;\n\n    public TaskJSONDecoder( TaskInstrument instrument ) {\n        this.instrument = instrument;\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public ElementNode    decode( String szName, Object o, GUID parentGuid ) {\n        if ( o instanceof Map ) {\n            return (ElementNode) this.instrument.get( this.decodeJSONObject( szName, (Map<String, Object>) o, parentGuid ).getGuid() );\n        }\n\n        throw new IllegalArgumentException( \"Elements of `TaskInstrument` should all be object.\" );\n    }\n\n    protected Namespace   newNamespace( String szName, Map<String, Object > jo ) {\n        Namespace ns = new GenericNamespace( jo, this.instrument );\n        ns.setName( szName );\n\n        return ns;\n    }\n\n    protected Object[]    affirmNSExisted( String szName, GUID parentGuid, Map<String, Object > jo ) {\n        Namespace ns = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.instrument.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evinceNamespace() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be namespace.\", szName )\n                    );\n                }\n\n                ns = rootE.evinceNamespace();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid );\n            if( parentNode instanceof Namespace ) {\n                Collection<ElementNode> destChildren = parentNode.evinceNamespace().fetchChildren();\n                for( ElementNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof Namespace ) {\n                            ns = (Namespace) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"<TaskInstrument> Existed child-destination [%s] should be namespace.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n        }\n\n\n        GUID currentGuid;\n        if( ns == null ) {\n            ns = this.newNamespace( szName, jo );\n            currentGuid  = this.instrument.put( ns );\n            this.instrument.affirmOwnedNode( parentGuid, currentGuid );\n        }\n        else {\n            currentGuid = ns.getGuid();\n        }\n        return new Object[] { ns, currentGuid };\n    }\n\n    protected Object[]    affirmAppExisted( String szName, GUID parentGuid, Map<String, Object > jo ) {\n        AppElement job = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.instrument.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evinceAppElement() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be `AppElement`.\", szName )\n                    );\n                }\n\n                job = rootE.evinceAppElement();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid );\n            if( parentNode instanceof Namespace ) {\n                Collection<ElementNode> destChildren = parentNode.evinceNamespace().fetchChildren();\n                for( ElementNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof AppElement) {\n                            job = (AppElement) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"Existed child-destination [%s] should be `AppElement`.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n        }\n\n\n\n        AppElement neo ;\n        if( job == null ) {\n            neo = new GenericAppElement( jo, this.instrument );\n            neo.setName( szName );\n        }\n        else {\n            neo = job;\n        }\n        return new Object[] { job, neo };\n    }\n\n    protected Object[]    affirmTasExisted( String szName, GUID parentGuid, Map<String, Object > jo ) {\n        TaskElement task = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.instrument.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evinceTaskElement() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be `TaskElement`.\", szName )\n                    );\n                }\n\n                task = rootE.evinceTaskElement();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid );\n            Collection<ElementNode> destChildren;\n            if( parentNode instanceof FolderElement ) {\n                destChildren = ( (FolderElement) parentNode ).fetchChildren();\n                for( ElementNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof TaskElement ) {\n                            task = (TaskElement) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"Existed child-destination [%s] should be `TaskElement`.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n            else {\n                throw new IllegalStateException(\n                        String.format( \"Parent of `TaskElement` [%s] should be `FolderElement`.\", szName )\n                );\n            }\n        }\n\n\n\n        TaskElement neo ;\n        if( task == null ) {\n            neo = new GenericTaskElement( jo, this.instrument );\n            neo.setName( szName );\n        }\n        else {\n            neo = task;\n        }\n        return new Object[] { task, neo };\n    }\n\n    protected Object[]    decodeExternalElements( String szMetaType, String szName, GUID parentGuid, Map<String, Object > jo ) throws IllegalArgumentException {\n        throw new IllegalArgumentException( \"Unknown metaType '\" + szMetaType + \"'.\" );\n    }\n\n    protected void        decodeChildren ( Map jo, GUID currentGuid ) {\n        for ( Object o : jo.entrySet() ) {\n            Map.Entry kv = (Map.Entry) o;\n            Object   val = kv.getValue();\n            if( val instanceof Map ) {\n                this.decode( kv.getKey().toString(), val, currentGuid );\n            }\n        }\n    }\n\n    protected ElementNode decodeJSONObject( String szName, Map<String, Object > jo, GUID parentGuid ) {\n        String szMetaType = (String) jo.get( \"metaType\" );\n        boolean isNamespace = szMetaType == null || szMetaType.equals( Namespace.class.getSimpleName() );\n        ElementNode elementNode;\n        GUID currentGuid;\n\n        if ( isNamespace ) {\n            Object[] pair = this.affirmNSExisted( szName, parentGuid, jo );\n            Namespace     ns = (Namespace) pair[ 0 ];\n            currentGuid      = (GUID)      pair[ 1 ];\n\n            this.decodeChildren( jo, currentGuid );\n\n            elementNode = ns;\n        }\n        else {\n            Object[] pair;\n            boolean bIsFolderElement = false;\n            if( szMetaType.equals( AppElement.class.getSimpleName() ) ) {\n                pair = this.affirmAppExisted( szName, parentGuid, jo );\n                bIsFolderElement = true;\n            }\n            else if( szMetaType.equals( TaskElement.class.getSimpleName() ) ) {\n                pair = this.affirmTasExisted( szName, parentGuid, jo );\n            }\n            else {\n                try{\n                    pair = this.decodeExternalElements( szMetaType, szName, parentGuid, jo );\n                }\n                catch ( RuntimeException e ) {\n                    throw new IllegalArgumentException( e );\n                }\n            }\n\n            ElementNode          arc = (ElementNode) pair[ 0 ];\n            ElementNode          neo = (ElementNode) pair[ 1 ];\n\n            if( arc == null ) {\n                currentGuid = this.instrument.put( neo );\n                this.instrument.affirmOwnedNode( parentGuid, currentGuid );\n            }\n            else {\n                currentGuid = arc.getGuid();\n                this.instrument.update( neo );\n            }\n\n            if( bIsFolderElement ) {\n                Object subs = jo.get( \"tasks\" );\n                if( subs instanceof Map ) {\n                    Map joSer = (Map) subs;\n                    this.decodeChildren( joSer, currentGuid );\n                }\n            }\n\n            elementNode = neo;\n        }\n\n        return elementNode;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/marshaling/TaskJSONEncoder.java",
    "content": "package com.pinecone.hydra.task.kom.marshaling;\n\n\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.ElementNode;\n\npublic class TaskJSONEncoder implements TaskInstrumentEncoder {\n    protected TaskInstrument instrument;\n\n    public TaskJSONEncoder(TaskInstrument instrument ) {\n        this.instrument = instrument;\n    }\n\n    @Override\n    public Object encode( ElementNode node ) {\n        return node.toJSONObject();\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/AppElementOperator.java",
    "content": "package com.pinecone.hydra.task.kom.operator;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.GenericAppElement;\nimport com.pinecone.hydra.task.kom.entity.AppElement;\nimport com.pinecone.hydra.task.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.task.kom.source.AppNodeManipulator;\nimport com.pinecone.hydra.task.kom.source.TaskMasterManipulator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic class AppElementOperator extends ArchElementOperator implements ElementOperator {\n    protected AppNodeManipulator appNodeManipulator;\n\n    public AppElementOperator(ElementOperatorFactory factory ) {\n        this( factory.getTaskMasterManipulator(),factory.taskInstrument() );\n        this.factory = factory;\n    }\n\n    public AppElementOperator(TaskMasterManipulator masterManipulator, TaskInstrument taskInstrument){\n        super( masterManipulator, taskInstrument);\n        this.appNodeManipulator = masterManipulator.getAppNodeManipulator();\n    }\n\n\n    @Override\n    public GUID insert( TreeNode treeNode ) {\n        GenericAppElement jobElement = (GenericAppElement) treeNode;\n\n        GuidAllocator guidAllocator = this.taskInstrument.getGuidAllocator();\n        GUID jobNodeGUID = guidAllocator.nextGUID();\n        jobElement.setGuid( jobNodeGUID );\n        this.appNodeManipulator.insert( jobElement );\n\n\n        //将节点信息存入主表\n        GUIDImperialTrieNode node = new GUIDImperialTrieNode();\n        node.setNodeMetadataGUID(jobNodeGUID);\n        node.setGuid(jobNodeGUID);\n        node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) );\n        this.imperialTree.insert( node );\n        return jobNodeGUID;\n    }\n\n\n    @Override\n    public void purge( GUID guid ) {\n        //namespace节点需要递归删除其拥有节点若其引用节点，没有其他引用则进行清理\n        List<GUIDImperialTrieNode> childNodes = this.imperialTree.getChildren(guid);\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        if ( !childNodes.isEmpty() ){\n            List<GUID > subordinates = this.imperialTree.getSubordinates(guid);\n            if ( !subordinates.isEmpty() ){\n                for ( GUID subordinateGuid : subordinates ){\n                    this.purge( subordinateGuid );\n                }\n            }\n            childNodes = this.imperialTree.getChildren( guid );\n            for( GUIDImperialTrieNode childNode : childNodes ){\n                List<GUID > parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid());\n                if ( parentNodes.size() > 1 ){\n                    this.imperialTree.removeInheritance(childNode.getGuid(),guid);\n                }\n                else {\n                    this.purge( childNode.getGuid() );\n                }\n            }\n        }\n\n        if ( node.getType().getObjectName().equals( GenericNamespace.class.getName() ) ){\n            this.removeNode(guid);\n        }\n        else {\n            UOI uoi = node.getType();\n            String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() );\n            if( metaType == null ) {\n                TreeNode newInstance = (TreeNode)uoi.newInstance( new Class<? >[]{ TaskInstrument.class }, this.taskInstrument);\n                metaType = newInstance.getMetaType();\n            }\n\n            ElementOperator operator = this.getOperatorFactory().getOperator( metaType );\n            operator.purge( guid );\n        }\n    }\n\n    @Override\n    public AppElement get(GUID guid ) {\n        AppElement appElement;\n        appElement = this.appNodeManipulator.getAppElement( guid, this.taskInstrument );\n\n        appElement.setGuid(appElement.getGuid());\n        return appElement;\n    }\n\n    @Override\n    public AppElement get(GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public AppElement getAsRootDepth(GUID guid ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode treeNode ) {\n        GenericAppElement applicationElement = (GenericAppElement) treeNode;\n        this.appNodeManipulator.update( applicationElement );\n    }\n\n    @Override\n    public void updateName( GUID guid, String name ) {\n\n    }\n\n    protected void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath(guid);\n        this.appNodeManipulator.remove( node.getGuid( ));\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/ArchElementOperator.java",
    "content": "package com.pinecone.hydra.task.kom.operator;\n\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.source.TaskMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\n\npublic abstract class ArchElementOperator implements ElementOperator {\n    protected TaskInstrument                taskInstrument;\n    protected ImperialTree                  imperialTree;\n    protected TaskMasterManipulator         taskMasterManipulator;\n    protected ElementOperatorFactory        factory;\n\n    public ArchElementOperator( ElementOperatorFactory factory ){\n        this( factory.getTaskMasterManipulator(), factory.taskInstrument() );\n        this.factory = factory;\n    }\n    public ArchElementOperator( TaskMasterManipulator masterManipulator, TaskInstrument taskInstrument){\n        this.imperialTree = taskInstrument.getMasterTrieTree();\n        this.taskInstrument = taskInstrument;\n        this.taskMasterManipulator = masterManipulator;\n        //this.factory = new GenericServiceOperatorFactory(servicesTree,masterManipulator);\n    }\n\n    public ElementOperatorFactory getOperatorFactory() {\n        return this.factory;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/ElementOperator.java",
    "content": "package com.pinecone.hydra.task.kom.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.kom.entity.ElementNode;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface ElementOperator extends TreeNodeOperator {\n    @Override\n    ElementNode get(GUID guid);\n\n    @Override\n    ElementNode get(GUID guid, int depth);\n\n    @Override\n    ElementNode getAsRootDepth(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/ElementOperatorFactory.java",
    "content": "package com.pinecone.hydra.task.kom.operator;\n\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.AppElement;\nimport com.pinecone.hydra.task.kom.entity.Namespace;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.kom.source.TaskMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.operator.OperatorFactory;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface ElementOperatorFactory extends OperatorFactory {\n    String DefaultServiceNode     =  TaskElement.class.getSimpleName();\n    String DefaultNamespace       =  Namespace.class.getSimpleName();\n    String DefaultApplicationNode =  AppElement.class.getSimpleName();\n\n    void register(String typeName, TreeNodeOperator functionalNodeOperation);\n\n    void registerMetaType(Class<?> clazz, String metaType);\n\n    void registerMetaType(String classFullName, String metaType);\n\n    String getMetaType(String classFullName);\n\n    ElementOperator getOperator(String typeName);\n\n    TaskInstrument taskInstrument();\n\n    TaskMasterManipulator getTaskMasterManipulator();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/GenericElementOperatorFactory.java",
    "content": "package com.pinecone.hydra.task.kom.operator;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.TreeMap;\n\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.GenericAppElement;\nimport com.pinecone.hydra.task.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.task.kom.entity.GenericTaskElement;\nimport com.pinecone.hydra.task.kom.source.TaskMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic class GenericElementOperatorFactory implements ElementOperatorFactory {\n    protected TaskMasterManipulator taskMasterManipulator;\n    protected TaskInstrument taskInstrument;\n    protected Map<String, TreeNodeOperator> registerer = new HashMap<>();\n\n    protected Map<String, String >             metaTypeMap = new TreeMap<>();\n\n    protected void registerDefaultMetaType( Class<?> genericType ) {\n        this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace(\"Generic\",\"\") );\n    }\n\n    protected void registerDefaultMetaTypes() {\n        this.registerDefaultMetaType( GenericNamespace.class );\n        this.registerDefaultMetaType( GenericTaskElement.class );\n        this.registerDefaultMetaType( GenericAppElement.class );\n    }\n\n    public GenericElementOperatorFactory(TaskInstrument taskInstrument, TaskMasterManipulator taskMasterManipulator){\n        this.taskInstrument = taskInstrument;\n        this.taskMasterManipulator = taskMasterManipulator;\n\n        this.registerer.put(\n                ElementOperatorFactory.DefaultServiceNode,\n                new TaskElementOperator( this )\n        );\n\n        this.registerer.put(\n                ElementOperatorFactory.DefaultApplicationNode,\n                new AppElementOperator(this)\n        );\n\n        this.registerer.put(\n                ElementOperatorFactory.DefaultNamespace,\n                new NamespaceOperator(this)\n        );\n\n        this.registerDefaultMetaTypes();\n    }\n    @Override\n    public void register( String typeName, TreeNodeOperator functionalNodeOperation ) {\n        this.registerer.put( typeName, functionalNodeOperation );\n    }\n\n    @Override\n    public void registerMetaType( Class<?> clazz, String metaType ){\n        this.registerMetaType( clazz.getName(), metaType );\n    }\n\n    @Override\n    public void registerMetaType( String classFullName, String metaType ){\n        this.metaTypeMap.put( classFullName, metaType );\n    }\n\n    @Override\n    public TaskInstrument taskInstrument() {\n        return this.taskInstrument;\n    }\n\n    @Override\n    public TaskMasterManipulator getTaskMasterManipulator() {\n        return this.taskMasterManipulator;\n    }\n\n    @Override\n    public String getMetaType( String classFullName ) {\n        return this.metaTypeMap.get( classFullName );\n    }\n\n    @Override\n    public ElementOperator getOperator(String typeName ) {\n        //Debug.trace( this.registerer.toString() );\n        return (ElementOperator) this.registerer.get( typeName );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/NamespaceOperator.java",
    "content": "package com.pinecone.hydra.task.kom.operator;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.GenericAppElement;\nimport com.pinecone.hydra.task.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.task.kom.entity.Namespace;\nimport com.pinecone.hydra.task.kom.source.TaskMasterManipulator;\nimport com.pinecone.hydra.task.kom.source.TaskNamespaceManipulator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic class NamespaceOperator extends ArchElementOperator implements ElementOperator {\n    protected TaskNamespaceManipulator namespaceManipulator;\n\n    public NamespaceOperator( ElementOperatorFactory factory ) {\n        this( factory.getTaskMasterManipulator(),factory.taskInstrument() );\n        this.factory = factory;\n    }\n\n    public NamespaceOperator( TaskMasterManipulator masterManipulator, TaskInstrument taskInstrument ) {\n        super( masterManipulator, taskInstrument);\n        this.namespaceManipulator = masterManipulator.getNamespaceManipulator();\n    }\n\n    @Override\n    public GUID insert( TreeNode treeNode ) {\n        GenericNamespace ns = ( GenericNamespace ) treeNode;\n\n        //存节点基础信息\n        GuidAllocator          guidAllocator = this.taskInstrument.getGuidAllocator();\n\n        GUID namespaceGuid = guidAllocator.nextGUID();\n        ns.setGuid( namespaceGuid );\n        this.namespaceManipulator.insert( ns );\n\n        //存元信息\n        GUID metadataGUID = guidAllocator.nextGUID();\n        ns.setMetaGuid( metadataGUID );\n        //this.nodeMetaManipulator.insertNS( ns );\n\n\n        GUIDImperialTrieNode node = new GUIDImperialTrieNode();\n        node.setGuid( namespaceGuid );\n        node.setNodeMetadataGUID( metadataGUID );\n        node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) );\n        this.imperialTree.insert( node );\n        return namespaceGuid;\n    }\n\n    @Override\n    public void purge( GUID guid ) {\n        //namespace节点需要递归删除其拥有节点若其引用节点，没有其他引用则进行清理\n        List<GUIDImperialTrieNode> childNodes = this.imperialTree.getChildren(guid);\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        if ( !childNodes.isEmpty() ){\n            List<GUID > subordinates = this.imperialTree.getSubordinates(guid);\n            if ( !subordinates.isEmpty() ){\n                for ( GUID subordinateGuid : subordinates ){\n                    this.purge( subordinateGuid );\n                }\n            }\n            childNodes = this.imperialTree.getChildren( guid );\n            for( GUIDImperialTrieNode childNode : childNodes ){\n                List<GUID > parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid());\n                if ( parentNodes.size() > 1 ){\n                    this.imperialTree.removeInheritance(childNode.getGuid(),guid);\n                }\n                else {\n                    this.purge( childNode.getGuid() );\n                }\n            }\n        }\n\n        if ( node.getType().getObjectName().equals(GenericNamespace.class.getName()) ||  node.getType().getObjectName().equals(GenericAppElement.class.getName())){\n            this.removeNode(guid);\n        }\n        else {\n            UOI uoi = node.getType();\n            String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() );\n            if( metaType == null ) {\n                TreeNode newInstance = (TreeNode)uoi.newInstance( new Class<? >[]{ TaskInstrument.class }, this.taskInstrument);\n                metaType = newInstance.getMetaType();\n            }\n\n            ElementOperator operator = this.getOperatorFactory().getOperator( metaType );\n            operator.purge( guid );\n        }\n    }\n\n    @Override\n    public Namespace get( GUID guid ) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        GenericNamespace                      namespace = new GenericNamespace( this.taskInstrument);\n        GUIDImperialTrieNode guidDistributedTrieNode = this.imperialTree.getNode( node.getGuid() );\n\n        GUID metaGuid = guidDistributedTrieNode.getNodeMetadataGUID();\n        namespace.setDistributedTreeNode( guidDistributedTrieNode );\n        namespace.setName( this.namespaceManipulator.getNamespace( guid ).getName() );\n        namespace.setGuid( guid );\n        namespace.setMetaGuid( metaGuid );\n\n        return namespace;\n    }\n\n    @Override\n    public Namespace get( GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public Namespace getAsRootDepth( GUID guid ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode nodeWideData ) {\n        GenericNamespace ns = ( GenericNamespace ) nodeWideData;\n        this.namespaceManipulator.update( ns );\n    }\n\n    @Override\n    public void updateName( GUID guid, String name ) {\n\n    }\n\n    protected void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.namespaceManipulator.remove( node.getGuid() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/operator/TaskElementOperator.java",
    "content": "package com.pinecone.hydra.task.kom.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.GenericTaskElement;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.kom.source.TaskMasterManipulator;\nimport com.pinecone.hydra.task.kom.source.TaskNodeManipulator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic class TaskElementOperator extends ArchElementOperator implements ElementOperator {\n    protected TaskNodeManipulator taskNodeManipulator;\n\n    public TaskElementOperator( ElementOperatorFactory factory ) {\n        this( factory.getTaskMasterManipulator(),factory.taskInstrument() );\n        this.factory = factory;\n    }\n\n    public TaskElementOperator( TaskMasterManipulator masterManipulator, TaskInstrument taskInstrument ){\n        super( masterManipulator, taskInstrument);\n        this.taskNodeManipulator = masterManipulator.getTaskNodeManipulator();\n    }\n\n\n    @Override\n    public GUID insert( TreeNode treeNode ) {\n        GenericTaskElement taskElement = (GenericTaskElement) treeNode;\n\n        //将信息写入数据库\n        //将节点信息存入应用节点表\n        GuidAllocator guidAllocator = this.taskInstrument.getGuidAllocator();\n        GUID taskNodeGUID = guidAllocator.nextGUID();\n        taskElement.setGuid(taskNodeGUID);\n        this.taskNodeManipulator.insert( taskElement );\n\n\n        //将节点信息存入主表\n        GUIDImperialTrieNode node = new GUIDImperialTrieNode();\n        node.setNodeMetadataGUID( taskNodeGUID ); // Since 20250419, the meta has been merged into the `node`.\n        node.setGuid( taskNodeGUID );\n        node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) );\n        this.imperialTree.insert( node );\n        return taskNodeGUID;\n    }\n\n    @Override\n    public void purge( GUID guid ) {\n        this.removeNode( guid );\n    }\n\n    @Override\n    public TaskElement get( GUID guid ) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        TaskElement taskElement   = this.taskNodeManipulator.getTaskNode( guid, this.taskInstrument );\n\n        taskElement.setDistributedTreeNode(node);\n        taskElement.setGuid( guid );\n\n        return taskElement;\n    }\n\n    @Override\n    public TaskElement get( GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public TaskElement getAsRootDepth( GUID guid ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode nodeWideData ) {\n        TaskElement serviceElement = (TaskElement) nodeWideData;\n        this.taskNodeManipulator.update( serviceElement );\n        this.imperialTree.removeCachePath( serviceElement.getGuid() );\n    }\n\n    @Override\n    public void updateName( GUID guid, String name ) {\n\n    }\n\n    private void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.taskNodeManipulator.remove(node.getGuid());\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/source/AppNodeManipulator.java",
    "content": "package com.pinecone.hydra.task.kom.source;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.AppElement;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\npublic interface AppNodeManipulator extends GUIDNameManipulator {\n\n    void insert( AppElement appElement);\n\n    void remove( GUID guid );\n\n    AppElement getAppElement( GUID guid, TaskInstrument instrument );\n\n    void update( AppElement appElement);\n\n    List<AppElement> fetchJobNodeByName( String name );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/source/TaskMasterManipulator.java",
    "content": "package com.pinecone.hydra.task.kom.source;\n\nimport com.pinecone.hydra.task.kom.instance.source.InstanceNodeManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\n\npublic interface TaskMasterManipulator extends KOIMasterManipulator {\n\n    TrieTreeManipulator getTrieTreeManipulator() ;\n\n    AppNodeManipulator getAppNodeManipulator();\n\n    TaskNodeManipulator getTaskNodeManipulator();\n\n    TaskNamespaceManipulator getNamespaceManipulator();\n\n    TireOwnerManipulator getTireOwnerManipulator();\n\n    InstanceNodeManipulator getInstanceNodeManipulator();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/source/TaskNamespaceManipulator.java",
    "content": "package com.pinecone.hydra.task.kom.source;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.kom.entity.Namespace;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\npublic interface TaskNamespaceManipulator extends GUIDNameManipulator {\n    void insert(Namespace ns);\n\n    void remove(GUID guid);\n\n    Namespace getNamespace(GUID guid);\n\n    void update(Namespace ns);\n\n    List<Namespace > fetchNamespaceNodeByName(String name);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/kom/source/TaskNodeManipulator.java",
    "content": "package com.pinecone.hydra.task.kom.source;\n\nimport java.time.LocalDateTime;\nimport java.util.Collection;\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.GenericTaskElement;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\nimport com.pinecone.slime.meta.TableIndex64Meta;\n\npublic interface TaskNodeManipulator extends GUIDNameManipulator {\n\n    void insert( TaskElement taskElement );\n\n    void remove( GUID UUID );\n\n    TaskElement getTaskNode( GUID guid, TaskInstrument instrument );\n\n    void update( TaskElement taskElement );\n\n    List<TaskElement> fetchTaskNodeByName( String name );\n\n    @Override\n    List<GUID> getGuidsByName( String name );\n\n    @Override\n    List<GUID> getGuidsByNameID( String name, GUID guid );\n\n\n    TableIndex64Meta selectSchedulableIdRange( Collection<TaskScheduleCycle> cycles, LocalDateTime targetTime );\n\n    List<TaskElement> fetchSchedulableTasksInRange( long idMin, long idMax, Collection<TaskScheduleCycle> cycles, LocalDateTime targetTime );\n\n    List<TaskElement> listPage(int offset, int pageSize);\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/marshal/TaskPriority.java",
    "content": "package com.pinecone.hydra.task.marshal;\n\nimport java.util.Arrays;\n\npublic enum TaskPriority {\n    MAX(999),\n    UNLIMITED(500),\n\n    L0(50),\n    L1(40),\n    L2(30),\n    L3(20),\n    L4(10),\n    L5(0);\n\n    private final int value;\n\n    private TaskPriority(int value ) {\n        this.value = value;\n    }\n\n    public Integer getValue() {\n        return this.value;\n    }\n\n    public static Integer byName( String name ) {\n        try {\n            TaskPriority taskPriority = valueOf(name);\n            return taskPriority.getValue();\n        }\n        catch ( IllegalArgumentException e ) {\n            return null;\n        }\n    }\n\n    public static TaskPriority of(int value ) {\n        return (TaskPriority) Arrays.stream(values())\n                .filter((it) -> it.value == value)\n                .findFirst().orElse((TaskPriority) null);\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/marshal/TaskScheduleCycle.java",
    "content": "package com.pinecone.hydra.task.marshal;\n\npublic enum TaskScheduleCycle {\n    Undefined      ( 0x00, \"Undefined\" ),\n    Minute         ( 0x01, \"Minute\" ),\n    Hour           ( 0x02, \"Hour\" ),\n    Day            ( 0x03, \"Day\" ),\n    Week           ( 0x04, \"Week\" ),\n    Month          ( 0x05, \"Month\" ),\n\n\n    // Sub second level scheduling, unable to use regular scheduling channels,\n    // requires client caching status (instance at minimum minute level)\n    // 亚秒级调度，无法走常规调度通道，需要客户端缓存状态（实例为最小分钟级）[无法生成秒级实例]\n    TickSecond     ( 0xC0, \"TickSecond\" ),\n    TickMills      ( 0xC1, \"TickMills\"  ),\n    ;\n\n    private final int code;\n\n    private final String name;\n\n    TaskScheduleCycle(int code, String name ) {\n        this.code = code;\n        this.name = name;\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public int getCode() {\n        return this.code;\n    }\n\n    public static TaskScheduleCycle getByCode( int code ) {\n        for ( TaskScheduleCycle cycle : TaskScheduleCycle.values() ) {\n            if ( cycle.code == code ) {\n                return cycle;\n            }\n        }\n\n        return null;\n    }\n\n    public static TaskScheduleCycle getByName( String name ) {\n        for ( TaskScheduleCycle cycle : TaskScheduleCycle.values() ) {\n            if ( cycle.name == name ) {\n                return cycle;\n            }\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/main/java/com/pinecone/hydra/task/marshal/TaskScheduleType.java",
    "content": "package com.pinecone.hydra.task.marshal;\n\npublic enum TaskScheduleType {\n    Undefined      ( 0x00, \"Undefined\" ),\n    Cycle          ( 0x01, \"Cycle\" ),\n    Manual         ( 0x02, \"Manual\" ),\n    Triggered      ( 0x03, \"Triggered\" ),\n    Resident       ( 0x04, \"Resident\" ),\n    ;\n\n    private final int code;\n\n    private final String name;\n\n    TaskScheduleType(int code, String name ) {\n        this.code = code;\n        this.name = name;\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public int getCode() {\n        return this.code;\n    }\n\n    public static TaskScheduleType getByCode(int code ) {\n        for ( TaskScheduleType type : TaskScheduleType.values() ) {\n            if ( type.code == code ) {\n                return type;\n            }\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/test/java/com/ioc/SystemTestIoC.java",
    "content": "package com.ioc;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.construction.*;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.auto.ArchInstructation;\nimport com.pinecone.hydra.auto.Instructation;\nimport com.pinecone.hydra.auto.PeriodicAutomaton;\nimport com.pinecone.hydra.auto.PeriodicAutomatron;\n\nclass SpawnInstruct extends ArchInstructation {\n    @Structure( cycle = ReuseCycle.Disposable )\n    Actor npc;\n\n    public SpawnInstruct() {\n        super();\n    }\n\n    @Override\n    public void execute() throws Exception {\n        Debug.trace( this.npc.name +\" spawned !\" );\n    }\n}\n\npublic class SystemTestIoC {\n    public static void testUnifyStructureInjector_MobSpawnner( )  throws Exception {\n        PeriodicAutomatron modSpawnner = new PeriodicAutomaton( null, 500 );\n        modSpawnner.start();\n\n        StructureInstanceDispenser dispenser = new UnifyCentralInstanceDispenser();\n        UnifyStructureInjector injector = new UnifyStructureInjector( SpawnInstruct.class, dispenser );\n\n        Thread elderBrain = new Thread(()->{\n            for ( int i = 0; i < 100; i++ ) {\n                Debug.sleep( 50 );\n                modSpawnner.command( (Instructation) injector.inject( new JSONMaptron(\n                        \"{npc:{ name:NPC\"+i+\", hp:9999, species: Hydra, weapon:{ name:SuperDragon, damage:9999, attachedMagic:fire } }}\"\n                ), new SpawnInstruct() ) );\n            }\n        });\n\n        elderBrain.start();\n        elderBrain.join();\n        modSpawnner.join();\n    }\n\n\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n\n\n            //TestIoC.testInstancePool();\n            //TestIoC.testUnifyStructureInjector_Simple();\n            //TestIoC.testUnifyStructureInjector_List();\n            SystemTestIoC.testUnifyStructureInjector_MobSpawnner();\n\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/test/java/com/ioc/TestIoC.java",
    "content": "package com.ioc;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.construction.*;\nimport com.pinecone.framework.system.prototype.ObjectiveMap;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.DirectJSONInjector;\nimport com.pinecone.framework.util.json.homotype.JSONGet;\nimport com.pinecone.framework.util.json.homotype.MapStructure;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\n\nimport java.util.List;\nimport java.util.Map;\n\nenum Species {\n    Dragon,\n    Lizard,\n    Bear,\n    Wolf,\n    Devil,\n    Human,\n    Druid,\n    Elf,\n    Dwarf,\n}\n\nclass Weapon {\n    @JSONGet\n    protected String    name;\n\n    @MapStructure\n    protected int       damage;\n\n    @JSONGet( \"attachedMagic\" )\n    protected String    magic;\n\n    public String toJSONString() {\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }\n\n    public String toString(){\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }\n}\n\nclass Actor {\n    @JSONGet\n    protected String    name;\n\n    @JSONGet\n    protected int       hp;\n\n    @JSONGet\n    protected Species   species = Species.Dragon;\n\n    @Structure( cycle = ReuseCycle.Disposable )\n    protected Weapon    weapon;\n\n    protected boolean   live = true;\n\n    public String toJSONString() {\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }\n\n    public String toString(){\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }\n\n}\n\n\nclass Team {\n    @Structure( type = Actor.class, cycle = ReuseCycle.Disposable )\n    protected List<Actor > craws;\n\n    public String toJSONString() {\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }\n\n    public String toString(){\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }\n}\n\nclass Tale {\n    @Structure\n    protected String              type;\n\n    @Structure( type = Team.class, cycle = ReuseCycle.Recyclable )\n    protected Map<String, Team >  worlds;\n\n    public String toJSONString() {\n        return ( (JSONObject)DirectJSONInjector.instance().inject( this )).toJSONStringI(2);\n    }\n\n    public String toString(){\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }\n}\n\n\npublic class TestIoC {\n    public static void testInstancePool( )  {\n        DynamicFactory wolfNPCFactory           = new GenericDynamicFactory();\n        GenericDynamicInstancePool<Actor > npcs = new GenericDynamicInstancePool<>( wolfNPCFactory, 0, 0, Actor.class );\n\n        for ( int i = 0; i < 1e2; ++i ) {\n            Actor npc = npcs.allocate();\n            Debug.trace( npc );\n            npcs.free(npc);\n        }\n    }\n\n    public static void testUnifyStructureInjector_Simple( )  throws Exception {\n        StructureInstanceDispenser dispenser = new UnifyCentralInstanceDispenser();\n        UnifyStructureInjector injector = new UnifyStructureInjector( Actor.class, dispenser );\n        Actor actor = new Actor();\n\n        JSONObject jo = new JSONMaptron( \"{ name:RedPrince, hp:100, species: Lizard, weapon:{ name:TyrantSuit, damage:70, attachedMagic:fire } }\" );\n        injector.inject( new ObjectiveMap<>(jo), actor );\n        Debug.trace( actor );\n    }\n\n    public static void testUnifyStructureInjector_List( )  throws Exception {\n        StructureInstanceDispenser dispenser = new UnifyCentralInstanceDispenser();\n        UnifyStructureInjector injector = new UnifyStructureInjector( Team.class, dispenser );\n        Team team = new Team();\n\n        JSONObject jo = new JSONMaptron( \"{ craws: [\" +\n                \"{ name:RedPrince, hp:100, species: Lizard, weapon:{ name:TyrantSuit, damage:70, attachedMagic:fire } }, \" +\n                \"{ name:Ifan, hp:90, species: Human, weapon:{ name:Ranger, damage:50, attachedMagic:lightning } }, \" +\n                \"] }\"\n        );\n        injector.inject( new ObjectiveMap<>(jo), team );\n        Debug.trace( team );\n    }\n\n    public static void testUnifyStructureInjector_Sophisticate( )  throws Exception {\n        StructureInstanceDispenser dispenser = new UnifyCentralInstanceDispenser();\n        UnifyStructureInjector injector = new UnifyStructureInjector( Tale.class, dispenser );\n        Tale tale = new Tale();\n\n        JSONObject jo = new JSONMaptron( \"{ type:fantasy, worlds :{ d2: { craws: [\" +\n                \"{ name:RedPrince, hp:100, species: Lizard, weapon:{ name:TyrantSuit, damage:70, attachedMagic:fire } }, \" +\n                \"{ name:Ifan, hp:90, species: Human, weapon:{ name:Ranger, damage:50, attachedMagic:lightning } }, \" +\n                \"] },\" +\n                \"b3: { craws: [\" +\n                \"{ name:DarkUrge, hp:120, species: Dragon, weapon:{ name:DragonSoul, damage:9999, attachedMagic:fire } }, \" +\n                \"{ name:Karlack, hp:100, species: Devil, weapon:{ name:Everburn Blade, damage:120, attachedMagic:fire } }, \" +\n                \"] },\" +\n                \"no: { craws: [\" +\n                \"{ name:Hydra, hp:9999, species: Hydra, weapon:{ name:SuperDragon, damage:9999, attachedMagic:fire } }, \" +\n                \"] }\" +\n                \" }}\"\n        );\n        injector.inject( new ObjectiveMap<>(jo), tale );\n        Debug.trace( tale );\n    }\n\n\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n\n\n            //TestIoC.testInstancePool();\n            //TestIoC.testUnifyStructureInjector_Simple();\n            //TestIoC.testUnifyStructureInjector_List();\n            TestIoC.testUnifyStructureInjector_Sophisticate();\n\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/test/java/com/orchestration/SimpleExertium.java",
    "content": "package com.orchestration;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.orchestration.Exertium;\n\npublic class SimpleExertium extends Exertium {\n    String mszToken;\n\n    public SimpleExertium( String szWho ) {\n        this.mszToken = szWho;\n    }\n\n    @Override\n    protected void doStart() {\n        Debug.trace( \"Hello hi, I am \" + this.mszToken );\n        Debug.sleep(50);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/test/java/com/orchestration/SimpleParallelium.java",
    "content": "package com.orchestration;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.orchestration.parallel.ArchMasterParallelium;\n\npublic class SimpleParallelium extends ArchMasterParallelium {\n    String mszToken;\n\n    public SimpleParallelium( String szWho ) {\n        this.mszToken = szWho;\n    }\n\n    @Override\n    protected void doStart() {\n        Debug.trace( \"Hello hi, I am \" + this.mszToken );\n        Debug.trace( Thread.currentThread().getName() );\n\n        Debug.sleep(100);\n    }\n\n\n    @Override\n    protected void doTerminate() {\n\n    }\n\n    @Override\n    protected void doRollback() {\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/test/java/com/orchestration/TestBasicTransaction.java",
    "content": "package com.orchestration;\n\nimport com.pinecone.hydra.orchestration.*;\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.NotImplementedException;\n\npublic class TestBasicTransaction {\n    public static void testSequential() {\n        SequentialAction action = new SequentialAction();\n        //LoopAction action = new LoopAction();\n        //ParallelAction action = new ParallelAction();\n\n        action.getSeqExceptionNeglector().add( NotImplementedException.class );\n\n        action.add( new SimpleExertium( \"Gay0\" ) );\n        action.add( new SimpleExertium( \"Gay1\" ) );\n\n        Exertium e = new Exertium();\n        //action.add( e );\n\n\n        SequentialAction sa = new SequentialAction();\n        sa.add( new SimpleExertium( \"A0\" ) );\n        sa.add( new SimpleExertium( \"A1\" ) );\n        sa.add( new SimpleExertium( \"A2\" ) );\n        sa.add( new JumpPoint( 0 ) );\n        action.add( sa );\n\n\n        action.add( new SimpleExertium( \"Gay2\" ) );\n\n        action.add( ProcessController.BREAK );\n\n        action.add( new SimpleExertium( \"Gay3\" ) );\n        action.add( new SimpleExertium( \"Gay4\" ) );\n        //action.add( new JumpPoint(1));\n\n        action.start();\n\n    }\n\n    public static void testParallel() {\n        ParallelAction action = new ParallelAction();\n        action.add( ( new SimpleParallelium( \"P0\" ) ) );\n        action.add( new SimpleParallelium( \"P1\" ) );\n        action.add( new SimpleParallelium( \"P2\" ) );\n\n        action.add( new SimpleExertium( \"E0\" ) );\n        action.add( new SimpleExertium( \"E1\" ) );\n        action.add( new SimpleExertium( \"E2\" ) );\n        action.add( new SimpleExertium( \"E3\" ) );\n\n        action.add( new SimpleParallelium( \"P3\" ) );\n        action.add( new SimpleParallelium( \"P4\" ) );\n        action.add( new SimpleParallelium( \"P5\" ) );\n        action.add( new SimpleParallelium( \"P6\" ) );\n        action.start();\n\n    }\n\n    public static void testGraph() {\n\n        Transaction a0 = new SequentialAction();\n        a0.add( ( new SimpleExertium( \"E0_0\" ) ) );\n        a0.add( ( new SimpleExertium( \"E0_1\" ) ) );\n\n        ParallelAction a1 = new ParallelAction();\n        a1.add( ( new SimpleParallelium( \"P1_0\" ) ) );\n        a1.add( new SimpleParallelium( \"P1_1\" ) );\n        a1.add( new SimpleParallelium( \"P1_2\" ) );\n\n        a1.add( new SimpleExertium( \"E1_0\" ) );\n        a1.add( new SimpleExertium( \"E1_1\" ) );\n\n        a1.add( new SimpleParallelium( \"P1_3\" ) );\n        a1.add( new SimpleParallelium( \"P1_4\" ) );\n\n        SequentialAction aGroup = new SequentialAction();\n        aGroup.add( ( new SimpleExertium( \"EG_0\" ) ) );\n        aGroup.add( ( new SimpleExertium( \"EG_1\" ) ) );\n        a1.add( aGroup );\n        a1.add( ParallelAction.wrap( aGroup ) );\n\n\n        a0.add( a1 );\n        a0.add( ( new SimpleExertium( \"E0_END\" ) ) );\n\n        a0.start();\n    }\n\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            //TestBasicTransaction.testSequential();\n            TestBasicTransaction.testParallel();\n            //TestBasicTransaction.testGraph();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/test/java/com/orchestration/TestInstructation.java",
    "content": "package com.orchestration;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.executum.ArchProcessum;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.auto.*;\n\nclass SimpleInstruct extends ArchInstructation {\n    String ss;\n    public SimpleInstruct( String s ) {\n        super();\n        this.ss = s;\n    }\n\n    @Override\n    public void execute() throws Exception {\n        Debug.trace( \"Hello hi, fuck \"+ this.ss +\" !\" );\n\n//        if( this.ss.equals( \"1\" ) ) {\n//            throw new Exception();\n//        }\n    }\n}\n\nclass SimpleS extends ArchSuggestation {\n    String ss;\n    public SimpleS( String s ) {\n        super();\n        this.ss = s;\n    }\n\n    @Override\n    public void execute() {\n        Debug.trace( \"Hello hi, fuck \"+ this.ss +\" !\" );\n\n        if( this.ss.equals( \"1\" ) ) {\n            throw new RuntimeException();\n        }\n    }\n}\n\nclass SimplePI extends ArchParallelInstructation {\n    String ss;\n    public SimplePI( Processum parent, String s  ) {\n        super( parent );\n        this.ss = s;\n    }\n\n    @Override\n    public void doExecute() throws Exception {\n        for ( int i = 0; i < 1; i++ ) {\n            Debug.trace( \"Hello hi, fuck \"+ this.ss +\" !\" );\n        }\n\n\n//        if( this.ss.equals( \"1\" ) ) {\n//            throw new Exception();\n//        }\n    }\n}\n\npublic class TestInstructation {\n    public static void testMarshalling() throws Exception {\n        Processum p = new ArchProcessum( \"\", null ) {};\n\n\n        GenericMarshalling marshalling = new GenericMarshalling();\n        marshalling.add( new SimpleInstruct( \"0\" ) );\n        //marshalling.add( new SimplePI( p,\"1\" ) );\n        marshalling.add( new SimpleS( \"1\" ) );\n\n        GenericMarshalling am = new GenericMarshalling();\n        am.add( new SimpleInstruct( \"a0\" ) );\n        am.add( new SimpleInstruct( \"a1\" ) );\n\n\n        marshalling.add( am );\n\n        marshalling.execute();\n    }\n\n    public static void testPeriodicAutomaton() throws Exception {\n        PeriodicAutomatron automatron = new PeriodicAutomaton( null, 500 );\n        automatron.command( new SimpleInstruct( \"0\" ) );\n        automatron.command( new SimpleInstruct( \"1\" ) );\n        automatron.command( new SimpleInstruct( \"2\" ) );\n\n        GenericMarshalling am = new GenericMarshalling( automatron );\n        am.add( new SimpleInstruct( \"a0\" ) );\n        am.add( new SimpleInstruct( \"a1\" ) );\n        //am.add( new SimplePI( automatron,\"p1\" ) );\n        automatron.command( am );\n\n        automatron.start();\n        //automatron.join();\n\n        Debug.sleep( 10 );\n        //automatron.command( new SimpleInstruct( \"3\" ) );\n\n        Thread push = new Thread( ()->{\n            for ( int i = 0; i < 100; i++ ) {\n                Debug.sleep( 50 );\n                automatron.command( new SimpleInstruct( \"push\" + i ) );\n            }\n        } );\n        push.start();\n\n        Thread push2 = new Thread( ()->{\n            for ( int i = 0; i < 100; i++ ) {\n                Debug.sleep( 50 );\n                automatron.command( new SimpleInstruct( \"2push\" + i ) );\n            }\n        } );\n        push2.start();\n\n\n\n//        Debug.sleep( 1500 );\n//        automatron.command( KernelInstructation.CONTINUE );\n//        Debug.sleep( 2000 );\n//        automatron.withdraw( KernelInstructation.CONTINUE );\n//        automatron.terminate();\n\n        push2.join();\n        push.join();\n        automatron.join();\n    }\n\n    public static void testAutomaton() throws Exception {\n        LifecycleAutomaton automatron = new Automaton( null );\n        automatron.setHeartbeatTimeoutMillis( 1000 );\n\n\n        automatron.start();\n\n        Thread push = new Thread( ()->{\n            int i = 0;\n            for ( ; i < 10; i++ ) {\n                Debug.sleep( 100 );\n                automatron.command( new SimpleInstruct( \"push\" + i ) );\n            }\n\n            //automatron.command( KernelInstructation.DIE );\n\n            for ( ; i < 20; i++ ) {\n                Debug.sleep( 100 );\n                automatron.command( new SimpleInstruct( \"push\" + i ) );\n            }\n        } );\n        push.start();\n\n        Thread push2 = new Thread( ()->{\n            int i = 0;\n            for ( ; i < 10; i++ ) {\n                Debug.sleep( 100 );\n                automatron.command( new SimpleInstruct( \"2push\" + i ) );\n            }\n\n            automatron.command( new SimplePI( automatron,\"p1\" ) );\n        } );\n        push2.start();\n\n//        Thread push3 = new Thread( ()->{\n//            while ( true ) {\n//                Debug.sleep( 1500 );\n//                automatron.sendHeartbeat();\n//            }\n//            //Debug.sleep( 1000 );\n//            //automatron.command( KernelInstructation.DIE );\n//        } );\n//        push3.start();\n\n        automatron.join();\n    }\n\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n\n\n            //TestInstructation.testMarshalling();\n            TestInstructation.testPeriodicAutomaton();\n            // TestInstructation.testAutomaton();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-runtime/src/test/java/com/servgram/TestServgram.java",
    "content": "package com.servgram;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.lang.*;\nimport com.pinecone.framework.util.lang.iterator.JarFileIterator;\nimport com.pinecone.framework.util.lang.iterator.NamespaceIterator;\nimport com.pinecone.ulf.util.lang.PooledClassCandidateScanner;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class TestServgram {\n    public static void testPackageCollector() throws Exception {\n        Debug.trace( Thread.currentThread().getContextClassLoader().getResource(\"com/mysql/jdbc\") );\n        //Debug.trace( Thread.currentThread().getContextClassLoader().getResource(\"com/pinecone/hydra\") );\n\n        NamespaceCollector collector = new ClassNameFetcher();\n        //NamespaceCollector collector = new PackageNameFetcher();\n        //Debug.echo( JSON.stringify( collector.fetch( \"com.mysql\", true ), 2 ) );\n        Debug.echo( JSON.stringify( collector.fetch( \"com.pinecone.hydra\", true ), 2 ) );\n        Debug.echo( collector.fetchFirst( \"com.pinecone.hydra\" ) );\n\n        //TestServgram.class.getClassLoader().loadClass()\n        //Debug.trace( Package.getPackage( \"com.pinecone.hydra.servgram\" ) );\n    }\n\n    public static void testPackageScope() throws Exception {\n        ScopedPackage scopedPackage = new LazyScopedPackage( \"com.pinecone.hydra\" );\n\n        Debug.trace( scopedPackage.children().get( 3 ).fetchChildrenClassNames() );\n\n        Debug.trace( scopedPackage.getPackage() );\n        Debug.trace( scopedPackage.tryLoad() );\n        Debug.trace( scopedPackage.getPackage(), scopedPackage.hasLoaded() );\n        Debug.trace( scopedPackage.fetchFirstClassName() );\n\n        //ClassPathScanningCandidateComponentProvider\n    }\n\n    public static void testIterator() throws Exception {\n        //NamespaceIterator iterator = new DirectoryClassIterator( \"/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Pinecones/Hydra/target/classes/com/pinecone/hydra/umc/wolf\", \"com.pinecone.hydra.umc.wolf\" );\n        //NamespaceIterator iterator = new DirectoryPackageIterator( \"/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Pinecones/Hydra/target/classes/com/pinecone/hydra/umc\", \"com.pinecone.hydra.umc\" );\n        NamespaceIterator iterator = new JarFileIterator( \"jar:file:/C:/Users/undefined/.m2/repository/mysql/mysql-connector-java/8.0.23/mysql-connector-java-8.0.23.jar!/com/mysql/jdbc\", \".class\" );\n        //NamespaceIterator iterator = new JarClassIterator( \"jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-1.2.7.jar!/BOOT-INF/lib/radium-2.1.0.jar!/com/sauron/radium/heistron\" );\n        //NamespaceIterator iterator = new JarClassIterator( \"jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-c-1.2.7.jar!/BOOT-INF/lib/shadow-c-1.2.7.jar!/BOOT-INF/lib/radium-2.1.0.jar!/com/sauron/radium/heistron\" );\n        //NamespaceIterator iterator = new JarClassIterator( \"jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-1.2.7.jar!/BOOT-INF/classes!/com/sauron/shadow/chronicle\" );\n        //NamespaceIterator iterator = new JarPackageIterator( \"jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-1.2.7.jar!/BOOT-INF/classes!/com/sauron/shadow/chronicle\" );\n\n        //NamespaceIterator iterator = new JarPackageIterator( \"jar:file:/C:/Users/undefined/.m2/repository/mysql/mysql-connector-java/8.0.23/mysql-connector-java-8.0.23.jar!/com/mysql\" );\n\n        while ( iterator.hasNext() ) {\n            Debug.trace( iterator.next() );\n        }\n\n//        JarEntryIterator iterator2 = new JarEntryIterator( \"jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-1.2.7.jar!/BOOT-INF/classes!/com/sauron/shadow/chronicle\" );\n//        while ( iterator2.hasNext() ) {\n//            Debug.trace( iterator2.next().getRealName() );\n//        }\n\n        // PathNamespaceCollectum collectum = new JarClassCollectorAdapter();\n        PathNamespaceCollectum collectum = new JarPackageCollectorAdapter();\n        List<String > classNames = new ArrayList<>();\n        collectum.collect(\n                //\"jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-c-1.2.7.jar!/BOOT-INF/lib/shadow-c-1.2.7.jar!/BOOT-INF/lib/radium-2.1.0.jar!/com/sauron/radium/heistron\",\n                \"jar:file:/E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Saurons/Shadow/target/shadow-1.2.7.jar!/BOOT-INF/classes!/com/sauron/shadow\",\n                \"\",\n                classNames, true\n        );\n\n        Debug.fmt( 2, classNames );\n    }\n\n    public static void testScanner() throws Exception {\n        ClassScanner scanner = new PooledClassCandidateScanner( null, Thread.currentThread().getContextClassLoader() );\n        PooledClassCandidateScanner scanner1 = (PooledClassCandidateScanner) scanner;\n\n        List<String > list = new ArrayList<>();\n        scanner1.scan( \"com.pinecone.hydra.umc\", true, list );\n        //scanner1.scan( \"com.mysql.jdbc\", true, list );\n        //scanner1.scan( \"com.mysql.jdbc\", false, list );\n\n        Debug.echo( JSON.stringify( list, 2 ) );\n    }\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            //TestServgram.testPackageCollector();\n            //TestServgram.testPackageScope();\n            TestServgram.testIterator();\n            //TestServgram.testScanner();\n\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kernel</groupId>\n    <artifactId>hydra-framework-service</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-message-control</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/entity/GenericNamespaceNode.java",
    "content": "package com.pinecone.hydra.scenario.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class GenericNamespaceNode implements NamespaceNode{\n    private long enumId;\n    private GUID guid;\n    private String name;\n    private GenericNamespaceNodeMeta namespaceNodeMeta;\n    private GenericScenarioCommonData scenarioCommonData;\n\n    public GenericNamespaceNode() {\n    }\n\n    public GenericNamespaceNode(long enumId, GUID guid, String name, GenericNamespaceNodeMeta namespaceNodeMeta, GenericScenarioCommonData scenarioCommonData) {\n        this.enumId = enumId;\n        this.guid = guid;\n        this.name = name;\n        this.namespaceNodeMeta = namespaceNodeMeta;\n        this.scenarioCommonData = scenarioCommonData;\n    }\n\n\n    public long getEnumId() {\n        return enumId;\n    }\n\n\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n\n    public GUID getGuid() {\n        return guid;\n    }\n\n\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n\n    public String getName() {\n        return name;\n    }\n\n\n    public void setName(String name) {\n        this.name = name;\n    }\n\n\n    public GenericNamespaceNodeMeta getNamespaceNodeMeta() {\n        return namespaceNodeMeta;\n    }\n\n\n    public void setNamespaceNodeMeta(GenericNamespaceNodeMeta namespaceNodeMeta) {\n        this.namespaceNodeMeta = namespaceNodeMeta;\n    }\n\n    /**\n     * 获取\n     * @return scenarioCommonData\n     */\n    public GenericScenarioCommonData getScenarioCommonData() {\n        return scenarioCommonData;\n    }\n\n    /**\n     * 设置\n     * @param scenarioCommonData\n     */\n    public void setScenarioCommonData(GenericScenarioCommonData scenarioCommonData) {\n        this.scenarioCommonData = scenarioCommonData;\n    }\n\n    public String toString() {\n        return \"GenericNamespaceNode{enumId = \" + enumId + \", guid = \" + guid + \", name = \" + name + \", namespaceNodeMeta = \" + namespaceNodeMeta + \", scenarioCommonData = \" + scenarioCommonData + \"}\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/entity/GenericNamespaceNodeMeta.java",
    "content": "package com.pinecone.hydra.scenario.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class GenericNamespaceNodeMeta implements NamespaceNodeMeta{\n    private long enumId;\n    private GUID guid;\n\n    public GenericNamespaceNodeMeta() {\n    }\n\n    public GenericNamespaceNodeMeta(long enumId, GUID guid) {\n        this.enumId = enumId;\n        this.guid = guid;\n    }\n\n\n    public long getEnumId() {\n        return enumId;\n    }\n\n\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n\n    public GUID getGuid() {\n        return guid;\n    }\n\n\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    public String toString() {\n        return \"GenericNamespaceNodeMeta{enumId = \" + enumId + \", guid = \" + guid + \"}\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/entity/GenericScenarioCommonData.java",
    "content": "package com.pinecone.hydra.scenario.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic class GenericScenarioCommonData implements ScenarioCommonData{\n    private long enumId;\n    private GUID guid;\n    private LocalDateTime createTime;\n    private LocalDateTime updateTime;\n\n    public GenericScenarioCommonData() {\n    }\n\n    public GenericScenarioCommonData(long enumId, GUID guid, LocalDateTime createTime, LocalDateTime updateTime) {\n        this.enumId = enumId;\n        this.guid = guid;\n        this.createTime = createTime;\n        this.updateTime = updateTime;\n    }\n\n\n    public long getEnumId() {\n        return enumId;\n    }\n\n\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n\n    public GUID getGuid() {\n        return guid;\n    }\n\n\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n\n    public LocalDateTime getCreateTime() {\n        return createTime;\n    }\n\n\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n\n    public LocalDateTime getUpdateTime() {\n        return updateTime;\n    }\n\n\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.updateTime = updateTime;\n    }\n\n    public String toString() {\n        return \"GenericScenarioCommonData{enumId = \" + enumId + \", guid = \" + guid + \", createTime = \" + createTime + \", updateTime = \" + updateTime + \"}\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/entity/NamespaceNode.java",
    "content": "package com.pinecone.hydra.scenario.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface NamespaceNode extends TreeNode {\n    long getEnumId();\n    void setEnumId(long id);\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n\n    @Override\n    default NamespaceNode evinceTreeNode() {\n        return this;\n    }\n\n    String getName();\n    void setName(String name);\n\n    GenericNamespaceNodeMeta getNamespaceNodeMeta();\n    void setNamespaceNodeMeta(GenericNamespaceNodeMeta namespaceNodeMeta);\n\n    GenericScenarioCommonData getScenarioCommonData();\n    void setScenarioCommonData(GenericScenarioCommonData scenarioCommonData);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/entity/NamespaceNodeMeta.java",
    "content": "package com.pinecone.hydra.scenario.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface NamespaceNodeMeta extends Pinenut {\n    long getEnumId();\n    void setEnumId(long enumId);\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/entity/ScenarioCommonData.java",
    "content": "package com.pinecone.hydra.scenario.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface ScenarioCommonData extends Pinenut {\n    long getEnumId();\n    void setEnumId(long id);\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n\n    LocalDateTime getCreateTime();\n    void setCreateTime(LocalDateTime localDateTime);\n\n    LocalDateTime getUpdateTime();\n    void setUpdateTime(LocalDateTime updateTime);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/source/NamespaceNodeManipulator.java",
    "content": "package com.pinecone.hydra.scenario.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.scenario.entity.NamespaceNode;\n\nimport java.util.List;\n\npublic interface NamespaceNodeManipulator extends Pinenut {\n    void insert(NamespaceNode namespaceNode);\n\n    void remove(GUID guid);\n\n    NamespaceNode getNamespaceNode(GUID guid);\n\n    void update(NamespaceNode namespaceNode);\n\n    List<GUID> getGuidsByName(String name);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/source/NamespaceNodeMetaManipulator.java",
    "content": "package com.pinecone.hydra.scenario.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.scenario.entity.NamespaceNodeMeta;\n\npublic interface NamespaceNodeMetaManipulator extends Pinenut {\n    void insert(NamespaceNodeMeta namespaceNodeMeta);\n\n    void remove(GUID guid);\n\n    NamespaceNodeMeta getNamespaceNodeMeta(GUID guid);\n\n    void update(NamespaceNodeMeta namespaceNodeMeta);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/source/ScenarioCommonDataManipulator.java",
    "content": "package com.pinecone.hydra.scenario.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.scenario.entity.ScenarioCommonData;\n\npublic interface ScenarioCommonDataManipulator extends Pinenut {\n    void insert(ScenarioCommonData scenarioCommonData);\n\n    void remove(GUID guid);\n\n    ScenarioCommonData getScenarioCommonData(GUID guid);\n\n    void update(ScenarioCommonData scenarioCommonData);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/source/ScenarioMasterManipulator.java",
    "content": "package com.pinecone.hydra.scenario.source;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\n\npublic interface ScenarioMasterManipulator extends KOIMasterManipulator {\n    NamespaceNodeManipulator        getNamespaceNodeManipulator();\n\n    NamespaceNodeMetaManipulator    getNSNodeMetaManipulator();\n\n    ScenarioCommonDataManipulator   getScenarioCommonDataManipulator();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/tree/DistributedScenarioMetaTree.java",
    "content": "package com.pinecone.hydra.scenario.tree;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface DistributedScenarioMetaTree extends Pinenut {\n    String getPath(GUID guid);\n\n    GUID insert(TreeNode treeNode);\n\n    TreeNode get(GUID guid);\n\n    TreeNode parsePath(String path);\n\n    void remove(GUID guid);\n\n    TreeNode getAsRootDepth(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/scenario/tree/GenericDistributedScenarioMetaTree.java",
    "content": "package com.pinecone.hydra.scenario.tree;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.scenario.entity.GenericNamespaceNode;\nimport com.pinecone.hydra.scenario.entity.GenericNamespaceNodeMeta;\nimport com.pinecone.hydra.scenario.entity.GenericScenarioCommonData;\nimport com.pinecone.hydra.scenario.entity.NamespaceNode;\nimport com.pinecone.hydra.scenario.entity.NamespaceNodeMeta;\nimport com.pinecone.hydra.scenario.entity.ScenarioCommonData;\nimport com.pinecone.hydra.scenario.source.ScenarioMasterManipulator;\nimport com.pinecone.hydra.scenario.source.NamespaceNodeManipulator;\nimport com.pinecone.hydra.scenario.source.NamespaceNodeMetaManipulator;\nimport com.pinecone.hydra.scenario.source.ScenarioCommonDataManipulator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.RegimentedImperialTree;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.time.LocalDateTime;\nimport java.util.List;\n\npublic class GenericDistributedScenarioMetaTree implements DistributedScenarioMetaTree{\n    protected Hydrogen   hydrogen;\n    private ImperialTree distributedScenarioTree;\n    private ScenarioMasterManipulator       scenarioMasterManipulator;\n\n    private NamespaceNodeMetaManipulator    namespaceNodeMetaManipulator;\n    private NamespaceNodeManipulator        namespaceNodeManipulator;\n    private ScenarioCommonDataManipulator   scenarioCommonDataManipulator;\n\n    public GenericDistributedScenarioMetaTree(Hydrogen hydrogen, KOIMasterManipulator masterManipulator){\n        this.hydrogen                       = hydrogen;\n        this.scenarioMasterManipulator      =   (ScenarioMasterManipulator) masterManipulator;\n        KOISkeletonMasterManipulator skeletonMasterManipulator = this.scenarioMasterManipulator.getSkeletonMasterManipulator();\n        TreeMasterManipulator        treeMasterManipulator     = (TreeMasterManipulator) skeletonMasterManipulator;\n        this.distributedScenarioTree        =   new RegimentedImperialTree(treeMasterManipulator);\n        this.namespaceNodeManipulator       =   this.scenarioMasterManipulator.getNamespaceNodeManipulator();\n        this.namespaceNodeMetaManipulator   =   this.scenarioMasterManipulator.getNSNodeMetaManipulator();\n        this.scenarioCommonDataManipulator  =   this.scenarioMasterManipulator.getScenarioCommonDataManipulator();\n    }\n\n    public GenericDistributedScenarioMetaTree( Hydrogen hydrogen) {\n        this.hydrogen = hydrogen;\n    }\n\n    public GenericDistributedScenarioMetaTree( KOIMappingDriver driver ) {\n        this(\n                driver.getSystem(),\n                driver.getMasterManipulator()\n        );\n    }\n\n    @Override\n    public String getPath(GUID guid) {\n        String path = this.distributedScenarioTree.getCachePath(guid);\n        if (path!=null) return path;\n        ImperialTreeNode node = this.distributedScenarioTree.getNode(guid);\n        Debug.trace(node.toString());\n            String assemblePath = this.getNodeName(node);\n            while (!node.getParentGUIDs().isEmpty()){\n                List<GUID> parentGuids = node.getParentGUIDs();\n                node = this.distributedScenarioTree.getNode(parentGuids.get(0));\n                String nodeName = this.getNodeName(node);\n                assemblePath = nodeName + \".\" + assemblePath;\n            }\n            this.distributedScenarioTree.insertCachePath(guid,assemblePath);\n            return assemblePath;\n    }\n\n    @Override\n    public GUID insert( TreeNode treeNode ) {\n        GenericNamespaceNode namespaceNode = (GenericNamespaceNode) treeNode;\n        GuidAllocator guidAllocator = GUIDs.newGuidAllocator();\n\n        NamespaceNodeMeta namespaceNodeMeta = namespaceNode.getNamespaceNodeMeta();\n        GUID namespaceNodeMetaGuid = guidAllocator.nextGUID();\n        namespaceNodeMeta.setGuid(namespaceNodeMetaGuid);\n\n        ScenarioCommonData scenarioCommonData = namespaceNode.getScenarioCommonData();\n        GUID scenarioCommonDataGuid = guidAllocator.nextGUID();\n        scenarioCommonData.setGuid(scenarioCommonDataGuid);\n        scenarioCommonData.setCreateTime(LocalDateTime.now());\n        scenarioCommonData.setUpdateTime(LocalDateTime.now());\n\n        GUIDImperialTrieNode guidDistributedTrieNode = new GUIDImperialTrieNode();\n        GUID nodeGuid = guidAllocator.nextGUID();\n        namespaceNode.setGuid(nodeGuid);\n        guidDistributedTrieNode.setGuid(nodeGuid);\n        guidDistributedTrieNode.setNodeMetadataGUID(namespaceNodeMetaGuid);\n        guidDistributedTrieNode.setBaseDataGUID(scenarioCommonDataGuid);\n        guidDistributedTrieNode.setType(UOIUtils.createLocalJavaClass(namespaceNode.getClass().getName()));\n\n        this.distributedScenarioTree.insert(guidDistributedTrieNode);\n        this.namespaceNodeMetaManipulator.insert(namespaceNodeMeta);\n        this.scenarioCommonDataManipulator.insert(scenarioCommonData);\n        this.namespaceNodeManipulator.insert(namespaceNode);\n        return null;\n    }\n\n    @Override\n    public TreeNode get(GUID guid) {\n        GUIDImperialTrieNode node = this.distributedScenarioTree.getNode(guid);\n        NamespaceNode namespaceNode = this.namespaceNodeManipulator.getNamespaceNode(guid);\n        GenericScenarioCommonData scenarioCommonData = (GenericScenarioCommonData) this.scenarioCommonDataManipulator.getScenarioCommonData(node.getAttributesGUID());\n        GenericNamespaceNodeMeta namespaceNodeMeta = (GenericNamespaceNodeMeta) this.namespaceNodeMetaManipulator.getNamespaceNodeMeta(node.getNodeMetadataGUID());\n        namespaceNode.setNamespaceNodeMeta(namespaceNodeMeta);\n        namespaceNode.setScenarioCommonData(scenarioCommonData);\n        return namespaceNode;\n    }\n\n    @Override\n    public TreeNode parsePath(String path) {\n        GUID guid = this.distributedScenarioTree.queryGUIDByPath( path );\n        if (guid != null){\n            return this.get(guid);\n        }\n        else{\n            String[] parts = this.processPath(path).split(\"\\\\.\");\n            List<GUID> nodeByName = this.namespaceNodeManipulator.getGuidsByName(parts[parts.length - 1]);\n            for(GUID nodeGuid :nodeByName){\n                if (this.getPath(nodeGuid).equals(path)){\n                    return this.get(nodeGuid);\n                }\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public void remove(GUID guid) {\n        List<GUIDImperialTrieNode> childNodes = this.distributedScenarioTree.getChildren(guid);\n        if (childNodes == null || childNodes.isEmpty()){\n            this.removeNode(guid);\n        }\n        else {\n            for(GUIDImperialTrieNode childNode : childNodes){\n                List<GUID> parentNodes = this.distributedScenarioTree.fetchParentGuids(childNode.getGuid());\n                if (parentNodes.size() > 1){\n                    this.distributedScenarioTree.removeInheritance(childNode.getGuid(),guid);\n                }else {\n                    this.remove(childNode.getGuid());\n                }\n            }\n            this.removeNode(guid);\n        }\n    }\n\n    @Override\n    public TreeNode getAsRootDepth(GUID guid) {\n        return null;\n    }\n\n    private String getNodeName(ImperialTreeNode node){\n        return this.namespaceNodeManipulator.getNamespaceNode(node.getGuid()).getName();\n    }\n\n    private String processPath(String path) {\n        // 使用正则表达式移除所有的括号及其内容\n        return path.replaceAll(\"\\\\(.*?\\\\)\", \"\");\n    }\n\n    private void removeNode(GUID guid){\n        GUIDImperialTrieNode node = this.distributedScenarioTree.getNode(guid);\n        this.distributedScenarioTree.purge( guid );\n        this.namespaceNodeManipulator.remove(guid);\n        this.namespaceNodeMetaManipulator.remove(node.getNodeMetadataGUID());\n        this.scenarioCommonDataManipulator.remove(node.getAttributesGUID());\n        this.distributedScenarioTree.removeCachePath(guid);\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/Application.java",
    "content": "package com.pinecone.hydra.service;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface Application extends ServiceFamilyMeta {\n    long getEnumId();\n\n    GUID getGuid();\n\n    String getName();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ArchService.java",
    "content": "package com.pinecone.hydra.service;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\n\npublic abstract class ArchService implements Service {\n\n    protected Identification            mServiceId;\n\n    protected ServiceElement            mServiceMetaData;\n\n    protected Map<String, Object >      mMetaDataScope;\n\n    public ArchService( Identification serviceId, ServiceElement serviceElement, Map<String, Object > metaDataScope ){\n        this.mServiceId = serviceId;\n        this.mServiceMetaData = serviceElement;\n        this.mMetaDataScope = metaDataScope;\n    }\n\n    public ArchService( Identification serviceId, ServiceElement serviceElement ){\n       this( serviceId, serviceElement, null );\n    }\n\n\n    @Override\n    public String getName() {\n        return this.mServiceMetaData.getName();\n    }\n\n    @Override\n    public String getType() {\n        return this.mServiceMetaData.getType();\n    }\n\n    @Override\n    public String getDisplayName() {\n        return this.mServiceMetaData.getName();\n    }\n\n    @Override\n    public String getFullName() {\n        return this.mServiceMetaData.getPath();\n    }\n\n\n    public GUID getGuid() {\n        return this.mServiceMetaData.getGuid();\n    }\n\n    @Override\n    public Identification getId() {\n        return this.getGuid();\n    }\n\n    @Override\n    public String getScenario() {\n        return this.mServiceMetaData.getScenario();\n    }\n\n    @Override\n    public String getPrimaryImplLang() {\n        return this.mServiceMetaData.getPrimaryImplLang();\n    }\n\n    @Override\n    public String getExtraInformation() {\n        return this.mServiceMetaData.getExtraInformation();\n    }\n\n    @Override\n    public String getLevel() {\n        return this.mServiceMetaData.getLevel();\n    }\n\n    @Override\n    public String getDescription() {\n        return this.mServiceMetaData.getDescription();\n    }\n\n    @Override\n    public Map<String, Object> getMetaDataScope() {\n        return this.mMetaDataScope;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ArchServiceFamilyMeta.java",
    "content": "package com.pinecone.hydra.service;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\npublic abstract class ArchServiceFamilyMeta implements ServiceFamilyMeta {\n    protected GUID   guid;\n\n    protected String name;\n\n    protected String scenario;\n\n    protected String primaryImplLang;\n\n    protected String extraInformation;\n\n    protected String szElementaryConfig;\n\n    protected Map<String, Object > elementaryConfig;\n\n\n    protected String level;\n\n    protected String description;\n\n    public ArchServiceFamilyMeta() {\n    }\n\n    public ArchServiceFamilyMeta( Map<String, Object > joEntity ) {\n        this.apply( joEntity );\n    }\n\n    protected ArchServiceFamilyMeta apply( Map<String, Object > joEntity ) {\n        String szGuid = (String) joEntity.get( \"guid\" );\n        if( szGuid != null ) {\n            this.guid = GUIDs.GUID128( (String) joEntity.get( \"guid\" ) );\n        }\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n\n        return this;\n    }\n\n\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public Identification getId() {\n        return this.getGuid();\n    }\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public String getScenario() {\n        return this.scenario;\n    }\n\n    @Override\n    public String getPrimaryImplLang() {\n        return this.primaryImplLang;\n    }\n\n    @Override\n    public String getExtraInformation() {\n        return this.extraInformation;\n    }\n\n    @Override\n    public String getLevel() {\n        return this.level;\n    }\n\n    @Override\n    public String getDescription() {\n        return this.description;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ProcApplication.java",
    "content": "package com.pinecone.hydra.service;\n\npublic interface ProcApplication extends Application {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ScheduleType.java",
    "content": "package com.pinecone.hydra.service;\n\npublic enum ScheduleType {\n    Automatic                  ( \"Automatic\"         ),\n    Manual                     ( \"Manual\"            ),\n    //DryRun                     ( \"DryRun\"            ),\n    Disable                    ( \"Disable\"           );\n\n    private final String value;\n\n    ScheduleType( String value ){\n        this.value = value;\n    }\n\n    public String getName(){\n        return this.value;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/Service.java",
    "content": "package com.pinecone.hydra.service;\n\nimport com.pinecone.framework.util.name.Namespace;\n\nimport java.util.Map;\n\npublic interface Service extends ServiceFamilyMeta {\n    String getName();        // Service Name, e.g. WpnService\n\n    String getType();\n\n    String getDisplayName(); // Service Long Name, e.g. Windows Push Notification System Service\n\n    String getDescription();\n\n    String getFullName();\n\n    Namespace getGroupNamespace();\n\n    String getGroupName();\n\n    String getScenario() ;\n\n    String getPrimaryImplLang() ;\n\n    String getExtraInformation() ;\n\n    String getLevel() ;\n\n    Object getProcessImageObject();\n\n    Map<String, Object> getMetaDataScope();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ServiceApplication.java",
    "content": "package com.pinecone.hydra.service;\n\npublic interface ServiceApplication extends Application {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ServiceFamilyMeta.java",
    "content": "package com.pinecone.hydra.service;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.Identification;\n\npublic interface ServiceFamilyMeta extends Pinenut  {\n    //long getEnumId();\n\n    //GUID getGuid();\n\n    Identification getId() ;\n\n    String getName();\n\n    String getScenario();\n\n    String getPrimaryImplLang();\n\n    String getExtraInformation();\n\n    String getLevel();\n\n    String getDescription();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ServiceInstance.java",
    "content": "package com.pinecone.hydra.service;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.service.entity.USII;\n\nimport java.time.LocalDateTime;\n\npublic interface ServiceInstance extends Pinenut {\n    Identification getId();\n\n    Identification getServiceId();\n\n    USII getUSII();\n\n    Object getProcessObject();\n\n    Service getService();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/ServiceMeta.java",
    "content": "package com.pinecone.hydra.service;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.name.Namespace;\n\npublic interface ServiceMeta extends Pinenut {\n    GUID getGuid() ;\n\n    String getName();        // Service Name, e.g. WpnService\n\n    String getDisplayName(); // Service Long Name, e.g. Windows Push Notification System Service\n\n    String getDescription();\n\n    String getFullName();\n\n    Namespace getGroupNamespace();\n\n    String getGroupName();\n\n    String getScenario() ;\n\n    String getPrimaryImplLang() ;\n\n    String getExtraInformation() ;\n\n    String getLevel() ;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/Serviciom.java",
    "content": "package com.pinecone.hydra.service;\n\npublic interface Serviciom extends Service {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/Servicium.java",
    "content": "package com.pinecone.hydra.service;\n\nimport com.pinecone.framework.system.executum.Processum;\n\npublic interface Servicium extends ServiceInstance {\n    @Override\n    Processum getProcessObject();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/Status.java",
    "content": "package com.pinecone.hydra.service;\n\npublic enum Status {\n    New                        ( \"New\"               ),\n    WaitingDependency          ( \"WaitingDependency\" ),\n    Running                    ( \"Running\"           ),\n    Paused                     ( \"Paused\"            ),\n    Stopped                    ( \"Stopped\"           );\n\n    private final String value;\n\n    Status( String value ){\n        this.value = value;\n    }\n\n    public String getName(){\n        return this.value;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/entity/BindUSII.java",
    "content": "package com.pinecone.hydra.service.entity;\n\nimport com.pinecone.framework.util.id.Identification;\n\npublic class BindUSII implements USII {\n    protected Long clientId;\n\n    protected Identification serviceId;\n\n    protected Identification instanceId;\n\n    public BindUSII( Long clientId, Identification serviceId, Identification instanceId ) {\n        this.clientId  = clientId;\n        this.serviceId = serviceId;\n        this.instanceId = instanceId;\n    }\n\n    public BindUSII(){}\n\n    @Override\n    public Long getClientId() {\n        return this.clientId;\n    }\n\n    @Override\n    public Identification getServiceId() {\n        return this.serviceId;\n    }\n\n    @Override\n    public Identification getInstanceId() {\n        return this.instanceId;\n    }\n\n    @Override\n    public String getFullKey() {\n        return this.serviceId + \":\" + this.instanceId + \":\" + this.clientId;\n    }\n\n    @Override\n    public boolean equals( Object obj ) {\n        if ( this == obj ) {\n            return true;\n        }\n        if ( obj instanceof USII) {\n            USII USII = (USII) obj;\n            return this.clientId.equals( USII.getClientId() ) && this.serviceId.equals( USII.getServiceId() );\n        }\n        else if ( obj instanceof Number ) {\n            return this.clientId.equals( obj );\n        }\n        else if ( obj instanceof Identification ) {\n            return this.serviceId.equals( obj );\n        }\n\n        return false;\n    }\n\n    @Override\n    public int hashCode() {\n        return this.clientId.hashCode() ^ this.serviceId.hashCode();\n    }\n\n    public static USII wrap(Long clientId, Identification serviceId, Identification instanceId ) {\n        return new BindUSII( clientId, serviceId, instanceId );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/entity/USII.java",
    "content": "package com.pinecone.hydra.service.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.Identification;\n\n/**\n * Uniform Service Instance Identifier\n */\npublic interface USII extends Pinenut {\n    Long getClientId();\n\n    Identification getServiceId();\n\n    Identification getInstanceId();\n\n    String getFullKey();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/GenericNamespaceRules.java",
    "content": "package com.pinecone.hydra.service.kom;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class GenericNamespaceRules implements NamespaceRules {\n    // 规则id\n    private long enumId;\n\n    // 规则uuid\n    private GUID guid;\n\n    // 作用域\n    private String scope;\n\n    // 名称\n    private String name;\n\n    // 规则描述\n    private String description;\n\n\n    public GenericNamespaceRules() {\n    }\n\n    public GenericNamespaceRules(long enumId, GUID guid, String scope, String name, String description) {\n        this.enumId = enumId;\n        this.guid = guid;\n        this.scope = scope;\n        this.name = name;\n        this.description = description;\n    }\n\n    /**\n     * 获取\n     * @return enumId\n     */\n    public long getEnumId() {\n        return enumId;\n    }\n\n    /**\n     * 设置\n     * @param enumId\n     */\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n    /**\n     * 获取\n     * @return guid\n     */\n    public GUID getGuid() {\n        return guid;\n    }\n\n    /**\n     * 设置\n     * @param guid\n     */\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    /**\n     * 获取\n     * @return scope\n     */\n    public String getScope() {\n        return scope;\n    }\n\n    /**\n     * 设置\n     * @param scope\n     */\n    public void setScope(String scope) {\n        this.scope = scope;\n    }\n\n    /**\n     * 获取\n     * @return name\n     */\n    public String getName() {\n        return name;\n    }\n\n    /**\n     * 设置\n     * @param name\n     */\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    /**\n     * 获取\n     * @return description\n     */\n    public String getDescription() {\n        return description;\n    }\n\n    /**\n     * 设置\n     * @param description\n     */\n    public void setDescription(String description) {\n        this.description = description;\n    }\n\n    public String toString() {\n        return \"GenericClassificationRules{enumId = \" + enumId + \", guid = \" + guid + \", scope = \" + scope + \", name = \" + name + \", description = \" + description + \"}\";\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/KernelServiceConfig.java",
    "content": "package com.pinecone.hydra.service.kom;\n\nimport com.pinecone.hydra.system.ko.ArchKernelObjectConfig;\n\npublic class KernelServiceConfig extends ArchKernelObjectConfig implements ServiceConfig {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/NamespaceRules.java",
    "content": "package com.pinecone.hydra.service.kom;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface NamespaceRules extends Pinenut {\n    long getEnumId();\n    void setEnumId(long id);\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n\n    String getScope();\n    void setScope(String scope);\n\n    String getName();\n    void setName(String name);\n\n    String getDescription();\n    void setDescription(String description);\n}"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/ServiceConfig.java",
    "content": "package com.pinecone.hydra.service.kom;\n\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\n\npublic interface ServiceConfig extends KernelObjectConfig {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/ServiceFamilyNode.java",
    "content": "package com.pinecone.hydra.service.kom;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.service.ServiceFamilyMeta;\n\npublic interface ServiceFamilyNode extends ServiceFamilyMeta {\n    long getEnumId();\n\n    void setEnumId( long id );\n\n    void setName( String name );\n\n    GUID getGuid();\n\n    void setGuid( GUID guid );\n\n    @Override\n    default Identification getId() {\n        return this.getGuid();\n    }\n\n    String getScenario();\n\n    void setScenario( String scenario );\n\n    String getPrimaryImplLang();\n\n    void setPrimaryImplLang( String primaryImplLang );\n\n    String getExtraInformation();\n\n    void setExtraInformation( String extraInformation );\n\n    String getLevel();\n\n    void setLevel( String level );\n\n    String getDescription();\n\n    void setDescription( String description );\n\n    ServiceFamilyNode apply( Map<String, Object > joEntity ) ;\n}"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/ServiceInstrument.java",
    "content": "package com.pinecone.hydra.service.kom;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.ApplicationElement;\nimport com.pinecone.hydra.service.kom.entity.ElementNode;\nimport com.pinecone.hydra.service.kom.entity.Namespace;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\nimport com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry;\nimport com.pinecone.hydra.system.ko.kom.ReparseKOMTree;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.util.List;\n\npublic interface ServiceInstrument extends ReparseKOMTree {\n\n    ServiceConfig KernelServiceConfig = new KernelServiceConfig();\n\n    ApplicationElement affirmApplication ( String path );\n\n    Namespace          affirmNamespace   ( String path );\n\n    ServiceElement     affirmService     ( String path );\n\n    ElementNode        queryElement      ( String path );\n\n    boolean            containsChild     ( GUID parentGuid, String childName );\n\n    void               update            ( TreeNode treeNode );\n\n    List<ServiceElement> fetchAllService();\n\n    void createServiceInstance( ServiceInstanceEntry serviceInstanceEntry);\n\n    ServiceInstanceEntry queryServiceInstance(GUID serviceId );\n\n    void updateServiceInstance( ServiceInstanceEntry element );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/ServicePathSelector.java",
    "content": "package com.pinecone.hydra.service.kom;\n\nimport com.pinecone.framework.util.name.path.PathResolver;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.kom.StandardPathSelector;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\n\npublic class ServicePathSelector extends StandardPathSelector {\n    public ServicePathSelector(PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans ) {\n        super( pathResolver, trieTree, dirMan, fileMans );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/UniformServiceInstrument.java",
    "content": "package com.pinecone.hydra.service.kom;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.service.kom.entity.ApplicationElement;\nimport com.pinecone.hydra.service.kom.entity.ElementNode;\nimport com.pinecone.hydra.service.kom.entity.GenericApplicationElement;\nimport com.pinecone.hydra.service.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.service.kom.entity.GenericServiceElement;\nimport com.pinecone.hydra.service.kom.entity.Namespace;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\nimport com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry;\nimport com.pinecone.hydra.service.kom.entity.ServiceTreeNode;\nimport com.pinecone.hydra.service.kom.entity.ServoElement;\nimport com.pinecone.hydra.service.kom.operator.GenericElementOperatorFactory;\nimport com.pinecone.hydra.service.kom.source.ApplicationNodeManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceInstanceManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceMasterManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceNamespaceManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceNodeManipulator;\nimport com.pinecone.hydra.system.identifier.KOPathResolver;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.system.ko.kom.ArchReparseKOMTree;\nimport com.pinecone.hydra.system.ko.kom.GenericReparseKOMTreeAddition;\nimport com.pinecone.hydra.system.ko.kom.MultiFolderPathSelector;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.RegimentedImperialTree;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128V7;\n\npublic class UniformServiceInstrument extends ArchReparseKOMTree implements ServiceInstrument {\n    //GenericDistributedScopeTree\n    protected ImperialTree                  imperialTree;\n\n    protected ServiceMasterManipulator      serviceMasterManipulator;\n\n    protected ServiceNamespaceManipulator   serviceNamespaceManipulator;\n\n    protected ApplicationNodeManipulator    applicationNodeManipulator;\n\n    protected ServiceNodeManipulator        serviceNodeManipulator;\n\n    protected ServiceInstanceManipulator    serviceInstanceManipulator;\n\n    protected List<GUIDNameManipulator >    folderManipulators;\n\n    protected List<GUIDNameManipulator >    fileManipulators;\n\n\n\n    public UniformServiceInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, ServiceInstrument parent, String name, @Nullable GuidAllocator guidAllocator ) {\n        super( superiorProcess, masterManipulator, ServiceInstrument.KernelServiceConfig, parent, name, guidAllocator );\n\n        this.serviceMasterManipulator    = (ServiceMasterManipulator) masterManipulator;\n        this.serviceNamespaceManipulator = this.serviceMasterManipulator.getNamespaceManipulator();\n        this.applicationNodeManipulator  = this.serviceMasterManipulator.getApplicationNodeManipulator();\n        this.serviceNodeManipulator      = this.serviceMasterManipulator.getServiceNodeManipulator();\n        this.serviceInstanceManipulator  = this.serviceMasterManipulator.getServiceInstanceManipulator();\n        KOISkeletonMasterManipulator skeletonMasterManipulator = this.serviceMasterManipulator.getSkeletonMasterManipulator();\n        TreeMasterManipulator        treeMasterManipulator     = (TreeMasterManipulator) skeletonMasterManipulator;\n        this.imperialTree                = new RegimentedImperialTree(treeMasterManipulator);\n        this.operatorFactory             = new GenericElementOperatorFactory(this,(ServiceMasterManipulator) masterManipulator);\n\n        this.pathResolver                = new KOPathResolver( this.kernelObjectConfig );\n\n        // TODO for customize service tree architecture.\n        this.folderManipulators          = new ArrayList<>( List.of( this.serviceNamespaceManipulator, this.applicationNodeManipulator ) );\n        this.fileManipulators            = new ArrayList<>( List.of( this.applicationNodeManipulator, this.serviceNodeManipulator ) );\n        this.pathSelector                = new MultiFolderPathSelector(\n                this.pathResolver, this.imperialTree, this.folderManipulators.toArray( new GUIDNameManipulator[]{} ), this.fileManipulators.toArray( new GUIDNameManipulator[]{} )\n        );\n\n        this.mReparseKOM                 =  new GenericReparseKOMTreeAddition( this );\n    }\n\n    public UniformServiceInstrument(Processum superiorProcess, KOIMasterManipulator masterManipulator ){\n        this( superiorProcess, masterManipulator, null, ServiceInstrument.class.getSimpleName(), new GuidAllocator128V7());\n    }\n\n//    public UniformServiceInstrument( Hydrogen hydrogen ) {\n//        this.hydrogen = hydrogen;\n//    }\n\n    public UniformServiceInstrument(KOIMappingDriver driver ) {\n        this(\n                driver.getSuperiorProcess(),\n                driver.getMasterManipulator()\n        );\n    }\n\n    public UniformServiceInstrument(KOIMappingDriver driver, ServiceInstrument parent, String name ) {\n        this(\n                driver.getSuperiorProcess(),\n                driver.getMasterManipulator(),\n                parent,\n                name,\n                null\n        );\n    }\n\n    protected ServiceTreeNode affirmTreeNodeByPath( String path, Class<? > cnSup, Class<? > nsSup ) {\n        String[] parts = this.pathResolver.segmentPathParts( path );\n        String currentPath = \"\";\n        GUID parentGuid = GUIDs.Dummy128();\n\n        ServiceTreeNode node = this.queryElement(path);\n        if ( node != null ){\n            return node;\n        }\n\n        ServiceTreeNode ret = null;\n        for( int i = 0; i < parts.length; ++i ){\n            currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : \"\" ) + parts[ i ];\n            node = this.queryElement( currentPath );\n            if ( node == null){\n                if ( i == parts.length - 1 && cnSup != null ){\n                    ServoElement servoElement = (ServoElement) this.dynamicFactory.optNewInstance( cnSup, new Object[]{ this } );\n                    servoElement.setName( parts[i] );\n                    GUID guid = this.put( servoElement );\n                    this.affirmOwnedNode( parentGuid, guid );\n                    return servoElement;\n                }\n                else {\n                    Namespace namespace = (Namespace) this.dynamicFactory.optNewInstance( nsSup, new Object[]{ this } );\n                    namespace.setName( parts[i] );\n                    GUID guid = this.put( namespace );\n                    if ( i != 0 ){\n                        this.affirmOwnedNode( parentGuid, guid );\n                        parentGuid = guid;\n                    }\n                    else {\n                        parentGuid = guid;\n                    }\n\n                    ret = namespace;\n                }\n            }\n            else {\n                parentGuid = node.getGuid();\n            }\n        }\n\n        return ret;\n    }\n\n    @Override\n    public ApplicationElement affirmApplication( String path ) {\n        return (ApplicationElement) this.affirmTreeNodeByPath( path, GenericApplicationElement.class, GenericNamespace.class );\n    }\n\n    @Override\n    public ServiceElement affirmService( String path ) {\n        return (ServiceElement) this.affirmTreeNodeByPath( path, GenericServiceElement.class, GenericNamespace.class );\n    }\n\n    @Override\n    public ElementNode queryElement( String path ) {\n        GUID guid = this.queryGUIDByPath( path );\n        if( guid != null ) {\n            return this.get( guid ).evinceElementNode();\n        }\n\n        return null;\n    }\n\n    @Override\n    public Namespace affirmNamespace( String path ) {\n        return ( Namespace ) this.affirmTreeNodeByPath( path, null, GenericNamespace.class );\n    }\n\n    protected boolean containsChild( GUIDNameManipulator manipulator, GUID parentGuid, String childName ) {\n        List<GUID > guids = manipulator.getGuidsByName( childName );\n        for( GUID guid : guids ) {\n            List<GUID > ps = this.imperialTree.fetchParentGuids( guid );\n            if( ps.contains( parentGuid ) ){\n                return true;\n            }\n        }\n        return false;\n    }\n\n    @Override\n    public boolean containsChild( GUID parentGuid, String childName ) {\n        for( GUIDNameManipulator manipulator : this.fileManipulators ) {\n            if( this.containsChild( manipulator, parentGuid, childName ) ) {\n                return true;\n            }\n        }\n\n        for( GUIDNameManipulator manipulator : this.folderManipulators ) {\n            if( this.containsChild( manipulator, parentGuid, childName ) ) {\n                return true;\n            }\n        }\n        return false;\n    }\n\n\n    /**\n     * Affirm path exist in cache, if required.\n     * 确保路径存在于缓存，如果有明确实现必要的话。\n     * 对于GenericDistributedScopeTree::getPath, 默认会自动写入缓存，因此这里可以通过getPath保证路径缓存一定存在。\n     * @param guid, target guid.\n     * @return Path\n     */\n    protected void affirmPathExist( GUID guid ) {\n        this.imperialTree.getCachePath( guid );\n    }\n\n    @Override\n    public ServiceTreeNode get( GUID guid ){\n        return (ServiceTreeNode) super.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode treeNode ) {\n        TreeNodeOperator operator = this.operatorFactory.getOperator( treeNode.getMetaType() );\n        operator.update( treeNode );\n    }\n\n\n\n    @Override\n    public void remove( GUID guid ) {\n        super.remove( guid );\n    }\n\n    @Override\n    public Object queryEntityHandleByNS(String path, String szBadSep, String szTargetSep) {\n        return null;\n    }\n\n    @Override\n    public List<ServiceElement> fetchAllService() {\n        return this.serviceNodeManipulator.fetchAllService();\n    }\n\n    @Override\n    public void createServiceInstance(ServiceInstanceEntry serviceInstanceEntry) {\n        this.serviceInstanceManipulator.initServiceInstance(serviceInstanceEntry);\n    }\n\n    @Override\n    public ServiceInstanceEntry queryServiceInstance(GUID serviceId) {\n        return this.serviceInstanceManipulator.queryServiceInstance( serviceId );\n    }\n\n    @Override\n    public void updateServiceInstance(ServiceInstanceEntry element) {\n        this.serviceInstanceManipulator.updateServiceInstance( element );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ApplicationElement.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\nimport com.pinecone.hydra.service.kom.ServiceFamilyNode;\n\npublic interface ApplicationElement extends FolderElement, ServiceFamilyNode {\n    String getDeploymentMethod();\n\n    void setDeploymentMethod( String deploymentMethod );\n\n    @Override\n    default ApplicationElement evinceApplicationElement() {\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ArchElementNode.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanColonist;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.service.ArchServiceFamilyMeta;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\npublic abstract class ArchElementNode extends ArchServiceFamilyMeta implements ElementNode {\n    protected long                       enumId;\n\n    protected GUIDImperialTrieNode distributedTreeNode;\n    protected ServiceInstrument serviceInstrument;\n\n    public ArchElementNode() {\n        super();\n    }\n\n    public ArchElementNode( Map<String, Object > joEntity ) {\n        super( joEntity );\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public ArchElementNode( Map<String, Object > joEntity, ServiceInstrument serviceInstrument) {\n        super( joEntity );\n        this.apply(serviceInstrument);\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public ArchElementNode( ServiceInstrument serviceInstrument) {\n        this.apply(serviceInstrument);\n    }\n\n    public void apply( ServiceInstrument serviceInstrument) {\n        this.serviceInstrument = serviceInstrument;\n        GuidAllocator guidAllocator = this.serviceInstrument.getGuidAllocator();\n        this.setGuid( guidAllocator.nextGUID() );\n    }\n\n    @Override\n    public ArchElementNode apply( Map<String, Object > joEntity ) {\n        super.apply( joEntity );\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n\n        return this;\n    }\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public void setEnumId( long enumId ) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid( GUID guid ) {\n        this.guid = guid;\n    }\n\n    @Override\n    public void setName( String name ) {\n        this.name = name;\n    }\n\n    @Override\n    public void setScenario( String scenario ) {\n        this.scenario = scenario;\n    }\n\n    @Override\n    public void setPrimaryImplLang( String primaryImplLang ) {\n        this.primaryImplLang = primaryImplLang;\n    }\n\n    @Override\n    public void setExtraInformation( String extraInformation ) {\n        this.extraInformation = extraInformation;\n    }\n\n    @Override\n    public void setLevel( String level ) {\n        this.level = level;\n    }\n\n    @Override\n    public void setDescription( String description ) {\n        this.description = description;\n    }\n\n    @Override\n    public GUIDImperialTrieNode getDistributedTreeNode() {\n        return this.distributedTreeNode;\n    }\n\n    @Override\n    public void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ) {\n        this.distributedTreeNode = distributedTreeNode;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n\n    protected Collection<ElementNode > fetchChildren() {\n        Collection<GUID > guids = this.fetchChildrenGuids();\n        List<ElementNode > elementNodes = new ArrayList<>();\n        for( GUID guid : guids ){\n            ElementNode elementNode = (ElementNode) this.serviceInstrument.get( guid );\n            elementNodes.add( elementNode );\n        }\n        return elementNodes;\n    }\n\n    protected Collection<GUID > fetchChildrenGuids() {\n        return this.serviceInstrument.fetchChildrenGuids( this.getGuid() );\n    }\n\n    protected void addChild( ElementNode child ) {\n        GUID childId;\n        boolean bContainsChild = this.containsChild( child.getName() );\n        if( bContainsChild ) {\n            return;\n        }\n        else {\n            childId = this.serviceInstrument.put( child );\n        }\n\n\n        this.serviceInstrument.affirmOwnedNode( this.guid, childId );\n    }\n\n    protected boolean containsChild( String childName ) {\n        return this.serviceInstrument.containsChild( this.guid, childName );\n    }\n\n    @Override\n    public JSONObject toJSONObject() {\n        return BeanColonist.DirectColonist.populate( this, ServoElement.UnbeanifiedKeys );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ArchServoElement.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\nimport java.time.LocalDateTime;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\n\npublic abstract class ArchServoElement extends ArchElementNode implements ServoElement {\n    protected GUID                       metaGuid;\n    protected String                     path;\n    protected String                     type;\n    protected String                     alias;\n    protected String                     resourceType;\n    protected LocalDateTime              createTime;\n    protected LocalDateTime              updateTime;\n\n\n    public ArchServoElement() {\n        super();\n        this.createTime = LocalDateTime.now();\n        this.updateTime = LocalDateTime.now();\n    }\n\n    public ArchServoElement( Map<String, Object > joEntity ) {\n        super( joEntity );\n        this.createTime = LocalDateTime.now();\n        this.updateTime = LocalDateTime.now();\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public ArchServoElement( Map<String, Object > joEntity, ServiceInstrument serviceInstrument) {\n        this( joEntity );\n        this.apply(serviceInstrument);\n    }\n\n    public ArchServoElement( ServiceInstrument serviceInstrument) {\n        super(serviceInstrument);\n    }\n\n    @Override\n    public ArchElementNode apply( Map<String, Object > joEntity ) {\n        super.apply( joEntity );\n        this.createTime = LocalDateTime.now();\n        this.updateTime = LocalDateTime.now();\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n\n        return this;\n    }\n\n\n    /**\n     *  Overridden to keep keys in prior json-decode.\n     */\n    @Override\n    public GUID getGuid() {\n        return super.getGuid();\n    }\n\n    @Override\n    public String getName() {\n        return super.getName();\n    }\n\n    @Override\n    public GUID getMetaGuid() {\n        return this.metaGuid;\n    }\n\n    @Override\n    public void setMetaGuid( GUID metaGuid ) {\n        this.metaGuid = metaGuid;\n    }\n\n    @Override\n    public String getPath() {\n        return this.path;\n    }\n\n    @Override\n    public void setPath( String path ) {\n        this.path = path;\n    }\n\n    @Override\n    public String getType() {\n        return this.type;\n    }\n\n    @Override\n    public void setType( String type ) {\n        this.type = type;\n    }\n\n    @Override\n    public String getAlias() {\n        return this.alias;\n    }\n\n    @Override\n    public void setAlias( String alias ) {\n        this.alias = alias;\n    }\n\n    @Override\n    public String getResourceType() {\n        return this.resourceType;\n    }\n\n    @Override\n    public void setResourceType( String resourceType ) {\n        this.resourceType = resourceType;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    @Override\n    public void setCreateTime( LocalDateTime createTime ) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.updateTime;\n    }\n\n    @Override\n    public void setUpdateTime( LocalDateTime updateTime ) {\n        this.updateTime = updateTime;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this, UnbeanifiedKeys);\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/CommonMeta.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\npublic interface CommonMeta extends ElementNode {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ElementNode.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.service.kom.ServiceFamilyNode;\nimport com.pinecone.hydra.system.ko.meta.ElementObject;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\n\npublic interface ElementNode extends ServiceTreeNode, ServiceFamilyNode, ElementObject {\n    default Namespace evinceNamespace() {\n        return null;\n    }\n\n    default ApplicationElement evinceApplicationElement() {\n        return null;\n    }\n\n    default ServiceElement evinceServiceElement() {\n        return null;\n    }\n\n    GUIDImperialTrieNode getDistributedTreeNode();\n\n    void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode );\n\n    JSONObject toJSONObject();\n\n    @Override\n    default ElementNode evinceElementNode(){\n        return this;\n    }\n\n    @Override\n    default String objectCategoryName() {\n        return \"Service\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/FolderElement.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\nimport java.util.Collection;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface FolderElement extends ElementNode {\n    Collection<ElementNode > fetchChildren();\n\n    Collection<GUID > fetchChildrenGuids();\n\n    void addChild( ElementNode child );\n\n    boolean containsChild( String childName );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/GenericApplicationElement.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanColonist;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\n\npublic class GenericApplicationElement extends ArchServoElement implements ApplicationElement {\n    protected String                     deploymentMethod;\n\n    public GenericApplicationElement() {\n        super();\n    }\n\n    public GenericApplicationElement( Map<String, Object > joEntity ) {\n        super( joEntity );\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericApplicationElement( Map<String, Object > joEntity, ServiceInstrument serviceInstrument) {\n        super( joEntity, serviceInstrument);\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericApplicationElement( ServiceInstrument serviceInstrument) {\n        super(serviceInstrument);\n    }\n\n    @Override\n    public String getDeploymentMethod() {\n        return this.deploymentMethod;\n    }\n\n    @Override\n    public void setDeploymentMethod( String deploymentMethod ) {\n        this.deploymentMethod = deploymentMethod;\n    }\n\n    @Override\n    public Collection<ElementNode > fetchChildren() {\n        return super.fetchChildren();\n    }\n\n    @Override\n    public Collection<GUID > fetchChildrenGuids() {\n        return super.fetchChildrenGuids();\n    }\n\n    @Override\n    public void addChild( ElementNode child ) {\n        if( child instanceof FolderElement ) {\n            throw new IllegalArgumentException( \"Foisting `FolderElement` into application node is not accepted.\" );\n        }\n        super.addChild( child );\n    }\n\n    @Override\n    public boolean containsChild( String childName ) {\n        return super.containsChild( childName );\n    }\n\n    @Override\n    public JSONObject toJSONObject() {\n        Collection<ElementNode > children = this.fetchChildren();\n        JSONObject jo         = BeanColonist.DirectColonist.populate( this, UnbeanifiedKeys);\n        JSONObject joChildren = new JSONMaptron();\n\n        for( ElementNode node : children ) {\n            joChildren.put( node.getName(), node.toJSONObject() );\n        }\n        jo.put( \"services\", joChildren );\n        return jo;\n    }\n}"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/GenericCommonMeta.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\npublic class GenericCommonMeta extends ArchElementNode implements CommonMeta {\n    public GenericCommonMeta() {\n        super();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/GenericNamespace.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\nimport java.util.Collection;\nimport java.util.Map;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanColonist;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.service.kom.GenericNamespaceRules;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.source.ServiceNamespaceManipulator;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\n\npublic class GenericNamespace extends ArchElementNode implements Namespace {\n    protected GUID                        rulesGUID;\n\n    protected GUID                        metaGuid;\n\n    protected GUIDImperialTrieNode distributedTreeNode;\n\n    protected GenericNamespaceRules       classificationRules;\n\n    protected ServiceNamespaceManipulator namespaceManipulator;\n\n\n    public GenericNamespace() {\n        super();\n    }\n\n    public GenericNamespace( Map<String, Object > joEntity ) {\n        super( joEntity );\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericNamespace( Map<String, Object > joEntity, ServiceInstrument serviceInstrument) {\n        super( joEntity, serviceInstrument);\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n    }\n\n    public GenericNamespace( ServiceInstrument serviceInstrument) {\n        super(serviceInstrument);\n    }\n\n    public GenericNamespace(ServiceInstrument serviceInstrument, ServiceNamespaceManipulator namespaceManipulator ) {\n        this(serviceInstrument);\n        this.namespaceManipulator = namespaceManipulator;\n    }\n\n    @Override\n    public GUIDImperialTrieNode getDistributedTreeNode() {\n        return this.distributedTreeNode;\n    }\n\n    @Override\n    public void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode ) {\n        this.distributedTreeNode = distributedTreeNode;\n    }\n\n    @Override\n    public GenericNamespaceRules getClassificationRules() {\n        return this.classificationRules;\n    }\n\n    @Override\n    public void setClassificationRules( GenericNamespaceRules classificationRules ) {\n        this.classificationRules = classificationRules;\n    }\n\n    @Override\n    public GUID getMetaGuid() {\n        return this.metaGuid;\n    }\n\n    @Override\n    public void setMetaGuid( GUID metaGuid ) {\n        this.metaGuid = metaGuid;\n    }\n\n    @Override\n    public GUID getRulesGUID() {\n        return this.rulesGUID;\n    }\n\n    @Override\n    public void setRulesGUID( GUID rulesGUID ) {\n        this.rulesGUID = rulesGUID;\n    }\n\n    @Override\n    public JSONObject toJSONObject() {\n        Collection<ElementNode > children = this.fetchChildren();\n        JSONObject jo = new JSONMaptron();\n\n        for( ElementNode node : children ) {\n            jo.put( node.getName(), node.toJSONObject() );\n        }\n        return jo;\n    }\n\n    @Override\n    public JSONObject toJSONDetails() {\n        return BeanColonist.DirectColonist.populate( this, UnbeanifiedKeys);\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"guid\"        , this.getGuid()            ),\n                new KeyValue<>( \"name\"        , this.getName()            )\n        } );\n    }\n\n    @Override\n    public String toString() {\n        return this.name;\n    }\n\n    @Override\n    public Collection<ElementNode > fetchChildren() {\n        return super.fetchChildren();\n    }\n\n    @Override\n    public Collection<GUID > fetchChildrenGuids() {\n        return super.fetchChildrenGuids();\n    }\n\n    @Override\n    public void addChild( ElementNode child ) {\n        super.addChild( child );\n    }\n\n    @Override\n    public boolean containsChild( String childName ) {\n        return super.containsChild( childName );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/GenericServiceElement.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\n\nimport java.util.Map;\n\npublic class GenericServiceElement extends ArchServoElement implements ServiceElement {\n    protected String                     serviceType;\n\n    private void initSelf( Map<String, Object > joEntity ) {\n        BeanMapDecoder.BasicDecoder.decode( this, joEntity );\n        if ( this.szElementaryConfig != null ) {\n            this.elementaryConfig = (JSONObject)JSON.parse( this.szElementaryConfig );\n        }\n    }\n\n    public GenericServiceElement() {\n        super();\n    }\n\n    public GenericServiceElement( Map<String, Object > joEntity ) {\n        super( joEntity );\n        this.initSelf( joEntity );\n    }\n\n    public GenericServiceElement( Map<String, Object > joEntity, ServiceInstrument serviceInstrument) {\n        super( joEntity, serviceInstrument);\n        this.initSelf( joEntity );\n    }\n\n    public GenericServiceElement( ServiceInstrument serviceInstrument) {\n        super(serviceInstrument);\n    }\n\n    @Override\n    public String getServiceType() {\n        return this.serviceType;\n    }\n\n    @Override\n    public void setServiceType( String serviceType ) {\n        this.serviceType = serviceType;\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/GenericServiceInstanceEntity.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry;\n\nimport java.time.LocalDateTime;\n\npublic class GenericServiceInstanceEntity implements ServiceInstanceEntry {\n    protected GUID mGuid;\n\n    protected GUID mServiceGuid;\n\n    protected String mszStatus;\n\n    protected LocalDateTime mLatestStartTime;\n\n    protected LocalDateTime mLatestEndTime;\n\n    protected String mErrorCause;\n\n    protected int mnRunCount;\n\n    protected GUID mDeployGuid;\n\n    protected String mIp;\n\n    @Override\n    public void setGuid( GUID guid ) {\n        this.mGuid = guid;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.mGuid;\n    }\n\n    @Override\n    public void setServiceGuid( GUID guid ) {\n        this.mServiceGuid = guid;\n    }\n\n    @Override\n    public GUID getServiceGuid() {\n        return this.mServiceGuid;\n    }\n\n    @Override\n    public String getStatus(){\n        return this.mszStatus;\n    }\n\n    @Override\n    public void setStatus( String status ){\n        this.mszStatus = status;\n    }\n\n    @Override\n    public LocalDateTime getLatestStartTime(){\n        return this.mLatestStartTime;\n    }\n\n    @Override\n    public void setLatestStartTime( LocalDateTime latestStartTime ){\n        this.mLatestStartTime = latestStartTime;\n    }\n\n    @Override\n    public LocalDateTime getLatestEndTime(){\n        return this.mLatestEndTime;\n    }\n\n    @Override\n    public void setLatestEndTime( LocalDateTime latestEndTime ){\n        this.mLatestEndTime = latestEndTime;\n    }\n\n    @Override\n    public String getErrorCause(){\n        return this.mErrorCause;\n    }\n\n    @Override\n    public void setErrorCause( String errorCause ){\n        this.mErrorCause = errorCause;\n    }\n\n    @Override\n    public int getRunCount(){\n        return this.mnRunCount;\n    }\n\n    @Override\n    public void setRunCount( int runCount ){\n        this.mnRunCount = runCount;\n    }\n\n    @Override\n    public GUID getDeployGuid() {\n        return this.mDeployGuid;\n    }\n\n    @Override\n    public void setDeployGuid( GUID deployGuid ) {\n        this.mDeployGuid = deployGuid;\n    }\n\n    @Override\n    public String getIp() {\n        return this.mIp;\n    }\n\n    @Override\n    public void setIp( String ip ) {\n        this.mIp = ip;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/Namespace.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\nimport java.util.Set;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.service.kom.GenericNamespaceRules;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\n\npublic interface Namespace extends FolderElement {\n    Set<String > UnbeanifiedKeys = Set.of( \"distributedTreeNode\", \"classificationRules\" );\n\n    long getEnumId();\n\n    void setEnumId( long id );\n\n    GUID getGuid();\n\n    void setGuid( GUID guid );\n\n    GUID getMetaGuid();\n\n    void setMetaGuid( GUID metaGuid );\n\n    String getName();\n\n    void setName( String name );\n\n    GUID getRulesGUID();\n\n    void setRulesGUID( GUID rulesGUID );\n\n    GenericNamespaceRules getClassificationRules();\n\n    void setClassificationRules( GenericNamespaceRules classificationRules );\n\n    GUIDImperialTrieNode getDistributedTreeNode();\n\n    void setDistributedTreeNode( GUIDImperialTrieNode distributedTreeNode );\n\n    @Override\n    default Namespace evinceNamespace() {\n        return this;\n    }\n\n    JSONObject toJSONDetails();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ServiceElement.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\npublic interface ServiceElement extends ServoElement {\n    @Override\n    default ServiceElement evinceServiceElement() {\n        return this;\n    }\n\n    String getServiceType();\n\n    void setServiceType( String serviceType );\n}"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ServiceInstanceEntry.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface ServiceInstanceEntry extends Pinenut {\n    void setGuid( GUID guid );\n\n    GUID getGuid();\n\n    void setServiceGuid( GUID guid );\n\n    GUID getServiceGuid();\n\n    String getStatus();\n\n    void setStatus( String status );\n\n    LocalDateTime getLatestStartTime();\n\n\n    void setLatestStartTime( LocalDateTime latestStartTime );\n\n    LocalDateTime getLatestEndTime();\n\n    void setLatestEndTime( LocalDateTime latestEndTime );\n\n    String getErrorCause();\n\n    void setErrorCause( String errorCause );\n\n    int getRunCount();\n\n    void setRunCount( int runCount );\n\n    GUID getDeployGuid();\n\n    void setDeployGuid( GUID deployGuid );\n\n    String getIp();\n\n    void setIp( String ip );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ServiceTreeNode.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface ServiceTreeNode extends TreeNode {\n    String getName();\n\n    default String getMetaType() {\n        return this.className().replace(\"Generic\",\"\");\n    }\n\n    default ServiceTreeNode evinceTreeNode(){\n        return this;\n    }\n\n    default ElementNode evinceElementNode(){\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/entity/ServoElement.java",
    "content": "package com.pinecone.hydra.service.kom.entity;\n\nimport java.time.LocalDateTime;\nimport java.util.Set;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.ServiceFamilyNode;\n\npublic interface ServoElement extends ElementNode, ServiceFamilyNode {\n    Set<String > UnbeanifiedKeys = Set.of( \"distributedTreeNode\" );\n\n    long getEnumId();\n    void setEnumId( long id );\n\n    GUID getGuid();\n    void setGuid( GUID guid );\n\n    GUID getMetaGuid();\n    void setMetaGuid( GUID metaGuid );\n\n    String getName();\n    void setName( String name );\n\n    String getPath();\n    void setPath( String path );\n\n    String getType();\n    void setType( String type );\n\n    String getAlias();\n    void setAlias( String alias );\n\n    String getResourceType();\n    void setResourceType( String resourceType );\n\n    LocalDateTime getCreateTime();\n    void setCreateTime( LocalDateTime createTime );\n\n    LocalDateTime getUpdateTime();\n    void setUpdateTime( LocalDateTime updateTime );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/marshaling/ServiceInstrumentDecoder.java",
    "content": "package com.pinecone.hydra.service.kom.marshaling;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.ElementNode;\n\npublic interface ServiceInstrumentDecoder extends Pinenut {\n    default ElementNode decode( Object val, GUID parentGUID ) {\n        if ( val instanceof Map ) {\n            Map map = (Map) val;\n            if( map.isEmpty() ) {\n                return null;\n            }\n            else if( map.size() > 1 ) {\n                throw new IllegalArgumentException( \"Root element should has at last 1.\" );\n            }\n\n            Map.Entry kv = (Map.Entry) map.entrySet().iterator().next();\n            return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID );\n        }\n\n        return null;\n    }\n\n    ElementNode decode( String key, Object val, GUID parentGUID );\n\n    default ElementNode decode( Map.Entry kv, GUID parentGUID ) {\n        return this.decode( kv.getKey().toString(), kv.getValue(), parentGUID );\n    }\n\n    default ElementNode decode( Object val ) {\n        return this.decode( val, null );\n    }\n\n    default ElementNode decode( String key, Object val ) {\n        return this.decode( key, val, null );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/marshaling/ServiceInstrumentEncoder.java",
    "content": "package com.pinecone.hydra.service.kom.marshaling;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.service.kom.entity.ElementNode;\n\npublic interface ServiceInstrumentEncoder extends Pinenut {\n    Object encode( ElementNode node );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/marshaling/ServiceJSONDecoder.java",
    "content": "package com.pinecone.hydra.service.kom.marshaling;\n\nimport java.util.Collection;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.entity.ApplicationElement;\nimport com.pinecone.hydra.service.kom.entity.ElementNode;\nimport com.pinecone.hydra.service.kom.entity.FolderElement;\nimport com.pinecone.hydra.service.kom.entity.GenericApplicationElement;\nimport com.pinecone.hydra.service.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.service.kom.entity.GenericServiceElement;\nimport com.pinecone.hydra.service.kom.entity.Namespace;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\n\npublic class ServiceJSONDecoder implements ServiceInstrumentDecoder {\n    protected ServiceInstrument instrument;\n\n    public ServiceJSONDecoder(ServiceInstrument instrument ) {\n        this.instrument = instrument;\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public ElementNode    decode( String szName, Object o, GUID parentGuid ) {\n        if ( o instanceof Map ) {\n            return (ElementNode) this.instrument.get( this.decodeJSONObject( szName, (Map<String, Object>) o, parentGuid ).getGuid() );\n        }\n\n        throw new IllegalArgumentException( \"Elements of `ServersInstrument` should all be object.\" );\n    }\n\n    protected Namespace   newNamespace( String szName, Map<String, Object > jo ) {\n        Namespace ns = new GenericNamespace( jo, this.instrument );\n        ns.setName( szName );\n\n        return ns;\n    }\n\n    protected Object[]    affirmNSExisted( String szName, GUID parentGuid, Map<String, Object > jo ) {\n        Namespace ns = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.instrument.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evinceNamespace() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be namespace.\", szName )\n                    );\n                }\n\n                ns = rootE.evinceNamespace();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid );\n            if( parentNode instanceof Namespace ) {\n                Collection<ElementNode> destChildren = parentNode.evinceNamespace().fetchChildren();\n                for( ElementNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof Namespace ) {\n                            ns = (Namespace) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"<ServiceInstrument> Existed child-destination [%s] should be namespace.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n        }\n\n\n        GUID currentGuid;\n        if( ns == null ) {\n            ns = this.newNamespace( szName, jo );\n            currentGuid  = this.instrument.put( ns );\n            this.instrument.affirmOwnedNode( parentGuid, currentGuid );\n        }\n        else {\n            currentGuid = ns.getGuid();\n        }\n        return new Object[] { ns, currentGuid };\n    }\n\n    protected Object[]    affirmAppExisted( String szName, GUID parentGuid, Map<String, Object > jo ) {\n        ApplicationElement app = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.instrument.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evinceApplicationElement() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be `ApplicationElement`.\", szName )\n                    );\n                }\n\n                app = rootE.evinceApplicationElement();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid );\n            if( parentNode instanceof Namespace ) {\n                Collection<ElementNode> destChildren = parentNode.evinceNamespace().fetchChildren();\n                for( ElementNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof ApplicationElement ) {\n                            app = (ApplicationElement) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"Existed child-destination [%s] should be `ApplicationElement`.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n        }\n\n\n\n        ApplicationElement neo ;\n        if( app == null ) {\n            neo = new GenericApplicationElement( jo, this.instrument );\n            neo.setName( szName );\n        }\n        else {\n            neo = app;\n        }\n        return new Object[] { app, neo };\n    }\n\n    protected Object[]    affirmSerExisted( String szName, GUID parentGuid, Map<String, Object > jo ) {\n        ServiceElement ser = null;\n\n        if( parentGuid == null ) {\n            ElementNode rootE = this.instrument.queryElement( szName );\n            if( rootE != null ) {\n                if( rootE.evinceServiceElement() == null ) {\n                    throw new IllegalArgumentException(\n                            String.format( \"Existed child-destination [%s] should be `ServiceElement`.\", szName )\n                    );\n                }\n\n                ser = rootE.evinceServiceElement();\n            }\n        }\n        else {\n            ElementNode parentNode = (ElementNode)this.instrument.get( parentGuid );\n            Collection<ElementNode> destChildren;\n            if( parentNode instanceof FolderElement ) {\n                destChildren = ( (FolderElement) parentNode ).fetchChildren();\n                for( ElementNode node : destChildren ) {\n                    if( szName.equals( node.getName() ) ) {\n                        if( node instanceof ServiceElement ) {\n                            ser = (ServiceElement) node;\n                            break;\n                        }\n                        else {\n                            throw new IllegalArgumentException(\n                                    String.format( \"Existed child-destination [%s] should be `ServiceElement`.\", szName )\n                            );\n                        }\n                    }\n                }\n            }\n            else {\n                throw new IllegalStateException(\n                        String.format( \"Parent of `ServiceElement` [%s] should be `FolderElement`.\", szName )\n                );\n            }\n        }\n\n\n\n        ServiceElement neo ;\n        if( ser == null ) {\n            neo = new GenericServiceElement( jo, this.instrument );\n            neo.setName( szName );\n        }\n        else {\n            neo = ser;\n        }\n        return new Object[] { ser, neo };\n    }\n\n    protected Object[]    decodeExternalElements( String szMetaType, String szName, GUID parentGuid, Map<String, Object > jo ) throws IllegalArgumentException {\n        throw new IllegalArgumentException( \"Unknown metaType '\" + szMetaType + \"'.\" );\n    }\n\n    protected void        decodeChildren ( Map jo, GUID currentGuid ) {\n        for ( Object o : jo.entrySet() ) {\n            Map.Entry kv = (Map.Entry) o;\n            Object   val = kv.getValue();\n            if( val instanceof Map ) {\n                this.decode( kv.getKey().toString(), val, currentGuid );\n            }\n        }\n    }\n\n    protected ElementNode decodeJSONObject( String szName, Map<String, Object > jo, GUID parentGuid ) {\n        String szMetaType = (String) jo.get( \"metaType\" );\n        boolean isNamespace = szMetaType == null || szMetaType.equals( Namespace.class.getSimpleName() );\n        ElementNode elementNode;\n        GUID currentGuid;\n\n        if ( isNamespace ) {\n            Object[] pair = this.affirmNSExisted( szName, parentGuid, jo );\n            Namespace     ns = (Namespace) pair[ 0 ];\n            currentGuid      = (GUID)      pair[ 1 ];\n\n            this.decodeChildren( jo, currentGuid );\n\n            elementNode = ns;\n        }\n        else {\n            Object[] pair;\n            boolean bIsFolderElement = false;\n            if( szMetaType.equals( ApplicationElement.class.getSimpleName() ) ) {\n                pair = this.affirmAppExisted( szName, parentGuid, jo );\n                bIsFolderElement = true;\n            }\n            else if( szMetaType.equals( ServiceElement.class.getSimpleName() ) ) {\n                pair = this.affirmSerExisted( szName, parentGuid, jo );\n            }\n            else {\n                try{\n                    pair = this.decodeExternalElements( szMetaType, szName, parentGuid, jo );\n                }\n                catch ( RuntimeException e ) {\n                    throw new IllegalArgumentException( e );\n                }\n            }\n\n            ElementNode          arc = (ElementNode) pair[ 0 ];\n            ElementNode          neo = (ElementNode) pair[ 1 ];\n\n            if( arc == null ) {\n                currentGuid = this.instrument.put( neo );\n                this.instrument.affirmOwnedNode( parentGuid, currentGuid );\n            }\n            else {\n                currentGuid = arc.getGuid();\n                this.instrument.update( neo );\n            }\n\n            if( bIsFolderElement ) {\n                Object services = jo.get( \"services\" );\n                if( services instanceof Map ) {\n                    Map joSer = (Map) services;\n                    this.decodeChildren( joSer, currentGuid );\n                }\n            }\n\n            elementNode = neo;\n        }\n\n        return elementNode;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/marshaling/ServiceJSONEncoder.java",
    "content": "package com.pinecone.hydra.service.kom.marshaling;\n\n\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.entity.ElementNode;\n\npublic class ServiceJSONEncoder implements ServiceInstrumentEncoder {\n    protected ServiceInstrument instrument;\n\n    public ServiceJSONEncoder(ServiceInstrument instrument ) {\n        this.instrument = instrument;\n    }\n\n    @Override\n    public Object encode( ElementNode node ) {\n        return node.toJSONObject();\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/ApplicationElementOperator.java",
    "content": "package com.pinecone.hydra.service.kom.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.entity.ApplicationElement;\nimport com.pinecone.hydra.service.kom.entity.GenericApplicationElement;\nimport com.pinecone.hydra.service.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.service.kom.source.ApplicationMetaManipulator;\nimport com.pinecone.hydra.service.kom.source.ApplicationNodeManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceMasterManipulator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.util.List;\n\npublic class ApplicationElementOperator extends ArchElementOperator implements ElementOperator {\n    protected ApplicationNodeManipulator        applicationNodeManipulator;\n    protected ApplicationMetaManipulator        applicationMetaManipulator;\n\n    public ApplicationElementOperator(ElementOperatorFactory factory ) {\n        this( factory.getServiceMasterManipulator(),factory.getServicesTree() );\n        this.factory = factory;\n    }\n\n    public ApplicationElementOperator(ServiceMasterManipulator masterManipulator, ServiceInstrument serviceInstrument){\n        super( masterManipulator, serviceInstrument);\n        this.applicationNodeManipulator = masterManipulator.getApplicationNodeManipulator();\n        this.applicationMetaManipulator = masterManipulator.getApplicationElementManipulator();\n    }\n\n\n    @Override\n    public GUID insert( TreeNode treeNode ) {\n        GenericApplicationElement applicationElement = (GenericApplicationElement) treeNode;\n\n        GuidAllocator guidAllocator = this.serviceInstrument.getGuidAllocator();\n        GUID applicationNodeGUID = guidAllocator.nextGUID();\n        applicationElement.setGuid( applicationNodeGUID );\n        this.applicationNodeManipulator.insert( applicationElement );\n\n\n        GUID descriptionGUID = guidAllocator.nextGUID();\n        if( applicationElement.getMetaGuid() == null ){\n            applicationElement.setMetaGuid( descriptionGUID );\n        }\n        this.applicationMetaManipulator.insert( applicationElement );\n\n\n        //将应用元信息存入元信息表\n        this.nodeMetaManipulator.insert( applicationElement );\n\n\n        //将节点信息存入主表\n        GUIDImperialTrieNode node = new GUIDImperialTrieNode();\n        node.setNodeMetadataGUID(descriptionGUID);\n        node.setGuid(applicationNodeGUID);\n        node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) );\n        this.imperialTree.insert( node );\n        return applicationNodeGUID;\n    }\n\n\n    @Override\n    public void purge( GUID guid ) {\n        //namespace节点需要递归删除其拥有节点若其引用节点，没有其他引用则进行清理\n        List<GUIDImperialTrieNode> childNodes = this.imperialTree.getChildren(guid);\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        if ( !childNodes.isEmpty() ){\n            List<GUID > subordinates = this.imperialTree.getSubordinates(guid);\n            if ( !subordinates.isEmpty() ){\n                for ( GUID subordinateGuid : subordinates ){\n                    this.purge( subordinateGuid );\n                }\n            }\n            childNodes = this.imperialTree.getChildren( guid );\n            for( GUIDImperialTrieNode childNode : childNodes ){\n                List<GUID > parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid());\n                if ( parentNodes.size() > 1 ){\n                    this.imperialTree.removeInheritance(childNode.getGuid(),guid);\n                }\n                else {\n                    this.purge( childNode.getGuid() );\n                }\n            }\n        }\n\n        if ( node.getType().getObjectName().equals( GenericNamespace.class.getName() ) ){\n            this.removeNode(guid);\n        }\n        else {\n            UOI uoi = node.getType();\n            String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() );\n            if( metaType == null ) {\n                TreeNode newInstance = (TreeNode)uoi.newInstance( new Class<? >[]{ ServiceInstrument.class }, this.serviceInstrument);\n                metaType = newInstance.getMetaType();\n            }\n\n            ElementOperator operator = this.getOperatorFactory().getOperator( metaType );\n            operator.purge( guid );\n        }\n    }\n\n    @Override\n    public ApplicationElement get( GUID guid ) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        ApplicationElement applicationElement;\n        if( node.getNodeMetadataGUID() != null ){\n            applicationElement = this.applicationMetaManipulator.getApplicationElement( node.getNodeMetadataGUID(), this.serviceInstrument);\n        }\n        else {\n            applicationElement = new GenericApplicationElement();\n        }\n\n        this.applyCommonMeta( applicationElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) );\n\n        applicationElement.setName( this.applicationNodeManipulator.getApplicationNode(guid).getName() );\n        applicationElement.setGuid(applicationElement.getGuid());\n        return applicationElement;\n    }\n\n    @Override\n    public ApplicationElement get( GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public ApplicationElement getAsRootDepth( GUID guid ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode treeNode ) {\n        GenericApplicationElement applicationElement = (GenericApplicationElement) treeNode;\n        this.applicationNodeManipulator.update( applicationElement );\n        this.applicationMetaManipulator.update( applicationElement );\n        this.nodeMetaManipulator.update( applicationElement );\n    }\n\n    @Override\n    public void updateName( GUID guid, String name ) {\n\n    }\n\n    protected void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath(guid);\n        this.applicationMetaManipulator.remove( node.getAttributesGUID() );\n        this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() );\n        this.applicationNodeManipulator.remove( node.getGuid( ));\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/ArchElementOperator.java",
    "content": "package com.pinecone.hydra.service.kom.operator;\n\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.entity.CommonMeta;\nimport com.pinecone.hydra.service.kom.entity.ElementNode;\nimport com.pinecone.hydra.service.kom.source.NodeMetaManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\n\npublic abstract class ArchElementOperator implements ElementOperator {\n    protected ServiceInstrument serviceInstrument;\n    protected ImperialTree                  imperialTree;\n    protected NodeMetaManipulator nodeMetaManipulator;\n    protected ServiceMasterManipulator      serviceMasterManipulator;\n    protected ElementOperatorFactory        factory;\n\n    public ArchElementOperator( ElementOperatorFactory factory ){\n        this( factory.getServiceMasterManipulator(),factory.getServicesTree() );\n        this.factory = factory;\n    }\n    public ArchElementOperator( ServiceMasterManipulator masterManipulator, ServiceInstrument serviceInstrument){\n        this.imperialTree = serviceInstrument.getMasterTrieTree();\n        this.serviceInstrument = serviceInstrument;\n        this.nodeMetaManipulator = masterManipulator.getNodeMetaManipulator();\n        this.serviceMasterManipulator = masterManipulator;\n        //this.factory = new GenericServiceOperatorFactory(servicesTree,masterManipulator);\n    }\n\n    public ElementOperatorFactory getOperatorFactory() {\n        return this.factory;\n    }\n\n    protected void applyCommonMeta( ElementNode ele, CommonMeta commonMeta ){\n        if( commonMeta != null ) {\n            ele.setGuid             ( commonMeta.getGuid()             );\n            ele.setScenario         ( commonMeta.getScenario()         );\n            ele.setPrimaryImplLang  ( commonMeta.getPrimaryImplLang()  );\n            ele.setExtraInformation ( commonMeta.getExtraInformation() );\n            ele.setLevel            ( commonMeta.getLevel()            );\n            ele.setDescription      ( commonMeta.getDescription()      );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/ElementOperator.java",
    "content": "package com.pinecone.hydra.service.kom.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.ElementNode;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface ElementOperator extends TreeNodeOperator {\n    @Override\n    ElementNode get( GUID guid );\n\n    @Override\n    ElementNode get( GUID guid, int depth );\n\n    @Override\n    ElementNode getAsRootDepth( GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/ElementOperatorFactory.java",
    "content": "package com.pinecone.hydra.service.kom.operator;\n\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.entity.ApplicationElement;\nimport com.pinecone.hydra.service.kom.entity.Namespace;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\nimport com.pinecone.hydra.service.kom.source.ServiceMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.operator.OperatorFactory;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface ElementOperatorFactory extends OperatorFactory {\n    String DefaultServiceNode     =  ServiceElement.class.getSimpleName();\n    String DefaultNamespace       =  Namespace.class.getSimpleName();\n    String DefaultApplicationNode =  ApplicationElement.class.getSimpleName();\n\n    void register( String typeName, TreeNodeOperator functionalNodeOperation );\n\n    void registerMetaType( Class<?> clazz, String metaType );\n\n    void registerMetaType( String classFullName, String metaType );\n\n    String getMetaType( String classFullName );\n\n    ElementOperator getOperator(String typeName );\n\n    ServiceInstrument getServicesTree();\n\n    ServiceMasterManipulator getServiceMasterManipulator();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/GenericElementOperatorFactory.java",
    "content": "package com.pinecone.hydra.service.kom.operator;\n\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.source.ServiceMasterManipulator;\nimport com.pinecone.hydra.service.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.service.kom.entity.GenericApplicationElement;\nimport com.pinecone.hydra.service.kom.entity.GenericServiceElement;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.TreeMap;\n\npublic class GenericElementOperatorFactory implements ElementOperatorFactory {\n    protected ServiceMasterManipulator      serviceMasterManipulator;\n    protected ServiceInstrument serviceInstrument;\n    protected Map<String, TreeNodeOperator> registerer = new HashMap<>();\n\n    protected Map<String, String >             metaTypeMap = new TreeMap<>();\n\n    protected void registerDefaultMetaType( Class<?> genericType ) {\n        this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace(\"Generic\",\"\") );\n    }\n\n    protected void registerDefaultMetaTypes() {\n        this.registerDefaultMetaType( GenericNamespace.class );\n        this.registerDefaultMetaType( GenericServiceElement.class );\n        this.registerDefaultMetaType( GenericApplicationElement.class );\n    }\n\n    public GenericElementOperatorFactory(ServiceInstrument serviceInstrument, ServiceMasterManipulator serviceMasterManipulator ){\n        this.serviceInstrument = serviceInstrument;\n        this.serviceMasterManipulator = serviceMasterManipulator;\n\n        this.registerer.put(\n                ElementOperatorFactory.DefaultServiceNode,\n                new ServiceElementOperator( this )\n        );\n\n        this.registerer.put(\n                ElementOperatorFactory.DefaultApplicationNode,\n                new ApplicationElementOperator(this)\n        );\n\n        this.registerer.put(\n                ElementOperatorFactory.DefaultNamespace,\n                new NamespaceOperator(this)\n        );\n\n        this.registerDefaultMetaTypes();\n    }\n    @Override\n    public void register( String typeName, TreeNodeOperator functionalNodeOperation ) {\n        this.registerer.put( typeName, functionalNodeOperation );\n    }\n\n    @Override\n    public void registerMetaType( Class<?> clazz, String metaType ){\n        this.registerMetaType( clazz.getName(), metaType );\n    }\n\n    @Override\n    public void registerMetaType( String classFullName, String metaType ){\n        this.metaTypeMap.put( classFullName, metaType );\n    }\n\n    @Override\n    public ServiceInstrument getServicesTree() {\n        return this.serviceInstrument;\n    }\n\n    @Override\n    public ServiceMasterManipulator getServiceMasterManipulator() {\n        return this.serviceMasterManipulator;\n    }\n\n    @Override\n    public String getMetaType( String classFullName ) {\n        return this.metaTypeMap.get( classFullName );\n    }\n\n    @Override\n    public ElementOperator getOperator(String typeName ) {\n        //Debug.trace( this.registerer.toString() );\n        return (ElementOperator) this.registerer.get( typeName );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/NamespaceOperator.java",
    "content": "package com.pinecone.hydra.service.kom.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.service.kom.GenericNamespaceRules;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.entity.GenericApplicationElement;\nimport com.pinecone.hydra.service.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.service.kom.entity.Namespace;\nimport com.pinecone.hydra.service.kom.source.NamespaceRulesManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceMasterManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceNamespaceManipulator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.util.List;\n\npublic class NamespaceOperator extends ArchElementOperator implements ElementOperator {\n    protected ServiceNamespaceManipulator   namespaceManipulator;\n    protected NamespaceRulesManipulator     namespaceRulesManipulator;\n\n    public NamespaceOperator( ElementOperatorFactory factory ) {\n        this( factory.getServiceMasterManipulator(),factory.getServicesTree() );\n        this.factory = factory;\n    }\n\n    public NamespaceOperator( ServiceMasterManipulator masterManipulator, ServiceInstrument serviceInstrument){\n        super( masterManipulator, serviceInstrument);\n        this.namespaceManipulator = masterManipulator.getNamespaceManipulator();\n        this.namespaceRulesManipulator = masterManipulator.getNamespaceRulesManipulator();\n    }\n\n    @Override\n    public GUID insert( TreeNode treeNode ) {\n        GenericNamespace ns = ( GenericNamespace ) treeNode;\n\n        //存节点基础信息\n        GuidAllocator          guidAllocator = this.serviceInstrument.getGuidAllocator();\n        GUID              namespaceRulesGuid = ns.getGuid();\n        GenericNamespaceRules namespaceRules = ns.getClassificationRules();\n        if ( namespaceRules!= null ){\n            namespaceRules.setGuid( namespaceRulesGuid );\n        }\n        else {\n            namespaceRulesGuid = null;\n        }\n\n        GUID namespaceGuid = guidAllocator.nextGUID();\n        ns.setGuid( namespaceGuid );\n        ns.setRulesGUID( namespaceRulesGuid );\n        this.namespaceManipulator.insert( ns );\n\n        //存元信息\n        GUID metadataGUID = guidAllocator.nextGUID();\n        ns.setMetaGuid( metadataGUID );\n        this.nodeMetaManipulator.insertNS( ns );\n\n\n        GUIDImperialTrieNode node = new GUIDImperialTrieNode();\n        node.setBaseDataGUID( namespaceRulesGuid );\n        node.setGuid( namespaceGuid );\n        node.setNodeMetadataGUID( metadataGUID );\n        node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) );\n        this.imperialTree.insert( node );\n        return namespaceGuid;\n    }\n\n    @Override\n    public void purge( GUID guid ) {\n        //namespace节点需要递归删除其拥有节点若其引用节点，没有其他引用则进行清理\n        List<GUIDImperialTrieNode> childNodes = this.imperialTree.getChildren(guid);\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        if ( !childNodes.isEmpty() ){\n            List<GUID > subordinates = this.imperialTree.getSubordinates(guid);\n            if ( !subordinates.isEmpty() ){\n                for ( GUID subordinateGuid : subordinates ){\n                    this.purge( subordinateGuid );\n                }\n            }\n            childNodes = this.imperialTree.getChildren( guid );\n            for( GUIDImperialTrieNode childNode : childNodes ){\n                List<GUID > parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid());\n                if ( parentNodes.size() > 1 ){\n                    this.imperialTree.removeInheritance(childNode.getGuid(),guid);\n                }\n                else {\n                    this.purge( childNode.getGuid() );\n                }\n            }\n        }\n\n        if ( node.getType().getObjectName().equals(GenericNamespace.class.getName()) ||  node.getType().getObjectName().equals(GenericApplicationElement.class.getName())){\n            this.removeNode(guid);\n        }\n        else {\n            UOI uoi = node.getType();\n            String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() );\n            if( metaType == null ) {\n                TreeNode newInstance = (TreeNode)uoi.newInstance( new Class<? >[]{ ServiceInstrument.class }, this.serviceInstrument);\n                metaType = newInstance.getMetaType();\n            }\n\n            ElementOperator operator = this.getOperatorFactory().getOperator( metaType );\n            operator.purge( guid );\n        }\n    }\n\n    @Override\n    public Namespace get( GUID guid ) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        GenericNamespace                      namespace = new GenericNamespace( this.serviceInstrument);\n        GenericNamespaceRules            namespaceRules = this.namespaceRulesManipulator.getNamespaceRules( node.getAttributesGUID() );\n        GUIDImperialTrieNode guidDistributedTrieNode = this.imperialTree.getNode( node.getGuid() );\n\n        if ( namespaceRules != null ){\n            namespace.setRulesGUID( namespaceRules.getGuid() );\n            namespace.setClassificationRules( namespaceRules );\n        }\n\n        GUID metaGuid = guidDistributedTrieNode.getNodeMetadataGUID();\n        namespace.setDistributedTreeNode( guidDistributedTrieNode );\n        namespace.setName( this.namespaceManipulator.getNamespace( guid ).getName() );\n        this.applyCommonMeta( namespace, this.nodeMetaManipulator.getNodeCommonMeta( metaGuid ) ); // GUID / MetaGUID difference.\n        namespace.setGuid( guid );\n        namespace.setMetaGuid( metaGuid );\n\n        return namespace;\n    }\n\n    @Override\n    public Namespace get( GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public Namespace getAsRootDepth( GUID guid ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode nodeWideData ) {\n        GenericNamespace ns = ( GenericNamespace ) nodeWideData;\n        this.namespaceManipulator.update( ns );\n        GenericNamespaceRules classificationRules = ns.getClassificationRules();\n        this.namespaceRulesManipulator.update( classificationRules );\n        this.nodeMetaManipulator.update( ns );\n    }\n\n    @Override\n    public void updateName( GUID guid, String name ) {\n\n    }\n\n    protected void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.namespaceManipulator.remove( node.getGuid() );\n        this.namespaceRulesManipulator.remove( node.getNodeMetadataGUID() );\n        this.nodeMetaManipulator.remove( node.getAttributesGUID() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/operator/ServiceElementOperator.java",
    "content": "package com.pinecone.hydra.service.kom.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.entity.GenericServiceElement;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\nimport com.pinecone.hydra.service.kom.source.ServiceMasterManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceMetaManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceNodeManipulator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\npublic class ServiceElementOperator extends ArchElementOperator implements ElementOperator {\n    protected ServiceNodeManipulator  serviceNodeManipulator;\n    protected ServiceMetaManipulator  serviceMetaManipulator;\n\n    public ServiceElementOperator( ElementOperatorFactory factory ) {\n        this( factory.getServiceMasterManipulator(),factory.getServicesTree() );\n        this.factory = factory;\n    }\n\n    public ServiceElementOperator( ServiceMasterManipulator masterManipulator, ServiceInstrument serviceInstrument){\n        super( masterManipulator, serviceInstrument);\n       this.serviceNodeManipulator = masterManipulator.getServiceNodeManipulator();\n       this.serviceMetaManipulator = masterManipulator.getServiceMetaManipulator();\n\n    }\n\n\n    @Override\n    public GUID insert( TreeNode treeNode ) {\n        GenericServiceElement serviceElement = (GenericServiceElement) treeNode;\n\n        //将信息写入数据库\n        //将节点信息存入应用节点表\n        GuidAllocator guidAllocator = this.serviceInstrument.getGuidAllocator();\n        GUID serviceNodeGUID = guidAllocator.nextGUID();\n        serviceElement.setGuid(serviceNodeGUID);\n        this.serviceNodeManipulator.insert( serviceElement );\n\n        //将应用节点基础信息存入信息表\n        GUID metaGUID = guidAllocator.nextGUID();\n        if ( serviceElement.getMetaGuid() == null ){\n            serviceElement.setMetaGuid( metaGUID );\n        }\n        this.serviceMetaManipulator.insert( serviceElement );\n\n\n        //将应用元信息存入元信息表\n       this.nodeMetaManipulator.insert( serviceElement );\n\n\n        //将节点信息存入主表\n        GUIDImperialTrieNode node = new GUIDImperialTrieNode();\n        node.setNodeMetadataGUID( metaGUID );\n        node.setGuid( serviceNodeGUID );\n        node.setType( UOIUtils.createLocalJavaClass( treeNode.getClass().getName() ) );\n        this.imperialTree.insert( node );\n        return serviceNodeGUID;\n    }\n\n    @Override\n    public void purge( GUID guid ) {\n        this.removeNode( guid );\n    }\n\n    @Override\n    public ServiceElement get( GUID guid ) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        ServiceElement serviceElement = new GenericServiceElement();\n        if( node.getNodeMetadataGUID() != null ){\n            serviceElement = this.serviceMetaManipulator.getServiceMeta( node.getNodeMetadataGUID() );\n        }\n\n        this.applyCommonMeta( serviceElement, this.nodeMetaManipulator.getNodeCommonMeta( guid ) );\n\n        serviceElement.setDistributedTreeNode(node);\n        serviceElement.setGuid( guid );\n        serviceElement.setName( this.serviceNodeManipulator.getServiceNode(guid).getName() );\n\n        return serviceElement;\n    }\n\n    @Override\n    public ServiceElement get( GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public ServiceElement getAsRootDepth( GUID guid ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public void update( TreeNode nodeWideData ) {\n        GenericServiceElement serviceElement = (GenericServiceElement) nodeWideData;\n        this.serviceNodeManipulator.update( serviceElement );\n        this.serviceMetaManipulator.update( serviceElement );\n        this.nodeMetaManipulator.update( serviceElement );\n    }\n\n    @Override\n    public void updateName(GUID guid, String name) {\n\n    }\n\n    private void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.serviceNodeManipulator.remove( node.getGuid() );\n        this.serviceMetaManipulator.remove( node.getAttributesGUID() );\n        this.nodeMetaManipulator.remove( node.getNodeMetadataGUID() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ApplicationMetaManipulator.java",
    "content": "package com.pinecone.hydra.service.kom.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.entity.ApplicationElement;\n\npublic interface ApplicationMetaManipulator extends Pinenut {\n    void insert( ApplicationElement applicationElement );\n\n    void remove( GUID guid );\n\n    ApplicationElement getApplicationElement( GUID guid, ServiceInstrument serviceInstrument);\n\n    void update( ApplicationElement applicationElement );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ApplicationNodeManipulator.java",
    "content": "package com.pinecone.hydra.service.kom.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.ApplicationElement;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\nimport java.util.List;\n\npublic interface ApplicationNodeManipulator extends GUIDNameManipulator {\n    void insert( ApplicationElement applicationElement );\n\n    void remove( GUID guid);\n\n    ApplicationElement getApplicationNode(GUID guid);\n\n    void update( ApplicationElement applicationElement );\n\n    List<ApplicationElement> fetchApplicationNodeByName( String name );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/NamespaceRulesManipulator.java",
    "content": "package com.pinecone.hydra.service.kom.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.GenericNamespaceRules;\n\npublic interface NamespaceRulesManipulator {\n    void insert(GenericNamespaceRules classificationRules);\n    void remove(GUID guid);\n    GenericNamespaceRules getNamespaceRules(GUID guid);\n    void update(GenericNamespaceRules classificationRules);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/NodeMetaManipulator.java",
    "content": "package com.pinecone.hydra.service.kom.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.ServiceFamilyNode;\nimport com.pinecone.hydra.service.kom.entity.CommonMeta;\nimport com.pinecone.hydra.service.kom.entity.Namespace;\n\npublic interface NodeMetaManipulator extends Pinenut {\n\n    void insert( ServiceFamilyNode node );\n\n    void insertNS( Namespace node );\n\n    void remove( GUID guid );\n\n    CommonMeta getNodeCommonMeta( GUID guid );\n\n    void update( ServiceFamilyNode node );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ServiceInstanceManipulator.java",
    "content": "package com.pinecone.hydra.service.kom.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry;\n\npublic interface ServiceInstanceManipulator extends Pinenut {\n    void initServiceInstance( ServiceInstanceEntry element );\n\n    ServiceInstanceEntry queryServiceInstance( GUID instanceId );\n\n    void updateServiceInstance( ServiceInstanceEntry element );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ServiceMasterManipulator.java",
    "content": "package com.pinecone.hydra.service.kom.source;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\n\npublic interface ServiceMasterManipulator extends KOIMasterManipulator {\n    TrieTreeManipulator getTrieTreeManipulator() ;\n\n    NodeMetaManipulator getNodeMetaManipulator();\n\n    ApplicationNodeManipulator getApplicationNodeManipulator();\n\n    ApplicationMetaManipulator getApplicationElementManipulator();\n\n    ServiceNodeManipulator getServiceNodeManipulator();\n\n    ServiceMetaManipulator getServiceMetaManipulator();\n\n    ServiceNamespaceManipulator getNamespaceManipulator();\n\n    ServiceInstanceManipulator getServiceInstanceManipulator();\n\n    NamespaceRulesManipulator getNamespaceRulesManipulator();\n\n    TireOwnerManipulator getTireOwnerManipulator();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ServiceMetaManipulator.java",
    "content": "package com.pinecone.hydra.service.kom.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\n\npublic interface ServiceMetaManipulator {\n    void insert( ServiceElement serviceElement );\n\n    void remove( GUID guid );\n\n    void update( ServiceElement serviceElement );\n\n    ServiceElement getServiceMeta( GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ServiceNamespaceManipulator.java",
    "content": "package com.pinecone.hydra.service.kom.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.Namespace;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\nimport java.util.List;\n\npublic interface ServiceNamespaceManipulator extends GUIDNameManipulator {\n    void insert( Namespace ns );\n\n    void remove( GUID guid );\n\n    Namespace getNamespace( GUID guid );\n\n    void update( Namespace ns );\n\n    List<Namespace > fetchNamespaceNodeByName( String name );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-service/src/main/java/com/pinecone/hydra/service/kom/source/ServiceNodeManipulator.java",
    "content": "package com.pinecone.hydra.service.kom.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\nimport java.util.List;\n\npublic interface ServiceNodeManipulator extends GUIDNameManipulator {\n    //ServiceNode的CRUD\n    void insert( ServiceElement serviceNode );\n\n    void remove(GUID UUID);\n\n    ServiceElement getServiceNode(GUID UUID);\n\n    void update(ServiceElement serviceNode);\n\n    List<ServiceElement> fetchServiceNodeByName(String name);\n\n    @Override\n    List<GUID> getGuidsByName(String name);\n\n    @Override\n    List<GUID> getGuidsByNameID(String name, GUID guid);\n\n    List<ServiceElement> fetchAllService();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kernel</groupId>\n    <artifactId>hydra-framework-storage</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulf-lib-oltp-rdb</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime</groupId>\n            <artifactId>slime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture-storage</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/ArchFileObjectMeta.java",
    "content": "package com.pinecone.hydra.storage;\n\npublic abstract class ArchFileObjectMeta implements CheckedFile {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/ArchStorageConfig.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.ArchKernelObjectConfig;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.util.Map;\n\npublic abstract class ArchStorageConfig extends ArchKernelObjectConfig implements StorageConfig {\n    protected String mszLocalHostGuid = StorageConstants.LocalhostGUID.toString();\n\n    protected String mszDefaultVolumeGuid   ;\n\n    protected String mszDefaultTempFilePath ;\n    protected ArchStorageConfig(){\n        super();\n    }\n\n    public ArchStorageConfig( Map<String, Object> config ){\n        super( config );\n        this.mszLocalHostGuid       = (String) config.getOrDefault(\"LocalHostGuid\", StorageConstants.LocalhostGUID.toString());\n        this.mszDefaultVolumeGuid   = (String) config.get(\"DefaultVolumeGuid\");\n        this.mszDefaultTempFilePath = (String) config.get(\"DefaultTempFilePath\");\n    }\n\n    @Override\n    public GUID getLocalHostGuid() {\n        return GUIDs.GUID128(this.mszLocalHostGuid);\n    }\n\n    @Override\n    public String getDefaultVolumeGuid() {\n        return this.mszDefaultVolumeGuid;\n    }\n\n    @Override\n    public String getDefaultTempFilePath() {\n        return this.mszDefaultTempFilePath;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/CheckedFile.java",
    "content": "package com.pinecone.hydra.storage;\n\npublic interface CheckedFile extends UFile {\n\n    long getChecksum();\n\n    void setChecksum(long checksum);\n\n    int getParityCheck();\n\n    void setParityCheck(int parityCheck);\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/RandomAccessChanface.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.hydra.storage.io.Chanface;\n\nimport java.io.IOException;\n\npublic interface RandomAccessChanface extends Chanface {\n    void mark(int readlimit);\n    void reset() throws IOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/ReadChannelRecalled.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class ReadChannelRecalled implements Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageConfig.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\n\npublic interface StorageConfig extends KernelObjectConfig {\n    GUID getLocalHostGuid();\n\n    String getDefaultVolumeGuid();\n\n    String getDefaultTempFilePath();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageConstants.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\npublic final class StorageConstants {\n    public static final String PathSeparator            = \"/\";\n    public static final String period                   = \".\";\n    public static final String StorageVersionSignature  = \"Titan\";\n    public static final GUID             LocalhostGUID  = GUIDs.GUID128( \"00000000-0000-0000-0000-000000000000\" );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageExportIORequest.java",
    "content": "package com.pinecone.hydra.storage;\n\npublic interface StorageExportIORequest extends StorageInstructRequest {\n    String getSourceName(); // 具体存在磁盘的为 / I/O寻址  Source => Address\n    void setSourceName( String sourceName );\n\n    long getCrc32();\n    void setCrc32( long crc32 );\n\n    Number getSize(); // 欲存储的声明大小\n    void setSize( Number size );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageIOResponse.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.Cluster;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\n\nimport java.util.zip.CRC32;\n\npublic interface StorageIOResponse extends StorageInstructResponse {\n    GUID getObjectGuid();\n    void setObjectGuid( GUID objectGuid );\n\n    GUID getBottomGuid();\n    void setBottomGuid( GUID bottomGuid );\n\n    long getChecksum();\n    void setChecksum( long checksum );\n\n    long getParityCheck();\n    void setParityCheck( long parityCheck );\n\n    CRC32 getCre32();\n    void setCrc32( CRC32 crc32 );\n\n    String getSourceName();\n    void setSourceName( String name );\n\n    Cluster toCluster();\n    FileNode toFileNode();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageInstructRequest.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface StorageInstructRequest extends Pinenut {\n    GUID getStorageObjectGuid(); // 存储单位的标识（指针）\n\n    void setStorageObjectGuid( GUID storageObjectGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageInstructResponse.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface StorageInstructResponse extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageNaming.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface StorageNaming extends Pinenut {\n    String naming( String objectName, String identity );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/StorageReceiveIORequest.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.zip.CRC32;\n\npublic interface StorageReceiveIORequest extends Pinenut {\n    String getName();\n    void setName( String name );\n\n    Number getSize();\n    void setSize( Number size );\n\n    GUID getStorageObjectGuid();\n    void setStorageObjectGuid( GUID storageObjectGuid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/TitanStorageExportIORequest.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class TitanStorageExportIORequest implements StorageExportIORequest {\n    private String sourceName;\n    private long crc32;\n    private Number size;\n    private GUID   storageGuid;\n\n    @Override\n    public String getSourceName() {\n        return this.sourceName;\n    }\n\n    @Override\n    public void setSourceName(String sourceName) {\n        this.sourceName = sourceName;\n    }\n\n    @Override\n    public long getCrc32() {\n        return this.crc32;\n    }\n\n    @Override\n    public void setCrc32(long crc32) {\n        this.crc32 = crc32;\n    }\n\n    @Override\n    public Number getSize() {\n        return this.size;\n    }\n\n    @Override\n    public void setSize(Number size) {\n        this.size = size;\n    }\n\n    @Override\n    public GUID getStorageObjectGuid() {\n        return this.storageGuid;\n    }\n\n    @Override\n    public void setStorageObjectGuid(GUID storageObjectGuid) {\n        this.storageGuid = storageObjectGuid;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/TitanStorageIOResponse.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.Cluster;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\n\nimport java.util.zip.CRC32;\n\npublic class TitanStorageIOResponse implements StorageIOResponse {\n    private GUID objectGuid;\n    private long checksum;\n    private long parityCheck;\n    private CRC32 crc32;\n    private String sourceName;\n    private GUID bottomGuid;\n\n    @Override\n    public GUID getObjectGuid() {\n        return this.objectGuid;\n    }\n\n    @Override\n    public void setObjectGuid(GUID objectGuid) {\n        this.objectGuid = objectGuid;\n    }\n\n    @Override\n    public long getChecksum() {\n        return this.checksum;\n    }\n\n    @Override\n    public void setChecksum(long checksum) {\n        this.checksum = checksum;\n    }\n\n    @Override\n    public long getParityCheck() {\n        return this.parityCheck;\n    }\n\n    @Override\n    public void setParityCheck(long parityCheck) {\n        this.parityCheck = parityCheck;\n    }\n\n    @Override\n    public CRC32 getCre32() {\n        return this.crc32;\n    }\n\n    @Override\n    public void setCrc32(CRC32 crc32) {\n        this.crc32 = crc32;\n    }\n\n    @Override\n    public String getSourceName() {\n        return this.sourceName;\n    }\n\n    @Override\n    public void setSourceName(String sourceName) {\n        this.sourceName = sourceName;\n    }\n\n    @Override\n    public GUID getBottomGuid() {\n        return this.bottomGuid;\n    }\n\n    @Override\n    public void setBottomGuid(GUID bottomGuid) {\n        this.bottomGuid = bottomGuid;\n    }\n\n    @Override\n    public Cluster toCluster() {\n        return null;\n    }\n\n    @Override\n    public FileNode toFileNode() {\n        return null;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/TitanStorageNaming.java",
    "content": "package com.pinecone.hydra.storage;\n\npublic class TitanStorageNaming implements StorageNaming{\n    @Override\n    public String naming( String objectName, String identity ) {\n        return String.format( \"%s_%s.storage\", objectName, identity ); // TODO! CONST\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/TitanStorageReceiveIORequest.java",
    "content": "package com.pinecone.hydra.storage;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class TitanStorageReceiveIORequest implements StorageReceiveIORequest {\n    private String name;\n    private Number size;\n    private GUID   storageObjectGuid;\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    @Override\n    public Number getSize() {\n        return this.size;\n    }\n\n    @Override\n    public void setSize(Number size) {\n        this.size = size;\n    }\n\n    @Override\n    public GUID getStorageObjectGuid() {\n        return this.storageObjectGuid;\n    }\n\n    @Override\n    public void setStorageObjectGuid(GUID storageObjectGuid) {\n        this.storageObjectGuid = storageObjectGuid;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/BucketInstrument.java",
    "content": "package com.pinecone.hydra.storage.bucket;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.bucket.entity.Bucket;\nimport com.pinecone.hydra.storage.bucket.entity.Site;\nimport com.pinecone.hydra.storage.bucket.entity.SiteNode;\nimport com.pinecone.hydra.storage.bucket.source.SiteManipulator;\n\nimport java.util.List;\n\npublic interface BucketInstrument extends Pinenut {\n    GUID createBucket(Bucket bucket);\n\n    void removeBucket( GUID bucketGuid );\n\n    void removeBucketByAccountAndBucketName( GUID accountGuid, String bucketName );\n\n    Bucket queryBucketByBucketGuid( GUID bucketGuid );\n\n    List<Bucket> queryBucketsByUserGuid( GUID userGuid );\n\n    SiteManipulator getSiteManipulator();\n\n    GUID createSite(Site site);\n\n    void removeSite( GUID siteGuid );\n\n    void removeSite( String siteName );\n\n    Site querySite( GUID siteGuid );\n\n    List<Site> listSite();\n\n    GUID createSiteNode( SiteNode siteNode );\n\n    void removeSiteNode( GUID siteNodeGuid );\n\n    SiteNode querySiteNode( GUID siteNodeGuid );\n\n    List<SiteNode> querySiteNodeBySiteGuid( GUID siteGuid );\n\n    void updateSiteNode( SiteNode siteNode );\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/TitanBucketInstrument.java",
    "content": "package com.pinecone.hydra.storage.bucket;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.bucket.entity.Bucket;\nimport com.pinecone.hydra.storage.bucket.entity.Site;\nimport com.pinecone.hydra.storage.bucket.entity.SiteNode;\nimport com.pinecone.hydra.storage.bucket.source.BucketManipulator;\nimport com.pinecone.hydra.storage.bucket.source.BucketMasterManipulator;\nimport com.pinecone.hydra.storage.bucket.source.SiteManipulator;\nimport com.pinecone.hydra.storage.bucket.source.SiteNodeManipulator;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.time.LocalDateTime;\nimport java.util.List;\n\npublic class TitanBucketInstrument implements BucketInstrument {\n    protected Hydrogen                  hydrogen;\n\n    protected BucketMasterManipulator   masterManipulator;\n\n    protected BucketManipulator         bucketManipulator;\n\n    protected SiteManipulator           siteManipulator;\n\n    protected SiteNodeManipulator       siteNodeManipulator;\n\n    protected GuidAllocator             guidAllocator;\n\n    public TitanBucketInstrument(Hydrogen hydrogen, KOIMasterManipulator masterManipulator, String name ){\n        this.hydrogen               = hydrogen;\n        this.masterManipulator      = (BucketMasterManipulator) masterManipulator;\n        this.guidAllocator          = GUIDs.newGuidAllocator();\n\n        this.bucketManipulator      = this.masterManipulator.getBucketManipulator();\n        this.siteManipulator        = this.masterManipulator.getSiteManipulator();\n        this.siteNodeManipulator    = this.masterManipulator.getSiteNodeManipulator();\n    }\n\n    public TitanBucketInstrument( Hydrogen hydrogen, KOIMasterManipulator masterManipulator ){\n        this( hydrogen, masterManipulator, KOMFileSystem.class.getSimpleName() );\n    }\n\n    public TitanBucketInstrument(KOIMappingDriver driver ) {\n        this(\n                driver.getSystem(),\n                driver.getMasterManipulator()\n        );\n    }\n\n    @Override\n    public GUID createBucket(Bucket bucket) {\n        GUID guid = this.guidAllocator.nextGUID();\n        bucket.setBucketGuid( guid );\n        this.bucketManipulator.insert( bucket );\n        return guid;\n    }\n\n    @Override\n    public void removeBucket(GUID bucketGuid) {\n        this.bucketManipulator.remove( bucketGuid );\n    }\n\n    @Override\n    public void removeBucketByAccountAndBucketName(GUID accountGuid, String bucketName) {\n\n    }\n\n    @Override\n    public Bucket queryBucketByBucketGuid(GUID bucketGuid) {\n        return this.bucketManipulator.queryBucketByBucketGuid( bucketGuid );\n    }\n\n    @Override\n    public List<Bucket> queryBucketsByUserGuid(GUID userGuid) {\n        return this.bucketManipulator.queryBucketsByUserGuid( userGuid );\n    }\n\n    @Override\n    public SiteManipulator getSiteManipulator() {\n        return this.siteManipulator;\n    }\n\n    @Override\n    public GUID createSite(Site site) {\n        GUID guid = this.guidAllocator.nextGUID();\n        site.setSiteGuid(guid);\n        site.setCreateTime(LocalDateTime.now());\n        this.siteManipulator.insert(site);\n        return guid;\n    }\n\n    @Override\n    public void removeSite(GUID siteGuid) {\n        this.siteManipulator.remove(siteGuid);\n    }\n\n    @Override\n    public void removeSite(String siteName) {\n        this.siteManipulator.removeByName( siteName );\n    }\n\n    @Override\n    public Site querySite(GUID siteGuid) {\n        return this.siteManipulator.querySite(siteGuid);\n    }\n\n    @Override\n    public List<Site> listSite() {\n        return this.siteManipulator.listSite();\n    }\n\n    @Override\n    public GUID createSiteNode(SiteNode siteNode) {\n        if ( siteNode.getNodeGuid() == null ){\n            siteNode.setNodeGuid( this.guidAllocator.nextGUID() );\n        }\n\n        this.siteNodeManipulator.insert( siteNode );\n        return siteNode.getNodeGuid();\n    }\n\n    @Override\n    public void removeSiteNode(GUID siteNodeGuid) {\n        this.siteNodeManipulator.remove( siteNodeGuid );\n    }\n\n    @Override\n    public SiteNode querySiteNode(GUID siteNodeGuid) {\n        return this.siteNodeManipulator.querySiteNode( siteNodeGuid );\n    }\n\n    @Override\n    public List<SiteNode> querySiteNodeBySiteGuid( GUID siteGuid ) {\n        return this.siteNodeManipulator.querySiteNodeBySiteGuid( siteGuid );\n    }\n\n    @Override\n    public void updateSiteNode(SiteNode siteNode) {\n        this.siteNodeManipulator.update( siteNode );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/entity/Bucket.java",
    "content": "package com.pinecone.hydra.storage.bucket.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface Bucket extends Pinenut {\n    int getEnumId();\n\n    String getBucketName();\n    void setBucketName( String bucketName );\n\n    GUID getBucketGuid();\n    void setBucketGuid( GUID bucketGuid );\n\n    GUID getUserGuid();\n    void setUserGuid( GUID userGuid );\n\n    GUID getMountPoint();\n    void setMountPoint( GUID mountPoint );\n\n    LocalDateTime  getCreateTime();\n    void setCreateTime( LocalDateTime createTime );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/entity/GenericBucket.java",
    "content": "package com.pinecone.hydra.storage.bucket.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.time.LocalDateTime;\n\npublic class GenericBucket implements Bucket{\n    protected int enumId;\n\n    protected String bucketName;\n\n    protected LocalDateTime createTime;\n\n    protected GUID bucketGuid;\n\n    protected GUID userGuid;\n\n    protected GUID mountPoint;\n\n    protected GuidAllocator guidAllocator;\n\n    public GenericBucket(){\n        this.guidAllocator = GUIDs.newGuidAllocator();\n        this.bucketGuid = this.guidAllocator.nextGUID();\n    }\n\n    @Override\n    public int getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public String getBucketName() {\n        return this.bucketName;\n    }\n\n    @Override\n    public void setBucketName(String bucketName) {\n        this.bucketName = bucketName;\n    }\n\n    @Override\n    public GUID getBucketGuid() {\n        return this.bucketGuid;\n    }\n\n    @Override\n    public void setBucketGuid(GUID bucketGuid) {\n        this.bucketGuid = bucketGuid;\n    }\n\n    @Override\n    public GUID getUserGuid() {\n        return this.userGuid;\n    }\n\n    @Override\n    public void setUserGuid(GUID userGuid) {\n        this.userGuid = userGuid;\n    }\n\n    @Override\n    public GUID getMountPoint() {\n        return this.mountPoint;\n    }\n\n    @Override\n    public void setMountPoint(GUID mountPoint) {\n        this.mountPoint = mountPoint;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/entity/GenericSite.java",
    "content": "package com.pinecone.hydra.storage.bucket.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\n\nimport java.time.LocalDateTime;\n\npublic class GenericSite implements Site{\n    private long enumId;\n\n    private String siteName;\n\n    private LocalDateTime createTime;\n\n    private GUID siteGuid;\n\n    private GUID mountPointGuid;\n\n    public GenericSite(){}\n\n    public GenericSite(long enumId, String siteName, LocalDateTime createTime, GUID siteGuid, GUID mountPointGuid) {\n        this.enumId = enumId;\n        this.siteName = siteName;\n        this.createTime = createTime;\n        this.siteGuid = siteGuid;\n        this.mountPointGuid = mountPointGuid;\n    }\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public String getSiteName() {\n        return this.siteName;\n    }\n\n    @Override\n    public void setSiteName(String siteName) {\n        this.siteName = siteName;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public GUID getSiteGuid() {\n        return this.siteGuid;\n    }\n\n    @Override\n    public void setSiteGuid(GUID siteGuid) {\n        this.siteGuid = siteGuid;\n    }\n\n    @Override\n    public GUID getMountPointGuid() {\n        return this.mountPointGuid;\n    }\n\n    @Override\n    public void setMountPointGuid(GUID mountPointGuid) {\n        this.mountPointGuid = mountPointGuid;\n    }\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/entity/GenericSiteNode.java",
    "content": "package com.pinecone.hydra.storage.bucket.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\n\npublic class GenericSiteNode implements SiteNode{\n    protected long enumId;\n\n    protected String nodeName;\n\n    protected GUID nodeGuid;\n\n    protected GUID siteGuid;\n\n    protected int state;\n\n    protected int isEnabled;\n\n    protected GUID relatedService;\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public String getNodeName() {\n        return this.nodeName;\n    }\n\n    @Override\n    public void setNodeName(String nodeName) {\n        this.nodeName = nodeName;\n    }\n\n    @Override\n    public GUID getNodeGuid() {\n        return this.nodeGuid;\n    }\n\n    @Override\n    public void setNodeGuid(GUID nodeGuid) {\n        this.nodeGuid = nodeGuid;\n    }\n\n    @Override\n    public GUID getSiteGuid() {\n        return this.siteGuid;\n    }\n\n    @Override\n    public void setSiteGuid(GUID siteGuid) {\n        this.siteGuid = siteGuid;\n    }\n\n    @Override\n    public int getState() {\n        return this.state;\n    }\n\n    @Override\n    public void setState(int state) {\n        this.state = state;\n    }\n\n    @Override\n    public int getIsEnabled() {\n        return this.isEnabled;\n    }\n\n    @Override\n    public void setIsEnabled(int isEnabled) {\n        this.isEnabled = isEnabled;\n    }\n\n    @Override\n    public GUID getRelatedService() {\n        return this.relatedService;\n    }\n\n    @Override\n    public void setRelatedService(GUID relatedService) {\n        this.relatedService = relatedService;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/entity/Site.java",
    "content": "package com.pinecone.hydra.storage.bucket.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface Site extends Pinenut {\n    long getEnumId();\n    void setEnumId( long enumId);\n\n    String getSiteName();\n    void setSiteName( String siteName );\n\n    LocalDateTime getCreateTime();\n    void setCreateTime( LocalDateTime createTime );\n\n    GUID getSiteGuid();\n    void setSiteGuid( GUID siteGuid );\n\n    GUID getMountPointGuid();\n    void setMountPointGuid( GUID mountPointGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/entity/SiteNode.java",
    "content": "package com.pinecone.hydra.storage.bucket.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface SiteNode extends Pinenut {\n    long getEnumId();\n    void setEnumId( long enumId );\n\n    String getNodeName();\n    void setNodeName( String nodeName );\n\n    GUID getNodeGuid();\n    void setNodeGuid( GUID nodeGuid );\n\n    GUID getSiteGuid();\n    void setSiteGuid( GUID siteGuid );\n\n    int getState();\n    void setState( int state );\n\n    int getIsEnabled();\n    void setIsEnabled( int isEnabled );\n\n    GUID getRelatedService();\n    void setRelatedService( GUID relatedService );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/source/BucketManipulator.java",
    "content": "package com.pinecone.hydra.storage.bucket.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.bucket.entity.Bucket;\n\nimport java.util.List;\n\npublic interface BucketManipulator extends Pinenut {\n    void insert( Bucket bucket );\n    void remove( GUID bucketGuid );\n\n    void removeByAccountAndBucketName( GUID accountGuid, String bucketName );\n    Bucket queryBucketByBucketGuid( GUID bucketGuid );\n    List<Bucket> queryBucketsByUserGuid( GUID userGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/source/BucketMasterManipulator.java",
    "content": "package com.pinecone.hydra.storage.bucket.source;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\n\npublic interface BucketMasterManipulator extends KOIMasterManipulator {\n    BucketManipulator getBucketManipulator();\n\n    SiteManipulator   getSiteManipulator();\n\n    SiteNodeManipulator getSiteNodeManipulator();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/source/SiteManipulator.java",
    "content": "package com.pinecone.hydra.storage.bucket.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.bucket.entity.Site;\n\nimport java.util.List;\n\npublic interface SiteManipulator extends Pinenut {\n    void insert(Site site );\n\n    void remove( GUID siteGuid );\n\n    void removeByName( String siteName );\n\n    Site querySite( GUID siteGuid );\n\n    Site querySiteByName( String siteName );\n\n    List<Site> listSite();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/bucket/source/SiteNodeManipulator.java",
    "content": "package com.pinecone.hydra.storage.bucket.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.bucket.entity.Site;\nimport com.pinecone.hydra.storage.bucket.entity.SiteNode;\n\nimport java.util.List;\n\npublic interface SiteNodeManipulator extends Pinenut {\n    void insert(SiteNode siteNode );\n\n    void remove( GUID siteNodeGuid );\n\n    SiteNode querySiteNode( GUID siteNodeGuid );\n\n    List<SiteNode> querySiteNodeBySiteGuid( GUID siteGuid );\n\n    void update( SiteNode siteNode );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/ClusterSegmentNaming.java",
    "content": "package com.pinecone.hydra.storage.file;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface ClusterSegmentNaming extends Pinenut {\n    String naming( String fileName, long segId, String crc3 );\n\n    String naming (String fileName, GUID frameGuid, int threadId );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/FileConstants.java",
    "content": "package com.pinecone.hydra.storage.file;\n\npublic final class FileConstants {\n    public static final Number DefaultClusterSize = 10 * 1024 * 1024L; // 10 MB\n\n    public static final String StorageVersionSignature = \"Generic\";\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/FileSystemConfig.java",
    "content": "package com.pinecone.hydra.storage.file;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.StorageConfig;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\n\npublic interface FileSystemConfig extends StorageConfig {\n    String getVersionSignature();\n\n    Number getClusterSize();\n\n    GUID getLocalhostGUID();\n\n    Number getmTinyFileStripSizing();\n\n    long getPathQueryExpiryTimeHotMil();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/KOFSClusterSegmentNaming.java",
    "content": "package com.pinecone.hydra.storage.file;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class KOFSClusterSegmentNaming implements ClusterSegmentNaming {\n    @Override\n    public String naming( String fileName,long segId,String crc3 ){\n        return String.format( \"%s_seg%d_%s.frame\", fileName, segId, crc3 );\n    }\n\n    @Override\n    public String naming(String fileName, GUID frameGuid, int threadId) {\n        return String.format( \"%s_%s-%d.strip\", fileName, frameGuid.toString(), threadId );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/KOMFileSystem.java",
    "content": "package com.pinecone.hydra.storage.file;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.StorageConfig;\nimport com.pinecone.hydra.storage.file.entity.Cluster;\nimport com.pinecone.hydra.storage.file.entity.ClusterPage;\nimport com.pinecone.hydra.storage.file.entity.FSNodeAllotment;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.RemoteCluster;\nimport com.pinecone.hydra.storage.file.source.FileMasterManipulator;\nimport com.pinecone.hydra.storage.file.transmit.exporter.FileExportEntity;\nimport com.pinecone.hydra.storage.file.transmit.receiver.FileReceiveEntity;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.system.ko.kom.ReparseKOMTree;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.TreeMap;\n\npublic interface KOMFileSystem extends ReparseKOMTree {\n    FileSystemConfig  KernelFileSystemConfig = new KernelFileSystemConfig();\n\n    @Override\n    String getPath( GUID guid );\n\n    @Override\n    String getFullName( GUID guid );\n\n    @Override\n    GUID put( TreeNode treeNode );\n\n    @Override\n    FileTreeNode get(GUID guid );\n\n    @Override\n    FileTreeNode get( GUID guid, int depth );\n\n    void update( FileTreeNode node);\n\n    @Override\n    FileTreeNode getAsRootDepth( GUID guid );\n\n    FileNode getFileNode(GUID guid );\n\n    Folder getFolder(GUID guid );\n\n    @Override\n    GUID queryGUIDByPath( String path );\n\n    @Override\n    GUID queryGUIDByFN  ( String fullName );\n\n    @Override\n    FileSystemConfig getConfig();\n\n\n    //todo update方法\n\n    @Override\n    void remove( GUID guid );\n\n    @Override\n    void removeReparseLink( GUID guid );\n\n\n    @Override\n    List<TreeNode > getChildren( GUID guid );\n\n    @Override\n    void rename( GUID guid, String name );\n\n    default void rename( String path, String name ) {\n        this.rename( this.assertPath( path ), name );\n    }\n\n    @Override\n    default GUID assertPath( String path, String pathType ) throws IllegalArgumentException {\n        GUID guid      = this.queryGUIDByPath( path );\n        if( guid == null ) {\n            throw new IllegalArgumentException( \"Undefined \" + pathType + \" '\" + path + \"'\" );\n        }\n\n        return guid;\n    }\n\n    @Override\n    default GUID assertPath( String path ) throws IllegalArgumentException {\n        return this.assertPath( path, \"path\" );\n    }\n\n    List<TreeNode > getAllTreeNode();\n\n\n\n    /** 断言，确保节点唯一拥有关系*/\n    @Override\n    void affirmOwnedNode( GUID parentGuid, GUID childGuid  );\n\n    FileNode  affirmFileNode( String path );\n\n    Folder    affirmFolder( String path);\n\n    @Override\n    void newHardLink    ( GUID sourceGuid, GUID targetGuid );\n\n    /** set affinityParentGuid for child.*/\n    void setDataAffinityGuid ( GUID childGuid, GUID affinityParentGuid  );\n\n    default void setDataAffinity ( String childPath, String parentPath ) {\n        GUID childGuid      = this.assertPath( childPath );\n        GUID parentGuid     = this.assertPath( parentPath );\n        if( childGuid == parentGuid ) {\n            throw new IllegalArgumentException( \"Cyclic path detected '\" + childPath + \"'\" );\n        }\n\n        this.setDataAffinityGuid( childGuid, parentGuid );\n    }\n\n    Object querySelector                  ( String szSelector );\n\n    ElementNode queryElement(String path);\n\n    @Override\n    void remove(String path);\n\n    @Override\n    EntityNode queryNode(String path);\n\n    @Override\n    ReparseLinkNode queryReparseLink(String path);\n\n    List<TreeNode> selectByName(String name);\n\n    void moveTo(String sourcePath, String destinationPath);\n\n    void move(String sourcePath, String destinationPath);\n\n    void copy(String sourcePath, String destinationPath, VolumeManager volumeManager) throws IOException;\n\n    void directCopy( String sourcePath, String destinationPath ) throws IOException;\n\n    @Override\n    List<FileTreeNode> fetchRoot();\n\n    Object querySelectorJ(String szSelector);\n\n    List querySelectorAll(String szSelector);\n\n    FSNodeAllotment getFSNodeAllotment();\n\n    TreeMap<Long, Cluster > getClustersByFileGuid( GUID guid );\n\n    List<RemoteCluster> fetchClustersPageByFileGuid( GUID fileGuid, long offset, int pageSize );\n\n    ClusterPage fetchClustersByFileGuid( GUID fileGuid, int pageSize );\n\n    ClusterPage fetchClustersByFileGuid( GUID fileGuid );\n\n    Cluster getLastCluster(GUID guid );\n\n    void setFolderVolumeMapping(GUID folderGuid, GUID volumeGuid );\n    GUID getMappingVolume(GUID folderGuid );\n\n    GUID getMappingVolume(String path );\n\n    Cluster getClusterByFileWithId(GUID fileGuid, long segId );\n\n\n    void receive(  FileReceiveEntity entity ) throws IOException;\n    void receive( FileReceiveEntity entity, Number offset, Number endSize )throws IOException;\n    void randomReceive( FileReceiveEntity entity, Number offset, Number endSize ) throws  IOException;\n\n    void export( FileExportEntity entity ) throws IOException;\n    void export( FileExportEntity entity, Number offset, Number endSize );\n\n    FileMasterManipulator  getFileMasterManipulator();\n\n    void updateCluster( FileNode fileNode, long segId );\n\n    void deleteCluster( FileNode fileNode, long segId );\n\n    long countFileCluster( GUID fileGuid );\n\n    void renameFile( String filePath, String newFileName );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/KernelFileSystemConfig.java",
    "content": "package com.pinecone.hydra.storage.file;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.ArchStorageConfig;\nimport com.pinecone.hydra.storage.StorageConstants;\nimport com.pinecone.hydra.storage.file.cache.DefaultCacheConstants;\nimport com.pinecone.hydra.storage.volume.VolumeConstants;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.util.Map;\n\npublic class KernelFileSystemConfig extends ArchStorageConfig implements FileSystemConfig {\n    protected String mszVersionSignature        = FileConstants.StorageVersionSignature;\n    protected Number mnClusterSize              = FileConstants.DefaultClusterSize;\n    protected GUID   mLocalhostGUID             = StorageConstants.LocalhostGUID;\n    protected Number mTinyFileStripSizing       = VolumeConstants.TinyFileStripSizing;\n    protected long mPathQueryExpiryTimeHotMil   = DefaultCacheConstants.PathQueryExpiryTimeHotMil;\n\n    public KernelFileSystemConfig() {\n        super();\n    }\n\n    public KernelFileSystemConfig( Map<String, Object> config ) {\n        super( config );\n        this.mszVersionSignature           = (String) config.getOrDefault(\"VersionSignature\", FileConstants.StorageVersionSignature);\n        this.mnClusterSize                 = (Number) config.getOrDefault(\"ClusterSize\", FileConstants.DefaultClusterSize);\n        this.mLocalhostGUID                = GUIDs.GUID128( String.valueOf(config.getOrDefault(\"LocalhostGUID\", StorageConstants.LocalhostGUID)) );\n        this.mTinyFileStripSizing          = (Number) config.getOrDefault(\"TinyFileStripSizing\", VolumeConstants.TinyFileStripSizing);\n        this.mPathQueryExpiryTimeHotMil    = ((Number) config.getOrDefault(\"PathQueryExpiryTimeHotMil\", DefaultCacheConstants.PathQueryExpiryTimeHotMil)).longValue();\n    }\n\n\n    @Override\n    public String getVersionSignature() {\n        return this.mszVersionSignature;\n    }\n\n    public Number getClusterSize() {\n        return this.mnClusterSize;\n    }\n\n    public GUID getLocalhostGUID() {\n        return this.mLocalhostGUID;\n    }\n\n    @Override\n    public Number getmTinyFileStripSizing() {\n        return this.mTinyFileStripSizing;\n    }\n\n    @Override\n    public long getPathQueryExpiryTimeHotMil() {\n        return this.mPathQueryExpiryTimeHotMil       ;\n    }\n\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/UniformObjectFileSystem.java",
    "content": "package com.pinecone.hydra.storage.file;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.storage.StorageConstants;\nimport com.pinecone.hydra.storage.file.cache.DefaultCacheConstants;\nimport com.pinecone.hydra.storage.file.external.ExternalFileSystemInstrument;\nimport com.pinecone.hydra.storage.file.external.KenExternalFileSystemInstrument;\nimport com.pinecone.hydra.storage.file.entity.Cluster;\nimport com.pinecone.hydra.storage.file.entity.ClusterPage;\nimport com.pinecone.hydra.storage.file.entity.ClusterPage64;\nimport com.pinecone.hydra.storage.file.entity.FSNodeAllotment;\nimport com.pinecone.hydra.storage.file.entity.GenericFSNodeAllotment;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.file.entity.GenericFileNode;\nimport com.pinecone.hydra.storage.file.entity.GenericFolder;\nimport com.pinecone.hydra.storage.file.entity.LocalCluster;\nimport com.pinecone.hydra.storage.file.entity.RemoteCluster;\nimport com.pinecone.hydra.storage.file.operator.FileSystemOperator;\nimport com.pinecone.hydra.storage.file.operator.FileSystemOperatorFactory;\nimport com.pinecone.hydra.storage.file.operator.GenericFileSystemOperatorFactory;\nimport com.pinecone.hydra.storage.file.source.FileSystemAttributeManipulator;\nimport com.pinecone.hydra.storage.file.source.FileManipulator;\nimport com.pinecone.hydra.storage.file.source.FileMasterManipulator;\nimport com.pinecone.hydra.storage.file.source.FileMetaManipulator;\nimport com.pinecone.hydra.storage.file.source.FolderManipulator;\nimport com.pinecone.hydra.storage.file.source.FolderMetaManipulator;\nimport com.pinecone.hydra.storage.file.source.FolderVolumeMappingManipulator;\nimport com.pinecone.hydra.storage.file.source.LocalClusterManipulator;\nimport com.pinecone.hydra.storage.file.source.RemoteClusterManipulator;\nimport com.pinecone.hydra.storage.file.source.SymbolicManipulator;\nimport com.pinecone.hydra.storage.file.source.SymbolicMetaManipulator;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.transmit.exporter.FileExportEntity;\nimport com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64;\nimport com.pinecone.hydra.storage.file.transmit.receiver.FileReceiveEntity;\nimport com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64;\nimport com.pinecone.hydra.storage.io.TitanFileChannelChanface;\nimport com.pinecone.hydra.storage.io.TitanOutputStreamChanface;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.system.identifier.KOPathResolver;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.system.ko.kom.ArchReparseKOMTree;\nimport com.pinecone.hydra.system.ko.kom.GenericReparseKOMTreeAddition;\nimport com.pinecone.hydra.system.ko.kom.StandardPathSelector;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\nimport com.pinecone.slime.map.indexable.IndexableMapQuerier;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.io.File;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.nio.channels.FileChannel;\nimport java.nio.file.StandardOpenOption;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Objects;\nimport java.util.TreeMap;\n\n/**\n *  Pinecone Ursus For Java UniformObjectFileSystem\n *  Author: Ken, Harald.E (Dragon King)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Uniform Object File System (Ken`s OFS / KOFS)\n *  Uniform Distribute Object Storage File System\n *  Supported TB-PB-ZB Level Big Data Storage\n *\n *  *****************************************************************************************\n */\npublic class UniformObjectFileSystem extends ArchReparseKOMTree implements KOMFileSystem {\n    protected FSNodeAllotment                         fsNodeAllotment;\n\n    protected FileSystemAttributeManipulator          fileSystemAttributeManipulator;\n    protected FileManipulator                         fileManipulator;\n    protected FileMasterManipulator                   fileMasterManipulator;\n    protected FileMetaManipulator                     fileMetaManipulator;\n    protected FolderManipulator                       folderManipulator;\n    protected FolderMetaManipulator                   folderMetaManipulator;\n    protected LocalClusterManipulator                 localClusterManipulator;\n    protected RemoteClusterManipulator                remoteClusterManipulator;\n    protected SymbolicManipulator                     symbolicManipulator;\n    protected SymbolicMetaManipulator                 symbolicMetaManipulator;\n    protected FolderVolumeMappingManipulator          folderVolumeMappingManipulator;\n\n    protected IndexableMapQuerier<String, String >    globalPathGuidCacheQuerier;\n\n    protected ExternalFileSystemInstrument directFileSystemAccessor;\n\n\n    public UniformObjectFileSystem(\n            Processum superiorProcess, KOIMasterManipulator masterManipulator, KOMFileSystem parent,\n            String name, IndexableMapQuerier<String, String > globalPathGuidCacheQuerier,\n            FileSystemConfig fileSystemConfig, @Nullable GuidAllocator guidAllocator\n    ){\n        // Phase [1] Construct system.\n        super( superiorProcess, masterManipulator, fileSystemConfig, parent, name, guidAllocator );\n\n        // Phase [2] Construct fundamentals.\n        this.fileMasterManipulator         = (FileMasterManipulator) masterManipulator;\n        this.pathResolver                  =  new KOPathResolver( this.kernelObjectConfig );\n\n        // Phase [3] Construct manipulators.\n        this.operatorFactory                 =  new GenericFileSystemOperatorFactory( this, (FileMasterManipulator) masterManipulator );\n        this.fileSystemAttributeManipulator  =  this.fileMasterManipulator.getAttributeManipulator();\n        this.fileManipulator                 =  this.fileMasterManipulator.getFileManipulator();\n        this.fileMetaManipulator             =  this.fileMasterManipulator.getFileMetaManipulator();\n        this.folderManipulator               =  this.fileMasterManipulator.getFolderManipulator();\n        this.folderMetaManipulator           =  this.fileMasterManipulator.getFolderMetaManipulator();\n        this.localClusterManipulator         =  this.fileMasterManipulator.getLocalClusterManipulator();\n        this.remoteClusterManipulator        =  this.fileMasterManipulator.getRemoteClusterManipulator();\n        this.symbolicManipulator             =  this.fileMasterManipulator.getSymbolicManipulator();\n        this.symbolicMetaManipulator         =  this.fileMasterManipulator.getSymbolicMetaManipulator();\n        this.folderVolumeMappingManipulator  =  this.fileMasterManipulator.getFolderVolumeRelationManipulator();\n\n        // Phase [4] Construct selectors.\n        this.pathSelector                    =  new StandardPathSelector(\n                this.pathResolver, this.imperialTree, this.folderManipulator, new GUIDNameManipulator[] { this.fileManipulator }\n        );\n        // Warning: ReparseKOMTreeAddition must be constructed only after `pathSelector` has been constructed.\n        this.mReparseKOM                     =  new GenericReparseKOMTreeAddition( this );\n\n        // Phase [5] Construct misc.\n//        this.propertyTypeConverter         =  new DefaultPropertyConverter();\n//        this.textValueTypeConverter        =  new DefaultTextValueConverter();\n        this.fsNodeAllotment                 =  new GenericFSNodeAllotment(this.fileMasterManipulator,this);\n        this.globalPathGuidCacheQuerier      =  globalPathGuidCacheQuerier;\n\n        this.directFileSystemAccessor = new KenExternalFileSystemInstrument(this);\n    }\n\n//    public GenericKOMFileSystem( Hydrogen hydrogen ) {\n//        this.hydrogen = hydrogen;\n//    }\n\n    public UniformObjectFileSystem( Processum superiorProcess, KOIMasterManipulator masterManipulator, KOMFileSystem parent, String name,FileSystemConfig fileSystemConfig ) {\n        this( superiorProcess, masterManipulator, parent, name, null,fileSystemConfig, null );\n    }\n\n    public UniformObjectFileSystem( Processum superiorProcess, KOIMasterManipulator masterManipulator, FileSystemConfig fileSystemConfig ){\n        this( superiorProcess, masterManipulator, null, KOMFileSystem.class.getSimpleName(),fileSystemConfig );\n    }\n\n    public UniformObjectFileSystem( Processum superiorProcess, KOIMasterManipulator masterManipulator, IndexableMapQuerier<String, String > globalPathGuidCacheQuerier, FileSystemConfig fileSystemConfig  ){\n        this( superiorProcess, masterManipulator, null, KOMFileSystem.class.getSimpleName(), globalPathGuidCacheQuerier,fileSystemConfig, null );\n    }\n\n    public UniformObjectFileSystem( KOIMappingDriver driver, KOMFileSystem parent, String name, FileSystemConfig fileSystemConfig ) {\n        this(\n                driver.getSuperiorProcess(),\n                driver.getMasterManipulator(),\n                parent,\n                name,\n                fileSystemConfig\n        );\n    }\n\n    public UniformObjectFileSystem( KOIMappingDriver driver,FileSystemConfig fileSystemConfig ) {\n        this(\n                driver.getSuperiorProcess(),\n                driver.getMasterManipulator(),\n                fileSystemConfig\n        );\n    }\n\n    public UniformObjectFileSystem( KOIMappingDriver driver, IndexableMapQuerier<String, String > globalPathGuidCacheQuerier, FileSystemConfig fileSystemConfig  ) {\n        this(\n                driver.getSuperiorProcess(),\n                driver.getMasterManipulator(),\n                globalPathGuidCacheQuerier,\n                fileSystemConfig\n        );\n    }\n\n\n\n    protected void apply( IndexableMapQuerier<String, String > globalPathGuidCacheQuerier ) {\n        this.globalPathGuidCacheQuerier = globalPathGuidCacheQuerier;\n    }\n\n\n    @Override\n    public FileTreeNode get(GUID guid, int depth ) {\n        return (FileTreeNode) super.get( guid, depth );\n    }\n\n    @Override\n    public FileMasterManipulator getFileMasterManipulator() {\n        return this.fileMasterManipulator;\n    }\n\n    @Override\n    public FileTreeNode get( GUID guid ) {\n        return (FileTreeNode) super.get( guid );\n    }\n\n    @Override\n    public void update(FileTreeNode node) {\n        TreeNodeOperator operator = this.operatorFactory.getOperator(node.getMetaType());\n        operator.update( node );\n    }\n\n    @Override\n    public FileTreeNode getAsRootDepth( GUID guid ) {\n        return (FileTreeNode) super.getAsRootDepth( guid );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public List<FileTreeNode > fetchRoot() {\n        return (List) super.fetchRoot();\n    }\n\n\n\n    @Override\n    public FileSystemConfig getConfig() {\n        return (FileSystemConfig) super.getConfig();\n    }\n\n    public FileSystemOperatorFactory getOperatorFactory() {\n        return (FileSystemOperatorFactory) this.operatorFactory;\n    }\n\n    @Override\n    public FileNode getFileNode(GUID guid) {\n        return ( FileNode ) this.get( guid );\n    }\n\n    @Override\n    public Folder getFolder(GUID guid) {\n        return ( Folder ) this.get( guid );\n    }\n\n    @Override\n    public void remove(String path) {\n        String key = DefaultCacheConstants.FilePathCacheNS + path;\n        this.globalPathGuidCacheQuerier.erase(key);\n        super.remove(path);\n    }\n\n    @Override\n    public void remove( GUID guid ){\n        super.remove( guid );\n        this.remoteClusterManipulator.remove( guid );\n        this.localClusterManipulator.remove( guid );\n    }\n\n    @Override\n    public List<TreeNode> getAllTreeNode() {\n        List<GUID> nameSpaceNodes = this.fileManipulator.dumpGuid();\n        List<GUID> confNodes      = this.folderManipulator.dumpGuid();\n        ArrayList<TreeNode> treeNodes = new ArrayList<>();\n        for (GUID guid : nameSpaceNodes){\n            TreeNode treeNode = this.get(guid);\n            treeNodes.add(treeNode);\n        }\n        for ( GUID guid : confNodes ){\n            TreeNode treeNode = this.get(guid);\n            treeNodes.add(treeNode);\n        }\n        return treeNodes;\n    }\n\n    protected FileTreeNode affirmTreeNodeByPath( String path, Class<? > cnSup, Class<? > nsSup ) {\n        String[] parts = this.pathResolver.segmentPathParts( path );\n        String currentPath = \"\";\n        GUID parentGuid = GUIDs.Dummy128();\n\n        FileTreeNode node = this.queryElement( path );\n        if( node != null ) {\n            return node;\n        }\n\n        FileTreeNode ret = null;\n        for( int i = 0; i < parts.length; ++i ){\n            currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : \"\" ) + parts[ i ];\n            node = this.queryElement( currentPath );\n            if ( node == null){\n                if ( i == parts.length - 1 && cnSup != null ){\n                    FileNode fileNode = (FileNode) this.dynamicFactory.optNewInstance( cnSup, new Object[]{ this } );\n                    fileNode.setName( parts[i] );\n                    GUID guid = this.put( fileNode );\n                    this.affirmOwnedNode( parentGuid, guid );\n                    return fileNode;\n                }\n                else {\n                    Folder folder = (Folder) this.dynamicFactory.optNewInstance( nsSup, new Object[]{ this } );\n                    folder.setName(parts[i]);\n                    GUID guid = this.put(folder);\n                    if ( i != 0 ){\n                        this.affirmOwnedNode( parentGuid, guid );\n                        parentGuid = guid;\n                    }\n                    else {\n                        parentGuid = guid;\n                    }\n\n                    ret = folder;\n                }\n            }\n            else {\n                parentGuid = node.getGuid();\n            }\n        }\n\n        return ret;\n    }\n\n    @Override\n    public FileNode affirmFileNode(String path) {\n        FileNode fileNode = (FileNode) this.affirmTreeNodeByPath(path, GenericFileNode.class, GenericFolder.class);\n        this.initVolume( path );\n        return fileNode;\n    }\n\n    @Override\n    public Folder affirmFolder(String path) {\n        Folder folder = (Folder) this.affirmTreeNodeByPath(path, null, GenericFolder.class);\n        this.initVolume( path );\n        return folder;\n    }\n\n    @Override\n    public void setDataAffinityGuid( GUID childGuid, GUID affinityParentGuid ) {\n\n    }\n\n\n    @Override\n    public GUID queryGUIDByPath( String path ) {\n        FileSystemConfig config = this.getConfig();\n        if ( this.globalPathGuidCacheQuerier != null ) {\n            String key = DefaultCacheConstants.FilePathCacheNS + path;\n            String szGUID = this.globalPathGuidCacheQuerier.get( key );\n            if ( StringUtils.isNoneEmpty( szGUID ) ) {\n                return GUIDs.GUID128( szGUID );\n            }\n        }\n        GUID guid =  super.queryGUIDByPath( path ); // Into OLTP-RDB\n        if ( this.globalPathGuidCacheQuerier != null ) {\n            String key = DefaultCacheConstants.FilePathCacheNS + path;\n            this.globalPathGuidCacheQuerier.insert( key, guid.toString(), config.getPathQueryExpiryTimeHotMil() );\n        }\n        return guid;\n    }\n\n    @Override\n    public ElementNode queryElement(String path) {\n        GUID guid = this.queryGUIDByPath( path );\n        if( guid != null ) {\n            return (ElementNode) this.get( guid );\n        }\n        return this.directFileSystemAccessor.queryElement(path);\n    }\n\n    @Override\n    public List<TreeNode> selectByName(String name) {\n        return null;\n    }\n\n    @Override\n    public void moveTo(String sourcePath, String destinationPath) {\n        GUID[] pair = this.assertCopyMove( sourcePath, destinationPath );\n        GUID sourceGuid      = pair[ 0 ];\n        GUID destinationGuid = pair[ 1 ];\n\n        this.imperialTree.moveTo( sourceGuid, destinationGuid );\n        this.imperialTree.removeCachePath( sourceGuid );\n    }\n\n    @Override\n    public void move(String sourcePath, String destinationPath) {\n        GUID sourceGuid         = this.assertPath( sourcePath, \"source\" );\n\n        List<String > sourParts = this.pathResolver.resolvePathParts( sourcePath );\n        List<String > destParts = this.pathResolver.resolvePathParts( destinationPath );\n\n        String szLastDestTarget = destParts.get( destParts.size() - 1 );\n        sourcePath      = sourcePath.trim();\n        destinationPath = destinationPath.trim();\n\n        //   Case1: Move \"game/terraria/npc\"   => \"game/minecraft/npc\", which has the same dest name.\n        // Case1-1: Move \"game/terraria/npc/\"  => \"game/minecraft/npc/\"\n        // Case1-2: Move \"game/terraria/npc/.\" => \"game/minecraft/npc/.\"\n        if(\n                sourParts.get( sourParts.size() - 1 ).equals( szLastDestTarget ) || szLastDestTarget.equals( \".\" ) ||\n                        ( sourcePath.endsWith( this.getConfig().getPathNameSeparator() ) && destinationPath.endsWith( this.getConfig().getPathNameSeparator() ) )\n        ) {\n            destParts.remove( destParts.size() - 1 );\n            String szParentPath = this.pathResolver.assemblePath( destParts );\n            destParts.add( szLastDestTarget );\n\n            // Move to, which has the same name or explicit current dir `.`.\n            this.moveTo( sourcePath, szParentPath );\n        }\n        // Case 2: \"game/terraria/npc\" => \"game/minecraft/character/\" || \"game/minecraft/character/.\"\n        //    game/terraria/npc => game/minecraft/character/npc\n        else if ( !sourcePath.endsWith( this.getConfig().getPathNameSeparator() ) && (\n                destinationPath.endsWith( this.getConfig().getPathNameSeparator() ) || destinationPath.endsWith( \".\" ) )\n        ) {\n            Folder target = this.affirmFolder( destinationPath );\n            this.imperialTree.moveTo( sourceGuid, target.getGuid() );\n        }\n        // Case3: Move \"game/terraria/npc\" => \"game/minecraft/character\", move all children therein.\n        //    game/terraria/npc/f1 => game/minecraft/character/f1\n        //    game/terraria/npc/f2 => game/minecraft/character/f2\n        //    etc.\n        else {\n            //  Case3-1: Is config or other none namespace node.\n            //           Move \"game/terraria/file\" => \"game/minecraft/dir\".\n            //  Case3-2: \"game/terraria/npc/\" => \"game/minecraft/character\"\n            // Eq.Case2: Move \"game/terraria/npc\" => \"game/minecraft/character\",\n            if( !this.folderManipulator.isFolder( sourceGuid ) ) {\n                Folder target = this.affirmFolder( destinationPath );\n                this.imperialTree.moveTo( sourceGuid, target.getGuid() );\n            }\n            else {\n                List<TreeNode > children = this.getChildren( sourceGuid );\n                if( !children.isEmpty() ) {\n                    Folder target = this.affirmFolder( destinationPath );\n                    for( TreeNode node : children ) {\n                        this.imperialTree.moveTo( node.getGuid(), target.getGuid() );\n                    }\n                }\n            }\n\n            this.imperialTree.removeTreeNodeOnly( sourceGuid );\n        }\n\n        this.imperialTree.removeCachePath( sourceGuid );\n    }\n\n    @Override\n    public void copy(String sourcePath, String destinationPath, VolumeManager volumeManager) throws  IOException {\n        ElementNode elementNode = this.queryElement(destinationPath);\n        this.copy(sourcePath,elementNode,volumeManager);\n    }\n\n    @Override\n    public void directCopy(String sourcePath, String destinationPath) throws IOException {\n        this.directFileSystemAccessor.copy( sourcePath,destinationPath );\n    }\n\n    private void copy(String sourcePath, FileTreeNode fileTreeNode, VolumeManager volumeManager ) throws IOException {\n        if( fileTreeNode instanceof Folder ){\n            List<TreeNode> children = this.getChildren(fileTreeNode.getGuid());\n            for(TreeNode child : children){\n                FileTreeNode childFileTreeNode = this.get(child.getGuid());\n                this.copy(sourcePath + StorageConstants.PathSeparator + fileTreeNode.getName(), childFileTreeNode,volumeManager);\n            }\n        }\n        else {\n            String name = fileTreeNode.getName();\n            String[] split = name.split(StorageConstants.period);\n//            File tempFile = File.createTempFile(split[0], StorageConstants.PathSeparator + split[1]);\n            File tempFile = new File(this.getConfig().getDefaultTempFilePath()+name);\n            if(!tempFile.createNewFile()){\n                throw new IOException( \"Creating file compromised, what :\" + tempFile.toPath() );\n            }\n            FileOutputStream fileOutputStream = new FileOutputStream(tempFile);\n            TitanOutputStreamChanface outputStreamChanface = new TitanOutputStreamChanface(fileOutputStream);\n            TitanFileExportEntity64 exportEntity64 = new TitanFileExportEntity64(this, volumeManager,\n                    (FileNode) fileTreeNode, outputStreamChanface);\n            exportEntity64.export();\n\n            FileNode fileNode = this.fsNodeAllotment.newFileNode();\n            FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.READ);\n            TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel );\n            fileNode.setDefinitionSize( tempFile.length() );\n            fileNode.setName( tempFile.getName() );\n            String destDirPath = sourcePath + StorageConstants.PathSeparator + name;\n            TitanFileReceiveEntity64 receiveEntity64 = new TitanFileReceiveEntity64(this, destDirPath,\n                    fileNode, titanFileChannelKChannel, volumeManager);\n            this.receive( receiveEntity64 );\n\n            tempFile.delete();\n            fileOutputStream.close();\n            channel.close();\n        }\n    }\n\n    @Override\n    public Object querySelectorJ( String szSelector ) {\n        return null;\n    }\n\n\n    @Override\n    public TreeMap<Long, Cluster> getClustersByFileGuid( GUID guid ) {\n        TreeMap< Long, Cluster> frameMap = new TreeMap<>();\n\n        List<RemoteCluster> remoteClusters = this.remoteClusterManipulator.fetchRemoteClusterByFileGuid( guid );\n        for( RemoteCluster remoteCluster : remoteClusters ){\n            if( remoteCluster.getDeviceGuid().equals( this.getConfig().getLocalHostGuid() )){\n                LocalCluster localCluster = this.localClusterManipulator.getLocalClusterByGuid( remoteCluster.getSegGuid() );\n                frameMap.put( localCluster.getSegId(), localCluster );\n            }\n            else {\n                //todo 远程获取逻辑\n            }\n        }\n\n        return frameMap;\n    }\n\n\n    @Override\n    public List<RemoteCluster > fetchClustersPageByFileGuid( GUID fileGuid, long offset, int pageSize ) {\n        return this.remoteClusterManipulator.fetchRemoteClusterByFileGuid( fileGuid, offset, pageSize );\n    }\n\n    @Override\n    public ClusterPage fetchClustersByFileGuid( GUID fileGuid, int pageSize ) {\n        return new ClusterPage64(this, this.remoteClusterManipulator, this.localClusterManipulator, fileGuid, pageSize );\n    }\n\n    @Override\n    public ClusterPage fetchClustersByFileGuid( GUID fileGuid ) {\n        return new ClusterPage64( this,this.remoteClusterManipulator, this.localClusterManipulator, fileGuid );\n    }\n\n\n    @Override\n    public FSNodeAllotment getFSNodeAllotment() {\n        return this.fsNodeAllotment;\n    }\n\n    @Override\n    public Object querySelector(String szSelector) {\n        return null;\n    }\n\n    @Override\n    public List querySelectorAll(String szSelector) {\n        return null;\n    }\n\n\n    @Override\n    public Cluster getLastCluster(GUID guid) {\n        RemoteCluster remoteCluster = this.remoteClusterManipulator.getLastCluster(guid);\n        if ( remoteCluster.getDeviceGuid().equals( this.getConfig().getLocalHostGuid() )){\n            return this.localClusterManipulator.getLocalClusterByGuid(remoteCluster.getSegGuid());\n        }\n        else {\n            //todo 远端获取方法\n        }\n        return null;\n    }\n\n    private String getNodeName(ImperialTreeNode node ){\n        UOI type = node.getType();\n        TreeNode newInstance = (TreeNode)type.newInstance();\n        TreeNodeOperator operator = this.getOperatorFactory().getOperator(newInstance.getMetaType());\n        TreeNode treeNode = operator.get(node.getGuid());\n        return treeNode.getName();\n    }\n\n    private boolean allNonNull( List<?> list ) {\n        return list.stream().noneMatch( Objects::isNull );\n    }\n\n    protected GUID[] assertCopyMove ( String sourcePath, String destinationPath ) throws IllegalArgumentException {\n        GUID sourceGuid      = this.queryGUIDByPath( sourcePath );\n        if( sourceGuid == null ) {\n            throw new IllegalArgumentException( \"Undefined source '\" + sourcePath + \"'\" );\n        }\n\n        GUID destinationGuid = this.queryGUIDByPath( destinationPath );\n        if( !this.folderManipulator.isFolder( destinationGuid ) ){\n            throw new IllegalArgumentException( \"Illegal destination '\" + destinationPath + \"', should be namespace.\" );\n        }\n\n        if( destinationGuid == null ) {\n            throw new IllegalArgumentException( \"Undefined destination '\" + destinationPath + \"'\" );\n        }\n\n        if( sourceGuid == destinationGuid ) {\n            throw new IllegalArgumentException( \"Cyclic path detected '\" + sourcePath + \"'\" );\n        }\n\n        return new GUID[] { sourceGuid, destinationGuid };\n    }\n\n    @Override\n    public void receive( FileReceiveEntity entity) throws IOException {\n        entity.receive();\n    }\n\n    @Override\n    public void receive( FileReceiveEntity entity, Number offset, Number endSize) throws IOException {\n        entity.receive(offset, endSize );\n    }\n\n    @Override\n    public void randomReceive(FileReceiveEntity entity, Number offset, Number endSize) throws  IOException {\n        entity.randomReceive( offset,endSize );\n    }\n\n    @Override\n    public void export( FileExportEntity entity ) throws  IOException {\n        entity.export();\n    }\n\n    @Override\n    public void export( FileExportEntity entity, Number offset, Number endSize ) {\n\n    }\n\n    @Override\n    public void setFolderVolumeMapping(GUID folderGuid, GUID volumeGuid) {\n        this.folderVolumeMappingManipulator.insert( folderGuid, volumeGuid );\n    }\n\n    @Override\n    public GUID getMappingVolume(GUID folderGuid) {\n        return this.folderVolumeMappingManipulator.getVolumeGuid( folderGuid );\n    }\n\n    @Override\n    public GUID getMappingVolume(String path) {\n        String[] parts = this.pathResolver.segmentPathParts( path );\n        GUID currentVolumeGuid = null;\n        String currentPath = \"\";\n        for( int i = 0; i < parts.length - 1; i++ ){\n            currentPath = currentPath + ( i > 0 ? this.getConfig().getPathNameSeparator() : \"\" ) + parts[ i ];\n            ElementNode elementNode = this.queryElement(currentPath);\n            Folder folder = this.getFolder(elementNode.getGuid());\n            GUID relationVolume = folder.getRelationVolume();\n            if ( relationVolume != null ){\n                currentVolumeGuid = relationVolume;\n            }\n        }\n        return currentVolumeGuid;\n    }\n\n    @Override\n    public Cluster getClusterByFileWithId(GUID fileGuid, long segId) {\n        return this.localClusterManipulator.getClusterByFileWithId( fileGuid,segId );\n    }\n\n    @Override\n    public void updateCluster(FileNode fileNode, long segId) {\n\n    }\n\n    @Override\n    public void deleteCluster(FileNode fileNode, long segId) {\n        this.remoteClusterManipulator.removeClusterByFileWithId( fileNode.getGuid(), segId );\n        this.localClusterManipulator.removeClusterByFileWithId( fileNode.getGuid(), segId );\n    }\n\n    @Override\n    public long countFileCluster(GUID fileGuid) {\n        return this.remoteClusterManipulator.countFileClusters( fileGuid );\n    }\n\n    @Override\n    public void renameFile(String filePath, String newFileName) {\n        ElementNode elementNode = this.queryElement(filePath);\n        elementNode.setName( newFileName );\n\n        FileSystemOperator operator = (FileSystemOperator)this.operatorFactory.getOperator(elementNode.getMetaType());\n        operator.rename( elementNode.getGuid(), newFileName );\n    }\n\n    private void initVolume(String path ){\n        String[] parts = this.pathResolver.segmentPathParts( path );\n        Folder root = this.getFolder(this.queryGUIDByPath(parts[0]));\n        if( root.getRelationVolume() == null ){\n            root.applyVolume( GUIDs.GUID128( this.getConfig().getDefaultVolumeGuid() ) );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/Verification.java",
    "content": "package com.pinecone.hydra.storage.file;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.zip.CRC32;\n\n//todo 改成接口\npublic class Verification implements Pinenut {\n    private CRC32 crc32;\n\n    private long checksum;\n\n    private int parityCheck;\n\n\n    public Verification() {\n    }\n\n    public Verification(CRC32 crc32, long checksum, int parityCheck) {\n        this.crc32 = crc32;\n        this.checksum = checksum;\n        this.parityCheck = parityCheck;\n    }\n\n\n    public CRC32 getCrc32() {\n        return crc32;\n    }\n\n\n    public void setCrc32(CRC32 crc32) {\n        this.crc32 = crc32;\n    }\n\n\n    public long getChecksum() {\n        return checksum;\n    }\n\n\n    public void setChecksum(long checksum) {\n        this.checksum = checksum;\n    }\n\n\n    public int getParityCheck() {\n        return parityCheck;\n    }\n\n\n    public void setParityCheck(int parityCheck) {\n        this.parityCheck = parityCheck;\n    }\n\n    public String toString() {\n        return \"Verification{crc32 = \" + crc32 + \", checksum = \" + checksum + \", parityCheck = \" + parityCheck + \"}\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/builder/ComponentUOFSBuilder.java",
    "content": "package com.pinecone.hydra.storage.file.builder;\n\nimport com.pinecone.framework.unit.BitSet64;\nimport com.pinecone.hydra.storage.file.FileSystemConfig;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.UniformObjectFileSystem;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\n\npublic class ComponentUOFSBuilder implements UOFSBuilder {\n    public static long DEFAULT_GENERATE_FEATURE = 0L;\n\n    protected UOFSComponentor[]  mComponentorIndex = new UOFSComponentor[ Feature.featuresSize() ];\n\n    protected KOIMappingDriver   mKOIMappingDriver;\n\n    protected long               mFeatureValues = DEFAULT_GENERATE_FEATURE;\n\n    protected FileSystemConfig   mFileSystemConfig;\n\n    public ComponentUOFSBuilder ( KOIMappingDriver driver, FileSystemConfig config ) {\n        this.mKOIMappingDriver = driver;\n        this.mFileSystemConfig = config;\n    }\n\n\n\n\n    @Override\n    public UOFSBuilder registerComponentor( UOFSComponentor componentor ) {\n        int i = componentor.getFeature().ordinal();\n        this.mComponentorIndex[ i ] = componentor;\n        this.mFeatureValues = BitSet64.setBit( this.mFeatureValues, i );\n        return this;\n    }\n\n    @Override\n    public KOMFileSystem buildByRegistered() {\n        return this.build( this.mFeatureValues );\n    }\n\n    @Override\n    public KOMFileSystem build( Feature... features ) {\n        long featureValues = DEFAULT_GENERATE_FEATURE;\n\n        for ( int i = 0; i < features.length; ++i ) {\n            Feature feature = features[ i ];\n            featureValues = Feature.config( featureValues, feature, true );\n        }\n\n        return this.build( featureValues );\n    }\n\n    @Override\n    public KOMFileSystem build( long featureValues ) {\n        KOMFileSystem fs = new UniformObjectFileSystem( this.mKOIMappingDriver, this.mFileSystemConfig );\n\n        for ( int i = 0; i < Feature.featuresSize(); ++i ) {\n            if ( ( featureValues & (1L << i) ) != 0 ) {\n                this.mComponentorIndex[ i ].apply( fs );\n            }\n        }\n\n        return fs;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/builder/Feature.java",
    "content": "package com.pinecone.hydra.storage.file.builder;\n\npublic enum Feature {\n    EnableGlobalCache\n    ;\n\n    public final long mask = 1 << this.ordinal();\n\n    private Feature() {\n    }\n\n    public final long getMask() {\n        return this.mask;\n    }\n\n    public static boolean isEnabled( long features, Feature feature ) {\n        return (features & feature.mask) != 0;\n    }\n\n    public static long config( long features, Feature feature, boolean state ) {\n        if ( state ) {\n            features |= feature.mask;\n        }\n        else {\n            features &= ~feature.mask;\n        }\n\n        return features;\n    }\n\n    public static long of( Feature[] features ) {\n        if ( features == null ) {\n            return 0L;\n        }\n        else {\n            long value = 0L;\n\n            for ( int i = 0; i < features.length; ++i ) {\n                Feature feature = features[ i ];\n                value |= feature.mask;\n            }\n\n            return value;\n        }\n    }\n\n    public static int featuresSize() {\n        return Feature.values().length;\n    }\n}\n\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/builder/UOFSBuilder.java",
    "content": "package com.pinecone.hydra.storage.file.builder;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\n\npublic interface UOFSBuilder extends Pinenut {\n    KOMFileSystem build( Feature ...features );\n\n    KOMFileSystem build( long featureValues );\n\n    KOMFileSystem buildByRegistered();\n\n\n    UOFSBuilder registerComponentor( UOFSComponentor componentor );\n\n }\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/builder/UOFSComponentor.java",
    "content": "package com.pinecone.hydra.storage.file.builder;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\n\npublic interface UOFSComponentor extends Pinenut {\n    void apply( KOMFileSystem fs );\n\n    Feature getFeature();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/cache/DefaultCacheConstants.java",
    "content": "package com.pinecone.hydra.storage.file.cache;\n\nimport java.util.concurrent.TimeUnit;\n\npublic final class DefaultCacheConstants {\n\n    public static final String FilePathCacheNS = \"FILE_PATH_CACHE_NS_\";\n\n    public static final long PathQueryExpiryTimeHotMil = TimeUnit.HOURS.toMillis( 4 );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/cache/FileSystemCacheConfig.java",
    "content": "package com.pinecone.hydra.storage.file.cache;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface FileSystemCacheConfig extends Pinenut {\n    String getRedisHost();\n\n    int getRedisPort();\n\n    int getRedisTimeOut();\n\n    String getRedisPassword();\n\n    int getRedisDatabase();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/cache/MappedFileSystemCacheConfig.java",
    "content": "package com.pinecone.hydra.storage.file.cache;\n\nimport com.pinecone.framework.util.json.JSONObject;\n\npublic class MappedFileSystemCacheConfig implements FileSystemCacheConfig {\n    //protected Map<String, Object >  protoConfig;\n\n    protected JSONObject            protoConfig;\n\n    protected String                redisHost;\n\n    protected int                   redisPort;\n\n    protected int                   redisTimeOut;\n\n    protected String                redisPassword;\n\n    protected int                   redisDatabase;\n\n    public MappedFileSystemCacheConfig( JSONObject protoConfig ){\n        this.protoConfig = protoConfig;\n        this.redisHost = this.protoConfig.optString(\"redisHost\");\n        this.redisPort = this.protoConfig.optInt(\"redisPort\", 6379);\n        this.redisTimeOut = this.protoConfig.optInt(\"redisTimeOut\",2000);\n        this.redisPassword = this.protoConfig.optString(\"redisPassword\");\n        this.redisDatabase = this.protoConfig.optInt( \"redisDatabase\" );\n    }\n\n\n    @Override\n    public String getRedisHost() {\n        return this.redisHost;\n    }\n\n    @Override\n    public int getRedisPort() {\n        return this.redisPort;\n    }\n\n    @Override\n    public int getRedisTimeOut() {\n        return this.redisTimeOut;\n    }\n\n    @Override\n    public String getRedisPassword() {\n        return this.redisPassword;\n    }\n\n    @Override\n    public int getRedisDatabase() {\n        return this.redisDatabase;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ArcReparseSemanticNode.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\n\nimport java.time.LocalDateTime;\n\npublic abstract class ArcReparseSemanticNode extends ArchElementNode implements ReparseSemanticNode {\n    protected String                  reparsedPoint;\n\n    protected KOMFileSystem           fileSystem;\n\n    public ArcReparseSemanticNode(){\n        this.createTime = LocalDateTime.now();\n        this.updateTime = LocalDateTime.now();\n    }\n\n    public ArcReparseSemanticNode( KOMFileSystem fileSystem ) {\n        this();\n        this.fileSystem = fileSystem;\n        GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator();\n        this.setGuid( guidAllocator.nextGUID() );\n    }\n\n    @Override\n    public String getReparsedPoint() {\n        return this.reparsedPoint;\n    }\n\n    @Override\n    public void setReparsedPoint(String reparsedPoint) {\n        this.reparsedPoint = reparsedPoint;\n    }\n\n    @Override\n    public KOMFileSystem parentFileSystem() {\n        return this.fileSystem;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ArchCluster.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class ArchCluster implements Cluster {\n    private long enumId;\n    private GUID fileGuid;\n    private GUID segGuid;\n    private long segId;\n    private long crc32;\n    private long size;\n\n    public ArchCluster() {\n    }\n\n    public ArchCluster(long enumId, GUID fileGuid, GUID segGuid, long segId, long crc32, long size) {\n        this.enumId = enumId;\n        this.fileGuid = fileGuid;\n        this.segGuid = segGuid;\n        this.segId = segId;\n        this.crc32 = crc32;\n        this.size = size;\n    }\n\n    @Override\n    public long getEnumId() {\n        return enumId;\n    }\n\n\n    @Override\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n\n    @Override\n    public GUID getFileGuid() {\n        return fileGuid;\n    }\n\n\n    @Override\n    public void setFileGuid(GUID fileGuid) {\n        this.fileGuid = fileGuid;\n    }\n\n    @Override\n    public GUID getSegGuid() {\n        return this.segGuid;\n    }\n\n    @Override\n    public void setSegGuid(GUID segGuid) {\n        this.segGuid = segGuid;\n    }\n\n\n    @Override\n    public long getSegId() {\n        return segId;\n    }\n\n\n    @Override\n    public void setSegId(long segId) {\n        this.segId = segId;\n    }\n\n\n    @Override\n    public long getCrc32() {\n        return this.crc32;\n    }\n\n\n    @Override\n    public void setCrc32( long crc32 ) {\n        this.crc32 = crc32;\n    }\n\n\n    @Override\n    public long getSize() {\n        return size;\n    }\n\n\n    @Override\n    public void setSize(long size) {\n        this.size = size;\n    }\n\n    @Override\n    public void remove() {\n\n    }\n\n    @Override\n    public void save() {\n\n    }\n\n    public String toString() {\n        return \"ArchCluster{enumId = \" + enumId + \", fileGuid = \" + fileGuid + \", segGuid = \" + segGuid + \", segId = \" + segId + \", crc32 = \" + crc32 + \", size = \" + size + \"}\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ArchElementNode.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic abstract class ArchElementNode implements ElementNode {\n    protected long                    enumId;\n    protected GUID                    guid;\n    protected LocalDateTime           createTime;\n    protected LocalDateTime           updateTime;\n    protected String                  name;\n\n\n    protected FileSystemAttributes fileSystemAttributes;\n\n    @Override\n    public long getEnumId() {\n        return enumId;\n    }\n\n\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n\n    @Override\n    public GUID getGuid() {\n        return guid;\n    }\n\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return createTime;\n    }\n\n\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return updateTime;\n    }\n\n\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.updateTime = updateTime;\n    }\n\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n\n    @Override\n    public void setName(String name) {\n        this.name = name;\n    }\n\n\n    @Override\n    public FileSystemAttributes getAttributes() {\n        return fileSystemAttributes;\n    }\n\n\n    @Override\n    public void setAttributes( FileSystemAttributes fileSystemAttributes ) {\n        this.fileSystemAttributes = fileSystemAttributes;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/Cluster.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface Cluster extends Pinenut {\n    long getEnumId();\n\n    void setEnumId(long enumId);\n\n    GUID getFileGuid();\n    void setFileGuid(GUID fileGuid);\n\n    GUID getSegGuid();\n    void setSegGuid(GUID segGuid);\n\n    long getSegId();\n    void setSegId(long segId);\n\n    long getCrc32();\n    void setCrc32(long crc32);\n\n    long getSize();\n    void setSize(long size);\n    void save();\n    void remove();\n\n    default LocalCluster evinceLocalCluster(){\n        return null;\n    }\n    default RemoteCluster evinceRemoteCluster(){\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ClusterPage.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ClusterPage extends Pinenut {\n    int getPageSize() ;\n\n    int getPageSum() ;\n\n    long getCurrentPage() ;\n\n    long getClusters() ;\n\n    Cluster getCluster( long segId ) ;\n\n    LocalCluster getLocalCluster( long segId );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ClusterPage64.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.source.LocalClusterManipulator;\nimport com.pinecone.hydra.storage.file.source.RemoteClusterManipulator;\n\nimport java.util.List;\n\npublic class ClusterPage64 implements ClusterPage {\n    protected KOMFileSystem           komFileSystem;\n\n    protected RemoteClusterManipulator remoteClusterManipulator;\n\n    protected LocalClusterManipulator localClusterManipulator;\n\n    protected List<Cluster > mCurrClusterPage;\n\n    protected int mnPageSize;\n\n    protected int mnPageSum;\n\n    protected int mnCurrPageAt;\n\n    protected long mnClusters;\n\n    protected GUID fileGuid;\n\n    public ClusterPage64(KOMFileSystem fileSystem,\n            RemoteClusterManipulator remoteClusterManipulator, LocalClusterManipulator localClusterManipulator,\n            GUID fileGuid, int pageSize\n    ) {\n        this.komFileSystem            = fileSystem;\n        this.fileGuid                 = fileGuid;\n        this.localClusterManipulator  = localClusterManipulator;\n        this.remoteClusterManipulator = remoteClusterManipulator;\n        this.mnPageSize               = pageSize;\n        this.mnClusters               = remoteClusterManipulator.countFileClusters( fileGuid );\n        this.mnCurrPageAt             = 0;\n        this.mnPageSum                = (int) Math.ceil( (double) this.mnClusters / this.mnPageSize );\n        this.mCurrClusterPage         = this.loadClusterPage(0);\n    }\n\n    public ClusterPage64(KOMFileSystem fileSystem, RemoteClusterManipulator remoteClusterManipulator, LocalClusterManipulator localClusterManipulator, GUID fileGuid ) {\n        this( fileSystem,remoteClusterManipulator, localClusterManipulator, fileGuid, 10 );\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    protected List<Cluster> loadClusterPage( int pageIndex ) {\n        return (List) this.remoteClusterManipulator.fetchRemoteClusterByFileGuid(\n                this.fileGuid, (long) pageIndex * this.mnPageSize, this.mnPageSize\n        );\n    }\n\n\n    @Override\n    public int getPageSize() {\n        return this.mnPageSize;\n    }\n\n    @Override\n    public int getPageSum() {\n        return this.mnPageSum;\n    }\n\n    @Override\n    public long getCurrentPage() {\n        return this.mnCurrPageAt;\n    }\n\n    @Override\n    public long getClusters() {\n        return this.mnClusters;\n    }\n\n    @Override\n    public Cluster getCluster( long segId ) {\n        if ( segId >= this.mnClusters ) {\n            return null;\n        }\n\n        if ( !this.isInCurrentPage( segId ) ) {\n            this.mnCurrPageAt     = this.calculatePageIndex( segId );\n            this.mCurrClusterPage = this.loadClusterPage(this.mnCurrPageAt);\n        }\n\n        return this.findClusterInPage(segId);\n    }\n\n\n    @Override\n    public LocalCluster getLocalCluster( long segId ) {\n        Cluster cluster = this.getCluster( segId );\n        if ( cluster instanceof LocalCluster ) {\n            return (LocalCluster) cluster;\n        }\n        else if ( cluster instanceof RemoteCluster ) {\n            RemoteCluster remoteCluster = (RemoteCluster) cluster;\n            if( remoteCluster.getDeviceGuid().equals( this.komFileSystem.getConfig().getLocalHostGuid() )) {\n                return this.localClusterManipulator.getLocalClusterByGuid( remoteCluster.getSegGuid() );\n            }\n        }\n\n        return null;\n    }\n\n    protected boolean isInCurrentPage( long segId ) {\n        return segId >= this.mCurrClusterPage.get(0).getSegId() && segId <= this.mCurrClusterPage.get(this.mCurrClusterPage.size() - 1).getSegId();\n    }\n\n    protected Cluster findClusterInPage( long segId ) {\n        int offset = (int) (segId % this.mnPageSize);\n\n        if ( offset < 0 || offset >= this.mCurrClusterPage.size() ) {\n            return null;\n        }\n\n        return this.mCurrClusterPage.get( offset );\n    }\n\n    protected int calculatePageIndex( long segId ) {\n        // pageIndex = segId / pageSize (向下取整)\n        return (int) (segId / this.mnPageSize);\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/DirectlyExternalSymbolic.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\npublic class DirectlyExternalSymbolic {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ElementNode.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.system.ko.meta.ElementObject;\n\nimport java.time.LocalDateTime;\n\npublic interface ElementNode extends FileTreeNode, ElementObject {\n\n    long getEnumId();\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n\n    LocalDateTime getCreateTime();\n\n    LocalDateTime getUpdateTime();\n\n    String getName();\n    void setName(String name);\n\n    FileSystemAttributes getAttributes();\n    void setAttributes( FileSystemAttributes attributes );\n\n    KOMFileSystem parentFileSystem();\n\n    @Override\n    default String objectCategoryName() {\n        return \"Storage\";\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ExternalSymbolic.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator;\n\npublic interface ExternalSymbolic extends Symbolic {\n    void apply( ExternalSymbolicManipulator externalSymbolicManipulator );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/FSNodeAllotment.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface FSNodeAllotment extends Pinenut {\n     Folder              newFolder();\n     Folder              newFolder( String name );\n\n     FileNode            newFileNode();\n     FileNode            newFileNode( String name, long definitionSize, boolean crc32Xor, boolean integrityCheckEnable, boolean disableCluster);\n     FileNode            newFileNode( String name, long definitionSize );\n     FileNode            newFileNode( String name, boolean crc32Xor, boolean integrityCheckEnable, boolean disableCluster);\n\n\n     LocalCluster newLocalCluster();\n     LocalCluster newLocalCluster(GUID fileGuid, int segId, String sourceName, long crc32, long size, long fileStartOffset );\n     LocalCluster newLocalCluster(GUID fileGuid, int segId, String sourceName );\n\n     RemoteCluster newRemoteCluster();\n     RemoteCluster newRemoteCluster(GUID fileGuid, int segId, long crc32, long size );\n     RemoteCluster newRemoteCluster(GUID fileGuid, int segId );\n     Symbolic            newSymbolic();\n     SymbolicMeta        newSymbolicMeta();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/FileMeta.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface FileMeta extends Pinenut {\n    long getEnumId();\n    void setEnumId(long enumId);\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/FileNode.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.CheckedFile;\n\nimport java.time.LocalDateTime;\nimport java.util.TreeMap;\n\npublic interface FileNode extends ElementNode, CheckedFile {\n    LocalDateTime getDeletedTime();\n    void setDeletedTime(LocalDateTime deletedTime);\n\n    long getChecksum();\n    void setChecksum(long checksum);\n\n    int getParityCheck();\n    void setParityCheck(int parityCheck);\n\n\n    void copyValueTo(GUID destinationGuid );\n    void copyTo    (GUID destinationGuid);\n\n    FileMeta getFileMeta();\n    void startDistribution(FileMeta fileMeta);\n    GUID getDataAffinityGuid();\n\n    boolean getIsUploadSuccessful();\n    void setIsUploadSuccessful( boolean isUploadSuccessful );\n    TreeMap<Long, Cluster> getClusters();\n\n    @Override\n    default FileNode evinceFileNode() {\n        return this;\n    }\n\n    void removeCluster();\n\n    long getPhysicalSize();\n    void setPhysicalSize(long physicalSize);\n\n    long getLogicSize();\n    void setLogicSize(long logicSize);\n\n    long getDefinitionSize();\n    void setDefinitionSize(long definitionSize);\n\n    long getCrc32Xor();\n\n    void setCrc32Xor( long crc32Xor );\n\n    boolean getIntegrityCheckEnable();\n    void setIntegrityCheckEnable(boolean integrityCheckEnable);\n\n    boolean getDisableCluster();\n\n    void setDisableCluster(boolean disableCluster);\n\n    boolean isUploadSuccess();\n\n    String getPath();\n\n    void setPath( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/FileSystemAttributes.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.Collection;\nimport java.util.Map;\nimport java.util.Set;\n\npublic interface FileSystemAttributes extends Pinenut ,Map<String, String > {\n    GUID getGuid();\n\n    void setGuid( GUID guid );\n\n    String getAttribute( String key );\n\n    void setAttribute( String key, String value );\n\n    Map<String, String > getAttributes();\n\n    void setAttributes( Map<String, String > attributes );\n\n    ElementNode parentElement();\n\n    @Override\n    default boolean isEmpty() {\n        return this.getAttributes().isEmpty();\n    }\n\n    @Override\n    default int size() {\n        return this.getAttributes().size();\n    }\n\n    @Override\n    default boolean containsKey( Object key ) {\n        return this.getAttributes().containsKey( key );\n    }\n\n\n    @Override\n    default boolean containsValue( Object value ) {\n        return this.getAttributes().containsValue(value);\n    }\n\n    @Override\n    default String get( Object key ) {\n        return this.getAttributes().get(key);\n    }\n\n    @Override\n    default Set<String > keySet() {\n        return this.getAttributes().keySet();\n    }\n\n    @Override\n    default Collection<String > values() {\n        return this.getAttributes().values();\n    }\n\n    @Override\n    default Set<Map.Entry<String, String >> entrySet() {\n        return this.getAttributes().entrySet();\n    }\n\n\n    String insert( String key, String value ) ;\n\n    String update( String key, String value ) ;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/FileTreeNode.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic interface FileTreeNode extends TreeNode {\n    default FileNode evinceFileNode(){\n        return null;\n    }\n\n    default Folder evinceFolder(){\n        return null;\n    }\n\n    default Symbolic evinceSymbolic() {\n        return null;\n    }\n\n\n    void setName(String s);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/Folder.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\n\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\npublic interface Folder extends ElementNode {\n    void setNodeAttribute(FileSystemAttributes attributes);\n\n    FolderMeta getFolderMeta();\n    void setFolderMeta(FolderMeta folderMeta);\n\n    Map<String, FileTreeNode> getChildren();\n\n    List<GUID > fetchChildrenGuids();\n\n    void setChildrenGuids( List<GUID> contentGuids, int depth );\n\n    List<FileTreeNode > listItem();\n\n\n    void put ( String key, FileTreeNode val );\n\n    void remove ( String key );\n\n    void put ( ElementNode child );\n\n    Folder createFolder( String name );\n\n    ExternalSymbolic createExternalSymbolic( String name, String reparsedPoint );\n\n    KOMFileSystem getFileTree();\n\n    boolean containsKey  ( String key );\n\n\n    boolean isEmpty();\n\n    @Override\n    default Folder evinceFolder() {\n        return this;\n    }\n\n    Set<String > keySet();\n\n    Set<Map.Entry<String,FileTreeNode>> entrySet();\n\n    void copyTo(GUID destinationGuid);\n    void copyNamespaceMetaTo(GUID destinationGuid);\n    long TotalFolderSize();\n\n    void applyVolume( GUID volumeGuid );\n\n    GUID getRelationVolume();\n\n    String getPath();\n\n    void setPath( String path );\n\n    Integer getSyncState();\n\n    void setSyncState( Integer syncState );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/FolderMeta.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface FolderMeta extends Pinenut {\n    long getEnumId();\n    void setEnumId(long enumId);\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericExternalSymbolic.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator;\n\npublic class GenericExternalSymbolic extends ArcReparseSemanticNode implements ExternalSymbolic {\n    private SymbolicMeta                    symbolicMeta;\n\n    private ExternalSymbolicManipulator     externalSymbolicManipulator;\n\n\n    public GenericExternalSymbolic() {\n        super();\n    }\n\n    public GenericExternalSymbolic( KOMFileSystem fileSystem ) {\n        super( fileSystem );\n    }\n\n    @Override\n    public SymbolicMeta getSymbolicMeta() {\n        return this.symbolicMeta;\n    }\n\n    @Override\n    public void setSymbolicMeta(SymbolicMeta symbolicMeta) {\n        this.symbolicMeta = symbolicMeta;\n    }\n\n    @Override\n    public void create() {\n        this.externalSymbolicManipulator.insert( this );\n    }\n\n    @Override\n    public void remove() {\n        this.externalSymbolicManipulator.remove( this.guid );\n        this.symbolicMeta.remove();\n    }\n\n    @Override\n    public void apply(ExternalSymbolicManipulator externalSymbolicManipulator) {\n        this.externalSymbolicManipulator = externalSymbolicManipulator;\n    }\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericFSNodeAllotment.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.source.FileMasterManipulator;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\npublic class GenericFSNodeAllotment implements FSNodeAllotment {\n    private FileMasterManipulator       fileMasterManipulator;\n    private KOMFileSystem               fileSystem;\n    private GuidAllocator               guidAllocator;\n\n    public GenericFSNodeAllotment(FileMasterManipulator fileMasterManipulator, KOMFileSystem fileSystem){\n        this.fileMasterManipulator = fileMasterManipulator;\n        this.fileSystem = fileSystem;\n        this.guidAllocator   = fileSystem.getGuidAllocator();\n    }\n    @Override\n    public Folder newFolder(){\n        GenericFolder folder = new GenericFolder(fileSystem, fileMasterManipulator.getFolderManipulator());\n        folder.setGuid( guidAllocator.nextGUID() );\n        return folder;\n    }\n\n\n\n    @Override\n    public Folder newFolder(String name) {\n        GenericFolder folder = new GenericFolder(fileSystem, fileMasterManipulator.getFolderManipulator());\n        folder.setName( name );\n        folder.setGuid( guidAllocator.nextGUID() );\n        return folder;\n    }\n\n    @Override\n    public FileNode newFileNode(){\n        GenericFileNode fileNode = new GenericFileNode(fileSystem, fileMasterManipulator.getFileManipulator());\n        fileNode.setGuid( guidAllocator.nextGUID() );\n        return fileNode;\n    }\n    @Override\n    public FileNode newFileNode(String name, long definitionSize, boolean crc32Xor, boolean integrityCheckEnable, boolean disableCluster) {\n        GenericFileNode fileNode = new GenericFileNode(fileSystem, fileMasterManipulator.getFileManipulator());\n        fileNode.setGuid( guidAllocator.nextGUID() );\n        fileNode.setName( name );\n        //fileNode.setCrc32Xor( crc32Xor );\n        fileNode.setDefinitionSize( definitionSize );\n        fileNode.setIntegrityCheckEnable( integrityCheckEnable );\n        fileNode.setDisableCluster( disableCluster );\n        return fileNode;\n    }\n\n    @Override\n    public FileNode newFileNode(String name, long definitionSize) {\n        GenericFileNode fileNode = new GenericFileNode(fileSystem, fileMasterManipulator.getFileManipulator());\n        fileNode.setName( name );\n        fileNode.setDefinitionSize( definitionSize );\n        fileNode.setGuid( guidAllocator.nextGUID() );\n        return fileNode;\n    }\n\n    @Override\n    public FileNode newFileNode(String name, boolean crc32Xor, boolean integrityCheckEnable, boolean disableCluster) {\n        GenericFileNode fileNode = new GenericFileNode(fileSystem, fileMasterManipulator.getFileManipulator());\n        fileNode.setName( name );\n        //fileNode.setCrc32Xor( crc32Xor );\n        fileNode.setDisableCluster( disableCluster );\n        fileNode.setIntegrityCheckEnable( integrityCheckEnable );\n        return fileNode;\n    }\n\n\n\n    @Override\n    public LocalCluster newLocalCluster(){\n        GenericLocalCluster frame = new GenericLocalCluster(fileMasterManipulator.getLocalClusterManipulator());\n        frame.setSegGuid( guidAllocator.nextGUID() );\n        frame.setLocalClusterManipulator( this.fileMasterManipulator.getLocalClusterManipulator() );\n        return frame;\n    }\n    @Override\n    public LocalCluster newLocalCluster(GUID fileGuid, int segId, String sourceName, long crc32, long size, long fileStartOffset) {\n        GenericLocalCluster frame = new GenericLocalCluster(fileMasterManipulator.getLocalClusterManipulator());\n        frame.setSegGuid( guidAllocator.nextGUID() );\n        frame.setSegId( segId );\n        frame.setSourceName( sourceName );\n        frame.setCrc32( crc32 );\n        frame.setSize( size );\n        frame.setFileGuid( fileGuid );\n        return frame;\n    }\n\n    @Override\n    public LocalCluster newLocalCluster(GUID fileGuid, int segId, String sourceName) {\n        GenericLocalCluster frame = new GenericLocalCluster(fileMasterManipulator.getLocalClusterManipulator());\n        frame.setFileGuid( fileGuid );\n        frame.setSegId( segId );\n        frame.setSourceName( sourceName );\n        frame.setSegGuid( guidAllocator.nextGUID() );\n        return frame;\n    }\n\n\n\n    @Override\n    public RemoteCluster newRemoteCluster(){\n        GenericRemoteCluster frame = new GenericRemoteCluster(fileMasterManipulator.getRemoteClusterManipulator());\n        frame.setSegGuid( guidAllocator.nextGUID() );\n        return frame;\n    }\n    @Override\n    public RemoteCluster newRemoteCluster(GUID fileGuid, int segId, long crc32, long size) {\n        GenericRemoteCluster frame = new GenericRemoteCluster(fileMasterManipulator.getRemoteClusterManipulator());\n        frame.setSegGuid( guidAllocator.nextGUID() );\n        frame.setFileGuid( fileGuid );\n        frame.setSegId( segId );\n        frame.setCrc32( crc32 );\n        frame.setSize( size );\n        return frame;\n    }\n\n    @Override\n    public RemoteCluster newRemoteCluster(GUID fileGuid, int segId) {\n        GenericRemoteCluster frame = new GenericRemoteCluster(fileMasterManipulator.getRemoteClusterManipulator());\n        frame.setFileGuid( fileGuid );\n        frame.setSegGuid( guidAllocator.nextGUID() );\n        frame.setSegId( segId );\n        return frame;\n    }\n\n    @Override\n    public Symbolic newSymbolic() {\n        return new GenericSymbolic(this.fileMasterManipulator.getSymbolicManipulator());\n    }\n\n    @Override\n    public SymbolicMeta newSymbolicMeta() {\n        return new GenericSymbolicMeta(this.fileMasterManipulator.getSymbolicMetaManipulator());\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericFileMeta.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class GenericFileMeta implements FileMeta {\n    private long enumId;\n    private GUID guid;\n\n    public GenericFileMeta() {\n    }\n\n    public GenericFileMeta(long enumId, GUID guid) {\n        this.enumId = enumId;\n        this.guid = guid;\n    }\n\n\n    public long getEnumId() {\n        return enumId;\n    }\n\n\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n\n    public GUID getGuid() {\n        return guid;\n    }\n\n\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    public String toString() {\n        return \"GenericFileMeta{enumId = \" + enumId + \", guid = \" + guid + \"}\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericFileNode.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.source.FileManipulator;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.time.LocalDateTime;\nimport java.util.TreeMap;\n\npublic class GenericFileNode extends ArchElementNode implements FileNode{\n    private LocalDateTime               deletedTime;\n    private long                        checksum;\n    private int                         parityCheck;\n    private FileMeta                    fileMeta;\n\n    private KOMFileSystem               fileSystem;\n    private FileManipulator             fileManipulator;\n    private TreeMap<Long, Cluster>      clusters = new TreeMap<>();\n    private boolean                     isUploadSuccessful;\n    private long                        physicalSize;\n    private long                        logicSize;\n\n    private long                        definitionSize;\n    private long                        crc32Xor;\n    private boolean                     integrityCheckEnable;\n    private boolean                     disableCluster;\n\n    private String                      path;\n\n    @Override\n    public boolean getIsUploadSuccessful() {\n        return this.isUploadSuccessful;\n    }\n\n    @Override\n    public void setIsUploadSuccessful(boolean isUploadSuccessful) {\n        this.isUploadSuccessful = isUploadSuccessful;\n    }\n\n\n    @Override\n    public TreeMap<Long, Cluster> getClusters() {\n        return this.fileSystem.getClustersByFileGuid( this.guid );\n    }\n\n\n    public GenericFileNode() {\n    }\n\n    public GenericFileNode( KOMFileSystem fileSystem ) {\n        this.fileSystem = fileSystem;\n        GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator();\n        this.setGuid( guidAllocator.nextGUID() );\n        this.setCreateTime( LocalDateTime.now() );\n    }\n    public GenericFileNode( KOMFileSystem fileSystem, FileManipulator fileManipulator ) {\n        this(fileSystem);\n        this.fileManipulator = fileManipulator;\n    }\n\n\n    public void apply( KOMFileSystem fileSystem ) {\n        this.fileSystem = fileSystem;\n    }\n\n    public long getEnumId() {\n        return enumId;\n    }\n\n\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n\n    public GUID getGuid() {\n        return guid;\n    }\n\n\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n\n    public LocalDateTime getCreateTime() {\n        return createTime;\n    }\n\n\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n\n    public LocalDateTime getUpdateTime() {\n        return updateTime;\n    }\n\n\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.updateTime = updateTime;\n    }\n\n\n    public String getName() {\n        return name;\n    }\n\n    @Override\n    public FileSystemAttributes getAttributes() {\n        return this.fileSystemAttributes;\n    }\n\n    @Override\n    public KOMFileSystem parentFileSystem() {\n        return fileSystem;\n    }\n\n\n    public void setName(String name) {\n        this.name = name;\n    }\n\n\n    public LocalDateTime getDeletedTime() {\n        return deletedTime;\n    }\n\n\n    public void setDeletedTime(LocalDateTime deletedTime) {\n        this.deletedTime = deletedTime;\n    }\n\n\n    public long getChecksum() {\n        return checksum;\n    }\n\n\n    public void setChecksum(long checksum) {\n        this.checksum = checksum;\n    }\n\n\n    public int getParityCheck() {\n        return parityCheck;\n    }\n\n\n    public void setParityCheck(int parityCheck) {\n        this.parityCheck = parityCheck;\n    }\n\n\n    @Override\n    public void copyValueTo(GUID destinationGuid) {\n\n    }\n\n    @Override\n    public void copyTo(GUID destinationGuid) {\n\n    }\n\n\n    public FileMeta getFileMeta() {\n        return fileMeta;\n    }\n\n\n    public void startDistribution(FileMeta fileMeta) {\n        this.fileMeta = fileMeta;\n    }\n\n    @Override\n    public GUID getDataAffinityGuid() {\n        return null;\n    }\n\n\n    public FileSystemAttributes getAttribute() {\n        return fileSystemAttributes;\n    }\n\n\n    public void setAttribute(FileSystemAttributes fileSystemAttributes) {\n        this.fileSystemAttributes = fileSystemAttributes;\n    }\n\n    @Override\n    public void removeCluster() {\n        if ( this.clusters == null || this.clusters.isEmpty() ){\n            this.clusters = this.fileSystem.getClustersByFileGuid( this.guid );\n        }\n        for ( Cluster cluster : this.clusters.values() ){\n            cluster.remove();\n        }\n    }\n\n    @Override\n    public long getPhysicalSize() {\n        return this.physicalSize;\n    }\n\n    @Override\n    public void setPhysicalSize(long physicalSize) {\n        this.physicalSize = physicalSize;\n    }\n\n    @Override\n    public long getLogicSize() {\n        return this.logicSize;\n    }\n\n    @Override\n    public void setLogicSize(long logicSize) {\n        this.logicSize = logicSize;\n    }\n\n    @Override\n    public long getDefinitionSize() {\n        return this.definitionSize;\n    }\n\n    @Override\n    public void setDefinitionSize(long definitionSize) {\n        this.definitionSize = definitionSize;\n    }\n\n    @Override\n    public long getCrc32Xor() {\n        return this.crc32Xor;\n    }\n\n    @Override\n    public void setCrc32Xor( long crc32Xor ) {\n        this.crc32Xor = crc32Xor;\n    }\n\n    @Override\n    public boolean getIntegrityCheckEnable() {\n        return this.integrityCheckEnable;\n    }\n\n    @Override\n    public void setIntegrityCheckEnable(boolean integrityCheckEnable) {\n        this.integrityCheckEnable = integrityCheckEnable;\n    }\n\n    @Override\n    public boolean getDisableCluster() {\n        return this.disableCluster;\n    }\n\n    @Override\n    public boolean isUploadSuccess() {\n        if ( this.physicalSize == this.definitionSize ){\n            return true;\n        }\n        return false;\n    }\n\n    @Override\n    public Number size() {\n        return this.physicalSize;\n    }\n\n    @Override\n    public void setDisableCluster(boolean disableCluster) {\n        this.disableCluster = disableCluster;\n    }\n\n    @Override\n    public String getPath() {\n        return this.path;\n    }\n\n    @Override\n    public void setPath(String path) {\n        this.path = path;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericFolder.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.source.FolderManipulator;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\npublic class GenericFolder extends ArchElementNode implements Folder{\n    private FileSystemAttributes        attributes;\n    private FolderMeta                  folderMeta;\n    private KOMFileSystem               fileSystem;\n    private FolderManipulator           folderManipulator;\n    private String                      path;\n    private Integer                     syncState;\n\n    public GenericFolder() {\n    }\n\n    public GenericFolder( KOMFileSystem fileSystem ) {\n        this.fileSystem = fileSystem;\n        GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator();\n        this.setGuid( guidAllocator.nextGUID() );\n        this.setCreateTime( LocalDateTime.now() );\n\n        this.folderManipulator = fileSystem.getFileMasterManipulator().getFolderManipulator();\n    }\n\n    public GenericFolder( KOMFileSystem fileSystem, FolderManipulator folderManipulator ) {\n        this(fileSystem);\n        this.folderManipulator = folderManipulator;\n    }\n\n\n    public void apply(KOMFileSystem fileSystem ) {\n        this.fileSystem = fileSystem;\n    }\n\n    @Override\n    public KOMFileSystem parentFileSystem() {\n        return this.fileSystem;\n    }\n\n\n    @Override\n    public void setNodeAttribute(FileSystemAttributes attributes) {\n        this.attributes = attributes;\n    }\n\n    @Override\n    public FolderMeta getFolderMeta() {\n        return this.folderMeta;\n    }\n\n    @Override\n    public void setFolderMeta(FolderMeta folderMeta) {\n        this.folderMeta = folderMeta;\n    }\n\n    @Override\n    public Map<String, FileTreeNode> getChildren() {\n        return null;\n    }\n\n    @Override\n    public List<GUID> fetchChildrenGuids() {\n        return null;\n    }\n\n    @Override\n    public void setChildrenGuids( List<GUID> contentGuids, int depth ) {\n\n    }\n\n    @Override\n    public List<FileTreeNode> listItem() {\n        ArrayList<FileTreeNode> fileTreeNodes = new ArrayList<>();\n        List<TreeNode> children = this.fileSystem.getChildren(this.guid);\n        for( TreeNode node : children ){\n//            if( node instanceof ExternalSymbolic ){\n//                ExternalSymbolic externalSymbolic = (ExternalSymbolic) node;\n//                String reparsedPoint = externalSymbolic.getReparsedPoint();\n//                File file = new File(reparsedPoint);\n//                if( file.isDirectory() ){\n//                    GenericExternalFolder externalFolder = new GenericExternalFolder(file);\n//                    fileTreeNodes.add(externalFolder);\n//                }else {\n//                    GenericExternalFile externalFile = new GenericExternalFile(file);\n//                    fileTreeNodes.add(externalFile);\n//                }\n//            }else {\n//                FileTreeNode fileTreeNode = this.fileSystem.get(node.getGuid());\n//                fileTreeNodes.add( fileTreeNode );\n//            }\n            FileTreeNode fileTreeNode = this.fileSystem.get(node.getGuid());\n            fileTreeNodes.add( fileTreeNode );\n        }\n        return fileTreeNodes;\n    }\n\n    @Override\n    public void put( String key, FileTreeNode val ) {\n\n    }\n\n    @Override\n    public void remove( String key ) {\n\n    }\n\n\n    @Override\n    public void put( ElementNode child ) {\n        this.fileSystem.put( child );\n        this.fileSystem.affirmOwnedNode( this.guid, child.getGuid() );\n    }\n\n    @Override\n    public Folder createFolder( String name ) {\n        Folder neo = new GenericFolder( this.fileSystem );\n        neo.setName( name );\n\n        this.put( neo );\n        return neo;\n    }\n\n    @Override\n    public ExternalSymbolic createExternalSymbolic( String name, String reparsedPoint ) {\n        ExternalSymbolic neo = new GenericExternalSymbolic( this.fileSystem );\n        neo.setName( name );\n        neo.setReparsedPoint( reparsedPoint );\n        this.put( neo );\n        return neo;\n    }\n\n    @Override\n    public KOMFileSystem getFileTree() {\n        return this.fileSystem;\n    }\n\n    @Override\n    public long TotalFolderSize() {\n        long size = 0;\n        List<TreeNode> children = this.fileSystem.getChildren(this.guid);\n        for( TreeNode node : children ){\n            if ( node instanceof Folder ){\n                Folder folder = (Folder) node;\n                size += folder.TotalFolderSize();\n            }\n            else if( node instanceof FileNode ){\n                FileNode file = (FileNode) node;\n                size += file.size().longValue();\n            }\n        }\n        return size;\n    }\n\n    @Override\n    public boolean containsKey(String key) {\n        return false;\n    }\n\n//    @Override\n//    public Number size() {\n//        long size = 0;\n//        List<TreeNode> children = this.fileSystem.getChildren(this.guid);\n//        for( TreeNode node : children ){\n//            ElementNode elementNode = (ElementNode) node;\n//            size += elementNode.size().longValue();\n//        }\n//        return size;\n//    }\n\n    @Override\n    public boolean isEmpty() {\n        return false;\n    }\n\n    @Override\n    public Set<String> keySet() {\n        return null;\n    }\n\n    @Override\n    public Set<Map.Entry<String, FileTreeNode>> entrySet() {\n        return null;\n    }\n\n    @Override\n    public void copyTo(GUID destinationGuid) {\n\n    }\n\n    @Override\n    public void copyNamespaceMetaTo(GUID destinationGuid) {\n\n    }\n\n    @Override\n    public String getPath() {\n        return this.path;\n    }\n\n    @Override\n    public void setPath(String path) {\n        this.path = path;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public void applyVolume(GUID volumeGuid) {\n        this.fileSystem.setFolderVolumeMapping( this.guid, volumeGuid );\n    }\n\n    @Override\n    public GUID getRelationVolume() {\n        return this.fileSystem.getMappingVolume( this.guid );\n    }\n\n    @Override\n    public Integer getSyncState() {\n        return this.syncState;\n    }\n\n    @Override\n    public void setSyncState(Integer syncState) {\n        this.syncState = syncState;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericFolderMeta.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class GenericFolderMeta implements FolderMeta{\n    private long enumId;\n    private GUID guid;\n\n    public GenericFolderMeta() {\n    }\n\n    public GenericFolderMeta(long enumId, GUID guid) {\n        this.enumId = enumId;\n        this.guid = guid;\n    }\n\n\n    public long getEnumId() {\n        return enumId;\n    }\n\n\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n\n    public GUID getGuid() {\n        return guid;\n    }\n\n\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    public String toString() {\n        return \"GenericFolderMeta{enumId = \" + enumId + \", guid = \" + guid + \"}\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericLocalCluster.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.hydra.storage.file.source.LocalClusterManipulator;\n\nimport java.time.LocalDateTime;\n\npublic class GenericLocalCluster extends ArchCluster implements LocalCluster {\n    private LocalDateTime               createTime;\n    private LocalDateTime               updateTime;\n    private String                      sourceName;\n    private LocalClusterManipulator       localClusterManipulator;\n    private long                        definitionSize;\n    private long                        fileStartOffset;\n\n    @Override\n    public long getDefinitionSize() {\n        return this.definitionSize;\n    }\n\n    @Override\n    public void setDefinitionSize(long definitionSize) {\n        this.definitionSize = definitionSize;\n    }\n\n    public GenericLocalCluster() {\n    }\n\n    public GenericLocalCluster(LocalDateTime createTime, LocalDateTime updateTime, String sourceName) {\n        this.createTime = createTime;\n        this.updateTime = updateTime;\n        this.sourceName = sourceName;\n    }\n\n    public GenericLocalCluster(LocalClusterManipulator localClusterManipulator ) {\n        this.localClusterManipulator = localClusterManipulator;\n        this.createTime = LocalDateTime.now();\n        this.updateTime = LocalDateTime.now();\n    }\n\n\n    public LocalDateTime getCreateTime() {\n        return createTime;\n    }\n\n\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public long getFileStartOffset() {\n        return this.fileStartOffset;\n    }\n\n    @Override\n    public void setFileStartOffset(long fileStartOffset) {\n        this.fileStartOffset = fileStartOffset;\n    }\n\n    public LocalDateTime getUpdateTime() {\n        return updateTime;\n    }\n\n\n    @Override\n    public void setLocalClusterManipulator(LocalClusterManipulator localClusterManipulator) {\n        this.localClusterManipulator = localClusterManipulator;\n    }\n\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.updateTime = updateTime;\n    }\n\n\n    public String getSourceName() {\n        return sourceName;\n    }\n\n\n    public void setSourceName(String sourceName) {\n        this.sourceName = sourceName;\n    }\n\n    @Override\n    public void save() {\n        LocalCluster frame = this.localClusterManipulator.getClusterByFileWithId(this.getFileGuid(), this.getSegId());\n        if( frame == null ){\n            this.localClusterManipulator.insert(this);\n        }else {\n            this.localClusterManipulator.update( this );\n        }\n    }\n\n    @Override\n    public void remove() {\n        this.localClusterManipulator.remove( this.getSegGuid() );\n    }\n    public String toString() {\n        return \"GenericLocalCluster{createTime = \" + createTime + \", updateTime = \" + updateTime + \", sourceName = \" + sourceName + \"}\";\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericLocalClusterMeta.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.Map;\n\npublic class GenericLocalClusterMeta implements LocalClusterMeta{\n    private long                                enumId;\n    private GUID                                guid;\n    private String                              key;\n    private String                              value;\n    protected Map<String, String > metas = new LinkedTreeMap<>();\n\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public String getKey() {\n        return this.key;\n    }\n\n    @Override\n    public void setKey(String key) {\n        this.key = key;\n    }\n\n    @Override\n    public String getValue() {\n        return this.value;\n    }\n\n    @Override\n    public void setValue(String value) {\n        this.value = value;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericRemoteCluster.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.source.RemoteClusterManipulator;\n\npublic class GenericRemoteCluster extends ArchCluster implements RemoteCluster {\n    private GUID                    deviceGuid;\n    private RemoteClusterManipulator  frameManipulator;\n\n    public GenericRemoteCluster() {\n    }\n\n    public GenericRemoteCluster(GUID deviceGuid) {\n        this.deviceGuid = deviceGuid;\n    }\n\n    public GenericRemoteCluster(RemoteClusterManipulator remoteClusterManipulator ) {\n        this.frameManipulator = remoteClusterManipulator;\n\n    }\n\n    public GUID getDeviceGuid() {\n        return deviceGuid;\n    }\n\n\n    public void setDeviceGuid(GUID deviceGuid) {\n        this.deviceGuid = deviceGuid;\n    }\n\n    @Override\n    public void setRemoteClusterManipulator(RemoteClusterManipulator remoteClusterManipulator) {\n        this.frameManipulator = remoteClusterManipulator;\n    }\n\n    public String toString() {\n        return \"GenericRemoteCluster{deviceGuid = \" + deviceGuid + \"}\";\n    }\n\n    @Override\n    public void save() {\n        RemoteCluster cluster = this.frameManipulator.getClusterByFileWithId(this.getFileGuid(), this.getSegId());\n        if( cluster == null ){\n            this.frameManipulator.insert(this);\n        }\n    }\n    @Override\n    public void remove() {\n        this.frameManipulator.remove( this.getSegGuid() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericSymbolic.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.hydra.storage.file.source.SymbolicManipulator;\n\npublic class GenericSymbolic extends ArcReparseSemanticNode implements Symbolic{\n    private SymbolicMeta            symbolicMeta;\n    private SymbolicManipulator     symbolicManipulator;\n\n    public GenericSymbolic( SymbolicManipulator symbolicManipulator ) {\n        this.symbolicManipulator = symbolicManipulator;\n    }\n\n\n\n    public SymbolicMeta getSymbolicMeta() {\n        return symbolicMeta;\n    }\n\n\n    public void setSymbolicMeta(SymbolicMeta symbolicMeta) {\n        this.symbolicMeta = symbolicMeta;\n    }\n\n    @Override\n    public void create() {\n        this.symbolicManipulator.insert(this);\n    }\n\n    @Override\n    public void remove() {\n        this.symbolicManipulator.remove(this.guid);\n        this.symbolicMeta.remove();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/GenericSymbolicMeta.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.source.SymbolicMetaManipulator;\n\npublic class GenericSymbolicMeta implements SymbolicMeta{\n    private long enumId;\n    private GUID guid;\n    private SymbolicMetaManipulator symbolicMetaManipulator;\n\n\n    public GenericSymbolicMeta() {\n    }\n\n    public GenericSymbolicMeta( SymbolicMetaManipulator symbolicMetaManipulator ) {\n        this.symbolicMetaManipulator = symbolicMetaManipulator;\n    }\n\n    public GenericSymbolicMeta(long enumId, GUID guid) {\n        this.enumId = enumId;\n        this.guid = guid;\n    }\n\n\n    public long getEnumId() {\n        return enumId;\n    }\n\n\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n\n    public GUID getGuid() {\n        return guid;\n    }\n\n\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public void save() {\n        this.symbolicMetaManipulator.insert(this);\n    }\n\n    @Override\n    public void remove() {\n        this.symbolicMetaManipulator.remove(this.guid);\n    }\n\n    public String toString() {\n        return \"GenericSymbolicMeta{enumId = \" + enumId + \", guid = \" + guid + \"}\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/LocalCluster.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.hydra.storage.file.source.LocalClusterManipulator;\n\nimport java.time.LocalDateTime;\n\npublic interface LocalCluster extends Cluster {\n    LocalDateTime getCreateTime();\n    void setCreateTime(LocalDateTime createTime);\n\n    LocalDateTime getUpdateTime();\n    void setUpdateTime(LocalDateTime updateTime);\n\n    String getSourceName();\n    void setSourceName(String sourceName);\n\n    @Override\n    default LocalCluster evinceLocalCluster() {\n        return this;\n    }\n\n    void setLocalClusterManipulator(LocalClusterManipulator localClusterManipulator);\n\n    long getDefinitionSize();\n    void setDefinitionSize( long definitionSize );\n\n    long getFileStartOffset();\n    void setFileStartOffset( long fileStartOffset );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/LocalClusterMeta.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface LocalClusterMeta {\n    long getEnumId();\n    void setEnumId(long enumId);\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n\n    String getKey();\n    void setKey(String key);\n\n    String getValue();\n    void setValue( String value );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/RemoteCluster.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.source.RemoteClusterManipulator;\n\npublic interface RemoteCluster extends Cluster {\n    GUID getDeviceGuid();\n    void setDeviceGuid(GUID deviceGuid);\n    void setRemoteClusterManipulator(RemoteClusterManipulator remoteClusterManipulator);\n\n    @Override\n    default RemoteCluster evinceRemoteCluster() {\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/ReparseSemanticNode.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport java.time.LocalDateTime;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface ReparseSemanticNode extends ElementNode {\n    long getEnumId();\n    void setEnumId(long enumId);\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n\n    LocalDateTime getCreateTime();\n    void setCreateTime( LocalDateTime createTime );\n\n    LocalDateTime getUpdateTime();\n    void setUpdateTime( LocalDateTime updateTime );\n\n    String getName();\n    void setName( String name );\n\n    String getReparsedPoint();\n    void setReparsedPoint( String reparsedPoint );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/Symbolic.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\npublic interface Symbolic extends ReparseSemanticNode {\n    SymbolicMeta  getSymbolicMeta();\n\n    void setSymbolicMeta( SymbolicMeta symbolicMeta );\n\n    void create();\n\n    void remove();\n\n    @Override\n    default Symbolic evinceSymbolic() {\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/entity/SymbolicMeta.java",
    "content": "package com.pinecone.hydra.storage.file.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface SymbolicMeta extends Pinenut {\n\n    long getEnumId();\n    void setEnumId( long enumId );\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n\n    void save();\n    void remove();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/ArchNativeExternalFileObject.java",
    "content": "package com.pinecone.hydra.storage.file.external;\n\nimport java.io.File;\nimport java.net.URI;\nimport java.time.Instant;\nimport java.time.LocalDateTime;\nimport java.time.ZoneId;\n\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.ArchElementNode;\n\npublic abstract class ArchNativeExternalFileObject extends ArchElementNode implements ExternalFileObject {\n    protected File      mNativeFile;\n\n    public ArchNativeExternalFileObject( File file ) {\n        this.mNativeFile  = file;\n        this.name         = file.getName();\n        long lastModified = file.lastModified();\n        this.updateTime   = LocalDateTime.ofInstant(Instant.ofEpochMilli(lastModified), ZoneId.systemDefault());\n        this.createTime   = this.updateTime;\n    }\n\n    @Override\n    public KOMFileSystem parentFileSystem() {\n        return null;\n    }\n\n    public File getNativeFile() {\n        return this.mNativeFile;\n    }\n\n    @Override\n    public URI toURI() {\n        return this.mNativeFile.toURI();\n    }\n\n    public String getURI() {\n        return this.toURI().toString();\n    }\n\n    @Override\n    public String getPath() {\n        return this.mNativeFile.getPath();\n    }\n\n    @Override\n    public boolean delete() {\n        return this.mNativeFile.delete();\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/ExternalFile.java",
    "content": "package com.pinecone.hydra.storage.file.external;\n\nimport com.pinecone.hydra.storage.UFile;\n\nimport java.io.File;\nimport java.net.URI;\n\npublic interface ExternalFile extends ExternalFileObject, UFile {\n\n    File getNativeFile();\n\n    URI toURI();\n\n    String getName();\n\n    String getPath();\n\n    boolean delete();\n\n    default boolean exists() {\n        return this.getNativeFile().exists();\n    }\n\n    @Override\n    default Object getNativeHandle() {\n        return this.getNativeFile();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/ExternalFileObject.java",
    "content": "package com.pinecone.hydra.storage.file.external;\n\nimport java.net.URI;\n\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\n\npublic interface ExternalFileObject extends ElementNode {\n\n    URI toURI();\n\n    @Override\n    String getName();\n\n    String getPath();\n\n    boolean delete();\n\n    Object getNativeHandle();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/ExternalFileSystemInstrument.java",
    "content": "package com.pinecone.hydra.storage.file.external;\n\nimport com.pinecone.framework.system.regime.Instrument;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.ExternalSymbolic;\n\nimport java.io.IOException;\n\npublic interface ExternalFileSystemInstrument extends Instrument {\n\n    void insertExternalSymbolic( ExternalSymbolic externalSymbolic );\n\n    void createExternalSymbolic( String folderPath, String externalSymbolicName,String reparsedPoint );\n\n    ElementNode queryElement( String path );\n\n    void copy( String sourcePath, String destinationPath ) throws IOException;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/ExternalFolder.java",
    "content": "package com.pinecone.hydra.storage.file.external;\n\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\n\nimport java.io.File;\nimport java.net.URI;\nimport java.util.List;\n\npublic interface ExternalFolder extends ExternalFileObject {\n\n    File getNativeFile();\n\n    URI toURI();\n\n    String getName();\n\n    String getPath();\n\n    String[] list();\n\n    File[]   listFiles();\n\n    List<FileTreeNode> listItem();\n\n    boolean delete();\n\n    @Override\n    default Object getNativeHandle() {\n        return this.getNativeFile();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/ExternalSymbolicSelector.java",
    "content": "package com.pinecone.hydra.storage.file.external;\n\nimport com.pinecone.hydra.system.ko.kom.ReparsePointSelector;\n\npublic interface ExternalSymbolicSelector extends ReparsePointSelector {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/GenericNativeExternalFile.java",
    "content": "package com.pinecone.hydra.storage.file.external;\n\nimport java.io.File;\n\npublic class GenericNativeExternalFile extends ArchNativeExternalFileObject implements ExternalFile {\n\n    public GenericNativeExternalFile( File file ) {\n        super( file );\n    }\n\n    @Override\n    public Number size() {\n        return this.mNativeFile.getTotalSpace();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/GenericNativeExternalFolder.java",
    "content": "package com.pinecone.hydra.storage.file.external;\n\nimport com.pinecone.framework.util.io.FileUtils;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.nio.file.FileVisitResult;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.SimpleFileVisitor;\nimport java.nio.file.attribute.BasicFileAttributes;\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class GenericNativeExternalFolder extends ArchNativeExternalFileObject implements ExternalFolder {\n\n    public GenericNativeExternalFolder( File file ){\n        super( file );\n    }\n\n    @Override\n    public String[] list() {\n        return this.mNativeFile.list();\n    }\n\n    @Override\n    public File[] listFiles() {\n        return this.mNativeFile.listFiles();\n    }\n\n    @Override\n    public List<FileTreeNode> listItem() {\n        ArrayList<FileTreeNode> fileTreeNodes = new ArrayList<>();\n        File[] files = this.listFiles();\n        if( files.length > 0 ){\n            for( int i = 0;i < files.length; ++i ){\n                File file = files[i];\n                if( file.isDirectory() ){\n                    fileTreeNodes.add( new GenericNativeExternalFolder(file) );\n                }\n                else {\n                    fileTreeNodes.add( new GenericNativeExternalFile( file ) );\n                }\n            }\n        }\n        return fileTreeNodes;\n    }\n\n    @Override\n    public boolean delete() {\n        try {\n            FileUtils.purgeDirectory( this.mNativeFile );\n        }\n        catch ( IOException e ) {\n            return false;\n        }\n        return true;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/KenExternalFileSystemInstrument.java",
    "content": "package com.pinecone.hydra.storage.file.external;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.name.path.PathResolver;\nimport com.pinecone.hydra.storage.StorageConstants;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.ExternalSymbolic;\nimport com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator;\nimport com.pinecone.hydra.storage.file.source.FileManipulator;\nimport com.pinecone.hydra.storage.file.source.FileMasterManipulator;\nimport com.pinecone.hydra.storage.file.source.FolderManipulator;\nimport com.pinecone.hydra.storage.natives.NativeExternalFileSystems;\nimport com.pinecone.hydra.system.identifier.KOPathResolver;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.kom.PathSelector;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.List;\n\npublic class KenExternalFileSystemInstrument implements ExternalFileSystemInstrument {\n    protected KOMFileSystem                 fileSystem;\n\n    protected PathResolver                  pathResolver;\n\n    protected PathSelector                  pathSelector;\n\n    protected FileMasterManipulator         fileMasterManipulator;\n\n    protected FolderManipulator             folderManipulator;\n\n    protected FileManipulator               fileManipulator;\n\n    protected ExternalSymbolicManipulator   externalSymbolicManipulator;\n\n    protected ImperialTree                  imperialTree;\n\n\n    public KenExternalFileSystemInstrument( KOMFileSystem fileSystem ){\n        this.fileSystem                     = fileSystem;\n        this.pathResolver                   = new KOPathResolver( fileSystem.getConfig() );\n        this.fileMasterManipulator          = this.fileSystem.getFileMasterManipulator();\n        this.fileManipulator                = this.fileMasterManipulator.getFileManipulator();\n        this.folderManipulator              = this.fileMasterManipulator.getFolderManipulator();\n        this.externalSymbolicManipulator    = this.fileMasterManipulator.getExternalSymbolicManipulator();\n        this.imperialTree                   = fileSystem.getMasterTrieTree();\n\n        this.pathSelector                   = new KenExternalSymbolicSelector(\n                this.pathResolver, this.fileSystem.getMasterTrieTree(),this.folderManipulator, new GUIDNameManipulator[] { this.fileManipulator },\n                this.externalSymbolicManipulator\n        );\n    }\n\n    @Override\n    public ElementNode queryElement( String path ) {\n        GUID guid = this.queryGUIDByPath(path);\n        if( guid == null ) {\n            return null;\n        }\n\n        ExternalSymbolic externalSymbolic = this.externalSymbolicManipulator.getSymbolicByGuid(guid);\n        String externalPath = this.fileSystem.getPath(externalSymbolic.getGuid());\n        String remainingPath = path.substring(externalPath.length()).replaceFirst( StorageConstants.PathSeparator, \"\" );\n\n        String realFilePath = externalSymbolic.getReparsedPoint() + StorageConstants.PathSeparator + remainingPath;\n        File file = new File(realFilePath);\n        if( file.isDirectory() ){\n            return new GenericNativeExternalFolder( file );\n        }\n        else {\n            return new GenericNativeExternalFile( file );\n        }\n    }\n\n    @Override\n    public void insertExternalSymbolic( ExternalSymbolic externalSymbolic ) {\n        this.externalSymbolicManipulator.insert( externalSymbolic );\n    }\n\n\n    @Override\n    public void createExternalSymbolic( String folderPath, String externalSymbolicName, String reparsedPoint ) {\n        ElementNode elementNode = this.fileSystem.queryElement(folderPath);\n        elementNode.evinceFolder().createExternalSymbolic( externalSymbolicName,reparsedPoint );\n    }\n\n    @Override\n    public void copy( String sourcePath, String destinationPath ) throws IOException {\n        NativeExternalFileSystems.copy( sourcePath, destinationPath );\n    }\n\n    private GUID queryGUIDByPath( String path ) {\n        return this.queryGUIDByNS( path, null, null );\n    }\n\n    private GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ) {\n        if( szTargetSep != null ) {\n            path = path.replace( szBadSep, szTargetSep );\n        }\n\n        String[] parts = this.pathResolver.segmentPathParts( path );\n        List<String > resolvedParts = this.pathResolver.resolvePath( parts );\n        path = this.pathResolver.assemblePath( resolvedParts );\n\n        GUID guid = this.imperialTree.queryGUIDByPath( path );\n        if ( guid != null ){\n            return guid;\n        }\n\n\n        guid = this.pathSelector.searchGUID( resolvedParts );\n        return guid;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/external/KenExternalSymbolicSelector.java",
    "content": "package com.pinecone.hydra.storage.file.external;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.name.path.PathResolver;\nimport com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.kom.MultiFolderPathSelector;\nimport com.pinecone.hydra.system.ko.kom.ReparseLinkSelector;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\n\nimport java.util.List;\n\npublic class KenExternalSymbolicSelector extends ReparseLinkSelector implements ExternalSymbolicSelector {\n    protected ExternalSymbolicManipulator mExternalSymbolicManipulator;\n\n    public KenExternalSymbolicSelector( MultiFolderPathSelector pathSelector ) {\n        super( pathSelector );\n    }\n\n    public KenExternalSymbolicSelector(\n            PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator dirMan, GUIDNameManipulator[] fileMans,\n            ExternalSymbolicManipulator externalSymbolicManipulator\n    ) {\n        super( pathResolver, trieTree, new GUIDNameManipulator[]{ dirMan }, fileMans );\n        this.mExternalSymbolicManipulator = externalSymbolicManipulator;\n    }\n\n    public KenExternalSymbolicSelector(\n            PathResolver pathResolver, ImperialTree trieTree, GUIDNameManipulator[] dirMans, GUIDNameManipulator[] fileMans,\n            ExternalSymbolicManipulator externalSymbolicManipulator\n    ) {\n        super( pathResolver, trieTree, dirMans, fileMans );\n        this.mExternalSymbolicManipulator = externalSymbolicManipulator;\n    }\n\n    @Override\n    public Object search( String[] parts ) {\n        List<String> resolvedParts = this.pathResolver.resolvePath(parts);\n        return this.dfsSearch( resolvedParts );\n    }\n\n    @Override\n    public ReparseLinkNode searchLinkNode(String[] parts ) {\n        Object result = this.search( parts );\n        if( result instanceof ReparseLinkNode ) {\n            return (ReparseLinkNode) result;\n        }\n        return null;\n    }\n\n    @Override\n    protected Object beforeDFSTermination( String currentPart, GUID guid ) {\n        Object obj = super.beforeDFSTermination( currentPart, guid );\n        if ( obj == null ) {\n            boolean b = this.mExternalSymbolicManipulator.isSymbolicMatchedByNameGuid( currentPart, guid );\n            if ( b ) {\n                return guid;\n            }\n        }\n\n        return guid;\n    }\n\n    @Override\n    protected Object tryTerminationBlock( String currentPart, GUID guid ) {\n        Object obj = super.tryTerminationBlock( currentPart, guid );\n        if ( obj == null ) {\n            boolean b = this.mExternalSymbolicManipulator.isSymbolicMatchedByNameGuid( currentPart, guid );\n            if ( b ) {\n                return guid;\n            }\n        }\n\n        return obj;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/marshaling/ClusterGroup.java",
    "content": "package com.pinecone.hydra.storage.file.marshaling;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ClusterGroup extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/marshaling/StripedClusterGroup.java",
    "content": "package com.pinecone.hydra.storage.file.marshaling;\n\npublic interface StripedClusterGroup extends ClusterGroup {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/ArchFileSystemOperator.java",
    "content": "package com.pinecone.hydra.storage.file.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.ArchElementNode;\nimport com.pinecone.hydra.storage.file.source.FileSystemAttributeManipulator;\nimport com.pinecone.hydra.storage.file.source.FileMasterManipulator;\n\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.time.LocalDateTime;\n\npublic abstract class ArchFileSystemOperator implements FileSystemOperator{\n    protected KOMFileSystem                     fileSystem;\n    protected FileSystemOperatorFactory         factory;\n    protected ImperialTree                      imperialTree;\n    protected FileSystemAttributeManipulator    fileSystemAttributeManipulator;\n    protected FileMasterManipulator             fileMasterManipulator;\n\n    public ArchFileSystemOperator( FileSystemOperatorFactory factory ) {\n        this( factory.getMasterManipulator(), (KOMFileSystem) factory.getFileSystem() );\n        this.factory = factory;\n    }\n\n    public ArchFileSystemOperator( FileMasterManipulator masterManipulator, KOMFileSystem fileSystem ) {\n        this.imperialTree =  fileSystem.getMasterTrieTree();\n        this.fileSystemAttributeManipulator =  masterManipulator.getAttributeManipulator();\n        this.fileSystem                     =  fileSystem;\n        this.fileMasterManipulator          =  masterManipulator;\n    }\n\n    protected ImperialTreeNode affirmPreinsertionInitialize(TreeNode treeNode ) {\n        ArchElementNode entityNode   = (ArchElementNode) treeNode;\n\n        GUID guid72 = entityNode.getGuid();\n        // Case 1: Dummy config node.\n        GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator();\n        if( guid72 == null ) {\n            guid72 = guidAllocator.nextGUID();\n            entityNode.setGuid( guid72 );\n            entityNode.setCreateTime( LocalDateTime.now() );\n        }\n        entityNode.setUpdateTime( LocalDateTime.now() );\n\n        ImperialTreeNode imperialTreeNode = new GUIDImperialTrieNode();\n        imperialTreeNode.setGuid( guid72 );\n        imperialTreeNode.setType( UOIUtils.createLocalJavaClass( entityNode.getClass().getName() ) );\n\n        return imperialTreeNode;\n    }\n\n    public FileSystemOperatorFactory getOperatorFactory() {\n        return this.factory;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/FileSystemOperator.java",
    "content": "package com.pinecone.hydra.storage.file.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface FileSystemOperator extends TreeNodeOperator {\n    @Override\n    FileTreeNode get(GUID guid );\n\n    FileTreeNode get( GUID guid, int depth );\n\n    void rename( GUID fileGuid, String newName );\n\n    @Override\n    FileTreeNode getAsRootDepth( GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/FileSystemOperatorFactory.java",
    "content": "package com.pinecone.hydra.storage.file.operator;\n\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.ExternalSymbolic;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.file.source.FileMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.operator.OperatorFactory;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface FileSystemOperatorFactory extends OperatorFactory {\n    String DefaultFile               =   FileNode.class.getSimpleName();\n    String DefaultFolder             =   Folder.class.getSimpleName();\n    String DefaultExternalSymbolic   = ExternalSymbolic.class.getSimpleName();\n\n    void register( String typeName, TreeNodeOperator functionalNodeOperation );\n\n    void registerMetaType( Class<?> clazz, String metaType );\n\n    void registerMetaType( String classFullName, String metaType );\n\n    String getMetaType( String classFullName );\n\n    FileSystemOperator getOperator(String typeName );\n\n    KOMFileSystem getFileSystem();\n\n    FileMasterManipulator getMasterManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/GenericExternalSymbolicOperator.java",
    "content": "package com.pinecone.hydra.storage.file.operator;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.ExternalSymbolic;\nimport com.pinecone.hydra.storage.file.entity.FileSystemAttributes;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.file.entity.GenericExternalSymbolic;\nimport com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator;\nimport com.pinecone.hydra.storage.file.source.FileMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.lang.reflect.Field;\nimport java.util.List;\nimport java.util.Objects;\n\npublic class GenericExternalSymbolicOperator extends ArchFileSystemOperator{\n    private ExternalSymbolicManipulator externalSymbolicManipulator;\n\n    public GenericExternalSymbolicOperator( FileSystemOperatorFactory factory ) {\n        this( factory.getMasterManipulator(), (KOMFileSystem) factory.getFileSystem() );\n        this.factory = factory;\n    }\n\n    public GenericExternalSymbolicOperator(FileMasterManipulator masterManipulator, KOMFileSystem fileSystem ) {\n        super( masterManipulator, fileSystem );\n        this.externalSymbolicManipulator = this.fileMasterManipulator.getExternalSymbolicManipulator();\n    }\n    @Override\n    public GUID insert(TreeNode treeNode) {\n        ExternalSymbolic externalSymbolic = (ExternalSymbolic) treeNode;\n        ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize( treeNode );\n        GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator();\n        GUID guid = externalSymbolic.getGuid();\n\n        FileSystemAttributes attributes = externalSymbolic.getAttributes();\n        GUID attrbutesGuid = guidAllocator.nextGUID();\n        if ( attributes != null ){\n            attributes.setGuid(attrbutesGuid);\n            this.fileSystemAttributeManipulator.insert(attributes);\n        }\n        else {\n            attrbutesGuid = null;\n        }\n\n        GUID fileMetaGuid = guidAllocator.nextGUID();\n\n        imperialTreeNode.setBaseDataGUID(attrbutesGuid);\n        imperialTreeNode.setNodeMetadataGUID(fileMetaGuid);\n        this.imperialTree.insert(imperialTreeNode);\n        this.externalSymbolicManipulator.insert( externalSymbolic );\n\n        return guid;\n    }\n\n    @Override\n    public void purge(GUID guid) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.externalSymbolicManipulator.remove( guid );\n        this.imperialTree.removeCachePath(guid);\n    }\n\n    @Override\n    public FileTreeNode get( GUID guid ) {\n        return (ExternalSymbolic) this.getFileTreeNodeWideData( guid );\n    }\n\n    @Override\n    public FileTreeNode get( GUID guid, int depth ) {\n        return this.get( guid );\n    }\n\n    @Override\n    public void rename(GUID fileGuid, String newName) {\n\n    }\n\n    @Override\n    public FileTreeNode getAsRootDepth(GUID guid) {\n        return this.getFileTreeNodeWideData(guid);\n    }\n\n    @Override\n    public void update(TreeNode treeNode) {\n\n    }\n\n    @Override\n    public void updateName(GUID guid, String name) {\n\n    }\n\n    protected FileTreeNode getFileTreeNodeWideData(GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        ExternalSymbolic cn = this.externalSymbolicManipulator.getSymbolicByGuid( guid );\n        if( cn instanceof GenericExternalSymbolic) {\n            ((GenericExternalSymbolic) cn).apply( this.externalSymbolicManipulator );\n        }\n\n        //Notice: Registry attributes is difference from other tree, -- that is, same as DOM;\n        //        So in this case, this field is deprecated.\n        //Attributes         attributes = this.attributesManipulator.getAttributes( node.getAttributesGUID(), cn );\n\n        FileSystemAttributes attributes = this.fileSystemAttributeManipulator.getAttributes( guid, cn );\n        cn.setAttributes    ( attributes );\n        return cn;\n    }\n\n    protected void inherit( FileTreeNode self, FileTreeNode prototype ){\n        Class<? extends FileTreeNode> clazz = self.getClass();\n        Field[] fields = clazz.getDeclaredFields();\n\n        for ( Field field : fields ){\n            field.setAccessible(true);\n            try {\n                Object value1 = field.get( self );\n                Object value2 = field.get( prototype );\n                if ( Objects.isNull(value1) || (value1 instanceof List && ((List<?>) value1).isEmpty()) ){\n                    field.set(self,value2);\n                }\n            }\n            catch ( IllegalAccessException e ) {\n                throw new ProxyProvokeHandleException(e);\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/GenericFileOperator.java",
    "content": "package com.pinecone.hydra.storage.file.operator;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileMeta;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.FileSystemAttributes;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.file.entity.GenericFileNode;\nimport com.pinecone.hydra.storage.file.source.FileManipulator;\nimport com.pinecone.hydra.storage.file.source.FileMasterManipulator;\nimport com.pinecone.hydra.storage.file.source.FileMetaManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport java.lang.reflect.Field;\nimport java.util.List;\nimport java.util.Objects;\n\npublic class GenericFileOperator extends ArchFileSystemOperator {\n    protected FileManipulator               fileManipulator;\n    protected FileMetaManipulator           fileMetaManipulator;\n\n    public GenericFileOperator( FileSystemOperatorFactory factory ) {\n        this( factory.getMasterManipulator(), (KOMFileSystem) factory.getFileSystem() );\n        this.factory = factory;\n    }\n\n    public GenericFileOperator( FileMasterManipulator masterManipulator, KOMFileSystem fileSystem ) {\n        super( masterManipulator, fileSystem );\n        this.fileManipulator               =  masterManipulator.getFileManipulator();\n        this.fileMetaManipulator           =  masterManipulator.getFileMetaManipulator();\n    }\n\n    @Override\n    public GUID insert(TreeNode treeNode) {\n        FileNode file = (FileNode) treeNode;\n        ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize( treeNode );\n        GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator();\n        GUID guid = file.getGuid();\n\n        FileSystemAttributes attributes = file.getAttributes();\n        GUID attrbutesGuid = guidAllocator.nextGUID();\n        if ( attributes != null ){\n            attributes.setGuid(attrbutesGuid);\n            this.fileSystemAttributeManipulator.insert(attributes);\n        }\n        else {\n            attrbutesGuid = null;\n        }\n\n        FileMeta fileMeta = file.getFileMeta();\n        GUID fileMetaGuid = guidAllocator.nextGUID();\n        if ( fileMeta != null ){\n            fileMeta.setGuid(fileMetaGuid);\n            this.fileMetaManipulator.insert(fileMeta);\n        }\n        else {\n            fileMetaGuid = null;\n        }\n\n        imperialTreeNode.setBaseDataGUID(attrbutesGuid);\n        imperialTreeNode.setNodeMetadataGUID(fileMetaGuid);\n        this.imperialTree.insert(imperialTreeNode);\n        this.fileManipulator.insert(file);\n\n        return guid;\n    }\n\n    @Override\n    public void purge(GUID guid) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.fileManipulator.remove(guid);\n        this.fileMetaManipulator.remove(node.getNodeMetadataGUID());\n        //this.fileSystemAttributeManipulator.remove(node.getAttributesGUID());\n        this.imperialTree.removeCachePath(guid);\n    }\n\n    @Override\n    public FileTreeNode get(GUID guid) {\n        FileNode fileTreeNode = this.getFileTreeNodeWideData( guid ).evinceFileNode();\n        FileNode thisNode = fileTreeNode;\n        while ( true ) {\n            GUID affinityGuid = thisNode.getDataAffinityGuid();\n            if ( affinityGuid != null ){\n                FileNode parent = this.getFileTreeNodeWideData( affinityGuid ).evinceFileNode();\n                this.inherit( thisNode, parent );\n                thisNode = parent;\n            }\n            else {\n                break;\n            }\n        }\n        return fileTreeNode;\n    }\n\n    @Override\n    public FileTreeNode get(GUID guid, int depth) {\n        return this.get( guid );\n    }\n\n    @Override\n    public FileTreeNode getAsRootDepth(GUID guid) {\n        return this.getFileTreeNodeWideData(guid);\n    }\n\n    @Override\n    public void rename(GUID fileGuid, String newName) {\n        this.fileManipulator.rename( fileGuid, newName );\n        this.imperialTree.removeCachePath(fileGuid);\n    }\n\n    @Override\n    public void update(TreeNode treeNode) {\n        this.imperialTree.removeCachePath(treeNode.getGuid());\n        this.fileManipulator.update( (FileNode) treeNode );\n    }\n\n    @Override\n    public void updateName(GUID guid, String name) {\n    }\n\n    protected FileTreeNode getFileTreeNodeWideData(GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        FileNode cn = this.fileManipulator.getFileNodeByGuid( guid );\n        if( cn instanceof GenericFileNode) {\n            ((GenericFileNode) cn).apply( this.fileSystem );\n        }\n\n        FileMeta fileMeta = this.fileMetaManipulator.getFileMetaByGuid( node.getNodeMetadataGUID() );\n\n        //Notice: Registry attributes is difference from other tree, -- that is, same as DOM;\n        //        So in this case, this field is deprecated.\n        //Attributes         attributes = this.attributesManipulator.getAttributes( node.getAttributesGUID(), cn );\n\n        FileSystemAttributes attributes = this.fileSystemAttributeManipulator.getAttributes( guid, cn );\n        cn.setAttributes    ( attributes );\n        cn.startDistribution( fileMeta );\n        return cn;\n    }\n\n    protected void inherit( FileTreeNode self, FileTreeNode prototype ){\n        Class<? extends FileTreeNode> clazz = self.getClass();\n        Field[] fields = clazz.getDeclaredFields();\n\n        for ( Field field : fields ){\n            field.setAccessible(true);\n            try {\n                Object value1 = field.get( self );\n                Object value2 = field.get( prototype );\n                if ( Objects.isNull(value1) || (value1 instanceof List && ((List<?>) value1).isEmpty()) ){\n                    field.set(self,value2);\n                }\n            }\n            catch ( IllegalAccessException e ) {\n                throw new ProxyProvokeHandleException(e);\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/GenericFileSystemOperatorFactory.java",
    "content": "package com.pinecone.hydra.storage.file.operator;\n\nimport com.fasterxml.jackson.annotation.JsonIgnore;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.GenericFileNode;\nimport com.pinecone.hydra.storage.file.entity.GenericFolder;\nimport com.pinecone.hydra.storage.file.source.FileMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.TreeMap;\n\npublic class GenericFileSystemOperatorFactory implements FileSystemOperatorFactory{\n    protected FileMasterManipulator            fileMasterManipulator;\n    @JsonIgnore\n    protected KOMFileSystem                    fileSystem;\n\n    protected Map<String, TreeNodeOperator>    registerer = new HashMap<>();\n\n    protected Map<String, String >             metaTypeMap = new TreeMap<>();\n\n    protected void registerDefaultMetaType( Class<?> genericType ) {\n        this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace(\n                this.fileSystem.getConfig().getVersionSignature(),\"\"\n        ));\n    }\n\n    protected void registerDefaultMetaTypes() {\n        this.registerDefaultMetaType( GenericFolder.class );\n        this.registerDefaultMetaType( GenericFileNode.class );\n    }\n\n    public GenericFileSystemOperatorFactory( KOMFileSystem fileSystem, FileMasterManipulator fileMasterManipulator ){\n        this.fileSystem = fileSystem;\n        this.fileMasterManipulator = fileMasterManipulator;\n\n        this.registerer.put(\n                DefaultFile,\n                new GenericFileOperator( this )\n        );\n\n        this.registerer.put(\n                DefaultFolder,\n                new GenericFolderOperator(this)\n        );\n\n        this.registerer.put(\n                DefaultExternalSymbolic,\n                new GenericExternalSymbolicOperator(this)\n        );\n\n        this.registerDefaultMetaTypes();\n    }\n\n\n\n\n\n\n    @Override\n    public void register( String typeName, TreeNodeOperator functionalNodeOperation ) {\n        this.registerer.put( typeName, functionalNodeOperation );\n    }\n\n    @Override\n    public void registerMetaType( Class<?> clazz, String metaType ){\n        this.registerMetaType( clazz.getName(), metaType );\n    }\n\n    @Override\n    public void registerMetaType( String classFullName, String metaType ){\n        this.metaTypeMap.put( classFullName, metaType );\n    }\n\n    @Override\n    public String getMetaType( String classFullName ) {\n        return this.metaTypeMap.get( classFullName );\n    }\n\n    @Override\n    public FileSystemOperator getOperator(String typeName ) {\n        //Debug.trace( this.registerer.toString() );\n        return (FileSystemOperator) this.registerer.get( typeName );\n    }\n\n    @Override\n    public KOMFileSystem getFileSystem() {\n        return this.fileSystem;\n    }\n\n    @Override\n    public FileMasterManipulator getMasterManipulator() {\n        return this.fileMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/operator/GenericFolderOperator.java",
    "content": "package com.pinecone.hydra.storage.file.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileSystemAttributes;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.file.entity.FolderMeta;\nimport com.pinecone.hydra.storage.file.entity.GenericFolder;\nimport com.pinecone.hydra.storage.file.source.FileMasterManipulator;\nimport com.pinecone.hydra.storage.file.source.FolderManipulator;\nimport com.pinecone.hydra.storage.file.source.FolderMetaManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class GenericFolderOperator extends ArchFileSystemOperator{\n    private FolderManipulator       folderManipulator;\n    private FolderMetaManipulator   folderMetaManipulator;\n\n    public GenericFolderOperator(FileSystemOperatorFactory factory ) {\n        this( factory.getMasterManipulator(), factory.getFileSystem() );\n        this.factory = factory;\n    }\n\n    public GenericFolderOperator(FileMasterManipulator masterManipulator, KOMFileSystem fileSystem ) {\n        super( masterManipulator, fileSystem );\n        this.folderManipulator      =   masterManipulator.getFolderManipulator();\n        this.folderMetaManipulator  =   masterManipulator.getFolderMetaManipulator();\n    }\n\n    @Override\n    public GUID insert(TreeNode treeNode) {\n        Folder folder  = (Folder) treeNode;\n        ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(treeNode);\n        GuidAllocator guidAllocator = this.fileSystem.getGuidAllocator();\n        GUID guid = folder.getGuid();\n\n        FileSystemAttributes attributes = folder.getAttributes();\n        GUID attributesGuid = guidAllocator.nextGUID();\n        if ( attributes != null ){\n            attributes.setGuid(attributesGuid);\n            this.fileSystemAttributeManipulator.insert(attributes);\n        }\n        else {\n            attributesGuid = null;\n        }\n\n        FolderMeta folderMeta = folder.getFolderMeta();\n        GUID folderMetaGuid = guidAllocator.nextGUID();\n        if ( folderMeta != null ){\n            folderMeta.setGuid(folderMetaGuid);\n            this.folderMetaManipulator.insert(folderMeta);\n        }\n        else {\n            folderMetaGuid = null;\n        }\n\n        imperialTreeNode.setNodeMetadataGUID(folderMetaGuid);\n        imperialTreeNode.setBaseDataGUID(attributesGuid);\n        this.imperialTree.insert(imperialTreeNode);\n        this.folderManipulator.insert(folder);\n        return guid;\n    }\n\n    @Override\n    public void purge(GUID guid) {\n        //namespace节点需要递归删除其拥有节点若其引用节点，没有其他引用则进行清理\n        List<GUIDImperialTrieNode> childNodes = this.imperialTree.getChildren(guid);\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        if ( !childNodes.isEmpty() ){\n            List<GUID > subordinates = this.imperialTree.getSubordinates(guid);\n            if ( !subordinates.isEmpty() ){\n                for ( GUID subordinateGuid : subordinates ){\n                    this.purge( subordinateGuid );\n                }\n            }\n            childNodes = this.imperialTree.getChildren( guid );\n            for( GUIDImperialTrieNode childNode : childNodes ){\n                List<GUID > parentNodes = this.imperialTree.fetchParentGuids(childNode.getGuid());\n                if ( parentNodes.size() > 1 ){\n                    this.imperialTree.removeInheritance(childNode.getGuid(),guid);\n                }\n                else {\n                    this.purge( childNode.getGuid() );\n                }\n            }\n        }\n\n        if ( node.getType().getObjectName().equals(GenericFolder.class.getName()) ){\n            this.removeNode(guid);\n        }\n        else {\n            UOI uoi = node.getType();\n            String metaType = this.getOperatorFactory().getMetaType( uoi.getObjectName() );\n            if( metaType == null ) {\n                TreeNode newInstance = (TreeNode)uoi.newInstance( new Class<? >[]{ KOMFileSystem.class }, this.fileSystem );\n                metaType = newInstance.getMetaType();\n            }\n\n            FileSystemOperator operator = this.getOperatorFactory().getOperator( metaType );\n            operator.purge( guid );\n        }\n    }\n\n    @Override\n    public FileTreeNode get(GUID guid) {\n        return this.getFolderWideData(guid, 0);\n    }\n\n    @Override\n    public FileTreeNode get(GUID guid, int depth) {\n        return this.getFolderWideData(guid,depth);\n    }\n\n    @Override\n    public FileTreeNode getAsRootDepth(GUID guid) {\n        return this.getFolderWideData(guid,0);\n    }\n\n    @Override\n    public void rename(GUID fileGuid, String newName) {\n        this.folderManipulator.rename( fileGuid, newName );\n        this.imperialTree.removeCachePath(fileGuid);\n    }\n\n    @Override\n    public void update(TreeNode treeNode) {\n        this.imperialTree.removeCachePath(treeNode.getGuid());\n        FileTreeNode fileTreeNode = this.get(treeNode.getGuid());\n        this.folderManipulator.update( (Folder) fileTreeNode );\n    }\n\n    @Override\n    public void updateName(GUID guid, String name) {\n\n    }\n\n    private Folder getFolderWideData(GUID guid, int depth ){\n        Folder fd = this.folderManipulator.getFolderByGuid( guid );\n        if ( fd instanceof GenericFolder){\n            ((GenericFolder) fd).apply( this.fileSystem );\n        }\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n\n        if( depth <= 0 ) {\n            List<GUIDImperialTrieNode> childNode = this.imperialTree.getChildren(guid);\n            ArrayList<GUID> guids = new ArrayList<>();\n            for ( GUIDImperialTrieNode n : childNode ){\n                guids.add( n.getGuid() );\n            }\n            ++depth;\n            fd.setChildrenGuids( guids, depth );\n        }\n\n        FileSystemAttributes attributes = this.fileSystemAttributeManipulator.getAttributes( guid, fd );\n        FolderMeta folderMeta = this.folderMetaManipulator.getFolderMetaByGuid( node.getNodeMetadataGUID() );\n        fd.setAttributes    ( attributes );\n        fd.setFolderMeta ( folderMeta );\n        return fd;\n    }\n\n\n\n    private void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath(guid);\n        this.folderManipulator.remove(guid);\n        this.folderMetaManipulator.remove(node.getNodeMetadataGUID());\n        //this.fileSystemAttributeManipulator.remove(node.getAttributesGUID());\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/ExternalSymbolicManipulator.java",
    "content": "package com.pinecone.hydra.storage.file.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ExternalSymbolic;\n\npublic interface ExternalSymbolicManipulator extends Pinenut {\n    void insert( ExternalSymbolic externalSymbolic );\n    void remove( GUID guid );\n    ExternalSymbolic getSymbolicByGuid( GUID guid );\n\n    ExternalSymbolic getSymbolicByNameGuid( String nodeName, GUID nodeGUID );\n\n    boolean isSymbolicMatchedByNameGuid( String nodeName, GUID nodeGUID );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FileManipulator.java",
    "content": "package com.pinecone.hydra.storage.file.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\nimport java.util.List;\n\npublic interface FileManipulator extends GUIDNameManipulator {\n    FileNode getFileNode(GUID guid, ElementNode element);\n    void insert( FileNode fileNode );\n    void remove( GUID guid );\n    FileNode getFileNodeByGuid(GUID guid);\n\n    List<GUID > getGuidsByName(String name );\n\n    List<GUID > getGuidsByNameID( String name, GUID guid );\n\n    List<GUID > dumpGuid();\n\n    void update( FileNode fileNode );\n\n    void rename( GUID guid, String newName );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FileMasterManipulator.java",
    "content": "package com.pinecone.hydra.storage.file.source;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\n\npublic interface FileMasterManipulator extends KOIMasterManipulator {\n    FileSystemAttributeManipulator      getAttributeManipulator();\n    FileManipulator                     getFileManipulator();\n    FileMetaManipulator                 getFileMetaManipulator();\n    FolderManipulator                   getFolderManipulator();\n    FolderMetaManipulator               getFolderMetaManipulator();\n    LocalClusterManipulator               getLocalClusterManipulator();\n    RemoteClusterManipulator              getRemoteClusterManipulator();\n    SymbolicManipulator                 getSymbolicManipulator();\n    SymbolicMetaManipulator             getSymbolicMetaManipulator();\n    FolderVolumeMappingManipulator      getFolderVolumeRelationManipulator();\n    ExternalSymbolicManipulator         getExternalSymbolicManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FileMetaManipulator.java",
    "content": "package com.pinecone.hydra.storage.file.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.FileMeta;\n\npublic interface FileMetaManipulator extends Pinenut {\n    FileMeta getFileMeta(GUID guid, ElementNode element);\n    void insert( FileMeta fileMeta );\n    void remove( GUID guid );\n    FileMeta getFileMetaByGuid(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FileSystemAttributeManipulator.java",
    "content": "package com.pinecone.hydra.storage.file.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.FileSystemAttributes;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\n\nimport java.util.List;\nimport java.util.Map;\n\npublic interface FileSystemAttributeManipulator extends Pinenut {\n    void insertAttribute(GUID guid, String key, String value );\n\n    List<Map<String, Object >> getAttributesByGuid(GUID guid );\n\n    void updateAttribute( GUID guid, String key, String value );\n\n    void remove( GUID guid );\n\n    FileSystemAttributes getAttributes(GUID guid, ElementNode element );\n\n    default void insert( FileSystemAttributes attributes) {\n        for ( Map.Entry<String, String> entry : attributes.getAttributes().entrySet() ) {\n            this.insertAttribute( attributes.getGuid(), entry.getKey(), entry.getValue() );\n        }\n    }\n\n    default void update( FileSystemAttributes attributes) {\n        for ( Map.Entry<String, String> entry : attributes.getAttributes().entrySet() ) {\n            this.updateAttribute( attributes.getGuid(), entry.getKey(), entry.getValue() );\n        }\n    }\n\n\n    boolean containsKey ( GUID guid, String key );\n\n    void clearAttributes( GUID guid );\n\n    void removeAttributeWithValue( GUID guid, String key, String value );\n\n    void removeAttribute( GUID guid, String key );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FolderManipulator.java",
    "content": "package com.pinecone.hydra.storage.file.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\nimport java.util.List;\n\npublic interface FolderManipulator extends GUIDNameManipulator {\n    Folder getFolder(GUID guid, ElementNode element);\n\n    void insert( Folder folder );\n\n    void remove( GUID guid );\n\n    void update( Folder folder );\n\n    Folder getFolderByGuid(GUID guid);\n\n    List<GUID > getGuidsByName(String name );\n\n    List<GUID > getGuidsByNameID( String name, GUID guid );\n\n    List<GUID > dumpGuid();\n\n    boolean isFolder(GUID guid);\n\n    void rename( GUID fileGuid, String newName );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FolderMetaManipulator.java",
    "content": "package com.pinecone.hydra.storage.file.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.FolderMeta;\n\npublic interface FolderMetaManipulator extends Pinenut {\n    FolderMeta getFolderMeta(GUID guid, ElementNode element);\n    void insert( FolderMeta folderMeta );\n    void remove( GUID guid );\n    FolderMeta getFolderMetaByGuid(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/FolderVolumeMappingManipulator.java",
    "content": "package com.pinecone.hydra.storage.file.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface FolderVolumeMappingManipulator extends Pinenut {\n    void insert( GUID folderGuid, GUID volumeGuid );\n\n    void remove( GUID folderGuid, GUID volumeGuid );\n\n    GUID getVolumeGuid( GUID folderGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/LocalClusterManipulator.java",
    "content": "package com.pinecone.hydra.storage.file.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.LocalCluster;\n\nimport java.util.List;\n\npublic interface LocalClusterManipulator extends Pinenut {\n    LocalCluster getLocalCluster(GUID guid, ElementNode element);\n    void insert( LocalCluster localCluster );\n    void remove( GUID guid );\n    void removeClustersByFile( GUID fileGuid );\n    LocalCluster getLocalClusterByGuid(GUID guid);\n    List<LocalCluster> getLocalClusterByFileGuid(GUID guid );\n    LocalCluster getClusterByFileWithId(GUID fileGuid, long segId );\n    void update( LocalCluster localCluster );\n    void removeClusterByFileWithId( GUID fileGuid, long segId );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/RemoteClusterManipulator.java",
    "content": "package com.pinecone.hydra.storage.file.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.RemoteCluster;\n\nimport java.util.List;\n\npublic interface RemoteClusterManipulator extends Pinenut {\n    RemoteCluster getRemoteCluster(GUID guid, ElementNode element);\n    void insert( RemoteCluster remoteCluster );\n    void remove( GUID guid );\n    void removeClustersByFile( GUID fileGuid );\n    RemoteCluster fetchRemoteClustersByFileGuid(GUID guid);\n    List<RemoteCluster> fetchRemoteClusterByFileGuid( GUID guid );\n\n    List<RemoteCluster > fetchRemoteClusterByFileGuid( GUID guid, long offset, int pageSize );\n\n    long countRemoteClustersByFileGuid( GUID guid );\n\n    RemoteCluster getLastCluster(GUID guid );\n    void removeClusterByFileWithId(GUID fileGuid, long segId );\n\n    long countFileClusters( GUID fileGuid );\n\n    RemoteCluster getClusterByFileWithId( GUID fileGuid, long segId );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/SymbolicManipulator.java",
    "content": "package com.pinecone.hydra.storage.file.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.Symbolic;\n\npublic interface SymbolicManipulator extends Pinenut {\n    Symbolic getSymbolic(GUID guid, ElementNode element);\n    void insert( Symbolic symbolic );\n    void remove( GUID guid );\n    Symbolic getSymbolicByGuid(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/source/SymbolicMetaManipulator.java",
    "content": "package com.pinecone.hydra.storage.file.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.SymbolicMeta;\n\npublic interface SymbolicMetaManipulator extends Pinenut {\n    SymbolicMeta getSymbolicMeta(GUID guid, ElementNode element);\n    void insert( SymbolicMeta symbolicMeta );\n    void remove( GUID guid );\n    SymbolicMeta getSymbolicMetaByGuid(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/UniformSourceLocator.java",
    "content": "package com.pinecone.hydra.storage.file.transmit;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\n\npublic class UniformSourceLocator implements Pinenut {\n    private String volumeGuid;\n\n    public UniformSourceLocator() {\n    }\n\n    public UniformSourceLocator(String volumeGuid, String sourceName) {\n        this.volumeGuid = volumeGuid;\n    }\n\n    public String getVolumeGuid() {\n        return volumeGuid;\n    }\n\n\n    public void setVolumeGuid(String volumeGuid) {\n        this.volumeGuid = volumeGuid;\n    }\n\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/ArchFileExporterEntity.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.exporter;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\n\npublic abstract class ArchFileExporterEntity implements FileExportEntity {\n    protected Chanface channel;\n\n    protected VolumeManager volumeManager;\n    private KOMFileSystem fileSystem;\n    private FileNode file;\n\n\n    public ArchFileExporterEntity(KOMFileSystem fileSystem, FileNode file, Chanface channel, VolumeManager volumeManager) {\n        this.fileSystem = fileSystem;\n        this.file = file;\n        this.channel = channel;\n        this.volumeManager = volumeManager;\n    }\n\n    @Override\n    public KOMFileSystem getFileSystem() {\n        return fileSystem;\n    }\n\n    @Override\n    public void setFileSystem(KOMFileSystem fileSystem) {\n        this.fileSystem = fileSystem;\n    }\n\n    @Override\n    public FileNode getFile() {\n        return file;\n    }\n\n    @Override\n    public void setFile(FileNode file) {\n        this.file = file;\n    }\n    @Override\n    public VolumeManager getVolumeManager() {\n        return this.volumeManager;\n    }\n\n    @Override\n    public void setVolumeManager(VolumeManager volumeManager) {\n        this.volumeManager = volumeManager;\n    }\n\n    @Override\n    public Chanface getKChannel() {\n        return this.channel;\n    }\n\n    @Override\n    public void setKChannel(Chanface channel) {\n        this.channel = channel;\n    }\n\n    public String toString() {\n        return \"ArchExporterEntity{fileSystem = \" + fileSystem + \", file = \" + file + \"}\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/FileExport.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.exporter;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.file.entity.Cluster;\n\nimport java.io.IOException;\n\npublic interface FileExport extends Pinenut {\n    void export() throws IOException;\n\n    void export(Cluster cluster) throws IOException;\n\n    void export( Number offset, Number endSize ) throws  IOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/FileExport64.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.exporter;\n\npublic interface FileExport64 extends FileExport {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/FileExportEntity.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.exporter;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.file.entity.Cluster;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\n\nimport java.io.IOException;\n\npublic interface FileExportEntity extends Pinenut {\n    KOMFileSystem getFileSystem();\n\n    void setFileSystem( KOMFileSystem fileSystem );\n\n    VolumeManager getVolumeManager();\n    void setVolumeManager( VolumeManager volumeManager );\n\n    FileNode getFile();\n\n    void setFile( FileNode file );\n\n    Chanface getKChannel();\n    void setKChannel( Chanface channel );\n\n    void export() throws IOException;\n\n    void export(Cluster cluster) throws IOException;\n\n    void export( Number offset, Number endSize );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/FileExportEntity64.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.exporter;\n\npublic interface FileExportEntity64 extends FileExportEntity {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/TitanFileExport64.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.exporter;\n\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.hydra.storage.file.entity.Cluster;\nimport com.pinecone.hydra.storage.file.entity.LocalCluster;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.TitanStorageExportIORequest;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.transmit.UniformSourceLocator;\nimport com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.io.IOException;\nimport java.util.TreeMap;\n\npublic class TitanFileExport64 implements FileExport64{\n    protected Chanface channel;\n\n    protected FileNode                   fileNode;\n\n    protected VolumeManager              volumeManager;\n\n    protected UnifiedTransmitConstructor constructor;\n\n    public TitanFileExport64( FileExportEntity64 entity ){\n        this.channel = entity.getKChannel();\n        this.fileNode = entity.getFile();\n        this.volumeManager = entity.getVolumeManager();\n        this.constructor = new UnifiedTransmitConstructor();\n    }\n    @Override\n    public void export() throws IOException {\n        // 获取文件所有的簇\n        TreeMap<Long, Cluster> framesMap = fileNode.getClusters();\n        for (long i = 0; i < framesMap.size(); i++) {\n            LocalCluster frame = (LocalCluster) framesMap.get(i);\n            TitanStorageExportIORequest titanExportStorageObject = new TitanStorageExportIORequest();\n            titanExportStorageObject.setSize( frame.getSize() );\n            titanExportStorageObject.setStorageObjectGuid( frame.getSegGuid() );\n            String sourceName = frame.getSourceName();\n            UniformSourceLocator uniformSourceLocator = JSON.unmarshal(sourceName, UniformSourceLocator.class);\n            LogicVolume volume = this.volumeManager.get(GUIDs.GUID128(uniformSourceLocator.getVolumeGuid()));\n            //volume.channelExport( titanExportStorageObject, this.channel );\n            ExporterEntity exportEntity = null;\n            exportEntity = this.constructor.getExportEntity(volume.getClass(), volumeManager, titanExportStorageObject, this.channel, volume);\n            volume.export( exportEntity );\n        }\n\n        this.channel.close();\n    }\n\n    @Override\n    public void export(Cluster cluster) throws IOException {\n        LocalCluster localCluster = (LocalCluster) cluster;\n        TitanStorageExportIORequest titanExportStorageObject = new TitanStorageExportIORequest();\n        titanExportStorageObject.setSize( localCluster.getSize() );\n        titanExportStorageObject.setStorageObjectGuid( localCluster.getSegGuid() );\n        String sourceName = localCluster.getSourceName();\n        UniformSourceLocator uniformSourceLocator = JSON.unmarshal(sourceName, UniformSourceLocator.class);\n        LogicVolume volume = this.volumeManager.get(GUIDs.GUID128(uniformSourceLocator.getVolumeGuid()));\n        ExporterEntity exportEntity = null;\n        exportEntity = this.constructor.getExportEntity(volume.getClass(), volumeManager, titanExportStorageObject, this.channel, volume);\n        volume.export( exportEntity );\n    }\n\n    @Override\n    public void export(Number offset, Number endSize) throws  IOException {\n        TreeMap<Long, Cluster> framesMap = fileNode.getClusters();\n        long startPosition = offset.longValue();\n        long endPosition = offset.longValue() + endSize.longValue();\n        long currentPosition = 0;\n\n        for( long i = 0;i < framesMap.size(); i++ ){\n            LocalCluster frame = (LocalCluster) framesMap.get(i);\n            if (startPosition < currentPosition + frame.getDefinitionSize() && endPosition > currentPosition) {\n                TitanStorageExportIORequest titanExportStorageObject = new TitanStorageExportIORequest();\n                titanExportStorageObject.setSize(frame.getSize());\n                titanExportStorageObject.setStorageObjectGuid(frame.getSegGuid());\n\n                String sourceName = frame.getSourceName();\n                UniformSourceLocator uniformSourceLocator = JSON.unmarshal(sourceName, UniformSourceLocator.class);\n                LogicVolume volume = this.volumeManager.get(GUIDs.GUID128(uniformSourceLocator.getVolumeGuid()));\n\n                ExporterEntity exportEntity = null;\n                exportEntity = this.constructor.getExportEntity(volume.getClass(), volumeManager, titanExportStorageObject, this.channel, volume);\n\n                long startOffsetInCluster = Math.max(startPosition - currentPosition, 0);\n                long sizeToExport = Math.min(endPosition - currentPosition, frame.getDefinitionSize()) - startOffsetInCluster;\n\n                volume.export(exportEntity, startOffsetInCluster, sizeToExport);\n            }\n\n            currentPosition += frame.getDefinitionSize();\n            if (currentPosition >= endPosition){\n                break;\n            }\n        }\n\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/exporter/TitanFileExportEntity64.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.exporter;\n\nimport com.pinecone.hydra.storage.file.entity.Cluster;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\n\nimport java.io.IOException;\n\npublic class TitanFileExportEntity64 extends ArchFileExporterEntity  implements FileExportEntity64{\n    protected FileExport64      fileExport;\n    public TitanFileExportEntity64(KOMFileSystem fileSystem, VolumeManager volumeManager, FileNode file, Chanface channel) {\n        super(fileSystem, file, channel, volumeManager);\n        this.fileExport  = new TitanFileExport64( this );\n    }\n\n\n    @Override\n    public void export() throws IOException {\n        this.fileExport.export();\n    }\n\n    @Override\n    public void export(Cluster cluster) throws IOException {\n        this.fileExport.export(cluster);\n    }\n\n\n    @Override\n    public void export(Number offset, Number endSize) {\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/ArchFileReceiveEntity.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.receiver;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\n\npublic abstract class ArchFileReceiveEntity implements FileReceiveEntity {\n    protected KOMFileSystem     fileSystem;\n\n    protected String            destDirPath;\n\n    protected FileNode          file;\n\n    protected Chanface channel;\n\n    protected VolumeManager     volumeManager;\n\n    public ArchFileReceiveEntity(KOMFileSystem fileSystem, String destDirPath, FileNode file, Chanface channel, VolumeManager volumeManager ){\n        this.fileSystem = fileSystem;\n        this.file = file;\n        this.destDirPath = destDirPath;\n        this.channel  = channel;\n        this.volumeManager = volumeManager;\n    }\n\n    @Override\n    public KOMFileSystem getFileSystem() {\n        return this.fileSystem;\n    }\n\n    @Override\n    public void setFileSystem(KOMFileSystem fileSystem) {\n        this.fileSystem = fileSystem;\n    }\n\n    @Override\n    public String getDestDirPath() {\n        return this.destDirPath;\n    }\n\n    @Override\n    public void setDestDirPath(String destDirPath) {\n        this.destDirPath = destDirPath;\n    }\n\n    @Override\n    public FileNode getFile() {\n        return this.file;\n    }\n\n    @Override\n    public void setFile(FileNode file) {\n        this.file = file;\n    }\n\n    @Override\n    public Chanface getChannel() {\n        return this.channel;\n    }\n\n    @Override\n    public void setChannel(Chanface channel) {\n        this.channel = channel;\n    }\n\n    @Override\n    public VolumeManager getVolumeManager() {\n        return this.volumeManager;\n    }\n\n    @Override\n    public void setVolumeManager(VolumeManager volumeManager) {\n        this.volumeManager = volumeManager;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/FileReceive.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.receiver;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\n\nimport java.io.IOException;\n\npublic interface FileReceive extends Pinenut {\n    void receive(LogicVolume volume) throws IOException;\n    void receive( LogicVolume volume, long segId ) throws  IOException;\n    void receive(LogicVolume volume, Number offset, Number endSize ) throws IOException;\n    void randomReceive( LogicVolume volume, Number offset, Number endSize ) throws IOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/FileReceive64.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.receiver;\n\npublic interface FileReceive64 extends FileReceive {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/FileReceiveEntity.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.receiver;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\n\nimport java.io.IOException;\n\npublic interface FileReceiveEntity extends Pinenut {\n    KOMFileSystem getFileSystem();\n    void setFileSystem( KOMFileSystem fileSystem );\n\n    String getDestDirPath();\n    void setDestDirPath( String destDirPath );\n\n    FileNode getFile();\n    void setFile( FileNode file );\n\n    Chanface getChannel();\n    void setChannel( Chanface channel );\n\n    VolumeManager getVolumeManager();\n    void setVolumeManager( VolumeManager volumeManager );\n\n    void receive() throws IOException;\n    void receive( Number offset, Number endSize )throws IOException;\n    void receive( long segId ) throws IOException;\n\n    void randomReceive( Number offset, Number endSize ) throws  IOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/FileReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.receiver;\n\npublic interface FileReceiveEntity64 extends FileReceiveEntity {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/TitanFileReceive64.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.receiver;\n\nimport com.pinecone.framework.util.Bytes;\nimport com.pinecone.hydra.storage.file.entity.LocalCluster;\nimport com.pinecone.hydra.storage.file.entity.RemoteCluster;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.io.TitanFileChannelChanface;\nimport com.pinecone.hydra.storage.TitanStorageReceiveIORequest;\nimport com.pinecone.hydra.storage.file.ClusterSegmentNaming;\nimport com.pinecone.hydra.storage.file.KOFSClusterSegmentNaming;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.Verification;\nimport com.pinecone.hydra.storage.file.entity.FSNodeAllotment;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.transmit.UniformSourceLocator;\nimport com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64;\nimport com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\n\nimport java.io.BufferedInputStream;\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.nio.channels.FileChannel;\nimport java.nio.file.StandardOpenOption;\nimport java.util.zip.CRC32;\n\npublic class TitanFileReceive64 implements FileReceive64{\n    protected KOMFileSystem                 mKOMFileSystem;\n\n    protected ClusterSegmentNaming mClusterSegmentNaming;\n\n    protected UnifiedTransmitConstructor    constructor;\n\n    protected Chanface                      chanface;\n\n    protected FileNode                      fileNode;\n\n    protected String                        destDirPath;\n\n    protected VolumeManager                 volumeManager;\n\n\n    public TitanFileReceive64( FileReceiveEntity64 entity ){\n        this.mKOMFileSystem      = entity.getFileSystem();\n        this.mClusterSegmentNaming = new KOFSClusterSegmentNaming();\n        this.constructor         = new UnifiedTransmitConstructor();\n        this.chanface            = entity.getChannel();\n        this.destDirPath         = entity.getDestDirPath();\n        this.fileNode            = entity.getFile();\n        this.volumeManager       = entity.getVolumeManager();\n    }\n\n    @Override\n    public void receive( LogicVolume volume ) throws IOException {\n        long frameSize = this.mKOMFileSystem.getConfig().getClusterSize().longValue();\n        this.fileNode.setGuid( mKOMFileSystem.queryGUIDByPath( this.destDirPath ) );\n\n        FSNodeAllotment allotment = mKOMFileSystem.getFSNodeAllotment();\n        long segId = 0;\n        long currentPosition = 0;\n        long endSize = frameSize;\n\n        long parityCheck = 0;\n        long checkSum    = 0;\n        long crc32       = 0;\n\n        StorageIOResponse storageIOResponse = null;\n        while ( true ) {\n            if( currentPosition >= this.fileNode.getDefinitionSize() ){\n                break;\n            }\n\n            if( currentPosition + endSize > this.fileNode.getDefinitionSize() ){\n                endSize = this.fileNode.getDefinitionSize() - currentPosition;\n            }\n\n            LocalCluster localCluster = allotment.newLocalCluster();\n            RemoteCluster remoteCluster = allotment.newRemoteCluster( this.fileNode.getGuid(),(int)segId );\n            remoteCluster.setDeviceGuid(this.mKOMFileSystem.getConfig().getLocalhostGUID());\n            remoteCluster.setSegGuid( localCluster.getSegGuid() );\n\n            StorageReceiveIORequest storageReceiveIORequest = new TitanStorageReceiveIORequest();\n            storageReceiveIORequest.setSize( this.fileNode.getDefinitionSize() );\n            storageReceiveIORequest.setName( this.fileNode.getName() );\n            storageReceiveIORequest.setStorageObjectGuid( localCluster.getSegGuid() );\n\n            //storageIOResponse = volume.channelReceive(storageReceiveIORequest, kChannel, currentPosition, endSize);\n            ReceiveEntity receiveEntity = null;\n            receiveEntity = this.constructor.getReceiveEntity(volume.getClass(), this.volumeManager, storageReceiveIORequest, this.chanface, volume);\n            storageIOResponse = volume.receive( receiveEntity, currentPosition, endSize );\n\n            UniformSourceLocator uniformSourceLocator = new UniformSourceLocator();\n            if( storageIOResponse != null ){\n                localCluster.setCrc32( storageIOResponse.getCre32().getValue() );\n                parityCheck += storageIOResponse.getParityCheck();\n                checkSum += storageIOResponse.getChecksum();\n                if( segId == 0 ){\n                    crc32 = storageIOResponse.getCre32().getValue();\n                }\n                else {\n                    crc32 = crc32 ^ storageIOResponse.getCre32().getValue();\n                }\n            }\n            uniformSourceLocator.setVolumeGuid( volume.getGuid().toString() );\n            localCluster.setSize( endSize );\n            localCluster.setSourceName( uniformSourceLocator.toJSONString() );\n            localCluster.setFileGuid( this.fileNode.getGuid() );\n            localCluster.setSegId( segId );\n\n\n            ++segId;\n            localCluster.save();\n            remoteCluster.save();\n            currentPosition += endSize;\n        }\n\n\n        this.fileNode.setPhysicalSize( currentPosition );\n        this.fileNode.setLogicSize   ( currentPosition );\n        this.fileNode.setChecksum    ( checkSum );\n        this.fileNode.setCrc32Xor    ( crc32 );\n        this.fileNode.setParityCheck ( (int) parityCheck );\n        this.mKOMFileSystem.update   ( this.fileNode );\n\n//       Verification verification = this.getVerification();\n//        fileNode.setChecksum( verification.getChecksum() );\n//        fileNode.setParityCheck( verification.getParityCheck() );\n//        fileNode.setCrc32Xor( Long.toHexString(verification.getCrc32().getValue()) );\n//        mKOMFileSystem.update( fileNode );\n    }\n\n    @Override\n    public void receive(LogicVolume volume, long segId) throws  IOException {\n        long frameSize = this.mKOMFileSystem.getConfig().getClusterSize().longValue();\n        FSNodeAllotment allotment = mKOMFileSystem.getFSNodeAllotment();\n        //this.mKOMFileSystem.deleteCluster( this.fileNode, segId );\n        LocalCluster localCluster = (LocalCluster)this.mKOMFileSystem.getClusterByFileWithId(this.fileNode.getGuid(), segId);\n        long endSize = frameSize;\n\n        long currentPosition = 0;\n        if( currentPosition + endSize > localCluster.getSize() ){\n            endSize = localCluster.getSize() - currentPosition;\n        }\n\n        //Debug.trace( \"更新簇的大小:\"+endSize );\n        RemoteCluster remoteCluster = allotment.newRemoteCluster( this.fileNode.getGuid(),(int)segId );\n        remoteCluster.setDeviceGuid(this.mKOMFileSystem.getConfig().getLocalhostGUID());\n        remoteCluster.setSegGuid( localCluster.getSegGuid() );\n\n        StorageReceiveIORequest storageReceiveIORequest = new TitanStorageReceiveIORequest();\n        storageReceiveIORequest.setSize( this.fileNode.getDefinitionSize() );\n        storageReceiveIORequest.setName( this.fileNode.getName() );\n        storageReceiveIORequest.setStorageObjectGuid( localCluster.getSegGuid() );\n\n        StorageIOResponse storageIOResponse = null;\n\n        ReceiveEntity receiveEntity = null;\n        receiveEntity = this.constructor.getReceiveEntity(volume.getClass(), this.volumeManager, storageReceiveIORequest, this.chanface, volume);\n        storageIOResponse = volume.receive( receiveEntity, currentPosition, endSize );\n\n        UniformSourceLocator uniformSourceLocator = new UniformSourceLocator();\n        if( storageIOResponse != null ){\n            localCluster.setCrc32( storageIOResponse.getCre32().getValue() );\n        }\n        uniformSourceLocator.setVolumeGuid( volume.getGuid().toString() );\n        localCluster.setSize( endSize );\n        localCluster.setSourceName( uniformSourceLocator.toJSONString() );\n        localCluster.setFileGuid( this.fileNode.getGuid() );\n        localCluster.setSegId( segId );\n\n        localCluster.save();\n        remoteCluster.save();\n    }\n\n    @Override\n    public void receive(LogicVolume volume, Number offset, Number endSize) throws IOException {\n\n    }\n\n    @Override\n    public void randomReceive(LogicVolume volume, Number offset, Number endSize) throws  IOException {\n        long frameSize = this.mKOMFileSystem.getConfig().getClusterSize().longValue();\n        this.fileNode.setGuid( mKOMFileSystem.queryGUIDByPath( this.destDirPath ) );\n\n        FSNodeAllotment allotment = mKOMFileSystem.getFSNodeAllotment();\n        long segId = offset.longValue() / frameSize + 1;\n        long startPosition = offset.longValue();\n        long endPosition = startPosition + endSize.longValue();\n        long frameTerminatePosition = segId * frameSize;\n        LocalCluster frame = (LocalCluster) this.mKOMFileSystem.getClusterByFileWithId(this.fileNode.getGuid(), segId);\n\n        if( frame == null ){\n            frame = allotment.newLocalCluster();\n            RemoteCluster remoteCluster = allotment.newRemoteCluster( this.fileNode.getGuid(),(int)segId );\n            remoteCluster.setDeviceGuid(this.mKOMFileSystem.getConfig().getLocalhostGUID());\n            remoteCluster.setSegGuid( frame.getSegGuid() );\n            remoteCluster.save();\n        }\n\n        if( endPosition <= frameTerminatePosition + frameSize ){\n            StorageReceiveIORequest storageReceiveIORequest = new TitanStorageReceiveIORequest();\n            storageReceiveIORequest.setSize( this.fileNode.getDefinitionSize() );\n            storageReceiveIORequest.setName( this.fileNode.getName() );\n            storageReceiveIORequest.setStorageObjectGuid( frame.getSegGuid() );\n\n            ReceiveEntity receiveEntity = null;\n            receiveEntity = this.constructor.getReceiveEntity(volume.getClass(), this.volumeManager, storageReceiveIORequest, this.chanface, volume);\n            volume.randomReceive( receiveEntity, startPosition, endSize );\n\n            UniformSourceLocator uniformSourceLocator = new UniformSourceLocator();\n            uniformSourceLocator.setVolumeGuid( volume.getGuid().toString() );\n            frame.setSize( frame.getSize() + endSize.longValue() );\n            frame.setSourceName( uniformSourceLocator.toJSONString() );\n            frame.setFileGuid( this.fileNode.getGuid() );\n            frame.setSegId( segId );\n\n            frame.save();\n        }\n        else {\n            long midPosition = Math.min(frameTerminatePosition + frameSize, endPosition);\n            this.randomReceive(volume, startPosition, midPosition - startPosition);\n            if (midPosition < endPosition) {\n                this.randomReceive(volume, midPosition, endPosition - midPosition);\n            }\n        }\n\n    }\n\n//    Verification getVerification() throws IOException {\n//        File tempFile = File.createTempFile(\"temp\",\".temp\");\n//        FileNode fileNode = (FileNode)this.mKOMFileSystem.get(this.mKOMFileSystem.queryGUIDByPath(this.destDirPath));\n//        FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND);\n//        TitanFileChannelChanface kChannel = new TitanFileChannelChanface(channel);\n//        TitanFileExportEntity64 exportEntity = new TitanFileExportEntity64(this.mKOMFileSystem, this.volumeManager, fileNode, kChannel);\n//        this.mKOMFileSystem.export( exportEntity );\n//\n//        return getVerification(tempFile);\n//    }\n\n    private Verification getVerification(File tempFile) throws IOException {\n        Verification verification = new Verification();\n\n        try (BufferedInputStream bis = new BufferedInputStream(new FileInputStream(tempFile))) {\n            CRC32 crc = new CRC32();\n            long checksum = 0;\n            int parityCheck = 0;\n\n            // 使用一个缓冲区一次读取多个字节\n            byte[] buffer = new byte[8192]; // 8KB 缓冲区\n            int bytesRead;\n\n            while ((bytesRead = bis.read(buffer)) != -1) {\n                for (int i = 0; i < bytesRead; i++) {\n                    byte b = buffer[i];\n\n                    // 批量处理每个字节\n                    parityCheck += Bytes.calculateParity(b);\n                    checksum += b & 0xFF;\n                    crc.update(b);\n                }\n            }\n\n            verification.setChecksum(checksum);\n            verification.setCrc32(crc);\n            verification.setParityCheck(parityCheck);\n        }\n        return verification;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/file/transmit/receiver/TitanFileReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.file.transmit.receiver;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\n\nimport java.io.IOException;\n\npublic class TitanFileReceiveEntity64 extends ArchFileReceiveEntity  implements FileReceiveEntity64{\n    protected FileReceive fileReceive;\n\n\n    public TitanFileReceiveEntity64(KOMFileSystem fileSystem, String destDirPath, FileNode file, Chanface channel, VolumeManager volumeManager) {\n        super(fileSystem, destDirPath, file, channel, volumeManager);\n        this.fileReceive = new TitanFileReceive64( this );\n    }\n\n\n    @Override\n    public void receive() throws IOException {\n        this.fileSystem.affirmFileNode( this.destDirPath );\n        GUID volumeGuid = this.fileSystem.getMappingVolume(this.destDirPath);\n        LogicVolume volume = this.volumeManager.get(volumeGuid);\n        if ( !volume.checkCapacity( this.file.getDefinitionSize() ) ){\n            this.fileSystem.remove( this.fileSystem.queryGUIDByPath( destDirPath ) );\n            Debug.trace(\"容量不足\");\n            return;\n        }\n        volume.deductCapacity( this.file.getDefinitionSize() );\n\n        this.fileReceive.receive( volume );\n    }\n\n    @Override\n    public void receive( Number offset, Number endSize) throws IOException {\n        this.fileSystem.affirmFileNode( this.destDirPath );\n        GUID volumeGuid = this.fileSystem.getMappingVolume(this.destDirPath);\n        LogicVolume volume = this.volumeManager.get(volumeGuid);\n        if ( !volume.checkCapacity( this.file.getDefinitionSize() ) ){\n            this.fileSystem.remove( this.fileSystem.queryGUIDByPath( destDirPath ) );\n            Debug.trace(\"容量不足\");\n            return;\n        }\n        volume.deductCapacity( this.file.getDefinitionSize() );\n        this.fileReceive.receive( volume, offset, endSize );\n    }\n\n    @Override\n    public void receive(long segId) throws IOException {\n        this.fileSystem.affirmFileNode( this.destDirPath );\n        GUID volumeGuid = this.fileSystem.getMappingVolume(this.destDirPath);\n        LogicVolume volume = this.volumeManager.get(volumeGuid);\n        if ( !volume.checkCapacity( this.file.getDefinitionSize() ) ){\n            this.fileSystem.remove( this.fileSystem.queryGUIDByPath( destDirPath ) );\n            Debug.trace(\"容量不足\");\n            return;\n        }\n        volume.deductCapacity( this.file.getDefinitionSize() );\n        this.fileReceive.receive( volume,segId );\n    }\n\n    @Override\n    public void randomReceive(Number offset, Number endSize) throws  IOException {\n        this.fileSystem.affirmFileNode( this.destDirPath );\n        GUID volumeGuid = this.fileSystem.getMappingVolume(this.destDirPath);\n        LogicVolume volume = this.volumeManager.get(volumeGuid);\n        if ( !volume.checkCapacity( this.file.getDefinitionSize() ) ){\n            this.fileSystem.remove( this.fileSystem.queryGUIDByPath( destDirPath ) );\n            Debug.trace(\"容量不足\");\n            return;\n        }\n        volume.deductCapacity( this.file.getDefinitionSize() );\n        this.fileReceive.randomReceive( volume, offset, endSize );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/Chanface.java",
    "content": "package com.pinecone.hydra.storage.io;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.List;\n\npublic interface Chanface extends Pinenut {\n    void position( long position ) throws IOException;\n\n    int read( ByteBuffer buffer ) throws IOException;\n\n    int read( ChanfaceReader reader, int size, long offset ) throws IOException;\n\n    int read( byte[] buffer, int size, long offset ) throws IOException;\n\n//    int read( ChanfaceReader reader, int size, long offset ) throws IOException;\n\n    int write( ByteBuffer buffer ) throws IOException;\n\n    int write( byte[] buffer, int startPosition, int endSize ) throws IOException;\n\n    int write( byte[] buffer, List<CacheBlock> writableCacheBlocks ) throws IOException;\n\n    long position() throws IOException;\n\n    void close() throws IOException;\n\n    Object getNativeFace();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/ChanfaceReader.java",
    "content": "package com.pinecone.hydra.storage.io;\n\nimport java.nio.ByteBuffer;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ChanfaceReader extends Pinenut {\n    void afterRead( ByteBuffer out );\n\n//    void afterRead( byte[] out );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/ChannelRecalled.java",
    "content": "package com.pinecone.hydra.storage.io;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ChannelRecalled extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/TitanFileChannelChanface.java",
    "content": "package com.pinecone.hydra.storage.io;\n\nimport com.pinecone.framework.system.NotImplementedException;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlockStatus;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.nio.channels.FileChannel;\nimport java.util.List;\nimport java.util.concurrent.locks.ReentrantLock;\n\npublic class TitanFileChannelChanface implements RandomAccessChanface {\n    private final FileChannel       channel;\n    private final ReentrantLock     reentrantLock;\n\n    public TitanFileChannelChanface(FileChannel channel ){\n        this.channel = channel;\n        this.reentrantLock = new ReentrantLock();\n    }\n\n    @Override\n    public void position(long position) throws IOException {\n        this.channel.position( position );\n    }\n\n    @Override\n    public int read( ByteBuffer buffer ) throws IOException {\n        return this.channel.read( buffer );\n    }\n\n    @Override\n    public int read(ChanfaceReader reader, int size, long offset ) throws IOException {\n        this.reentrantLock.lock();\n        int read = 0;\n        try {\n            this.channel.position( offset );\n            ByteBuffer buffer = ByteBuffer.allocateDirect(size);\n            read = this.channel.read(buffer);\n            reader.afterRead( buffer );\n        }\n        finally {\n            this.reentrantLock.unlock();\n        }\n        return read;\n    }\n\n    @Override\n    public int read(byte[] buffer, int size, long offset) throws IOException {\n        return 0;\n    }\n\n    @Override\n    public int write( ByteBuffer buffer ) throws IOException {\n        return this.channel.write( buffer );\n    }\n\n    @Override\n    public int write(byte[] buffer, int startPosition, int endSize) throws IOException {\n        ByteBuffer byteBuffer = this.copyToTemporaryBuffer(buffer, startPosition, endSize);\n        return this.channel.write( byteBuffer );\n    }\n\n    @Override\n    public int write(byte[] buffer, List<CacheBlock> writableCacheBlocks) throws IOException {\n        ByteBuffer byteBuffer = this.mergeArrays(buffer, writableCacheBlocks);\n        return this.channel.write(byteBuffer);\n    }\n\n    @Override\n    public long position() throws IOException {\n        return this.channel.position();\n    }\n\n    @Override\n    public void close() throws IOException {\n        this.channel.close();\n    }\n\n    @Override\n    public void mark(int readlimit) {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public void reset() throws IOException {\n        throw new NotImplementedException();\n    }\n\n    private ByteBuffer copyToTemporaryBuffer(byte[] buffer, int startPosition, int endSize ){\n        ByteBuffer temporaryBuffer = ByteBuffer.allocate( endSize );\n        temporaryBuffer.put( buffer, startPosition, endSize );\n        return temporaryBuffer;\n    }\n\n    private ByteBuffer mergeArrays( byte[] buffer, List<CacheBlock> writableCacheBlocks ){\n        // 计算所有缓存块的总长度\n        int totalLength = 0;\n        for (CacheBlock cacheBlock : writableCacheBlocks) {\n            totalLength += cacheBlock.getValidByteEnd().intValue() - cacheBlock.getValidByteStart().intValue();\n        }\n\n        // 创建一个 ByteBuffer 来存储合并的数据\n        ByteBuffer mergedBuffer = ByteBuffer.allocate(totalLength);\n\n        // 将数据从 mBuffer 复制到 mergedBuffer\n        for (CacheBlock cacheBlock : writableCacheBlocks) {\n            int start = cacheBlock.getValidByteStart().intValue();\n            int end = cacheBlock.getValidByteEnd().intValue();\n            int bufferSize = end - start;\n\n            // 将 mBuffer 中的数据复制到 mergedBuffer\n            mergedBuffer.put(buffer, start, bufferSize);\n\n            // 将缓存块状态设置为 Free\n            cacheBlock.setStatus(CacheBlockStatus.Free);\n        }\n        // 准备将 mergedBuffer 用于读取\n        mergedBuffer.flip();\n        return mergedBuffer;\n    }\n\n    @Override\n    public Object getNativeFace() {\n        return this.channel;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/TitanInputStreamChanface.java",
    "content": "package com.pinecone.hydra.storage.io;\n\nimport com.pinecone.framework.system.NotImplementedException;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.nio.ByteBuffer;\nimport java.util.List;\n\npublic class TitanInputStreamChanface implements Chanface {\n    protected InputStream          stream;\n\n    public TitanInputStreamChanface(InputStream stream ){\n        this.stream        = stream;\n    }\n\n    @Override\n    public void position(long position) throws IOException {\n        this.stream.skip( position );\n    }\n\n    @Override\n    public int read( ByteBuffer buffer ) throws IOException {\n        byte[] tempBuffer = new byte[buffer.remaining()];\n        int bytesRead = stream.read(tempBuffer);\n        buffer.put(tempBuffer, 0, bytesRead);\n        return bytesRead;\n    }\n\n    @Override\n    public int read(ChanfaceReader reader, int size, long offset ) throws IOException {\n        int bytesRead = 0;\n        byte[] tempBuffer = new byte[ size ];\n        bytesRead = stream.read( tempBuffer );\n        ByteBuffer buffer = ByteBuffer.wrap(tempBuffer);\n        reader.afterRead( buffer );\n        //buffer.put(tempBuffer, 0, bytesRead);\n        return bytesRead;\n    }\n\n    @Override\n    public int read(byte[] buffer, int size, long offset) throws IOException {\n        return this.stream.read(buffer,(int)offset,size);\n    }\n\n    @Override\n    public int write(ByteBuffer buffer) throws IOException {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public int write(byte[] buffer, int startPosition, int endSize) throws IOException {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public int write(byte[] buffer, List<CacheBlock> writableCacheBlocks) throws IOException {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public long position() throws IOException {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public void close() throws IOException {\n        this.stream.close();\n    }\n\n    @Override\n    public Object getNativeFace() {\n        return this.stream;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/TitanOutputStreamChanface.java",
    "content": "package com.pinecone.hydra.storage.io;\n\nimport com.pinecone.framework.system.NotImplementedException;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlockStatus;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport java.util.concurrent.locks.ReentrantLock;\n\npublic class TitanOutputStreamChanface implements Chanface {\n    protected OutputStream  stream;\n\n    protected final ReentrantLock reentrantLock;\n\n    public TitanOutputStreamChanface(OutputStream stream ) {\n        this.stream = stream;\n        this.reentrantLock = new ReentrantLock();\n    }\n\n    @Override\n    public void position(long position) throws IOException {\n\n    }\n\n    @Override\n    public int read( ByteBuffer buffer ) throws IOException {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public int read(ChanfaceReader reader, int size, long offset ) throws IOException {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public int read(byte[] buffer, int size, long offset) throws IOException {\n        return 0;\n    }\n\n    @Override\n    public int write(ByteBuffer buffer) throws IOException {\n        return this.writeToByte( buffer );\n    }\n\n    @Override\n    public int write(byte[] buffer, int startPosition, int endSize) throws IOException {\n        this.stream.write( buffer, startPosition, endSize );\n        return endSize;\n    }\n\n    @Override\n    public int write(byte[] buffer, List<CacheBlock> writableCacheBlocks) throws IOException {\n        int length = 0;\n\n        for( CacheBlock cacheBlock : writableCacheBlocks ){\n            length += ( cacheBlock.getValidByteEnd().intValue() - cacheBlock.getValidByteStart().intValue() );\n            this.stream.write( buffer, cacheBlock.getValidByteStart().intValue(), cacheBlock.getValidByteEnd().intValue() - cacheBlock.getValidByteStart().intValue() );\n            cacheBlock.setStatus( CacheBlockStatus.Free );\n        }\n        return length;\n    }\n\n    @Override\n    public long position() throws IOException {\n        return 0;\n    }\n\n    @Override\n    public void close() throws IOException {\n        this.stream.close();\n    }\n\n\n    private int writeToByte(ByteBuffer buffer) throws IOException {\n        if (buffer == null) {\n            throw new NullPointerException(\"Buffer is null\");\n        }\n\n        int bytesWritten = 0;\n        byte[] tempArray = new byte[buffer.remaining()];\n        buffer.get(tempArray);\n        this.stream.write(tempArray);\n        bytesWritten = tempArray.length;\n\n        return bytesWritten;\n    }\n\n\n    @Override\n    public Object getNativeFace() {\n        return this.stream;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/TitanRandomInputStreamAccessChanface.java",
    "content": "package com.pinecone.hydra.storage.io;\n\nimport com.pinecone.framework.system.NotImplementedException;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.FilterInputStream;\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.List;\nimport java.util.concurrent.locks.ReentrantLock;\n\npublic class TitanRandomInputStreamAccessChanface implements RandomAccessChanface {\n    protected FilterInputStream stream;\n\n    protected final ReentrantLock reentrantLock;\n\n    public TitanRandomInputStreamAccessChanface(FilterInputStream stream ){\n        this.stream        = stream;\n        this.reentrantLock = new ReentrantLock();\n    }\n\n    @Override\n    public void position(long position) throws IOException {\n        this.stream.skip( position );\n    }\n\n    @Override\n    public int read( ByteBuffer buffer ) throws IOException {\n        byte[] tempBuffer = new byte[buffer.remaining()];\n        int bytesRead = stream.read(tempBuffer);\n        buffer.put(tempBuffer, 0, bytesRead);\n        return bytesRead;\n    }\n\n    @Override\n    public int read(ChanfaceReader reader, int size, long offset ) throws IOException {\n        int bytesRead = 0;\n        byte[] tempBuffer = new byte[ size ];\n        bytesRead = stream.read( tempBuffer );\n        ByteBuffer buffer = ByteBuffer.wrap(tempBuffer);\n        reader.afterRead( buffer );\n        //buffer.put(tempBuffer, 0, bytesRead);\n        return bytesRead;\n    }\n\n    @Override\n    public int read(byte[] buffer, int size, long offset) throws IOException {\n        return 0;\n    }\n\n    @Override\n    public int write(ByteBuffer buffer) throws IOException {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public int write(byte[] buffer, int startPosition, int endSize) throws IOException {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public int write(byte[] buffer, List<CacheBlock> writableCacheBlocks) throws IOException {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public long position() throws IOException {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public void close() throws IOException {\n        this.stream.close();\n    }\n\n    @Override\n    public void mark(int readlimit) {\n        this.stream.mark( readlimit );\n    }\n\n    @Override\n    public void reset() throws IOException {\n        this.stream.reset();\n    }\n\n    @Override\n    public Object getNativeFace() {\n        return this.stream;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/io/UIOException.java",
    "content": "package com.pinecone.hydra.storage.io;\n\nimport java.io.IOException;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class UIOException extends IOException implements Pinenut {\n    public UIOException() {\n        super();\n    }\n\n    public UIOException( String message ) {\n        super(message);\n    }\n\n    public UIOException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public UIOException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/mfs/GenericNativeMFile.java",
    "content": "package com.pinecone.hydra.storage.mfs;\n\nimport java.io.File;\nimport java.net.URI;\n\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.system.ko.handle.ArchKHandle;\n\npublic class GenericNativeMFile extends ArchKHandle implements NativeMFile {\n    protected File mNativeFile;\n\n    public GenericNativeMFile( File file ) {\n        this.mNativeFile  = file;\n    }\n\n    public GenericNativeMFile( URI uri ) {\n        this( new File( uri ) );\n    }\n\n\n    @Override\n    public URI toURI() {\n        return this.mNativeFile.toURI();\n    }\n\n    @Override\n    public String getName() {\n        return this.mNativeFile.getName();\n    }\n\n    @Override\n    public Number size() {\n        return this.mNativeFile.length();\n    }\n\n    @Override\n    public String getURI() {\n        return this.toURI().toString();\n    }\n\n    @Override\n    public String getPath() {\n        return this.mNativeFile.getPath();\n    }\n\n    @Override\n    public boolean delete() {\n        return this.mNativeFile.delete();\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public File getNativeHandle() {\n        return this.mNativeFile;\n    }\n\n    @Override\n    public boolean exists() {\n        return this.mNativeFile.exists();\n    }\n\n    @Override\n    public boolean isAbsolute() {\n        return this.mNativeFile.isAbsolute();\n    }\n\n    @Override\n    public boolean isDirectory() {\n        return this.mNativeFile.isDirectory();\n    }\n\n    @Override\n    public MFile[] listFiles() {\n        File[] files = this.mNativeFile.listFiles();\n        if( files == null ){\n            return new MFile[0];\n        }\n        MFile[] mFiles = new MFile[ files.length ];\n        for( int i = 0; i < files.length; ++i ){\n            mFiles[i] = new GenericNativeMFile( files[i] );\n        }\n        return mFiles;\n    }\n\n    @Override\n    public String getMetaType() {\n        return MetaType;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/mfs/MFile.java",
    "content": "package com.pinecone.hydra.storage.mfs;\n\nimport java.net.URI;\n\nimport com.pinecone.hydra.storage.UFile;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\n\npublic interface MFile extends UFile, EntityNode {\n\n    URI toURI();\n\n    @Override\n    String getName();\n\n    String getPath();\n\n    String getURI();\n\n    boolean delete();\n\n    Object getNativeHandle();\n\n    boolean exists();\n\n    boolean isAbsolute();\n\n    boolean isDirectory();\n\n    MFile[] listFiles();\n\n    default String getMetaType() {\n        return this.className().replace( MFile.class.getName(), \"\" );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/mfs/MappingFileSystem.java",
    "content": "package com.pinecone.hydra.storage.mfs;\n\nimport com.pinecone.hydra.system.ko.handle.AppliableKHandle;\nimport com.pinecone.hydra.system.ko.handle.ObjectTreeAddressingSectionHandle;\n\nimport java.io.IOException;\n\npublic interface MappingFileSystem extends ObjectTreeAddressingSectionHandle, AppliableKHandle {\n\n    void copy( String sourcePath, String destinationPath ) throws IOException;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/mfs/NativeMFile.java",
    "content": "package com.pinecone.hydra.storage.mfs;\n\nimport java.io.File;\n\npublic interface NativeMFile extends MFile {\n\n    String MetaType = NativeMFile.class.getSimpleName();\n\n    @Override\n    File getNativeHandle();\n\n    @Override\n    default String getMetaType() {\n        return MetaType;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/mfs/NativeMappingFileSystem.java",
    "content": "package com.pinecone.hydra.storage.mfs;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.external.GenericNativeExternalFile;\nimport com.pinecone.hydra.storage.file.external.GenericNativeExternalFolder;\nimport com.pinecone.hydra.storage.natives.NativeExternalFileSystems;\nimport com.pinecone.hydra.system.ko.handle.ArchKHandle;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URI;\n\npublic class NativeMappingFileSystem extends ArchKHandle implements MappingFileSystem {\n\n    protected URI    mMountPointURI;\n\n    public NativeMappingFileSystem( URI mountPointURI, String treeNodeName, GUID treeNodeGuid ) {\n        super( treeNodeName, treeNodeGuid );\n        this.mMountPointURI  = mountPointURI;\n    }\n\n    public NativeMappingFileSystem( String localFileMountScope, String treeNodeName, GUID treeNodeGuid ) {\n        this( URI.create( \"file:///\" + localFileMountScope ), treeNodeName, treeNodeGuid );\n    }\n\n    public NativeMappingFileSystem( URI mountPointURI ) {\n        this( mountPointURI, null, null );\n    }\n\n    public NativeMappingFileSystem( String localFileMountScope ) {\n        this( localFileMountScope, null, null );\n    }\n\n    @Override\n    public EntityNode queryNode( String path ) {\n        URI fullURI = this.mMountPointURI.resolve( path );\n        return new GenericNativeMFile( fullURI );\n    }\n\n    @Override\n    public void copy( String sourcePath, String destinationPath ) throws IOException {\n        NativeExternalFileSystems.copy( sourcePath, destinationPath );\n    }\n\n    @Override\n    public String getName() {\n        return this.mszTreeNodeName;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.mTreeNodeGuid;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/natives/NativeExternalFileSystems.java",
    "content": "package com.pinecone.hydra.storage.natives;\n\n\nimport java.io.IOException;\nimport java.nio.file.FileVisitResult;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.nio.file.SimpleFileVisitor;\nimport java.nio.file.StandardCopyOption;\nimport java.nio.file.attribute.BasicFileAttributes;\n\nimport com.pinecone.framework.util.io.FileUtils;\n\npublic final class NativeExternalFileSystems {\n\n    public static void copy( String sourcePath, String destinationPath ) throws IOException {\n        // 注意参数语义交换：destinationPath是待复制的内容，sourcePath是目标容器目录\n        Path source = Paths.get(destinationPath); // 实际要复制的源内容\n        Path destinationDir = Paths.get(sourcePath); // 目标容器目录\n\n        // 校验源是否存在\n        if ( !Files.exists(source) ) {\n            throw new IOException(\"Source to copy does not exist: \" + source);\n        }\n\n        // 确保目标目录存在\n        if ( !Files.exists(destinationDir) ) {\n            Files.createDirectories(destinationDir);\n        }\n\n        // 如果源是单个文件，直接复制到目标目录\n        if ( Files.isRegularFile(source) ) {\n            Path target = destinationDir.resolve(source.getFileName());\n            Files.copy(source, target, StandardCopyOption.REPLACE_EXISTING);\n            return;\n        }\n\n        // 处理目录复制（保留目录结构）\n        Files.walkFileTree(source, new SimpleFileVisitor<Path>() {\n            @Override\n            public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {\n                // 计算相对路径：从源根目录到当前目录\n                Path relative = source.relativize(dir);\n\n                // 构建目标目录路径\n                Path targetDir = destinationDir.resolve(relative);\n\n                // 创建目标目录（如果不存在）\n                if (!Files.exists(targetDir)) {\n                    Files.createDirectories(targetDir);\n                }\n                return FileVisitResult.CONTINUE;\n            }\n\n            @Override\n            public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {\n                // 计算相对路径：从源根目录到当前文件\n                Path relative = source.relativize(file);\n\n                // 构建目标文件路径\n                Path targetFile = destinationDir.resolve(relative);\n\n                // 复制文件并覆盖已存在文件\n                Files.copy(file, targetFile, StandardCopyOption.REPLACE_EXISTING);\n                return FileVisitResult.CONTINUE;\n            }\n\n            @Override\n            public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {\n                if (exc != null) {\n                    throw exc; // 传播异常\n                }\n                return FileVisitResult.CONTINUE;\n            }\n        });\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/PolicyManage.java",
    "content": "package com.pinecone.hydra.storage.policy;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.policy.entity.Policy;\nimport com.pinecone.hydra.storage.policy.source.PolicyMasterManipulator;\n\nimport java.util.List;\n\npublic interface PolicyManage extends Pinenut {\n    void insertPolicy( Policy policy );\n\n    void removePolicy(GUID policyGuid );\n\n    Policy queryPolicy( GUID policyGuid );\n\n    void insertFilePolicyMapping( GUID policyGuid, String filePath );\n\n    void removeFilePolicyMapping( GUID policyGuid, String filePath );\n\n    List<GUID> queryPolicyGuid(String pathPath );\n\n    PolicyMasterManipulator getMasterManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/TitanPolicyManage.java",
    "content": "package com.pinecone.hydra.storage.policy;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.policy.entity.Policy;\nimport com.pinecone.hydra.storage.policy.source.PolicyFileMappingManipulator;\nimport com.pinecone.hydra.storage.policy.source.PolicyManipulator;\nimport com.pinecone.hydra.storage.policy.source.PolicyMasterManipulator;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.util.List;\n\npublic class TitanPolicyManage implements PolicyManage{\n    protected PolicyManipulator                 policyManipulator;\n\n    protected Hydrogen                          hydrogen;\n\n    protected PolicyMasterManipulator           masterManipulator;\n\n    protected GuidAllocator                     guidAllocator;\n\n    protected PolicyFileMappingManipulator      policyFileMappingManipulator;\n\n\n\n    public TitanPolicyManage(Hydrogen hydrogen, KOIMasterManipulator masterManipulator, String name ){\n       this.hydrogen = hydrogen;\n       this.masterManipulator             = (PolicyMasterManipulator) masterManipulator;\n       this.guidAllocator                 = GUIDs.newGuidAllocator();\n       this.policyManipulator             = this.masterManipulator.getPolicyManipulator();\n       this.policyFileMappingManipulator  = this.masterManipulator.getPolicyFileMappingManipulator();\n    }\n\n    @Override\n    public void insertPolicy(Policy policy) {\n        this.policyManipulator.insert( policy );\n    }\n\n    @Override\n    public void removePolicy(GUID policyGuid) {\n        this.policyManipulator.remove( policyGuid );\n    }\n\n    @Override\n    public Policy queryPolicy(GUID policyGuid) {\n        return this.policyManipulator.queryPolicy( policyGuid );\n    }\n\n    @Override\n    public void insertFilePolicyMapping(GUID policyGuid, String filePath) {\n        this.policyFileMappingManipulator.insert( policyGuid, filePath );\n    }\n\n    @Override\n    public void removeFilePolicyMapping(GUID policyGuid, String filePath) {\n        this.policyFileMappingManipulator.remove( policyGuid, filePath );\n    }\n\n    @Override\n    public List<GUID> queryPolicyGuid(String pathPath) {\n        return this.policyFileMappingManipulator.queryPolicy(pathPath);\n    }\n\n    @Override\n    public PolicyMasterManipulator getMasterManipulator() {\n        return this.masterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/chain/PolicyChain.java",
    "content": "package com.pinecone.hydra.storage.policy.chain;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\n\npublic interface PolicyChain extends Pinenut {\n    GUID execution( String filePath, String version );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/chain/VersionPolicyChain.java",
    "content": "package com.pinecone.hydra.storage.policy.chain;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.policy.PolicyManage;\nimport com.pinecone.hydra.storage.version.source.VersionManipulator;\n\npublic class VersionPolicyChain implements PolicyChain {\n    protected PolicyManage          policyManage;\n\n    protected VersionManipulator    versionManipulator;\n\n    public VersionPolicyChain( PolicyManage policyManage ){\n        this.policyManage = policyManage;\n    }\n    @Override\n    public GUID execution( String filePath, String version ) {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/chain/factory/PolicyChainFactory.java",
    "content": "package com.pinecone.hydra.storage.policy.chain.factory;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface PolicyChainFactory extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/entity/GenericPolicy.java",
    "content": "package com.pinecone.hydra.storage.policy.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class GenericPolicy implements Policy {\n    protected long          enumId;\n    protected String        policyName;\n    protected GUID          policyGuid;\n    protected String        policyDesc;\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public String getPolicyName() {\n        return this.policyName;\n    }\n\n    @Override\n    public void setPolicyName(String policyName) {\n        this.policyName = policyName;\n    }\n\n    @Override\n    public GUID getPolicyGuid() {\n        return this.policyGuid;\n    }\n\n    @Override\n    public void setPolicyGuid(GUID policyGuid) {\n        this.policyGuid = policyGuid;\n    }\n\n    @Override\n    public String getPolicyDesc() {\n        return this.policyDesc;\n    }\n\n    @Override\n    public void setPolicyDesc(String policyDesc) {\n        this.policyDesc = policyDesc;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/entity/Policy.java",
    "content": "package com.pinecone.hydra.storage.policy.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface Policy extends Pinenut {\n    long getEnumId();\n    void setEnumId(long enumId);\n\n    String getPolicyName();\n    void setPolicyName(String policyName);\n\n    GUID getPolicyGuid();\n    void setPolicyGuid(GUID policyGuid);\n\n    String getPolicyDesc();\n    void setPolicyDesc(String policyDesc);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/source/PolicyFileMappingManipulator.java",
    "content": "package com.pinecone.hydra.storage.policy.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.List;\n\npublic interface PolicyFileMappingManipulator extends Pinenut {\n    void insert(GUID policyGuid, String filePath);\n\n    void remove(GUID policyGuid, String filePath);\n\n    List<GUID> queryPolicy(String filePath );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/source/PolicyManipulator.java",
    "content": "package com.pinecone.hydra.storage.policy.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.policy.entity.Policy;\n\npublic interface PolicyManipulator extends Pinenut {\n    void insert(Policy policy);\n\n    void remove(GUID policyGuid);\n\n    Policy queryPolicy( GUID policyGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/policy/source/PolicyMasterManipulator.java",
    "content": "package com.pinecone.hydra.storage.policy.source;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\n\npublic interface PolicyMasterManipulator extends KOIMasterManipulator {\n    PolicyManipulator                   getPolicyManipulator();\n    PolicyFileMappingManipulator        getPolicyFileMappingManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/remote/RemoteFSInstrument.java",
    "content": "package com.pinecone.hydra.storage.remote;\n\npublic interface RemoteFSInstrument {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/remote/RemoteUOFSInstrument.java",
    "content": "package com.pinecone.hydra.storage.remote;\n\npublic class RemoteUOFSInstrument implements RemoteFSInstrument {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/TitanVersionManage.java",
    "content": "package com.pinecone.hydra.storage.version;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.version.entity.TitanVersion;\nimport com.pinecone.hydra.storage.version.entity.TitanVersionMapping;\nimport com.pinecone.hydra.storage.version.entity.VersionMapping;\nimport com.pinecone.hydra.storage.version.source.VersionManipulator;\nimport com.pinecone.hydra.storage.version.source.VersionMappingManipulator;\nimport com.pinecone.hydra.storage.version.source.VersionMasterManipulator;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.util.List;\n\npublic class TitanVersionManage implements VersionManage {\n    protected Hydrogen                      hydrogen;\n\n    protected GuidAllocator                 guidAllocator;\n\n    protected VersionMasterManipulator      masterManipulator;\n\n    protected VersionManipulator            versionManipulator;\n    protected VersionMappingManipulator     versionMappingManipulator;\n\n    public TitanVersionManage(Hydrogen hydrogen, KOIMasterManipulator masterManipulator, String name ){\n        this.hydrogen = hydrogen;\n        this.masterManipulator          = (VersionMasterManipulator) masterManipulator;\n        this.guidAllocator              = GUIDs.newGuidAllocator();\n        this.versionManipulator         = this.masterManipulator.getVersionManipulator();\n        this.versionMappingManipulator  = this.masterManipulator.getVersionMappingManipulator();\n    }\n\n    public TitanVersionManage( Hydrogen hydrogen, KOIMasterManipulator masterManipulator ){\n        this(hydrogen, masterManipulator, VersionManage.class.getSimpleName() );\n    }\n\n    public TitanVersionManage( KOIMappingDriver driver ) {\n        this(\n                driver.getSystem(),\n                driver.getMasterManipulator()\n        );\n    }\n\n\n    @Override\n    public void insert(TitanVersion version) {\n        this.versionManipulator.insertObjectVersion( version);\n    }\n\n    @Override\n    public void remove(String version, GUID fileGuid) {\n        this.versionManipulator.removeObjectVersion( version, fileGuid );\n    }\n\n    @Override\n    public boolean queryIsManage(GUID targetStorageObjectGuid) {\n        return this.versionManipulator.queryIsManage( targetStorageObjectGuid );\n    }\n\n    @Override\n    public GUID queryObjectGuid(String version, GUID fileGuid) {\n        return this.versionManipulator.queryObjectGuid( version, fileGuid );\n    }\n\n    @Override\n    public List<GUID> fetchVersions(GUID guid) {\n        return  this.versionManipulator.fetchVersions( guid );\n    }\n\n    @Override\n    public GUID getVersionFileByGuid(GUID fileGuid) {\n        return this.versionManipulator.getVersionFileByGuid( fileGuid );\n    }\n\n    @Override\n    public TitanVersion queryByTargetStorageObjectGuid(GUID targetStorageObjectGuid) {\n        return this.versionManipulator.queryByTargetStorageObjectGuid( targetStorageObjectGuid );\n    }\n\n    @Override\n    public boolean isExistEnableVersionMapping(GUID enableVersionGuid) {\n        for (TitanVersionMapping versionMapping : this.versionMappingManipulator.queryAllVersionMapper())\n            if (versionMapping.getEnableVersionGuid().equals(enableVersionGuid))\n                return true;\n        return false;\n    }\n\n    @Override\n    public VersionMapping queryVersionMapping(GUID fileGuid) {\n        for (TitanVersionMapping versionMapping : this.versionMappingManipulator.queryAllVersionMapper())\n            if (versionMapping.getFileGuid().equals(fileGuid))\n                return versionMapping;\n        return null;\n    }\n\n    @Override\n    public void UpdateVesionMapping(VersionMapping versionMapping) {\n        this.versionMappingManipulator.update(versionMapping);\n    }\n\n    @Override\n    public void insertVesionMapping(TitanVersionMapping versionMapping) {\n        this.versionMappingManipulator.insert(versionMapping);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/VersionManage.java",
    "content": "package com.pinecone.hydra.storage.version;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.version.entity.TitanVersion;\nimport com.pinecone.hydra.storage.version.entity.TitanVersionMapping;\nimport com.pinecone.hydra.storage.version.entity.VersionMapping;\n\nimport java.util.List;\n\npublic interface VersionManage extends Pinenut {\n    void insert(TitanVersion version);\n\n    void remove(String version, GUID fileGuid);\n\n    GUID queryObjectGuid(String version, GUID fileGuid );\n\n    boolean queryIsManage(GUID targetStorageObjectGuid);\n\n    List<GUID> fetchVersions(GUID guid);\n\n    GUID getVersionFileByGuid( GUID fileGuid );\n\n    TitanVersion queryByTargetStorageObjectGuid(GUID targetStorageObjectGuid);\n\n    boolean isExistEnableVersionMapping(GUID enableVersionGuid);\n\n    VersionMapping queryVersionMapping(GUID fileGuid);\n\n    void UpdateVesionMapping(VersionMapping versionMapping);\n\n    void insertVesionMapping(TitanVersionMapping versionMapping);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/entity/TitanVersion.java",
    "content": "package com.pinecone.hydra.storage.version.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\n\npublic class TitanVersion implements Version{\n    private long enumId;\n\n    private String version;\n\n    private GUID targetStorageObjectGuid;\n\n    private GUID fileGuid;\n\n    private GUID versionGuid;\n\n    private boolean enableCrc32;\n\n    private long crc32;\n\n\n    public TitanVersion() {\n    }\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n\n    @Override\n    public String getVersion() {\n        return this.version;\n    }\n\n    @Override\n    public void setVersion(String version) {\n        this.version = version;\n    }\n\n    @Override\n    public GUID getTargetStorageObjectGuid() {\n        return this.targetStorageObjectGuid;\n    }\n\n    @Override\n    public void setTargetStorageObjectGuid(GUID targetStorageObjectGuid) {\n        this.targetStorageObjectGuid = targetStorageObjectGuid;\n    }\n\n    @Override\n    public GUID getFileGuid() {\n        return this.fileGuid;\n    }\n\n    @Override\n    public void setFileGuid(GUID fileGuid) {\n        this.fileGuid = fileGuid;\n    }\n\n    @Override\n    public boolean getEnableCrc32() {\n        return this.enableCrc32;\n    }\n\n    @Override\n    public void setEnableCrc32(boolean enableCrc32) {\n        this.enableCrc32 = enableCrc32;\n    }\n\n    @Override\n    public long getCrc32() {\n        return this.crc32;\n    }\n\n    @Override\n    public void setCrc32(long crc32) {\n        this.crc32 = crc32;\n    }\n\n    @Override\n    public GUID getVersionGuid() {\n        return versionGuid;\n    }\n    @Override\n    public void setVersionGuid(GUID versionGuid) {\n        this.versionGuid = versionGuid;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/entity/TitanVersionMapping.java",
    "content": "package com.pinecone.hydra.storage.version.entity;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class TitanVersionMapping implements VersionMapping{\n    GUID versionGuid;\n    GUID fileGuid;\n    GUID enableVersionGuid;\n\n    public TitanVersionMapping() {\n    }\n\n    public TitanVersionMapping(GUID versionGuid, GUID fileGuid, GUID enableVersionGuid) {\n        this.versionGuid = versionGuid;\n        this.fileGuid = fileGuid;\n        this.enableVersionGuid = enableVersionGuid;\n    }\n\n    public GUID getVersionGuid() {\n        return versionGuid;\n    }\n\n    public void setVersionGuid(GUID versionGuid) {\n        this.versionGuid = versionGuid;\n    }\n\n    public GUID getFileGuid() {\n        return fileGuid;\n    }\n\n    public void setFileGuid(GUID fileGuid) {\n        this.fileGuid = fileGuid;\n    }\n\n    public GUID getEnableVersionGuid() {\n        return enableVersionGuid;\n    }\n\n    public void setEnableVersionGuid(GUID enableVersionGuid) {\n        this.enableVersionGuid = enableVersionGuid;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/entity/Version.java",
    "content": "package com.pinecone.hydra.storage.version.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface Version extends Pinenut {\n    long getEnumId();\n    void setEnumId( long enumId );\n    String getVersion();\n    void setVersion( String version );\n\n    GUID getTargetStorageObjectGuid();\n    void setTargetStorageObjectGuid( GUID targetStorageObjectGuid );\n\n    GUID getFileGuid();\n    void setFileGuid( GUID fileGuid );\n\n    boolean getEnableCrc32();\n    void setEnableCrc32( boolean enableCrc32 );\n\n    long getCrc32();\n    void setCrc32( long crc32 );\n\n    void setVersionGuid(GUID versionGuid);\n    GUID getVersionGuid();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/entity/VersionMapping.java",
    "content": "package com.pinecone.hydra.storage.version.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface VersionMapping extends Pinenut {\n    GUID getVersionGuid();\n    void setVersionGuid(GUID versionGuid);\n    GUID getFileGuid();\n    void setFileGuid(GUID fileGuid);\n    GUID getEnableVersionGuid();\n    void setEnableVersionGuid(GUID enableVersionGuid);\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/source/VersionManipulator.java",
    "content": "package com.pinecone.hydra.storage.version.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.version.entity.TitanVersion;\nimport com.pinecone.hydra.storage.version.entity.Version;\n\nimport java.util.List;\n\npublic interface VersionManipulator extends Pinenut {\n    void insertObjectVersion(Version version);\n\n    void removeObjectVersion( String version, GUID fileGuid );\n\n    GUID queryObjectGuid( String version, GUID fileGuid );\n\n    boolean queryIsManage(GUID fileGuid);\n\n    List<GUID> fetchVersions(GUID guid);\n\n    GUID getVersionFileByGuid(GUID fileGuid);\n\n    TitanVersion queryByTargetStorageObjectGuid(GUID targetStorageObjectGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/source/VersionMappingManipulator.java",
    "content": "package com.pinecone.hydra.storage.version.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.version.entity.TitanVersionMapping;\nimport com.pinecone.hydra.storage.version.entity.VersionMapping;\n\nimport java.util.List;\n\npublic interface VersionMappingManipulator extends Pinenut {\n      void insert(VersionMapping versionMapping);\n       void remove(VersionMapping versionMapping);\n\n       TitanVersionMapping queryVersionMapping(GUID fileGuid);\n\n       void update(VersionMapping versionMapping);\n\n       List<TitanVersionMapping> queryAllVersionMapper();\n\n       boolean isExistEnableVersionMapping(GUID enableVersionGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/version/source/VersionMasterManipulator.java",
    "content": "package com.pinecone.hydra.storage.version.source;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\n\npublic interface VersionMasterManipulator extends KOIMasterManipulator {\n    VersionManipulator getVersionManipulator();\n\n    VersionMappingManipulator getVersionMappingManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/IUnifiedTransmitConstructor.java",
    "content": "package com.pinecone.hydra.storage.volume;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\n\npublic interface IUnifiedTransmitConstructor extends Pinenut {\n    ReceiveEntity getReceiveEntity(Class< ? extends LogicVolume> volumeClass, Object... params);\n\n    ExporterEntity getExportEntity( Class< ? extends LogicVolume > volumeClass, Object... params );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/KernelVolumeConfig.java",
    "content": "package com.pinecone.hydra.storage.volume;\n\nimport com.pinecone.hydra.storage.ArchStorageConfig;\nimport com.pinecone.hydra.storage.StorageConstants;\nimport com.pinecone.hydra.system.ko.ArchKernelObjectConfig;\n\nimport java.util.Map;\n\npublic class KernelVolumeConfig extends ArchStorageConfig implements VolumeConfig {\n    protected String mszVersionSignature             = StorageConstants.StorageVersionSignature;\n\n    protected Number mnTinyFileStripSizing           = VolumeConstants.TinyFileStripSizing  ;\n    protected Number mnSmallFileStripSizing          = VolumeConstants.SmallFileStripSizing ;\n    protected Number mnMegaFileStripSizing           = VolumeConstants.MegaFileStripSizing  ;\n    protected Number mnDefaultStripSize              = VolumeConstants.DefaultStripSize     ;\n    protected int    mStripResidentCacheAllotRatio   = VolumeConstants.StripResidentCacheAllotRatio;\n    protected String mStorageObjectExtension         = VolumeConstants.StorageObjectExtension;\n    protected String mSqliteFileExtension            = VolumeConstants.SqliteFileExtension;\n    protected String mPathSeparator                  = VolumeConstants.PathSeparator;\n\n    public KernelVolumeConfig(){\n        super();\n    }\n\n    public KernelVolumeConfig(Map<String, Object> config){\n        super(config);\n        this.mszVersionSignature            = (String) config.getOrDefault(\"VersionSignature\", StorageConstants.StorageVersionSignature);\n        this.mnTinyFileStripSizing          = (Number) config.getOrDefault(\"TinyFileStripSizing\", VolumeConstants.TinyFileStripSizing);\n        this.mnSmallFileStripSizing         = (Number) config.getOrDefault(\"SmallFileStripSizing\", VolumeConstants.SmallFileStripSizing);\n        this.mnMegaFileStripSizing          = (Number) config.getOrDefault(\"MegaFileStripSizing\", VolumeConstants.MegaFileStripSizing);\n        this.mnDefaultStripSize             = (Number) config.getOrDefault(\"DefaultStripSize\", VolumeConstants.DefaultStripSize);\n        this.mStripResidentCacheAllotRatio  = ((Number) config.getOrDefault(\"StripResidentCacheAllotRatio\", VolumeConstants.StripResidentCacheAllotRatio)).intValue();\n        this.mStorageObjectExtension        = (String) config.getOrDefault(\"StorageObjectExtension\", VolumeConstants.StorageObjectExtension);\n        this.mSqliteFileExtension           = (String) config.getOrDefault(\"SqliteFileExtension\", VolumeConstants.SqliteFileExtension);\n        this.mPathSeparator                 = (String) config.getOrDefault(\"PathSeparator\", VolumeConstants.PathSeparator);\n    }\n\n    @Override\n    public String getVersionSignature() {\n        return this.mszVersionSignature;\n    }\n\n    @Override\n    public Number getTinyFileStripSizing() {\n        return this.mnTinyFileStripSizing;\n    }\n\n    @Override\n    public Number getSmallFileStripSizing() {\n        return this.mnSmallFileStripSizing;\n    }\n\n    @Override\n    public Number getMegaFileStripSizing() {\n        return this.mnMegaFileStripSizing;\n    }\n\n    @Override\n    public Number getDefaultStripSize() {\n        return this.mnDefaultStripSize;\n    }\n\n    @Override\n    public int getStripResidentCacheAllotRatio() {\n        return this.mStripResidentCacheAllotRatio;\n    }\n\n    @Override\n    public String getStorageObjectExtension() {\n        return this.mStorageObjectExtension;\n    }\n\n    @Override\n    public String getSqliteFileExtension() {\n        return this.mSqliteFileExtension;\n    }\n\n    @Override\n    public String getPathSeparator() {\n        return this.mPathSeparator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/TitanVolumeFile.java",
    "content": "package com.pinecone.hydra.storage.volume;\n\nimport com.pinecone.hydra.storage.CheckedFile;\n\npublic class TitanVolumeFile implements VolumeFile{\n    private String name;\n    private Number size;\n    private long   checksum;\n    private int   parityCheck;\n\n    public TitanVolumeFile( String name, Number size ){\n        this.name = name;\n        this.size = size;\n    }\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public Number size() {\n        return this.size;\n    }\n\n    @Override\n    public long getChecksum() {\n        return this.checksum;\n    }\n\n    @Override\n    public void setChecksum(long checksum) {\n        this.checksum = checksum;\n    }\n\n    @Override\n    public int getParityCheck() {\n        return this.parityCheck;\n    }\n\n    @Override\n    public void setParityCheck(int parityCheck) {\n        this.parityCheck = parityCheck;\n    }\n\n    @Override\n    public VolumeFile fromUniformFile(CheckedFile file) {\n        this.name = file.getName();\n        this.size = file.size();\n        this.parityCheck = file.getParityCheck();\n        this.checksum = file.getChecksum();\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/UnifiedTransmitConstructor.java",
    "content": "package com.pinecone.hydra.storage.volume;\n\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.local.simple.TitanLocalSimpleVolume;\n\nimport com.pinecone.hydra.storage.volume.entity.local.simple.export.TitanSimpleExportEntity64;\n\nimport com.pinecone.hydra.storage.volume.entity.local.simple.recevice.TitanSimpleReceiveEntity64;\nimport com.pinecone.hydra.storage.volume.entity.local.spanned.TitanLocalSpannedVolume;\n\nimport com.pinecone.hydra.storage.volume.entity.local.spanned.export.TitanSpannedExportEntity64;\n\nimport com.pinecone.hydra.storage.volume.entity.local.spanned.receive.TitanSpannedReceiveEntity64;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.TitanLocalStripedVolume;\n\nimport com.pinecone.hydra.storage.volume.entity.local.striped.export.TitanStripedExportEntity64;\n\nimport com.pinecone.hydra.storage.volume.entity.local.striped.receive.TitanStripedReceiveEntity64;\n\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic class UnifiedTransmitConstructor implements IUnifiedTransmitConstructor{\n\n    private Map< Class< ? extends LogicVolume >, Class< ? extends ReceiveEntity > > receiveMap = new HashMap<>();\n\n    private Map< Class< ? extends LogicVolume >, Class< ? extends ExporterEntity > > exportMap = new HashMap<>();\n\n    public UnifiedTransmitConstructor(){\n        this.receiveMap.put( TitanLocalSimpleVolume.class, TitanSimpleReceiveEntity64.class );\n        this.receiveMap.put( TitanLocalSpannedVolume.class, TitanSpannedReceiveEntity64.class );\n        this.receiveMap.put( TitanLocalStripedVolume.class, TitanStripedReceiveEntity64.class );\n\n        this.exportMap.put( TitanLocalSimpleVolume.class, TitanSimpleExportEntity64.class );\n        this.exportMap.put( TitanLocalSpannedVolume.class, TitanSpannedExportEntity64.class );\n        this.exportMap.put( TitanLocalStripedVolume.class, TitanStripedExportEntity64.class );\n    }\n\n    @Override\n    public ReceiveEntity getReceiveEntity(Class<? extends LogicVolume> volumeClass, Object... params) {\n        Class<? extends ReceiveEntity> receiveEntityClass = receiveMap.get(volumeClass);\n        if( receiveEntityClass == null ){\n            throw new IllegalArgumentException( \"Class not found.\" );\n        }\n\n        Constructor<? extends ReceiveEntity> receiveConstructor = this.findReceiveConstructor(receiveEntityClass, params);\n\n        try {\n            return receiveConstructor.newInstance( params );\n        } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {\n            throw new RuntimeException(e);\n        }\n    }\n\n    @Override\n    public ExporterEntity getExportEntity(Class<? extends LogicVolume> volumeClass, Object... params) {\n        Class<? extends ExporterEntity> exportEntityClass = exportMap.get(volumeClass);\n        if( exportEntityClass == null ){\n            throw new IllegalArgumentException( \"Class not found.\" );\n        }\n\n        Constructor<? extends ExporterEntity> exportConstructor = this.findExportConstructor(exportEntityClass, params);\n        try {\n            return exportConstructor.newInstance( params );\n        } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {\n            throw new RuntimeException(e);\n        }\n    }\n\n\n    private Constructor<? > searchConstructor( Class<? > clazz, Object... params ) {\n        for ( Constructor<?> constructor : clazz.getConstructors() ) {\n            if ( constructor.getParameterCount() == params.length ) {\n                boolean matches = true;\n                Class<?>[] parameterTypes = constructor.getParameterTypes();\n                for ( int i = 0; i < params.length; ++i ) {\n                    if ( !parameterTypes[ i ].isInstance(params[ i ]) ) {\n                        matches = false;\n                        break;\n                    }\n                }\n                if ( matches ) {\n                    return constructor;\n                }\n            }\n        }\n\n        return null;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    private Constructor<? extends ReceiveEntity> findReceiveConstructor( Class<? extends ReceiveEntity> clazz, Object... params ) {\n        return (Constructor<? extends ReceiveEntity>) this.searchConstructor( clazz, params );\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    private Constructor<? extends ExporterEntity> findExportConstructor(Class<? extends ExporterEntity> clazz, Object... params) {\n        return (Constructor<? extends ExporterEntity>) this.searchConstructor( clazz, params );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/UniformVolumeManager.java",
    "content": "package com.pinecone.hydra.storage.volume;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.storage.file.entity.Cluster;\nimport com.pinecone.hydra.storage.file.entity.LocalCluster;\nimport com.pinecone.hydra.storage.file.transmit.UniformSourceLocator;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.MountPoint;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.TitanVolumeAllotment;\nimport com.pinecone.hydra.storage.volume.entity.Volume;\nimport com.pinecone.hydra.storage.volume.entity.VolumeAllotment;\nimport com.pinecone.hydra.storage.volume.entity.VolumeCapacity64;\nimport com.pinecone.hydra.storage.volume.entity.local.VolumeCapacity;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.kvfs.KenusPool;\nimport com.pinecone.hydra.storage.volume.kvfs.ExecutorPool;\nimport com.pinecone.hydra.storage.volume.operator.TitanVolumeOperatorFactory;\nimport com.pinecone.hydra.storage.volume.operator.VolumeOperator;\nimport com.pinecone.hydra.storage.volume.source.LogicVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.MirroredVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.MountPointManipulator;\nimport com.pinecone.hydra.storage.volume.source.PhysicalVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.SimpleVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.SpannedVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.SQLiteVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.StripedVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.VolumeAllocateManipulator;\nimport com.pinecone.hydra.storage.volume.source.VolumeCapacityManipulator;\nimport com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.identifier.KOPathResolver;\nimport com.pinecone.hydra.system.ko.CascadeInstrument;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.system.ko.kom.ArchKOMTree;\nimport com.pinecone.hydra.system.ko.kom.SimplePathSelector;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.sql.SQLException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Objects;\n\npublic class UniformVolumeManager extends ArchKOMTree implements VolumeManager {\n    protected VolumeAllotment                   volumeAllotment;\n    protected MirroredVolumeManipulator         mirroredVolumeManipulator;\n    protected MountPointManipulator             mountPointManipulator;\n    protected PhysicalVolumeManipulator         physicalVolumeManipulator;\n    protected SimpleVolumeManipulator           simpleVolumeManipulator;\n    protected SpannedVolumeManipulator          spannedVolumeManipulator;\n    protected StripedVolumeManipulator          stripedVolumeManipulator;\n    protected VolumeCapacityManipulator         volumeCapacityManipulator;\n    protected VolumeMasterManipulator           volumeMasterManipulator;\n\n    protected VolumeAllocateManipulator         volumeAllocateManipulator;\n\n    protected SQLiteVolumeManipulator           sqliteVolumeManipulator;\n\n    protected LogicVolumeManipulator            primeLogicVolumeManipulator;\n\n    protected ExecutorPool                      kenusPool;\n\n    protected KenVolumeFileSystem               kenVolumeFileSystem;\n\n\n    public UniformVolumeManager( Processum superiorProcess, KOIMasterManipulator masterManipulator, VolumeManager parent, String name, VolumeConfig config, String superiorPathScope, @Nullable GuidAllocator guidAllocator ) {\n        super( superiorProcess, masterManipulator, config, parent, name, superiorPathScope, guidAllocator );\n        this.volumeMasterManipulator       =   ( VolumeMasterManipulator ) masterManipulator;\n        this.pathResolver                  =   new KOPathResolver( this.kernelObjectConfig );\n\n        this.volumeAllotment               =   new TitanVolumeAllotment( this,this.volumeMasterManipulator );\n        this.mirroredVolumeManipulator     =   this.volumeMasterManipulator.getMirroredVolumeManipulator();\n        this.mountPointManipulator         =   this.volumeMasterManipulator.getMountPointManipulator();\n        this.physicalVolumeManipulator     =   this.volumeMasterManipulator.getPhysicalVolumeManipulator();\n        this.simpleVolumeManipulator       =   this.volumeMasterManipulator.getSimpleVolumeManipulator();\n        this.spannedVolumeManipulator      =   this.volumeMasterManipulator.getSpannedVolumeManipulator();\n        this.stripedVolumeManipulator      =   this.volumeMasterManipulator.getStripedVolumeManipulator();\n        this.volumeCapacityManipulator     =   this.volumeMasterManipulator.getVolumeCapacityManipulator();\n        this.volumeAllocateManipulator     =   this.volumeMasterManipulator.getVolumeAllocateManipulator();\n        this.sqliteVolumeManipulator       =   this.volumeMasterManipulator.getSQLiteVolumeManipulator();\n        this.primeLogicVolumeManipulator   =   this.volumeMasterManipulator.getPrimeLogicVolumeManipulator();\n\n        this.kenusPool                     =   new KenusPool();\n        this.pathSelector                  =   new SimplePathSelector(\n                this.pathResolver, this.imperialTree, this.primeLogicVolumeManipulator, new GUIDNameManipulator[] {}\n        );\n        this.kenVolumeFileSystem           =   new KenVolumeFileSystem(this);\n        this.operatorFactory               =   new TitanVolumeOperatorFactory( this, this.volumeMasterManipulator );\n    }\n\n    public UniformVolumeManager( Processum superiorProcess, KOIMasterManipulator masterManipulator, VolumeManager parent, String name, VolumeConfig config ) {\n        this( superiorProcess, masterManipulator, parent, name, config, CascadeInstrument.EmptySuperiorPathScope, null );\n    }\n\n    public UniformVolumeManager( Processum superiorProcess, KOIMasterManipulator masterManipulator, VolumeConfig config ) {\n        this( superiorProcess, masterManipulator, null, VolumeManager.class.getSimpleName(), config );\n    }\n\n    public UniformVolumeManager( KOIMappingDriver driver, VolumeManager parent, String name, VolumeConfig config ){\n        this( driver.getSuperiorProcess(), driver.getMasterManipulator(), parent, name, config );\n    }\n\n    public UniformVolumeManager( KOIMappingDriver driver, VolumeConfig config ) {\n        this( driver.getSuperiorProcess(), driver.getMasterManipulator(), config );\n    }\n\n    @Override\n    public VolumeConfig getConfig() {\n        return (VolumeConfig) super.getConfig();\n    }\n\n    @Override\n    public GuidAllocator getGuidAllocator() {\n        return this.guidAllocator;\n    }\n\n    @Override\n    public ImperialTree getMasterTrieTree() {\n        return this.imperialTree;\n    }\n\n    @Override\n    public KenVolumeFileSystem getKVFSystem() {\n        return this.kenVolumeFileSystem;\n    }\n\n    public VolumeAllotment getVolumeAllotment(){\n        return this.volumeAllotment;\n    }\n\n    protected String getNS( GUID guid, String szSeparator ){\n        String path = this.imperialTree.getCachePath(guid);\n        if ( path != null ) {\n            return path;\n        }\n\n        ImperialTreeNode node = this.imperialTree.getNode(guid);\n        String assemblePath = this.getNodeName(node);\n        while ( !node.getParentGUIDs().isEmpty() && this.allNonNull( node.getParentGUIDs() ) ){\n            List<GUID> parentGuids = node.getParentGUIDs();\n            for( int i = 0; i < parentGuids.size(); ++i ){\n                if ( parentGuids.get(i) != null ){\n                    node = this.imperialTree.getNode( parentGuids.get(i) );\n                    break;\n                }\n            }\n            String nodeName = this.getNodeName(node);\n            assemblePath = nodeName + szSeparator + assemblePath;\n        }\n        this.imperialTree.insertCachePath( guid, assemblePath );\n        return assemblePath;\n    }\n\n    @Override\n    public String getPath( GUID guid ) {\n        return this.getNS( guid, this.kernelObjectConfig.getPathNameSeparator() );\n    }\n\n    @Override\n    public String getFullName( GUID guid ) {\n        return this.getNS( guid, this.kernelObjectConfig.getFullNameSeparator() );\n    }\n\n    @Override\n    public GUID queryGUIDByFN( String fullName ) {\n        return null;\n    }\n\n    @Override\n    public GUID put( TreeNode treeNode ) {\n        TreeNodeOperator operator = this.operatorFactory.getOperator( this.getVolumeMetaType( treeNode ) );\n        return operator.insert( treeNode );\n    }\n\n    protected TreeNodeOperator getOperatorByGuid( GUID guid ) {\n        ImperialTreeNode node = this.imperialTree.getNode( guid );\n        TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class<? >[]{this.getClass()}, this );\n        return this.operatorFactory.getOperator( this.getVolumeMetaType( newInstance ) );\n    }\n\n    @Override\n    public LogicVolume get( GUID guid ) {\n        return (LogicVolume) this.getOperatorByGuid( guid ).get( guid );\n    }\n\n    @Override\n    public void update(LogicVolume logicVolume) {\n        TreeNodeOperator operator = this.getOperatorByGuid(logicVolume.getGuid());\n        operator.update( logicVolume );\n    }\n\n    @Override\n    public void updateVolumeUsedSize(GUID guid, VolumeCapacity volumeCapacity) {\n        this.volumeCapacityManipulator.update( guid, volumeCapacity.getUsedSize().longValue() );\n    }\n\n    @Override\n    public void updatePhysical(PhysicalVolume physicalVolume) {\n        this.physicalVolumeManipulator.update( physicalVolume );\n    }\n\n    @Override\n    public TreeNode get(GUID guid, int depth) {\n        return null;\n    }\n\n    @Override\n    public TreeNode getAsRootDepth(GUID guid) {\n        return null;\n    }\n\n    @Override\n    public void remove(GUID guid) {\n        GUIDImperialTrieNode node = this.imperialTree.getNode( guid );\n        TreeNode newInstance = (TreeNode)node.getType().newInstance();\n        TreeNodeOperator operator = this.operatorFactory.getOperator( this.getVolumeMetaType( newInstance ) );\n        operator.purge( guid );\n    }\n\n    @Override\n    public void remove(String path) {\n\n    }\n\n    //protected ReentrantLock reentrantLock = new ReentrantLock();\n\n    @Override\n    public PhysicalVolume getPhysicalVolume(GUID guid) {\n//        this.reentrantLock.lock();\n//        try{\n            //Debug.trace( Thread.currentThread().getName(), Thread.currentThread().getId() );\n            PhysicalVolume physicalVolume   = this.physicalVolumeManipulator.getPhysicalVolume(guid);\n            if( physicalVolume == null ){\n                return null;\n            }\n            MountPoint mountPoint           = this.mountPointManipulator.getMountPointByVolumeGuid(guid);\n            VolumeCapacity64 volumeCapacity = this.volumeCapacityManipulator.getVolumeCapacity(guid);\n            physicalVolume.setMountPoint( mountPoint );\n            physicalVolume.setVolumeCapacity( volumeCapacity );\n            physicalVolume.applyVolumeManage( this );\n            return physicalVolume;\n//        }\n//        finally {\n//            this.reentrantLock.unlock();\n//        }\n    }\n\n    @Override\n    public SimpleVolume getPhysicalVolumeParent(GUID guid) {\n        return null;\n    }\n\n    @Override\n    public List<TreeNode> getChildren(GUID guid) {\n        return super.getChildren( guid );\n    }\n\n    @Override\n    public Object queryEntityHandleByNS(String path, String szBadSep, String szTargetSep) {\n        return null;\n    }\n\n    @Override\n    public EntityNode queryNode(String path) {\n        return null;\n    }\n\n    @Override\n    public List<? extends TreeNode> fetchRoot() {\n        return null;\n    }\n\n    @Override\n    public void rename(GUID guid, String name) {\n\n    }\n\n    private boolean allNonNull( List<?> list ) {\n        return list.stream().noneMatch( Objects::isNull );\n    }\n\n    @Override\n    public GUID insertPhysicalVolume(PhysicalVolume physicalVolume) {\n        GUID guid = physicalVolume.getGuid();\n        VolumeCapacity64 volumeCapacity = physicalVolume.getVolumeCapacity();\n        if( volumeCapacity.getVolumeGuid() == null ){\n            volumeCapacity.setVolumeGuid( guid );\n        }\n\n        MountPoint mountPoint = physicalVolume.getMountPoint();\n        if( mountPoint.getVolumeGuid() == null ){\n            mountPoint.setVolumeGuid( guid );\n        }\n\n\n        this.physicalVolumeManipulator.insert( physicalVolume );\n        this.volumeCapacityManipulator.insert( volumeCapacity );\n        this.mountPointManipulator.insert( mountPoint );\n        return guid;\n    }\n\n    @Override\n    public void purgePhysicalVolume(GUID guid) {\n        this.physicalVolumeManipulator.remove( guid );\n        this.volumeCapacityManipulator.remove( guid );\n        this.mountPointManipulator.removeByVolumeGuid( guid );\n    }\n\n    @Override\n    public void insertAllocate(GUID objectGuid, GUID childVolumeGuid, GUID parentVolumeGuid) {\n        this.volumeAllocateManipulator.insert( objectGuid, childVolumeGuid, parentVolumeGuid);\n    }\n\n    @Override\n    public PhysicalVolume getSmallestCapacityPhysicalVolume() {\n        PhysicalVolume smallestCapacityPhysicalVolume = this.physicalVolumeManipulator.getSmallestCapacityPhysicalVolume();\n        return this.getPhysicalVolume( smallestCapacityPhysicalVolume.getGuid() );\n    }\n\n\n    @Override\n    public VolumeMasterManipulator getMasterManipulator() {\n        return this.volumeMasterManipulator;\n    }\n\n    @Override\n    public void storageExpansion(GUID parentGuid, GUID childGuid) {\n        this.treeMasterManipulator.getTrieTreeManipulator().addChild( childGuid, parentGuid );\n    }\n\n    @Override\n    public Hydrogen getHydrogen() {\n        return this.hydrogen;\n    }\n\n    @Override\n    public ExecutorPool getKenusPool() {\n        return this.kenusPool;\n    }\n\n    @Override\n    public List<Volume> queryAllVolumes() {\n        List<Volume> physicalVolumes = this.physicalVolumeManipulator.queryAllPhysicalVolumes();\n        List<Volume> simpleVolumes = this.simpleVolumeManipulator.queryAllSimpleVolumes();\n        List<Volume> spannedVolumes = this.spannedVolumeManipulator.queryAllSpannedVolume();\n        List<Volume> stripedVolumes = this.stripedVolumeManipulator.queryAllStripedVolume();\n\n        List<Volume> fullPhysicalVolumes = new ArrayList<>();\n        List<Volume> fullSimpleVolumes = new ArrayList<>();\n        List<Volume> fullSpannedVolumes = new ArrayList<>();\n        List<Volume> fullStripedVolumes = new ArrayList<>();\n        for( Volume volume : physicalVolumes ){\n            PhysicalVolume physicalVolume = this.getPhysicalVolume(volume.getGuid());\n            fullPhysicalVolumes.add(physicalVolume);\n        }\n        for( Volume volume : simpleVolumes ){\n            LogicVolume logicVolume = this.get(volume.getGuid());\n            fullSimpleVolumes.add(logicVolume);\n        }\n        for( Volume volume : spannedVolumes ){\n            LogicVolume logicVolume = this.get(volume.getGuid());\n            fullSpannedVolumes.add(logicVolume);\n        }\n        for( Volume volume : stripedVolumes ){\n            LogicVolume logicVolume = this.get(volume.getGuid());\n            fullStripedVolumes.add(logicVolume);\n        }\n\n        ArrayList<Volume> volumes = new ArrayList<>();\n        volumes.addAll( fullPhysicalVolumes );\n        volumes.addAll(fullSimpleVolumes);\n        volumes.addAll(fullSpannedVolumes);\n        volumes.addAll(fullStripedVolumes);\n        return volumes;\n    }\n\n    @Override\n    public List<Volume> listLogicVolumes() {\n        List<Volume> simpleVolumes = this.simpleVolumeManipulator.queryAllSimpleVolumes();\n        List<Volume> spannedVolumes = this.spannedVolumeManipulator.queryAllSpannedVolume();\n        List<Volume> stripedVolumes = this.stripedVolumeManipulator.queryAllStripedVolume();\n        ArrayList<Volume> volumes = new ArrayList<>();\n        volumes.addAll(simpleVolumes);\n        volumes.addAll(spannedVolumes);\n        volumes.addAll(stripedVolumes);\n        return volumes;\n    }\n\n    @Override\n    public List<Volume> listPhysicsVolumes() {\n        List<Volume> physicalVolumes = this.physicalVolumeManipulator.queryAllPhysicalVolumes();\n        ArrayList<Volume> volumes = new ArrayList<>();\n        for( Volume volume : physicalVolumes ){\n            PhysicalVolume physicalVolume = this.getPhysicalVolume(volume.getGuid());\n            volumes.add(physicalVolume);\n        }\n        return new ArrayList<>(volumes);\n    }\n\n    @Override\n    public void removeStorageObject( Cluster cluster ) throws SQLException {\n        LocalCluster localCluster = (LocalCluster) cluster;\n\n        String sourceName = localCluster.getSourceName();\n        UniformSourceLocator uniformSourceLocator = JSON.unmarshal(sourceName, UniformSourceLocator.class);\n        LogicVolume volume = this.get(GUIDs.GUID128(uniformSourceLocator.getVolumeGuid()));\n\n        VolumeOperator operator = (VolumeOperator) this.getOperatorByGuid(volume.getGuid());\n        operator.removeStorageObject( volume.getGuid(), cluster.getSegGuid(), cluster.getSize() );\n    }\n\n    @Override\n    public void removeStorageObject(GUID volumeGuid, GUID storageGuid, long size) {\n        VolumeOperator operator = (VolumeOperator) this.getOperatorByGuid(volumeGuid);\n        operator.removeStorageObject( volumeGuid, storageGuid, size );\n    }\n\n    private String getNodeName(ImperialTreeNode node ){\n        UOI type = node.getType();\n        TreeNode newInstance = (TreeNode)type.newInstance();\n        TreeNodeOperator operator = this.operatorFactory.getOperator(this.getVolumeMetaType( newInstance ));\n        TreeNode treeNode = operator.get(node.getGuid());\n        return treeNode.getName();\n    }\n\n    private String getVolumeMetaType( TreeNode treeNode ){\n        return treeNode.className().replace(\"Titan\",\"\");\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/VolumeConfig.java",
    "content": "package com.pinecone.hydra.storage.volume;\n\nimport com.pinecone.hydra.storage.StorageConfig;\n\npublic interface VolumeConfig extends StorageConfig {\n    String getVersionSignature();\n\n    Number getTinyFileStripSizing() ;\n\n    Number getSmallFileStripSizing() ;\n\n    Number getMegaFileStripSizing() ;\n\n    Number getDefaultStripSize() ;\n\n    int getStripResidentCacheAllotRatio();\n\n    String getStorageObjectExtension();\n\n    String getSqliteFileExtension();\n\n    String getPathSeparator();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/VolumeConstants.java",
    "content": "package com.pinecone.hydra.storage.volume;\n\npublic final class VolumeConstants {\n    public static final Number TinyFileStripSizing              =       512 * 1024L;  // 512 KB\n    public static final Number SmallFileStripSizing             =  4 * 1024 * 1024L;  //   4 MB\n    public static final Number MegaFileStripSizing              = 10 * 1024 * 1024L;  //  10 MB\n    public static final Number DefaultStripSize                 = VolumeConstants.MegaFileStripSizing;\n    public static final int    StripResidentCacheAllotRatio     = 2;\n    public static final String StorageObjectExtension           = \".storage\";\n    public static final String SqliteFileExtension              = \".db\";\n    public static final String PathSeparator                    = \"/\";\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/VolumeFile.java",
    "content": "package com.pinecone.hydra.storage.volume;\n\nimport com.pinecone.hydra.storage.CheckedFile;\n\npublic interface VolumeFile extends CheckedFile {\n    VolumeFile fromUniformFile( CheckedFile file );\n}\n\n\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/VolumeManager.java",
    "content": "package com.pinecone.hydra.storage.volume;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.Cluster;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.Volume;\nimport com.pinecone.hydra.storage.volume.entity.local.VolumeCapacity;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.kvfs.ExecutorPool;\nimport com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\n\nimport java.sql.SQLException;\nimport java.util.List;\n\npublic interface VolumeManager extends KOMInstrument {\n    @Override\n    LogicVolume get( GUID guid );\n\n    void update( LogicVolume logicVolume );\n\n    void updateVolumeUsedSize(GUID guid, VolumeCapacity volumeCapacity);\n\n    void updatePhysical( PhysicalVolume physicalVolume );\n\n    @Override\n    VolumeConfig getConfig();\n\n    PhysicalVolume getPhysicalVolume( GUID guid );\n    SimpleVolume   getPhysicalVolumeParent( GUID guid );\n\n\n    GUID insertPhysicalVolume( PhysicalVolume physicalVolume );\n\n    void purgePhysicalVolume( GUID guid );\n\n    void insertAllocate( GUID objectGuid, GUID childVolumeGuid, GUID parentVolumeGuid );\n\n    PhysicalVolume getSmallestCapacityPhysicalVolume();\n\n    VolumeMasterManipulator  getMasterManipulator();\n\n    void storageExpansion( GUID parentGuid, GUID childGuid );\n\n    Hydrogen getHydrogen();\n\n    ExecutorPool getKenusPool();\n\n    List<Volume> queryAllVolumes();\n\n    List<Volume> listLogicVolumes();\n\n    List<Volume> listPhysicsVolumes();\n\n    void removeStorageObject(Cluster cluster) throws SQLException;\n\n    void removeStorageObject( GUID volumeGuid, GUID storageGuid, long size );\n\n    KenVolumeFileSystem getKVFSystem();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/VolumePoliceDog.java",
    "content": "package com.pinecone.hydra.storage.volume;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface VolumePoliceDog extends Pinenut {\n    GUID simpleDfsSearch(String path,String szSeparator);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/ArchExportEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\n\npublic abstract  class ArchExportEntity implements ExporterEntity{\n    protected VolumeManager volumeManager;\n\n    protected StorageExportIORequest storageExportIORequest;\n\n    protected Chanface channel;\n\n    public ArchExportEntity(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel){\n        this.volumeManager = volumeManager;\n        this.storageExportIORequest = storageExportIORequest;\n        this.channel = channel;\n    }\n    @Override\n    public VolumeManager getVolumeManager() {\n        return this.volumeManager;\n    }\n\n    @Override\n    public void setVolumeManager(VolumeManager volumeManager) {\n        this.volumeManager = volumeManager;\n    }\n\n    @Override\n    public StorageExportIORequest getStorageIORequest() {\n        return this.storageExportIORequest;\n    }\n\n    @Override\n    public void setStorageIORequest(StorageExportIORequest storageExportIORequest) {\n        this.storageExportIORequest = storageExportIORequest;\n    }\n\n    @Override\n    public Chanface getChannel() {\n        return this.channel;\n    }\n\n    @Override\n    public void setChannel(Chanface channel) {\n        this.channel = channel;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/ArchLogicVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteExecutor;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.sql.SQLException;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\n\npublic abstract class ArchLogicVolume extends ArchVolume implements LogicVolume{\n\n    protected List<LogicVolume>            children;\n\n    protected VolumeCapacity64             volumeCapacity;\n\n\n    public ArchLogicVolume(VolumeManager volumeManager) {\n        super(volumeManager);\n        this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager );\n    }\n\n    public ArchLogicVolume(){}\n\n\n\n    @Override\n    public List<LogicVolume> queryChildren() {\n        if ( this.children == null || this.children.isEmpty() ){\n            ArrayList<LogicVolume> logicVolumes = new ArrayList<>();\n            Collection<TreeNode> nodes = this.volumeManager.getChildren( this.guid );\n            for( TreeNode node : nodes ){\n                LogicVolume volume = this.volumeManager.get(node.getGuid());\n                logicVolumes.add( volume );\n            }\n            this.children = logicVolumes;\n        }\n        return this.children;\n    }\n\n    @Override\n    public void setChildren(List<LogicVolume> children) {\n        this.children = children;\n    }\n\n    @Override\n    public VolumeCapacity64 getVolumeCapacity() {\n        return this.volumeCapacity;\n    }\n\n    @Override\n    public void setVolumeCapacity(VolumeCapacity64 volumeCapacity) {\n        this.volumeCapacity = volumeCapacity;\n    }\n\n    @Override\n    public SQLiteExecutor getSQLiteExecutor() throws SQLException {\n        VolumeConfig config = this.volumeManager.getConfig();\n        GUID physicsVolumeGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume(this.getGuid());\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsVolumeGuid);\n        String url = physicalVolume.getMountPoint().getMountPoint()+ config.getPathSeparator() +this.getGuid()+config.getSqliteFileExtension();\n        return (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/ArchReceiveEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\n\npublic abstract class ArchReceiveEntity implements ReceiveEntity{\n    protected VolumeManager volumeManager;\n\n    protected StorageReceiveIORequest storageReceiveIORequest;\n\n    protected Chanface channel;\n\n\n    public ArchReceiveEntity(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel){\n        this.volumeManager = volumeManager;\n        this.storageReceiveIORequest = storageReceiveIORequest;\n        this.channel = channel;\n    }\n\n\n    @Override\n    public VolumeManager getVolumeManager() {\n        return this.volumeManager;\n    }\n\n    @Override\n    public void setVolumeManager(VolumeManager volumeManager) {\n        this.volumeManager = volumeManager;\n    }\n\n    @Override\n    public StorageReceiveIORequest getReceiveStorageObject() {\n        return this.storageReceiveIORequest;\n    }\n\n    @Override\n    public void setReceiveStorageObject(StorageReceiveIORequest storageReceiveIORequest) {\n        this.storageReceiveIORequest = storageReceiveIORequest;\n    }\n\n    @Override\n    public Chanface getKChannel() {\n        return this.channel;\n    }\n\n    @Override\n    public void setKChannel(Chanface channel) {\n        this.channel = channel;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/ArchVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem;\n\nimport java.time.LocalDateTime;\n\npublic abstract class ArchVolume implements Volume{\n    protected long                    enumId;\n    protected GUID                    guid;\n    protected LocalDateTime           createTime;\n    protected LocalDateTime           updateTime;\n    protected String                  name;\n    protected String                  type;\n    protected String                  extConfig;\n    protected VolumeManager           volumeManager;\n    protected VolumeCapacity64        volumeCapacity;\n    protected OnVolumeFileSystem      kenVolumeFileSystem;\n\n    public ArchVolume( VolumeManager volumeManager ){\n        this.volumeManager = volumeManager;\n        this.guid = volumeManager.getGuidAllocator().nextGUID();\n        this.createTime = LocalDateTime.now();\n        this.updateTime = LocalDateTime.now();\n        this. kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager );\n    }\n\n    public ArchVolume(){}\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.updateTime;\n    }\n\n    @Override\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.updateTime = updateTime;\n    }\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    @Override\n    public String getType() {\n        return this.type;\n    }\n\n    @Override\n    public void setType(String type) {\n        this.type = type;\n    }\n\n    @Override\n    public VolumeCapacity64 getVolumeCapacity() {\n        return this.volumeCapacity;\n    }\n\n    @Override\n    public void setVolumeCapacity(VolumeCapacity64 volumeCapacity) {\n        this.volumeCapacity = volumeCapacity;\n    }\n\n    @Override\n    public String getExtConfig() {\n        return this.extConfig;\n    }\n\n    @Override\n    public void setExtConfig(String extConfig) {\n        this.extConfig = extConfig;\n    }\n\n    @Override\n    public void setKenVolumeFileSystem() {\n        this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager );\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/DirectReceiver.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\npublic interface DirectReceiver extends Receiver {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/Exporter.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Exporter extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/ExporterEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic interface ExporterEntity extends Pinenut {\n    VolumeManager getVolumeManager();\n    void setVolumeManager(VolumeManager volumeManager);\n\n   StorageExportIORequest getStorageIORequest();\n   void setStorageIORequest(StorageExportIORequest storageExportIORequest);\n\n    StorageIOResponse export() throws IOException;\n    StorageIOResponse export( Number offset, Number endSize ) throws IOException;\n    StorageIOResponse export(CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException;\n\n    Chanface getChannel();\n    void setChannel( Chanface channel );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/LogicVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteExecutor;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.io.IOException;\nimport java.sql.SQLException;\nimport java.util.List;\n\npublic interface LogicVolume extends Volume, TreeNode {\n    String getName();\n\n    void setName( String name );\n\n    List<LogicVolume> queryChildren();\n\n    void setChildren( List<LogicVolume> children );\n\n    VolumeCapacity64 getVolumeCapacity();\n\n    void setVolumeCapacity( VolumeCapacity64 volumeCapacity );\n\n    void extendLogicalVolume( GUID physicalGuid );\n    List< GUID > listPhysicalVolume();\n\n    default MirroredVolume evinceMirroredVolume(){\n        return null;\n    }\n    default SimpleVolume   evinceSimpleVolume(){\n        return null;\n    }\n    default SpannedVolume  evinceSpannedVolume(){\n        return null;\n    }\n    default StripedVolume  evinceStripeVolume(){\n        return null;\n    }\n    void setVolumeTree( VolumeManager volumeManager);\n\n\n    StorageIOResponse receive( ReceiveEntity entity ) throws IOException;\n    StorageIOResponse receive( ReceiveEntity entity, Number offset, Number endSize ) throws IOException;\n    StorageIOResponse randomReceive( ReceiveEntity entity, Number offset, Number endSize ) throws IOException;\n    StorageIOResponse receive( ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer ) throws IOException;\n\n    StorageIOResponse export( ExporterEntity entity ) throws IOException;\n    //敬请期待\n    StorageIOResponse export( ExporterEntity entity, Number offset, Number endSize ) throws IOException;\n    StorageIOResponse export( ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer ) throws UIOException;\n\n    StorageIOResponse export( ExporterEntity entity, boolean accessRandom ) throws UIOException;\n    //敬请期待\n    StorageIOResponse export( ExporterEntity entity, Number offset, Number endSize, boolean accessRandom );\n    StorageIOResponse export( ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer, boolean accessRandom ) throws UIOException;\n\n\n    boolean existStorageObject( GUID storageObject ) throws SQLException;\n\n    void build() throws SQLException;\n\n    void storageExpansion( GUID volumeGuid );\n\n    SQLiteExecutor getSQLiteExecutor() throws SQLException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/MirroredVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\npublic interface MirroredVolume extends LogicVolume{\n    @Override\n    default MirroredVolume evinceMirroredVolume() {\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/MountPoint.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface MountPoint extends Pinenut {\n    long getEnumId();\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n\n    LocalDateTime getCreateTime();\n    void setCreateTime( LocalDateTime createTime );\n\n    LocalDateTime getUpdateTime();\n    void setUpdateTime( LocalDateTime updateTime );\n\n    String getName();\n    void setName(String name);\n\n    GUID getVolumeGuid();\n    void setVolumeGuid( GUID volumeGuid );\n\n    String getMountPoint();\n    void setMountPoint( String mountPoint );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/PhysicalVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic interface PhysicalVolume extends Volume{\n    MountPoint getMountPoint();\n    void setMountPoint( MountPoint mountPoint );\n\n    GUID getParent();\n\n    void applyVolumeManage( VolumeManager volumeManager );\n\n    StorageIOResponse channelReceive(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel ) throws UIOException;\n    StorageIOResponse channelReceive(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, Number offset, Number endSize ) throws IOException;\n\n    StorageIOResponse channelExport(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel ) throws IOException;\n    StorageIOResponse channelRaid0Export(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws IOException;\n\n    StorageIOResponse receive( ReceiveEntity entity ) throws IOException;\n    StorageIOResponse receive( ReceiveEntity entity, Number offset, Number endSize ) throws IOException;\n    StorageIOResponse randomReceive( ReceiveEntity entity,Number offset, Number endSize) throws IOException;\n\n    StorageIOResponse receive( ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer ) throws IOException;\n\n    StorageIOResponse export( ExporterEntity entity ) throws IOException;\n    //敬请期待\n    StorageIOResponse export( ExporterEntity entity, Number offset, Number endSize );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/ReceiveEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic interface ReceiveEntity extends Pinenut {\n    VolumeManager getVolumeManager();\n    void setVolumeManager(VolumeManager volumeManager);\n\n    StorageReceiveIORequest getReceiveStorageObject();\n    void setReceiveStorageObject( StorageReceiveIORequest storageReceiveIORequest);\n\n    Chanface getKChannel();\n    void setKChannel( Chanface channel);\n\n    StorageIOResponse receive() throws IOException;\n\n    StorageIOResponse receive(Number offset, Number endSize ) throws IOException;\n\n    StorageIOResponse randomReceive( Number offset, Number endSize ) throws IOException;\n\n    StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer ) throws IOException;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/Receiver.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\n\nimport java.io.IOException;\n\npublic interface Receiver extends Pinenut {\n    StorageIOResponse receive(Chanface chanface) throws IOException;\n    StorageIOResponse receive(Chanface chanface,Number offset, Number endSize) throws IOException;\n    StorageIOResponse randomReceive(Chanface chanface,Number offset, Number endSize) throws IOException;\n    StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws IOException;\n    StorageIOResponse receive(RandomAccessChanface randomAccessChanface,Number offset, Number endSize) throws IOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/SimpleVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport java.sql.SQLException;\n\npublic interface SimpleVolume extends LogicVolume{\n    @Override\n    default SimpleVolume evinceSimpleVolume() {\n        return this;\n    }\n\n     void assembleSQLiteExecutor() throws SQLException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/SpannedVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\n\npublic interface SpannedVolume extends LogicVolume{\n    @Override\n    default SpannedVolume evinceSpannedVolume() {\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/StripedReceiver.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\npublic interface StripedReceiver extends Receiver {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/StripedVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\npublic interface StripedVolume extends LogicVolume{\n    @Override\n    default StripedVolume evinceStripeVolume() {\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/TitanMountPoint.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.source.MountPointManipulator;\n\nimport java.time.LocalDateTime;\n\npublic class TitanMountPoint implements MountPoint{\n    protected long                    enumId;\n    protected GUID                    guid;\n    protected LocalDateTime           createTime;\n    protected LocalDateTime           updateTime;\n    protected String                  name;\n    protected GUID                    volumeGuid;\n    protected String                  mountPoint;\n    protected VolumeManager volumeManager;\n    protected MountPointManipulator   mountPointManipulator;\n\n    public TitanMountPoint(VolumeManager volumeManager, MountPointManipulator mountPointManipulator ){\n        this.volumeManager = volumeManager;\n        this.mountPointManipulator      =   mountPointManipulator;\n        this.guid                       =   volumeManager.getGuidAllocator().nextGUID();\n        this.createTime                 =   LocalDateTime.now();\n        this.updateTime                 =   LocalDateTime.now();\n    }\n    public TitanMountPoint(){}\n\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.createTime;\n    }\n\n    @Override\n    public void setCreateTime(LocalDateTime createTime) {\n        this.createTime = createTime;\n    }\n\n    @Override\n    public LocalDateTime getUpdateTime() {\n        return this.updateTime;\n    }\n\n    @Override\n    public void setUpdateTime(LocalDateTime updateTime) {\n        this.updateTime = updateTime;\n    }\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    @Override\n    public GUID getVolumeGuid() {\n        return this.volumeGuid;\n    }\n\n    @Override\n    public void setVolumeGuid(GUID volumeGuid) {\n        this.volumeGuid = volumeGuid;\n    }\n\n    @Override\n    public String getMountPoint() {\n        return this.mountPoint;\n    }\n\n    @Override\n    public void setMountPoint(String mountPoint) {\n        this.mountPoint = mountPoint;\n    }\n    public void setMountPointManipulator( MountPointManipulator mountPointManipulator ){\n        this.mountPointManipulator = mountPointManipulator;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/TitanVolumeAllotment.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalPhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.physical.TitanLocalPhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.simple.TitanLocalSimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.spanned.TitanLocalSpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.TitanLocalStripedVolume;\nimport com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator;\n\npublic class TitanVolumeAllotment implements VolumeAllotment{\n    private VolumeManager volumeManager;\n    private VolumeMasterManipulator masterManipulator;\n\n    public TitanVolumeAllotment(VolumeManager volumeManager, VolumeMasterManipulator volumeMasterManipulator ){\n        this.volumeManager = volumeManager;\n        this.masterManipulator= volumeMasterManipulator;\n    }\n    @Override\n    public VolumeCapacity64 newVolumeCapacity() {\n        return new TitanVolumeCapacity64( this.volumeManager,this.masterManipulator.getVolumeCapacityManipulator() );\n    }\n\n    @Override\n    public LocalStripedVolume newLocalStripedVolume() {\n        return new TitanLocalStripedVolume( this.volumeManager, this.masterManipulator.getStripedVolumeManipulator() );\n    }\n\n    @Override\n    public LocalSpannedVolume newLocalSpannedVolume() {\n        return new TitanLocalSpannedVolume( this.volumeManager, this.masterManipulator.getSpannedVolumeManipulator() );\n    }\n\n    @Override\n    public LocalSimpleVolume newLocalSimpleVolume() {\n        return new TitanLocalSimpleVolume( this.volumeManager, this.masterManipulator.getSimpleVolumeManipulator() );\n    }\n\n    @Override\n    public LocalPhysicalVolume newLocalPhysicalVolume() {\n        return new TitanLocalPhysicalVolume( this.volumeManager, this.masterManipulator.getPhysicalVolumeManipulator() );\n    }\n\n    @Override\n    public MountPoint newMountPoint() {\n        return new TitanMountPoint( this.volumeManager, this.masterManipulator.getMountPointManipulator() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/TitanVolumeCapacity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.source.VolumeCapacityManipulator;\n\npublic class TitanVolumeCapacity64 implements VolumeCapacity64 {\n    private GUID                        volumeGuid;\n    private long                        definitionCapacity;\n    private long                        usedSize;\n    private long                        quotaCapacity;\n    private VolumeManager               volumeManager;\n    private VolumeCapacityManipulator   volumeCapacityManipulator;\n\n    public TitanVolumeCapacity64(VolumeManager volumeManager, VolumeCapacityManipulator volumeCapacityManipulator ){\n        this.volumeManager = volumeManager;\n        this.volumeCapacityManipulator = volumeCapacityManipulator;\n    }\n\n    public TitanVolumeCapacity64( GUID volumeGuid, long definitionCapacity, long usedSize, long quotaCapacity ){\n        this.volumeGuid = volumeGuid;\n        this.definitionCapacity = definitionCapacity;\n        this.usedSize = usedSize;\n        this.quotaCapacity = quotaCapacity;\n    }\n\n\n    @Override\n    public Long getDefinitionCapacity() {\n        return this.definitionCapacity;\n    }\n\n    @Override\n    public void setDefinitionCapacity( Number definitionCapacity ) {\n        this.definitionCapacity = definitionCapacity.longValue();\n    }\n\n    @Override\n    public Long getUsedSize() {\n        return this.usedSize;\n    }\n\n    @Override\n    public GUID getVolumeGuid() {\n        return this.volumeGuid;\n    }\n\n    @Override\n    public void setVolumeGuid( GUID volumeGuid ) {\n        this.volumeGuid = volumeGuid;\n    }\n\n    @Override\n    public void setUsedSize( Number usedSize ) {\n        this.usedSize = usedSize.longValue();\n    }\n\n    @Override\n    public Long getQuotaCapacity() {\n        return this.quotaCapacity;\n    }\n\n    @Override\n    public void setQuotaCapacity( Number quotaCapacity ) {\n        this.quotaCapacity = quotaCapacity.longValue();\n    }\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/Volume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface Volume extends Pinenut {\n    long getEnumId();\n\n    GUID getGuid();\n    void setGuid(GUID guid);\n\n    LocalDateTime getCreateTime();\n    void setCreateTime( LocalDateTime createTime );\n\n    LocalDateTime getUpdateTime();\n    void setUpdateTime( LocalDateTime updateTime );\n\n    String getName();\n    void setName(String name);\n\n    String getType();\n    void setType( String type );\n\n    String getExtConfig();\n    void setExtConfig( String extConfig );\n\n    VolumeCapacity64 getVolumeCapacity();\n    void setVolumeCapacity( VolumeCapacity64 volumeCapacity );\n    void setKenVolumeFileSystem();\n\n    void deductCapacity( long deductCapacity );\n\n    void increaseCapacity( long increaseCapacity );\n\n    boolean checkCapacity( long size );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/VolumeAllotment.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalPhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume;\n\npublic interface VolumeAllotment extends Pinenut {\n    VolumeCapacity64 newVolumeCapacity();\n    LocalStripedVolume      newLocalStripedVolume();\n    LocalSpannedVolume      newLocalSpannedVolume();\n    LocalSimpleVolume       newLocalSimpleVolume();\n    LocalPhysicalVolume     newLocalPhysicalVolume();\n    MountPoint              newMountPoint();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/VolumeCapacity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity;\n\nimport com.pinecone.hydra.storage.volume.entity.local.VolumeCapacity;\n\npublic interface VolumeCapacity64 extends VolumeCapacity {\n    @Override\n    Long getDefinitionCapacity();\n\n    @Override\n    void setDefinitionCapacity( Number definitionCapacity );\n\n    @Override\n    Long getUsedSize();\n\n    @Override\n    void setUsedSize( Number usedSize );\n\n    @Override\n    Long getQuotaCapacity();\n\n    @Override\n    void setQuotaCapacity( Number quotaCapacity );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/LocalMirroredVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local;\n\nimport com.pinecone.hydra.storage.volume.entity.MirroredVolume;\n\npublic interface LocalMirroredVolume extends MirroredVolume {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/LocalPhysicalVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local;\n\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\n\npublic interface LocalPhysicalVolume extends PhysicalVolume {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/LocalSimpleVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local;\n\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\n\npublic interface LocalSimpleVolume extends SimpleVolume {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/LocalSpannedVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local;\n\nimport com.pinecone.hydra.storage.volume.entity.SpannedVolume;\n\npublic interface LocalSpannedVolume extends SpannedVolume {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/LocalStripedVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local;\n\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\n\npublic interface LocalStripedVolume extends StripedVolume {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/VolumeCapacity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface VolumeCapacity extends Pinenut {\n    GUID getVolumeGuid();\n\n    void setVolumeGuid( GUID volumeGuid );\n\n    Number getDefinitionCapacity();\n\n    void setDefinitionCapacity( Number definitionCapacity );\n\n    Number getUsedSize();\n\n    void setUsedSize( Number usedSize );\n\n    Number getQuotaCapacity();\n\n    void setQuotaCapacity( Number quotaCapacity );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/mirrored/TitanLocalMirroredVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.mirrored;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchLogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalMirroredVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\nimport com.pinecone.hydra.storage.volume.source.MirroredVolumeManipulator;\n\nimport java.sql.SQLException;\nimport java.util.List;\n\npublic class TitanLocalMirroredVolume extends ArchLogicVolume implements LocalMirroredVolume {\n    private MirroredVolumeManipulator mirroredVolumeManipulator;\n\n    public void setMirroredVolumeManipulator( MirroredVolumeManipulator mirroredVolumeManipulator ){\n        this.mirroredVolumeManipulator = mirroredVolumeManipulator;\n    }\n\n    public TitanLocalMirroredVolume(VolumeManager volumeManager, MirroredVolumeManipulator mirroredVolumeManipulator) {\n        super(volumeManager);\n        this.mirroredVolumeManipulator = mirroredVolumeManipulator;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n\n    @Override\n    public void extendLogicalVolume(GUID physicalGuid) {\n\n    }\n\n    @Override\n    public List<GUID> listPhysicalVolume() {\n        return null;\n    }\n\n\n    @Override\n    public void setVolumeTree(VolumeManager volumeManager) {\n        this.volumeManager = volumeManager;\n    }\n\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity, Number offset, Number endSize) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(ReceiveEntity entity, Number offset, Number endSize) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, boolean accessRandom) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize, boolean accessRandom) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer, boolean accessRandom) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public boolean existStorageObject(GUID storageObject) throws SQLException {\n        return false;\n    }\n\n    @Override\n    public void build() throws SQLException {\n\n    }\n\n    @Override\n    public void storageExpansion(GUID volumeGuid) {\n\n    }\n\n    @Override\n    public void deductCapacity(long deductCapacity) {\n        this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() + deductCapacity );\n        this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity );\n    }\n\n    @Override\n    public boolean checkCapacity(long size) {\n        long freeSpace = this.volumeCapacity.getDefinitionCapacity() - this.volumeCapacity.getUsedSize();\n        return freeSpace > size;\n    }\n\n    @Override\n    public void increaseCapacity(long increaseCapacity) {\n        this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() - increaseCapacity );\n        this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/TitanLocalPhysicalVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.physical;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchVolume;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.MountPoint;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalPhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\nimport com.pinecone.hydra.storage.volume.source.PhysicalVolumeManipulator;\n\nimport java.io.IOException;\n\npublic class TitanLocalPhysicalVolume extends ArchVolume implements LocalPhysicalVolume {\n    private MountPoint                  mountPoint;\n    private PhysicalVolumeManipulator   physicalVolumeManipulator;\n\n    public TitanLocalPhysicalVolume(VolumeManager volumeManager, PhysicalVolumeManipulator physicalVolumeManipulator) {\n        super(volumeManager);\n        this.physicalVolumeManipulator = physicalVolumeManipulator;\n    }\n    public TitanLocalPhysicalVolume(){}\n\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n\n    @Override\n    public MountPoint getMountPoint() {\n        return this.mountPoint;\n    }\n\n    @Override\n    public void setMountPoint(MountPoint mountPoint) {\n        this.mountPoint = mountPoint;\n    }\n\n    @Override\n    public GUID getParent() {\n        return this.physicalVolumeManipulator.getParent( this.guid );\n    }\n\n    @Override\n    public void applyVolumeManage(VolumeManager volumeManager) {\n        this.volumeManager = volumeManager;\n    }\n\n    public void setPhysicalVolumeManipulator(PhysicalVolumeManipulator physicalVolumeManipulator ){\n        this.physicalVolumeManipulator = physicalVolumeManipulator;\n    }\n\n\n    @Override\n    public StorageIOResponse channelReceive(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel) throws UIOException {\n//        TitanDirectChannelReceiveEntity64 titanDirectChannelReceiveEntity64 = new TitanDirectChannelReceiveEntity64(volumeManager, storageReceiveIORequest, this.mountPoint.getMountPoint(), channel);\n//        StorageIOResponse storageIOResponse = titanDirectChannelReceiveEntity64.receive();\n//        storageIOResponse.setBottomGuid( this.guid );\n//\n//        return storageIOResponse;\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse channelReceive(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, Number offset, Number endSize) throws IOException {\n//        TitanDirectChannelReceiveEntity64 titanDirectChannelReceiveEntity64 = new TitanDirectChannelReceiveEntity64(volumeManager, storageReceiveIORequest, this.mountPoint.getMountPoint(), channel);\n//        StorageIOResponse storageIOResponse = titanDirectChannelReceiveEntity64.receive(offset, endSize);\n//        storageIOResponse.setBottomGuid( this.getGuid() );\n//        return storageIOResponse;\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse channelExport(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel) throws IOException {\n//        TitanDirectChannelExportEntity64 titanDirectChannelExportEntity64 = new TitanDirectChannelExportEntity64(volumeManager, storageExportIORequest,channel );\n//        StorageIOResponse storageIOResponse = titanDirectChannelExportEntity64.export();\n//        storageIOResponse.setBottomGuid( this.getGuid() );\n//        return storageIOResponse;\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse channelRaid0Export(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws IOException {\n//        TitanDirectChannelExportEntity64 titanDirectChannelExportEntity64 = new TitanDirectChannelExportEntity64(volumeManager, storageExportIORequest,channel );\n//        StorageIOResponse storageIOResponse = titanDirectChannelExportEntity64.export(cacheBlock, offset, endSize, buffer);\n//        storageIOResponse.setBottomGuid( this.getGuid() );\n//        return storageIOResponse;\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity) throws IOException {\n        this.deductCapacity( entity.getReceiveStorageObject().getSize().longValue() );\n        return entity.receive();\n    }\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity, Number offset, Number endSize) throws IOException {\n        this.deductCapacity( entity.getReceiveStorageObject().getSize().longValue() );\n        return entity.receive( offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(ReceiveEntity entity, Number offset, Number endSize) throws IOException {\n        this.deductCapacity( entity.getReceiveStorageObject().getSize().longValue() );\n        return entity.randomReceive( offset,endSize );\n    }\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer) throws IOException {\n        this.deductCapacity( entity.getReceiveStorageObject().getSize().longValue() );\n        return entity.receive( cacheBlock, buffer );\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity) throws IOException {\n        return entity.export();\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize) {\n        return null;\n    }\n\n    @Override\n    public void deductCapacity(long deductCapacity) {\n        this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() + deductCapacity );\n        this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity );\n    }\n\n    @Override\n    public void increaseCapacity(long increaseCapacity) {\n        this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() - increaseCapacity );\n        this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity );\n    }\n\n    @Override\n    public boolean checkCapacity(long size) {\n        long freeSpace = this.volumeCapacity.getDefinitionCapacity() - this.volumeCapacity.getUsedSize();\n        return freeSpace > size;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/export/DirectExport.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.physical.export;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.volume.entity.Exporter;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic interface DirectExport extends Exporter {\n    StorageIOResponse export(Chanface chanface) throws IOException;\n\n    StorageIOResponse export(Chanface chanface, Number offset, Number endSize) throws IOException;\n\n    StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws IOException;\n\n    StorageIOResponse export( CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer);\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/export/DirectExport64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.physical.export;\n\npublic interface DirectExport64 extends DirectExport {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/export/DirectExportEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.physical.export;\n\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\n\nimport java.io.IOException;\n\npublic interface DirectExportEntity extends ExporterEntity {\n    StorageIOResponse export(Number offset, Number endSize) throws IOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/export/DirectExportEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.physical.export;\n\npublic interface DirectExportEntity64 extends DirectExportEntity {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/export/TitanDirectExport64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.physical.export;\n\nimport com.pinecone.framework.util.Bytes;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.TitanStorageIOResponse;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlockStatus;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.nio.channels.FileChannel;\nimport java.nio.file.StandardOpenOption;\nimport java.util.zip.CRC32;\n\npublic class TitanDirectExport64 implements DirectExport64{\n\n    protected VolumeManager volumeManager;\n\n    protected StorageExportIORequest storageExportIORequest;\n\n    public TitanDirectExport64( DirectExportEntity64 entity ){\n        this.volumeManager = entity.getVolumeManager();\n        this.storageExportIORequest = entity.getStorageIORequest();\n    }\n\n    @Override\n    public StorageIOResponse export( Chanface chanface ) throws IOException {\n        String sourceName = this.storageExportIORequest.getSourceName();\n        long size = this.storageExportIORequest.getSize().longValue();\n        TitanStorageIOResponse titanMiddleStorageObject = new TitanStorageIOResponse();\n\n        long parityCheck = 0;\n        long checksum = 0;\n        File file = new File(sourceName);\n\n        try (FileChannel frameChannel = FileChannel.open(file.toPath(), StandardOpenOption.READ)) {\n            ByteBuffer buffer = ByteBuffer.allocate((int) size);\n            frameChannel.read(buffer);\n            buffer.flip();\n            CRC32 crc = new CRC32();\n            while ( buffer.hasRemaining() ) {\n                byte b = buffer.get();\n                parityCheck += Bytes.calculateParity( b );\n                checksum += b & 0xFF;\n                crc.update(b);\n            }\n\n            buffer.rewind();\n            chanface.write(buffer);\n            buffer.clear();\n\n            titanMiddleStorageObject.setChecksum( checksum );\n            titanMiddleStorageObject.setCrc32( crc );\n            titanMiddleStorageObject.setParityCheck( parityCheck );\n        }\n\n        return titanMiddleStorageObject;\n    }\n\n    @Override\n    public StorageIOResponse export(Chanface chanface, Number offset, Number endSize) throws IOException {\n        String sourceName = this.storageExportIORequest.getSourceName();\n        long size = endSize.longValue();\n        TitanStorageIOResponse titanMiddleStorageObject = new TitanStorageIOResponse();\n\n        long parityCheck = 0;\n        long checksum = 0;\n        File file = new File(sourceName);\n\n        try (FileChannel frameChannel = FileChannel.open(file.toPath(), StandardOpenOption.READ)) {\n            long actualSize = Math.min(size, frameChannel.size() - offset.longValue());\n            ByteBuffer buffer = ByteBuffer.allocate((int) actualSize);\n\n            frameChannel.read(buffer, offset.longValue());\n            buffer.flip();\n            CRC32 crc = new CRC32();\n            while (buffer.hasRemaining()) {\n                byte b = buffer.get();\n                parityCheck += Bytes.calculateParity(b);\n                checksum += b & 0xFF;\n                crc.update(b);\n            }\n\n            buffer.rewind();\n            chanface.write(buffer);\n            buffer.clear();\n\n            titanMiddleStorageObject.setChecksum(checksum);\n            titanMiddleStorageObject.setCrc32(crc );\n            titanMiddleStorageObject.setParityCheck(parityCheck);\n        }\n\n        return titanMiddleStorageObject;\n    }\n\n    @Override\n    public StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws IOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export( CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) {\n        String sourceName = this.storageExportIORequest.getSourceName();\n        TitanStorageIOResponse titanMiddleStorageObject = new TitanStorageIOResponse();\n\n        long parityCheck = 0;\n        long checksum = 0;\n        File file = new File(sourceName);\n\n        try ( FileChannel frameChannel = FileChannel.open(file.toPath(), StandardOpenOption.READ) ) {\n            long bufferSize = endSize.longValue();\n            // 定位到文件的 offset 位置\n            frameChannel.position(offset.longValue());\n\n            // 读取 endSize 大小的字节\n            ByteBuffer byteBuffer = ByteBuffer.allocate(endSize.intValue());\n            int read = frameChannel.read(byteBuffer);\n            byteBuffer.flip();\n\n            // 将读取的数据从 bufferStartPosition 开始写入到 buffer\n            if( read < bufferSize ){\n                bufferSize = read;\n            }\n            Debug.trace( \"起始位置\" + offset.longValue()+\"终止大小\"+bufferSize+\"缓存大小\"+endSize.intValue() );\n            byteBuffer.get(buffer, cacheBlock.getByteStart().intValue(), (int) bufferSize);\n            cacheBlock.setStatus( CacheBlockStatus.Full );\n            cacheBlock.setValidByteStart( cacheBlock.getByteStart().intValue() );\n            cacheBlock.setValidByteEnd( cacheBlock.getByteStart().intValue()+bufferSize );\n\n            // 计算校验和和奇偶校验\n            CRC32 crc = new CRC32();\n            for (int i = 0; i < endSize.intValue(); i++) {\n                byte b = buffer[cacheBlock.getByteStart().intValue()+i];\n                parityCheck += Bytes.calculateParity(b);\n                checksum += b & 0xFF;\n                crc.update(b);\n            }\n\n            titanMiddleStorageObject.setChecksum(checksum);\n            titanMiddleStorageObject.setCrc32(crc);\n            titanMiddleStorageObject.setParityCheck(parityCheck);\n        }\n        catch (IOException e) {\n            throw new RuntimeException(e);\n        }\n\n        return titanMiddleStorageObject;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/export/TitanDirectExportEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.physical.export;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchExportEntity;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic class TitanDirectExportEntity64 extends ArchExportEntity implements DirectExportEntity64{\n    protected DirectExport64 directExport;\n\n    public TitanDirectExportEntity64(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel) {\n        super(volumeManager, storageExportIORequest, channel);\n        this.directExport = new TitanDirectExport64( this );\n    }\n\n    @Override\n    public StorageIOResponse export() throws IOException {\n        return this.directExport.export(this.channel);\n    }\n\n    @Override\n    public StorageIOResponse export(Number offset, Number endSize) throws IOException {\n        return this.directExport.export( this.channel,offset,endSize );\n    }\n\n    @Override\n    public StorageIOResponse export(CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException {\n        return this.directExport.export(cacheBlock, offset, endSize, buffer);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/receive/DirectReceive.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.physical.receive;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.volume.entity.Receiver;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic interface DirectReceive extends Receiver {\n    StorageIOResponse receive(Chanface chanface,CacheBlock cacheBlock, byte[] buffer ) throws IOException;\n\n    StorageIOResponse randomReceive( Chanface chanface, Number offset, Number endSize) throws IOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/receive/DirectReceive64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.physical.receive;\n\npublic interface DirectReceive64 extends DirectReceive {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/receive/DirectReceiveEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.physical.receive;\n\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\n\npublic interface DirectReceiveEntity extends ReceiveEntity {\n    String getDestDirPath();\n    void setDestDirPath( String destDirPath );\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/receive/DirectReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.physical.receive;\n\npublic interface DirectReceiveEntity64 extends DirectReceiveEntity {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/receive/TitanDirectReceive64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.physical.receive;\n\nimport com.pinecone.framework.util.Bytes;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageNaming;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.TitanStorageIOResponse;\nimport com.pinecone.hydra.storage.TitanStorageNaming;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.nio.ByteBuffer;\nimport java.nio.channels.FileChannel;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.nio.file.StandardOpenOption;\nimport java.util.zip.CRC32;\n\npublic class TitanDirectReceive64 implements DirectReceive64{\n    protected StorageNaming           storageNaming;\n\n    protected StorageReceiveIORequest storageReceiveIORequest;\n\n    protected VolumeManager           volumeManager;\n\n    protected String                  destDirPath;\n\n    public TitanDirectReceive64( DirectReceiveEntity entity ){\n        this.storageReceiveIORequest = entity.getReceiveStorageObject();\n        this.volumeManager           = entity.getVolumeManager();\n        this.destDirPath             = entity.getDestDirPath();\n        this.storageNaming           = new TitanStorageNaming();\n    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface) throws IOException {\n        return this.receiveWithOffsetAndSize( chanface, 0, this.storageReceiveIORequest.getSize().intValue() );\n    }\n\n    @Override\n    public StorageIOResponse receive( Chanface chanface, Number offset, Number endSize) throws IOException {\n        return this.receiveWithOffsetAndSize( chanface,offset.intValue(),endSize.intValue() );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Chanface chanface,Number offset, Number endSize) throws IOException {\n        long startPosition = offset.longValue();\n        long endPosition = startPosition + endSize.longValue();\n\n        TitanStorageIOResponse titanMiddleStorageObject = new TitanStorageIOResponse();\n        titanMiddleStorageObject.setObjectGuid(storageReceiveIORequest.getStorageObjectGuid());\n\n        URI uri;\n        try {\n            uri = new URI(this.destDirPath);\n        } catch (URISyntaxException e) {\n            throw new IOException(e);\n        }\n\n        Path path = Paths.get(uri);\n        String sourceName = this.storageNaming.naming(\n                this.storageReceiveIORequest.getName(), this.storageReceiveIORequest.getStorageObjectGuid().toString()\n        );\n        path = path.resolve(sourceName);\n\n        ByteBuffer buffer = ByteBuffer.allocate(1024);\n        try (FileChannel chunkChannel = FileChannel.open(path, StandardOpenOption.CREATE, StandardOpenOption.WRITE)) {\n            while (startPosition < endPosition && chanface.read(buffer) != -1) {\n                buffer.flip();\n\n                chunkChannel.position(startPosition);\n                int write = chunkChannel.write(buffer);\n                startPosition += write;\n\n                buffer.clear();\n            }\n        }\n\n        titanMiddleStorageObject.setSourceName(path.toString());\n\n        return titanMiddleStorageObject;\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws IOException {\n        return this.receiveWithOffsetAndSize( randomAccessChanface, 0, this.storageReceiveIORequest.getSize().intValue() );\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws IOException {\n        return this.receiveWithOffsetAndSize( randomAccessChanface,offset.intValue(),endSize.intValue() );\n    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface,CacheBlock cacheBlock, byte[] buffer) throws IOException {\n        int start = cacheBlock.getValidByteStart().intValue();\n        int end = cacheBlock.getValidByteEnd().intValue();\n\n        if (start < 0 || end > buffer.length || start >= end) {\n            throw new IllegalArgumentException(\"Invalid cacheBlock range or buffer size.\");\n        }\n\n        int size = end - start;\n        int parityCheck = 0;\n        long checksum = 0;\n        CRC32 crc = new CRC32();\n\n        TitanStorageIOResponse titanMiddleStorageObject = new TitanStorageIOResponse();\n        titanMiddleStorageObject.setObjectGuid(storageReceiveIORequest.getStorageObjectGuid());\n\n        String sourceName = this.storageNaming.naming(storageReceiveIORequest.getName(), storageReceiveIORequest.getStorageObjectGuid().toString());\n        Path path = Paths.get(destDirPath, sourceName);\n\n        Files.createDirectories(path.getParent());\n\n        try (OutputStream outputStream = Files.newOutputStream(path, StandardOpenOption.CREATE, StandardOpenOption.APPEND)) {\n            for (int i = start; i < end; i++) {\n                byte b = buffer[i];\n                parityCheck += Bytes.calculateParity(b);\n                checksum += b & 0xFF;\n                crc.update(b);\n            }\n\n            outputStream.write(buffer, start, size);\n        } catch (IOException e) {\n            throw new IOException(\"Failed to write to file: \" + path.toString(), e);\n        }\n\n        titanMiddleStorageObject.setChecksum(checksum);\n        titanMiddleStorageObject.setCrc32(crc);\n        titanMiddleStorageObject.setParityCheck(parityCheck);\n        titanMiddleStorageObject.setSourceName(path.toString());\n\n        return titanMiddleStorageObject;\n    }\n\n    private StorageIOResponse receiveWithOffsetAndSize(Chanface chanface,long offset, int size) throws IOException {\n\n        int parityCheck = 0;\n        long checksum = 0;\n        //ByteBuffer buffer = ByteBuffer.allocateDirect(size);\n\n        TitanStorageIOResponse titanMiddleStorageObject = new TitanStorageIOResponse();\n        titanMiddleStorageObject.setObjectGuid(storageReceiveIORequest.getStorageObjectGuid());\n\n        //buffer.clear();\n\n        ByteBuffer[] lpBuf = new ByteBuffer[ 1 ];\n        chanface.read( (out)->{\n            lpBuf[0] = out;\n        }, size, offset );\n        ByteBuffer buffer = lpBuf[ 0 ];\n\n        buffer.flip();\n        CRC32 crc = new CRC32();\n\n        while (buffer.hasRemaining()) {\n            byte b = buffer.get();\n            parityCheck += Bytes.calculateParity(b);\n            checksum += b & 0xFF;\n            crc.update(b);\n        }\n        URI uri = null;\n        try {\n            uri = new URI( this.destDirPath );\n        }\n        catch ( URISyntaxException e ) {\n            throw new IOException(e);\n        }\n\n        Path path = Paths.get(uri);\n        String sourceName = this.storageNaming.naming(\n                this.storageReceiveIORequest.getName(), this.storageReceiveIORequest.getStorageObjectGuid().toString()\n        );\n        path = path.resolve(sourceName);\n\n        try (FileChannel chunkChannel = FileChannel.open(path, StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND)) {\n            buffer.rewind();\n            chunkChannel.position(chunkChannel.size());  // 从文件末尾开始写入\n            chunkChannel.write(buffer);\n        }\n\n        titanMiddleStorageObject.setChecksum(checksum);\n        titanMiddleStorageObject.setCrc32(crc);\n        titanMiddleStorageObject.setParityCheck(parityCheck);\n        titanMiddleStorageObject.setSourceName(path.toString());\n\n        return titanMiddleStorageObject;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/physical/receive/TitanDirectReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.physical.receive;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic class TitanDirectReceiveEntity64 extends ArchReceiveEntity implements DirectReceiveEntity64{\n    protected String            destDirPath;\n\n    protected DirectReceive64   directReceive;\n\n    protected Chanface          chanface;\n\n    public TitanDirectReceiveEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, String destDirPath) {\n        super(volumeManager, storageReceiveIORequest, channel);\n        this.destDirPath = destDirPath;\n        this.directReceive = new TitanDirectReceive64( this );\n        this.chanface      = channel;\n    }\n\n    @Override\n    public StorageIOResponse receive() throws IOException {\n        return this.directReceive.receive( this.chanface );\n    }\n\n    @Override\n    public StorageIOResponse receive(Number offset, Number endSize) throws IOException {\n        return this.directReceive.receive( this.chanface, offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws IOException {\n        return this.directReceive.receive( this.chanface,cacheBlock, buffer );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException {\n        return this.directReceive.randomReceive( this.chanface, offset,endSize );\n    }\n\n    @Override\n    public String getDestDirPath() {\n        return this.destDirPath;\n    }\n\n    @Override\n    public void setDestDirPath(String destDirPath) {\n        this.destDirPath = destDirPath;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/TitanLocalSimpleVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.framework.util.rdb.MappedExecutor;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteExecutor;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchLogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\nimport com.pinecone.hydra.storage.volume.source.SimpleVolumeManipulator;\n\nimport java.io.IOException;\nimport java.sql.SQLException;\nimport java.util.List;\n\npublic class TitanLocalSimpleVolume extends ArchLogicVolume implements LocalSimpleVolume {\n    protected SimpleVolumeManipulator simpleVolumeManipulator;\n\n    protected MappedExecutor          mappedExecutor;\n\n\n    public TitanLocalSimpleVolume(VolumeManager volumeManager, SimpleVolumeManipulator simpleVolumeManipulator) {\n        super(volumeManager);\n        this.simpleVolumeManipulator = simpleVolumeManipulator;\n    }\n\n    public TitanLocalSimpleVolume( VolumeManager volumeManager){\n        super(volumeManager);\n    }\n\n    public TitanLocalSimpleVolume(){\n    }\n\n    public void setSimpleVolumeManipulator( SimpleVolumeManipulator simpleVolumeManipulator ){\n        this.simpleVolumeManipulator = simpleVolumeManipulator;\n    }\n\n    @Override\n    public List<LogicVolume> queryChildren() {\n        return super.queryChildren();\n    }\n\n\n    @Override\n    public void extendLogicalVolume(GUID physicalGuid) {\n        this.simpleVolumeManipulator.extendLogicalVolume( this.guid, physicalGuid );\n    }\n\n    @Override\n    public List<GUID> listPhysicalVolume() {\n        return this.simpleVolumeManipulator.listPhysicalVolume( this.guid );\n    }\n\n\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity) throws IOException {\n        StorageIOResponse response = entity.receive();\n        try {\n            this.saveMate( response, entity.getReceiveStorageObject().getName() );\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n        return response ;\n    }\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity, Number offset, Number endSize) throws IOException{\n        StorageIOResponse response = entity.receive( offset, endSize );\n        try {\n            this.saveMate( response, entity.getReceiveStorageObject().getName() );\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n        return response;\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(ReceiveEntity entity, Number offset, Number endSize) throws IOException {\n        StorageIOResponse response = entity.randomReceive( offset,endSize );\n        try {\n            this.saveMate( response, entity.getReceiveStorageObject().getName() );\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n        return response;\n    }\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer) throws IOException {\n        StorageIOResponse response = entity.receive(cacheBlock, buffer);\n        try {\n            this.saveMate( response, entity.getReceiveStorageObject().getName() );\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n        return response;\n    }\n\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity) throws IOException {\n        return entity.export();\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize) throws IOException {\n        return entity.export( offset,endSize );\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException {\n        return entity.export( cacheBlock, offset, endSize, buffer );\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, boolean accessRandom) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize, boolean accessRandom) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer, boolean accessRandom) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public void setVolumeTree( VolumeManager volumeManager ) {\n        this.volumeManager = volumeManager;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public boolean existStorageObject(GUID storageObject) throws SQLException {\n        return this.kenVolumeFileSystem.existStorageObject( this.mappedExecutor, storageObject );\n    }\n\n    private synchronized void saveMate(StorageIOResponse storageIOResponse, String storageObjectName) throws SQLException {\n        if( !kenVolumeFileSystem.existStorageObject( this.mappedExecutor, storageIOResponse.getObjectGuid() ) ){\n            this.kenVolumeFileSystem.insertSimpleTargetMappingSoloRecord( storageIOResponse.getObjectGuid(), storageObjectName, storageIOResponse.getSourceName(), this.mappedExecutor );\n        }\n    }\n\n    @Override\n    public void build() throws SQLException {\n        VolumeConfig config = this.volumeManager.getConfig();\n        PhysicalVolume smallestCapacityPhysicalVolume = this.volumeManager.getSmallestCapacityPhysicalVolume();\n        String url = smallestCapacityPhysicalVolume.getMountPoint().getMountPoint() + config.getPathSeparator() + this.guid + config.getSqliteFileExtension();\n        SQLiteExecutor sqLiteExecutor = (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url);\n        this.mappedExecutor = sqLiteExecutor;\n        this.kenVolumeFileSystem.createSimpleTargetMappingTab( sqLiteExecutor );\n        this.volumeManager.put( this );\n        this.kenVolumeFileSystem.insertSimpleTargetMappingTab( smallestCapacityPhysicalVolume.getGuid(), this.getGuid() );\n    }\n\n    @Override\n    public void storageExpansion(GUID volumeGuid) {\n        this.extendLogicalVolume( volumeGuid );\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(volumeGuid);\n        this.simpleVolumeManipulator.updateDefinitionCapacity( this.guid, physicalVolume.getVolumeCapacity().getDefinitionCapacity() );\n    }\n    @Override\n    public SQLiteExecutor getSQLiteExecutor() throws SQLException {\n        VolumeConfig config = this.volumeManager.getConfig();\n        GUID physicsGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume(this.guid);\n        if( physicsGuid == null ){\n            return null;\n        }\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsGuid);\n\n        String url = physicalVolume.getMountPoint().getMountPoint()+ config.getPathSeparator() +this.guid+ config.getSqliteFileExtension();\n        return (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url);\n    }\n\n    @Override\n    public void deductCapacity(long deductCapacity) {\n        this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() + deductCapacity );\n        this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity );\n    }\n\n    @Override\n    public void increaseCapacity(long increaseCapacity) {\n        this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() - increaseCapacity );\n        this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity );\n    }\n\n    @Override\n    public boolean checkCapacity(long size) {\n        long freeSpace = this.volumeCapacity.getDefinitionCapacity() - this.volumeCapacity.getUsedSize();\n        return freeSpace > size;\n    }\n\n    public void assembleSQLiteExecutor() throws SQLException {\n        this.mappedExecutor = this.getSQLiteExecutor();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/export/SimpleExport.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.export;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.entity.Exporter;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic interface SimpleExport extends Exporter {\n    StorageIOResponse export(Chanface chanface) throws IOException;\n\n    StorageIOResponse export(Chanface chanface, Number offset, Number endSize) throws IOException;\n\n    StorageIOResponse export(Chanface chanface,CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer ) throws UIOException;\n\n    StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws UIOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/export/SimpleExport64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.export;\n\npublic interface SimpleExport64 extends SimpleExport {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/export/SimpleExportEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.export;\n\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\n\npublic interface SimpleExportEntity extends ExporterEntity {\n    SimpleVolume getSimpleVolume();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/export/SimpleExportEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.export;\n\npublic interface SimpleExportEntity64 extends SimpleExportEntity {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/export/TitanSimpleExport64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.export;\n\nimport com.pinecone.ulf.rdb.sqlite.SQLiteExecutor;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.physical.export.TitanDirectExportEntity64;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\n\nimport java.io.IOException;\nimport java.sql.SQLException;\n\npublic class TitanSimpleExport64 implements SimpleExport64{\n    private VolumeManager           volumeManager;\n\n    private StorageExportIORequest  storageExportIORequest;\n\n    private SimpleVolume             simpleVolume;\n\n    private KenVolumeFileSystem      kenVolumeFileSystem;\n\n    public TitanSimpleExport64( SimpleExportEntity entity ){\n        this.volumeManager = entity.getVolumeManager();\n        this.storageExportIORequest = entity.getStorageIORequest();\n        this.simpleVolume = entity.getSimpleVolume();\n        this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager );\n    }\n    @Override\n    public StorageIOResponse export(Chanface chanface) throws IOException {\n        try {\n            SQLiteExecutor sqLiteExecutor = simpleVolume.getSQLiteExecutor();\n            String sourceName = this.kenVolumeFileSystem.getSimpleStorageObjectSourceName(this.storageExportIORequest.getStorageObjectGuid(), sqLiteExecutor);\n            this.storageExportIORequest.setSourceName(sourceName);\n            TitanDirectExportEntity64 exportEntity = new TitanDirectExportEntity64( this.volumeManager, this.storageExportIORequest, chanface );\n            return exportEntity.export();\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n    }\n\n    @Override\n    public StorageIOResponse export(Chanface chanface, Number offset, Number endSize) throws IOException {\n        try {\n            SQLiteExecutor sqLiteExecutor = simpleVolume.getSQLiteExecutor();\n            String sourceName = this.kenVolumeFileSystem.getSimpleStorageObjectSourceName(this.storageExportIORequest.getStorageObjectGuid(), sqLiteExecutor);\n            this.storageExportIORequest.setSourceName(sourceName);\n            TitanDirectExportEntity64 exportEntity = new TitanDirectExportEntity64( this.volumeManager, this.storageExportIORequest, chanface );\n            return exportEntity.export( offset,endSize );\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n    }\n\n    @Override\n    public StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(Chanface chanface,CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException {\n        TitanDirectExportEntity64 exportEntity = new TitanDirectExportEntity64( this.volumeManager, this.storageExportIORequest, chanface );\n        return exportEntity.export( cacheBlock, offset, endSize, buffer );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/export/TitanSimpleExportEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.export;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchExportEntity;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic class TitanSimpleExportEntity64 extends ArchExportEntity implements SimpleExportEntity64{\n    protected SimpleExport64 simpleExportEntity;\n\n    protected SimpleVolume   simpleVolume;\n\n    public TitanSimpleExportEntity64(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel, SimpleVolume simpleVolume) {\n        super(volumeManager, storageExportIORequest, channel);\n        this.simpleVolume       = simpleVolume;\n        this.simpleExportEntity = new TitanSimpleExport64( this );\n    }\n\n    @Override\n    public StorageIOResponse export() throws IOException {\n        return this.simpleExportEntity.export(this.channel);\n    }\n\n    @Override\n    public StorageIOResponse export(Number offset, Number endSize) throws IOException {\n        return this.simpleExportEntity.export( this.channel, offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse export(CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException {\n        return this.simpleExportEntity.export( this.channel, cacheBlock, offset, endSize, buffer );\n    }\n\n    @Override\n    public SimpleVolume getSimpleVolume() {\n        return this.simpleVolume;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/SimpleReceive.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.volume.entity.Receiver;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic interface SimpleReceive extends Receiver {\n    StorageIOResponse receive(Chanface chanface,CacheBlock cacheBlock, byte[] buffer ) throws IOException;\n\n    StorageIOResponse receive(RandomAccessChanface randomAccessChanface, CacheBlock cacheBlock, byte[] buffer ) throws IOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/SimpleReceive64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice;\n\npublic interface SimpleReceive64 extends SimpleReceive {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/SimpleReceiveEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice;\n\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\n\npublic interface SimpleReceiveEntity extends ReceiveEntity {\n    SimpleVolume getSimpleVolume();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/SimpleReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice;\n\npublic interface SimpleReceiveEntity64 extends SimpleReceiveEntity {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/SimpleReceiver.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice;\n\nimport com.pinecone.hydra.storage.volume.entity.Receiver;\n\npublic interface SimpleReceiver extends Receiver {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/SimpleReceiverEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice;\n\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\n\npublic interface SimpleReceiverEntity extends ReceiveEntity {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/TitanSimpleReceive64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.physical.receive.TitanDirectReceiveEntity64;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\nimport java.util.List;\n\npublic class TitanSimpleReceive64 implements SimpleReceive64{\n    private SimpleVolume            simpleVolume;\n\n    private VolumeManager           volumeManager;\n\n    private StorageReceiveIORequest storageReceiveIORequest;\n\n    public TitanSimpleReceive64( SimpleReceiveEntity entity ){\n        this.simpleVolume = entity.getSimpleVolume();\n        this.volumeManager = entity.getVolumeManager();\n        this.storageReceiveIORequest = entity.getReceiveStorageObject();\n    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface) throws IOException {\n        List<GUID> guids = simpleVolume.listPhysicalVolume();\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0));\n        TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, chanface, physicalVolume.getMountPoint().getMountPoint() );\n        return physicalVolume.receive( receiveEntity );\n    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface,Number offset, Number endSize) throws IOException {\n        List<GUID> guids = simpleVolume.listPhysicalVolume();\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0));\n        TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, chanface, physicalVolume.getMountPoint().getMountPoint() );\n        return physicalVolume.receive( receiveEntity, offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) throws IOException {\n        List<GUID> guids = simpleVolume.listPhysicalVolume();\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0));\n        TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, chanface, physicalVolume.getMountPoint().getMountPoint() );\n        return physicalVolume.randomReceive( receiveEntity, offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface,CacheBlock cacheBlock, byte[] buffer) throws IOException {\n        List<GUID> guids = simpleVolume.listPhysicalVolume();\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0));\n        TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, chanface, physicalVolume.getMountPoint().getMountPoint() );\n        return physicalVolume.receive( receiveEntity, cacheBlock, buffer );\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws IOException {\n        List<GUID> guids = simpleVolume.listPhysicalVolume();\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0));\n        TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, randomAccessChanface, physicalVolume.getMountPoint().getMountPoint() );\n        return physicalVolume.receive( receiveEntity );\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws IOException {\n        List<GUID> guids = simpleVolume.listPhysicalVolume();\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0));\n        TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, randomAccessChanface, physicalVolume.getMountPoint().getMountPoint() );\n        return physicalVolume.receive( receiveEntity, offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, CacheBlock cacheBlock, byte[] buffer) throws IOException {\n        List<GUID> guids = simpleVolume.listPhysicalVolume();\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0));\n        TitanDirectReceiveEntity64 receiveEntity = new TitanDirectReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, randomAccessChanface, physicalVolume.getMountPoint().getMountPoint() );\n        return physicalVolume.receive( receiveEntity, cacheBlock, buffer );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/TitanSimpleReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic class TitanSimpleReceiveEntity64 extends ArchReceiveEntity implements SimpleReceiveEntity64{\n    protected SimpleVolume      simpleVolume;\n\n    protected SimpleReceive     simpleReceive;\n\n    protected Chanface          chanface;\n\n    public TitanSimpleReceiveEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, SimpleVolume volume) {\n        super(volumeManager, storageReceiveIORequest, channel);\n        this.simpleVolume  = volume;\n        this.simpleReceive = new TitanSimpleReceive64( this );\n        this.chanface      = channel;\n    }\n\n    @Override\n    public StorageIOResponse receive() throws IOException {\n        return this.simpleReceive.receive(this.chanface);\n    }\n\n    @Override\n    public StorageIOResponse receive(Number offset, Number endSize) throws IOException{\n        return this.simpleReceive.receive(this.chanface, offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException {\n        return this.simpleReceive.randomReceive( this.chanface, offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws IOException {\n        return this.simpleReceive.receive(this.chanface, cacheBlock, buffer );\n    }\n\n    @Override\n    public SimpleVolume getSimpleVolume() {\n        return this.simpleVolume;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/channel/SimpleChannelReceiver.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.channel;\n\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.entity.local.simple.recevice.SimpleReceiver;\n\n\nimport java.io.IOException;\n\npublic interface SimpleChannelReceiver extends SimpleReceiver {\n    StorageIOResponse channelReceive( ) throws UIOException;\n    StorageIOResponse channelReceive(Number offset, Number endSize) throws IOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/channel/SimpleChannelReceiver64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.channel;\n\npublic interface SimpleChannelReceiver64 extends SimpleChannelReceiver{\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/channel/SimpleChannelReceiverEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.channel;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.simple.recevice.SimpleReceiverEntity;\n\npublic interface SimpleChannelReceiverEntity extends SimpleReceiverEntity {\n    Chanface getChannel();\n    void setChannel( Chanface channel );\n\n    SimpleVolume getSimpleVolume();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/channel/SimpleChannelReceiverEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.channel;\n\npublic interface SimpleChannelReceiverEntity64 extends SimpleChannelReceiverEntity{\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/channel/TitanSimpleChannelReceiver64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.channel;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\n\nimport java.io.IOException;\nimport java.util.List;\n\npublic class TitanSimpleChannelReceiver64   implements SimpleChannelReceiver64{\n    private SimpleVolume            simpleVolume;\n\n    private Chanface fileChannel;\n\n    private VolumeManager           volumeManager;\n\n    private StorageReceiveIORequest storageReceiveIORequest;\n\n    public TitanSimpleChannelReceiver64( SimpleChannelReceiverEntity entity ){\n        this.volumeManager = entity.getVolumeManager();\n        this.simpleVolume = entity.getSimpleVolume();\n        this.fileChannel = entity.getChannel();\n        this.storageReceiveIORequest = entity.getReceiveStorageObject();\n    }\n    @Override\n    public StorageIOResponse channelReceive() throws UIOException {\n        List<GUID> guids = simpleVolume.listPhysicalVolume();\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0));\n\n        return physicalVolume.channelReceive( this.volumeManager,this.storageReceiveIORequest,this.fileChannel );\n    }\n\n    @Override\n    public StorageIOResponse channelReceive(Number offset, Number endSize) throws IOException {\n        List<GUID> guids = simpleVolume.listPhysicalVolume();\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0));\n        return physicalVolume.channelReceive( this.volumeManager,this.storageReceiveIORequest,this.fileChannel, offset,endSize );\n    }\n\n//    @Override\n//    public StorageIOResponse receive() throws UIOException {\n//        return null;\n//    }\n//\n//    @Override\n//    public StorageIOResponse receive(Number offset, Number endSize) throws UIOException {\n//        return null;\n//    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface) throws IOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface, Number offset, Number endSize) throws IOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws IOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws IOException{\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/channel/TitanSimpleChannelReceiverEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.channel;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic class TitanSimpleChannelReceiverEntity64 extends ArchReceiveEntity implements SimpleChannelReceiverEntity64{\n\n    private Chanface channel;\n    private SimpleVolume            simpleVolume;\n    private SimpleChannelReceiver64 titanSimpleChannelReceiver64;\n\n    public TitanSimpleChannelReceiverEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, SimpleVolume simpleVolume) {\n        super(volumeManager, storageReceiveIORequest, null);\n        this.channel = channel;\n        this.simpleVolume = simpleVolume;\n        this.titanSimpleChannelReceiver64 = new TitanSimpleChannelReceiver64( this );\n    }\n\n\n    @Override\n    public Chanface getChannel() {\n        return this.channel;\n    }\n\n    @Override\n    public void setChannel(Chanface channel) {\n        this.channel = channel;\n    }\n\n    @Override\n    public SimpleVolume getSimpleVolume() {\n        return this.simpleVolume;\n    }\n\n\n    @Override\n    public StorageIOResponse receive() throws UIOException {\n        return this.titanSimpleChannelReceiver64.channelReceive();\n    }\n\n    @Override\n    public StorageIOResponse receive(Number offset, Number endSize) throws IOException {\n        return this.titanSimpleChannelReceiver64.channelReceive( offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws UIOException {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/stream/SimpleStreamReceiveEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.stream;\n\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.simple.recevice.SimpleReceiverEntity;\n\nimport java.io.InputStream;\n\npublic interface SimpleStreamReceiveEntity extends SimpleReceiverEntity {\n    InputStream getStream();\n    void setStream( InputStream stream );\n\n    SimpleVolume getSimpleVolume();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/stream/SimpleStreamReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.stream;\n\npublic interface SimpleStreamReceiveEntity64 extends SimpleStreamReceiveEntity{\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/stream/SimpleStreamReceiver.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.stream;\n\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.entity.local.simple.recevice.SimpleReceiver;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\npublic interface SimpleStreamReceiver extends SimpleReceiver {\n    StorageIOResponse streamReceive( ) throws UIOException;\n\n    StorageIOResponse streamReceive(Number offset, Number endSize) throws UIOException;\n\n    StorageIOResponse streamReceive( CacheBlock cacheBlock, byte[] buffer ) throws UIOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/stream/SimpleStreamReceiver64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.stream;\n\npublic interface SimpleStreamReceiver64 extends SimpleStreamReceiver {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/stream/TitanSimpleStreamReceive64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.stream;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\n\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.List;\n\npublic class TitanSimpleStreamReceive64 implements SimpleStreamReceiver64{\n    protected SimpleVolume                  simpleVolume;\n\n    protected InputStream                   stream;\n\n    protected VolumeManager                 volumeManager;\n\n    protected StorageReceiveIORequest       storageReceiveIORequest;\n\n    protected PhysicalVolume                physicalVolume;\n\n\n    public TitanSimpleStreamReceive64( SimpleStreamReceiveEntity64 entity ){\n        this.volumeManager           = entity.getVolumeManager();\n        this.simpleVolume            = entity.getSimpleVolume();\n        this.stream                  = entity.getStream();\n        this.storageReceiveIORequest = entity.getReceiveStorageObject();\n\n        List<GUID> guids = this.simpleVolume.listPhysicalVolume();\n        this.physicalVolume          = this.volumeManager.getPhysicalVolume(guids.get(0));\n    }\n\n\n    @Override\n    public StorageIOResponse streamReceive()  {\n//        TitanDirectStreamReceiveEntity64 titanDirectStreamReceiveEntity64 = new TitanDirectStreamReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, this.stream, this.physicalVolume.getMountPoint().getMountPoint() );\n//        return this.physicalVolume.receive( titanDirectStreamReceiveEntity64 );\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse streamReceive(Number offset, Number endSize)  {\n//        TitanDirectStreamReceiveEntity64 titanDirectStreamReceiveEntity64 = new TitanDirectStreamReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, this.stream, this.physicalVolume.getMountPoint().getMountPoint() );\n//        return this.physicalVolume.receive( titanDirectStreamReceiveEntity64, offset, endSize );\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse streamReceive(CacheBlock cacheBlock, byte[] buffer) {\n//        TitanDirectStreamReceiveEntity64 titanDirectStreamReceiveEntity64 = new TitanDirectStreamReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, this.stream, this.physicalVolume.getMountPoint().getMountPoint() );\n//        return this.physicalVolume.receive( titanDirectStreamReceiveEntity64, cacheBlock, buffer );\n        return null;\n    }\n\n\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface) throws IOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface, Number offset, Number endSize) throws IOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws IOException{\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws IOException {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/simple/recevice/stream/TitanSimpleStreamReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.simple.recevice.stream;\n\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\npublic class TitanSimpleStreamReceiveEntity64 extends ArchReceiveEntity implements SimpleStreamReceiveEntity64{\n    protected InputStream stream;\n\n    protected SimpleVolume simpleVolume;\n\n    protected SimpleStreamReceiver64 streamReceiver;\n\n    public TitanSimpleStreamReceiveEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, InputStream stream, SimpleVolume simpleVolume ) {\n        super(volumeManager, storageReceiveIORequest,null);\n        this.stream = stream;\n        this.simpleVolume = simpleVolume;\n        this.streamReceiver = new TitanSimpleStreamReceive64( this );\n    }\n\n    @Override\n    public StorageIOResponse receive() throws UIOException {\n        return this.streamReceiver.streamReceive();\n    }\n\n    @Override\n    public StorageIOResponse receive(Number offset, Number endSize) throws UIOException {\n        return this.streamReceiver.streamReceive( offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws UIOException {\n        return this.streamReceiver.streamReceive( cacheBlock, buffer );\n    }\n\n    @Override\n    public InputStream getStream() {\n        return this.stream;\n    }\n\n    @Override\n    public void setStream(InputStream stream) {\n        this.stream = stream;\n    }\n\n    @Override\n    public SimpleVolume getSimpleVolume() {\n        return this.simpleVolume;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/TitanLocalSpannedVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.spanned;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteExecutor;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchLogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\nimport com.pinecone.hydra.storage.volume.source.SpannedVolumeManipulator;\n\nimport java.io.IOException;\nimport java.sql.SQLException;\nimport java.util.List;\n\npublic class TitanLocalSpannedVolume extends ArchLogicVolume implements LocalSpannedVolume {\n    private SpannedVolumeManipulator spannedVolumeManipulator;\n\n    public TitanLocalSpannedVolume(VolumeManager volumeManager, SpannedVolumeManipulator spannedVolumeManipulator) {\n        super(volumeManager);\n        this.spannedVolumeManipulator = spannedVolumeManipulator;\n    }\n    public TitanLocalSpannedVolume( VolumeManager volumeManager){\n        super(volumeManager);\n    }\n\n    public TitanLocalSpannedVolume(){\n    }\n    public void setSpannedVolumeManipulator( SpannedVolumeManipulator spannedVolumeManipulator ){\n        this.spannedVolumeManipulator = spannedVolumeManipulator;\n    }\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n\n    @Override\n    public void extendLogicalVolume(GUID physicalGuid) {\n\n    }\n\n    @Override\n    public List<GUID> listPhysicalVolume() {\n        return null;\n    }\n\n\n    @Override\n    public void setVolumeTree(VolumeManager volumeManager) {\n        this.volumeManager = volumeManager;\n    }\n\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity) throws IOException {\n        return entity.receive();\n    }\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity, Number offset, Number endSize) throws IOException {\n        return entity.receive( offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(ReceiveEntity entity, Number offset, Number endSize) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity) throws IOException {\n        return entity.export();\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException {\n        return entity.export( cacheBlock, offset, endSize, buffer );\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, boolean accessRandom) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize, boolean accessRandom) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer, boolean accessRandom) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public boolean existStorageObject(GUID storageObject) throws SQLException {\n        List<LogicVolume> volumes = this.queryChildren();\n        for( LogicVolume volume : volumes ){\n            if ( volume.existStorageObject( storageObject ) ){\n                return true;\n            }\n        }\n        return false;\n    }\n\n    // Build 模式，最后去执行\n    @Override\n    public void build() throws SQLException {\n        VolumeConfig config = this.volumeManager.getConfig();\n        PhysicalVolume smallestCapacityPhysicalVolume = this.volumeManager.getSmallestCapacityPhysicalVolume();\n        String url = smallestCapacityPhysicalVolume.getMountPoint().getMountPoint() + config.getPathSeparator() + this.guid + config.getSqliteFileExtension();\n        SQLiteExecutor sqLiteExecutor = (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url);\n        this.kenVolumeFileSystem.creatSpanLinkedVolumeTable( sqLiteExecutor );\n        this.kenVolumeFileSystem.createSpannedIndexTable( sqLiteExecutor );\n        List<LogicVolume> volumes = this.queryChildren();\n        int index = 0;\n        for( LogicVolume volume : volumes ){\n            this.kenVolumeFileSystem.insertSpannedIndexTable( sqLiteExecutor, index, volume.getGuid() );\n            index++;\n        }\n        this.kenVolumeFileSystem.insertSimpleTargetMappingTab( smallestCapacityPhysicalVolume.getGuid(), this.getGuid() );\n        this.volumeManager.put( this );\n    }\n\n    @Override\n    public void storageExpansion(GUID volumeGuid) {\n        //todo 跨区卷扩容还有点问题\n        this.volumeManager.storageExpansion( this.getGuid(), volumeGuid );\n        LogicVolume logicVolume = this.volumeManager.get(volumeGuid);\n        this.spannedVolumeManipulator.updateDefinitionCapacity( this.guid, logicVolume.getVolumeCapacity().getDefinitionCapacity() );\n    }\n\n    @Override\n    public void deductCapacity(long deductCapacity) {\n        this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() + deductCapacity );\n        this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity );\n    }\n\n    @Override\n    public void increaseCapacity(long increaseCapacity) {\n        this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() - increaseCapacity );\n        this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity );\n    }\n\n    @Override\n    public boolean checkCapacity(long size) {\n        long freeSpace = this.volumeCapacity.getDefinitionCapacity() - this.volumeCapacity.getUsedSize();\n        return freeSpace > size;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/export/SpannedExport.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.spanned.export;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.entity.Exporter;\n\npublic interface SpannedExport extends Exporter {\n    StorageIOResponse export(Chanface chanface) throws UIOException;\n\n    StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws UIOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/export/SpannedExport64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.spanned.export;\n\npublic interface SpannedExport64 extends SpannedExport {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/export/SpannedExportEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.spanned.export;\n\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.SpannedVolume;\n\npublic interface SpannedExportEntity extends ExporterEntity {\n    SpannedVolume getSpannedVolume();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/export/SpannedExportEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.spanned.export;\n\npublic interface SpannedExportEntity64 extends SpannedExportEntity {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/export/TitanSpannedExport64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.spanned.export;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteExecutor;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.SpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.physical.export.TitanDirectExportEntity64;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem;\n\nimport java.io.IOException;\nimport java.sql.SQLException;\nimport java.util.List;\n\npublic class TitanSpannedExport64 implements SpannedExport64{\n    protected VolumeManager volumeManager;\n\n    protected StorageExportIORequest storageExportIORequest;\n\n    protected SpannedVolume spannedVolume;\n\n    protected OnVolumeFileSystem kenVolumeFileSystem;\n\n    public TitanSpannedExport64( SpannedExportEntity64 entity ){\n        this.spannedVolume              = entity.getSpannedVolume();;\n        this.volumeManager              = entity.getVolumeManager();\n        this.storageExportIORequest     = entity.getStorageIORequest();\n        this.kenVolumeFileSystem        = new KenVolumeFileSystem( this.volumeManager );\n    }\n    @Override\n    public StorageIOResponse export(Chanface chanface) throws UIOException {\n        //先查找冲突表中是否存在该文件\n        try {\n            List<LogicVolume> volumes = this.spannedVolume.queryChildren();\n            GUID physicsVolumeGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume(this.spannedVolume.getGuid());\n            PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsVolumeGuid);\n            SQLiteExecutor sqLiteExecutor = this.getSQLiteExecutor(physicalVolume);\n            GUID targetGuid = this.kenVolumeFileSystem.getSpanLinkedVolumeTableTargetGuid(sqLiteExecutor, this.storageExportIORequest.getStorageObjectGuid());\n            if ( targetGuid == null ){\n                int idx = this.kenVolumeFileSystem.hashStorageObjectID(this.storageExportIORequest.getStorageObjectGuid(), volumes.size());\n                GUID tableTargetGuid = this.kenVolumeFileSystem.getSpannedIndexTableTargetGuid(sqLiteExecutor, idx);\n                String source = this.getSource(tableTargetGuid, this.storageExportIORequest.getStorageObjectGuid());\n                this.storageExportIORequest.setSourceName( source );\n                SimpleVolume simpleVolume = (SimpleVolume)this.volumeManager.get(tableTargetGuid);\n                List<GUID> guids = simpleVolume.listPhysicalVolume();\n                PhysicalVolume volume = this.volumeManager.getPhysicalVolume(guids.get(0));\n                TitanDirectExportEntity64 exportEntity = new TitanDirectExportEntity64( this.volumeManager, this.storageExportIORequest, chanface );\n                return  volume.export( exportEntity );\n            }\n            else {\n                SimpleVolume simpleVolume = (SimpleVolume)this.volumeManager.get(targetGuid);\n                List<GUID> guids = simpleVolume.listPhysicalVolume();\n                PhysicalVolume volume = this.volumeManager.getPhysicalVolume(guids.get(0));\n                TitanDirectExportEntity64 exportEntity = new TitanDirectExportEntity64( this.volumeManager, this.storageExportIORequest, chanface );\n                return volume.export( exportEntity );\n            }\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        } catch (IOException e) {\n            throw new RuntimeException(e);\n        }\n    }\n\n    @Override\n    public StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws UIOException {\n        return null;\n    }\n\n    private SQLiteExecutor getSQLiteExecutor(PhysicalVolume physicalVolume ) throws SQLException {\n        VolumeConfig config = this.volumeManager.getConfig();\n        String mountPoint = physicalVolume.getMountPoint().getMountPoint();\n        String url = mountPoint + config.getPathSeparator() + this.spannedVolume.getGuid()+ config.getSqliteFileExtension();\n        return (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url);\n    }\n\n    private String getSource(GUID volumeGuid, GUID storageObjectGuid ) throws SQLException {\n        VolumeConfig config = this.volumeManager.getConfig();\n        GUID physicsVolumeGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume( volumeGuid );\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume( physicsVolumeGuid );\n        String mountPoint = physicalVolume.getMountPoint().getMountPoint();\n        String url = mountPoint + config.getPathSeparator() + volumeGuid+ config.getSqliteFileExtension();\n        SQLiteExecutor sqLiteExecutor = (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url);\n        return this.kenVolumeFileSystem.getSimpleStorageObjectSourceName(storageObjectGuid, sqLiteExecutor);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/export/TitanSpannedExportEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.spanned.export;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchExportEntity;\nimport com.pinecone.hydra.storage.volume.entity.SpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\npublic class TitanSpannedExportEntity64 extends ArchExportEntity implements SpannedExportEntity64{\n    protected SpannedVolume    spannedVolume;\n\n    protected SpannedExport64  spannedExport;\n\n    public TitanSpannedExportEntity64(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel, SpannedVolume spannedVolume) {\n        super(volumeManager, storageExportIORequest, channel);\n        this.spannedVolume = spannedVolume;\n        this.spannedExport = new TitanSpannedExport64( this );\n    }\n\n    @Override\n    public StorageIOResponse export() throws UIOException {\n        return this.spannedExport.export(this.channel);\n    }\n\n    @Override\n    public StorageIOResponse export(Number offset, Number endSize) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) {\n        return null;\n    }\n\n    @Override\n    public SpannedVolume getSpannedVolume() {\n        return this.spannedVolume;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/receive/SpannedReceive.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.spanned.receive;\n\nimport com.pinecone.hydra.storage.volume.entity.Receiver;\n\npublic interface SpannedReceive extends Receiver {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/receive/SpannedReceive64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.spanned.receive;\n\npublic interface SpannedReceive64 extends SpannedReceive {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/receive/SpannedReceiveEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.spanned.receive;\n\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.SpannedVolume;\n\npublic interface SpannedReceiveEntity extends ReceiveEntity {\n    SpannedVolume  getSpannedVolume();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/receive/SpannedReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.spanned.receive;\n\npublic interface SpannedReceiveEntity64 extends SpannedReceiveEntity {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/receive/TitanSpannedReceive64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.spanned.receive;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteExecutor;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.SpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.Volume;\nimport com.pinecone.hydra.storage.volume.entity.VolumeCapacity64;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem;\n\nimport java.io.IOException;\nimport java.sql.SQLException;\nimport java.util.List;\n\npublic class TitanSpannedReceive64 implements SpannedReceive64{\n    protected SpannedVolume                 spannedVolume;\n\n    protected VolumeManager                 volumeManager;\n\n    protected StorageReceiveIORequest       storageReceiveIORequest;\n\n    protected OnVolumeFileSystem            kenVolumeFileSystem;\n\n    public TitanSpannedReceive64( SpannedReceiveEntity64 entity ){\n        this.spannedVolume           = entity.getSpannedVolume();\n        this.volumeManager           = entity.getVolumeManager();\n        this.storageReceiveIORequest = entity.getReceiveStorageObject();\n        this.kenVolumeFileSystem     = new KenVolumeFileSystem( this.volumeManager );\n    }\n    @Override\n    public StorageIOResponse receive(Chanface chanface) throws IOException {\n        return this.receiveInternal(chanface, null, null );\n    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface,Number offset, Number endSize) throws IOException {\n        return this.receiveInternal(chanface, offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws IOException {\n        return this.receiveInternal(randomAccessChanface, null, null );\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws IOException {\n        return this.receiveInternal(randomAccessChanface, offset, endSize );\n    }\n\n    private long freeSpace(Volume volume ){\n        VolumeCapacity64 volumeCapacity = volume.getVolumeCapacity();\n        return volumeCapacity.getDefinitionCapacity() - volumeCapacity.getUsedSize();\n    }\n\n    private SQLiteExecutor getSQLiteExecutor( PhysicalVolume physicalVolume ) {\n        VolumeConfig config = this.volumeManager.getConfig();\n        String mountPoint = physicalVolume.getMountPoint().getMountPoint();\n        String url = mountPoint + config.getPathSeparator() + this.spannedVolume.getGuid()+ config.getSqliteFileExtension();\n        return (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url);\n    }\n\n    private StorageIOResponse receiveInternal(Chanface chanface,Number offset, Number endSize) throws IOException {\n        List<LogicVolume> volumes = this.spannedVolume.queryChildren();\n        UnifiedTransmitConstructor constructor = new UnifiedTransmitConstructor();\n        GUID physicsGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume( this.spannedVolume.getGuid() );\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsGuid);\n        SQLiteExecutor sqLiteExecutor = this.getSQLiteExecutor(physicalVolume);\n        int idx = this.kenVolumeFileSystem.hashStorageObjectID(this.storageReceiveIORequest.getStorageObjectGuid(), volumes.size());\n        //Debug.trace(\"存储的GUID是：\"+storageReceiveIORequest.getStorageObjectGuid());\n        GUID volumeGuid = null;\n        try {\n            volumeGuid = this.kenVolumeFileSystem.getSpannedIndexTableTargetGuid(sqLiteExecutor, idx);\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n        //Debug.trace( volumeGuid );\n        LogicVolume targetVolume = this.volumeManager.get(volumeGuid);\n\n\n        if (this.freeSpace(targetVolume) < storageReceiveIORequest.getSize().longValue()) {\n\n            for (LogicVolume volume : volumes) {\n                if (this.freeSpace(volume) > storageReceiveIORequest.getSize().longValue()) {\n                    try {\n                        this.kenVolumeFileSystem.insertSpanLinkedVolumeTable(sqLiteExecutor, idx, storageReceiveIORequest.getStorageObjectGuid(), volume.getGuid());\n                    } catch (SQLException e) {\n                        throw new UIOException(e);\n                    }\n                    //TitanSimpleReceiveEntity64 receiveEntity = new TitanSimpleReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, this.channel, (SimpleVolume) volume);\n\n                    ReceiveEntity receiveEntity = constructor.getReceiveEntity(volume.getClass(), this.volumeManager, this.storageReceiveIORequest, chanface, volume);\n                    return offset == null && endSize == null\n                            ? volume.receive( receiveEntity )\n                            : volume.receive( receiveEntity, offset, endSize );\n                }\n            }\n        } else {\n            //TitanSimpleReceiveEntity64 receiveEntity = new TitanSimpleReceiveEntity64( this.volumeManager, this.storageReceiveIORequest, this.channel, (SimpleVolume) targetVolume);\n            ReceiveEntity receiveEntity = constructor.getReceiveEntity(targetVolume.getClass(), this.volumeManager, this.storageReceiveIORequest, chanface, targetVolume);\n            return offset == null && endSize == null\n                    ? targetVolume.receive( receiveEntity )\n                    : targetVolume.receive(receiveEntity, offset, endSize);\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/spanned/receive/TitanSpannedReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.spanned.receive;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.SpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic class TitanSpannedReceiveEntity64 extends ArchReceiveEntity implements SpannedReceiveEntity64{\n    protected SpannedVolume    spannedVolume;\n\n    protected SpannedReceive64 spannedReceive;\n    public TitanSpannedReceiveEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, SpannedVolume spannedVolume) {\n        super(volumeManager, storageReceiveIORequest, channel);\n        this.spannedVolume = spannedVolume;\n        this.spannedReceive = new TitanSpannedReceive64( this );\n    }\n\n    @Override\n    public StorageIOResponse receive() throws IOException {\n        return this.spannedReceive.receive(this.channel);\n    }\n\n    @Override\n    public StorageIOResponse receive(Number offset, Number endSize) throws IOException {\n        return this.spannedReceive.receive(this.channel, offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException {\n        return this.spannedReceive.randomReceive( this.channel,offset,endSize );\n    }\n\n    @Override\n    public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public SpannedVolume getSpannedVolume() {\n        return this.spannedVolume;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/BufferOutMate.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.concurrent.Semaphore;\n\npublic class BufferOutMate implements Pinenut {\n    private Semaphore   bufferOutLock;\n    private int bufferOutThreadId;\n\n    public BufferOutMate() {\n    }\n\n    public BufferOutMate(Semaphore bufferOutLock, int bufferOutThreadId) {\n        this.bufferOutLock = bufferOutLock;\n        this.bufferOutThreadId = bufferOutThreadId;\n    }\n\n\n    public Semaphore getBufferOutLock() {\n        return bufferOutLock;\n    }\n\n\n    public void setBufferOutLock(Semaphore bufferOutLock) {\n        this.bufferOutLock = bufferOutLock;\n    }\n\n\n    public int getBufferOutThreadId() {\n        return bufferOutThreadId;\n    }\n\n\n    public void setBufferOutThreadId(int bufferOutThreadId) {\n        this.bufferOutThreadId = bufferOutThreadId;\n    }\n\n    public String toString() {\n        return \"bufferToFileMate{bufferToFileLock = \" + bufferOutLock + \", bufferToFileThreadId = \" + bufferOutThreadId + \"}\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/BufferOutStatus.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\npublic enum BufferOutStatus implements StripBufferStatus{\n    Writing                  ,\n    Suspended                ,\n    Exiting                  ;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/BufferWriteStatus.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\npublic enum BufferWriteStatus implements StripBufferStatus{\n    Writing                  ,\n    Suspended                ,\n    Synchronization          ,\n    Exiting                  ;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/CacheBlock.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\n\npublic interface CacheBlock extends Pinenut { ;\n\n    CacheBlockStatus getStatus();\n    void setStatus( CacheBlockStatus status );\n\n    Number getValidByteStart();\n    void setValidByteStart( Number validByteStart );\n\n    Number getValidByteEnd();\n    void setValidByteEnd( Number validByteEnd );\n\n    Number getByteStart();\n    void setByteStart( Number byteStart );\n\n    Number getByteEnd();\n    void setByteEnd( Number byteEnd );\n\n    int getCacheBlockNumber();\n    void setCacheBlockNumber( int cacheBlockNumber );\n\n    long getBufferWriteThreadId();\n    void setBufferWriteThreadId( long bufferWriteThreadId );\n\n    LogicVolume getVolume();\n    void setVolume( LogicVolume volume );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/CacheBlockStatus.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\npublic enum CacheBlockStatus {\n    Writing,\n    Free,\n    Full;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/LocalStripedTaskThread.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.storage.volume.runtime.ArchStripedTaskThread;\nimport com.pinecone.hydra.storage.volume.runtime.VolumeJob;\n\nimport java.util.concurrent.Semaphore;\n\npublic class LocalStripedTaskThread extends ArchStripedTaskThread {\n\n    public LocalStripedTaskThread ( String szName, Processum parent, VolumeJob volumeJob ) {\n        super( szName, parent, volumeJob );\n\n        volumeJob.applyThread( this );\n    }\n\n    StripBufferStatus getJobStatus(){\n        return this.mVolumeJob.getStatus();\n    }\n\n    void setJobStatus( StripBufferStatus status ){\n        this.mVolumeJob.setStatus( status );\n    }\n\n    Semaphore getBlockerLatch(){\n        return this.mVolumeJob.getBlockerLatch();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/ReceiveBufferInStatus.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\npublic enum ReceiveBufferInStatus implements StripBufferStatus{\n    Writing                  ,\n    Suspended                ,\n    Exiting                  ;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/ReceiveBufferOutStatus.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\npublic enum ReceiveBufferOutStatus implements StripBufferStatus{\n    Writing                  ,\n    Suspended                ,\n    Exiting                  ;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripBufferInJob.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\npublic interface StripBufferInJob extends StripExportJob {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripBufferOutJob.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\npublic interface StripBufferOutJob extends StripExportJob {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripBufferStatus.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface StripBufferStatus extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripCacheBlock.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\n\npublic class StripCacheBlock implements CacheBlock{\n    protected CacheBlockStatus    status;\n    protected Number              validByteStart;\n    protected Number              validByteEnd;\n    protected int                 cacheBlockNumber;\n    protected Number              byteStart;\n    protected Number              byteEnd;\n    protected long                bufferWriteThreadId;\n\n    protected LogicVolume         volume;\n\n    public StripCacheBlock( int cacheBlockNumber, Number byteStart, Number byteEnd ){\n        this.status = CacheBlockStatus.Free;\n        this.byteStart = byteStart;\n        this.byteEnd = byteEnd;\n        this.cacheBlockNumber = cacheBlockNumber;\n    }\n\n\n    @Override\n    public CacheBlockStatus getStatus() {\n        return this.status;\n    }\n\n    @Override\n    public void setStatus(CacheBlockStatus status) {\n        this.status = status;\n    }\n\n    @Override\n    public Number getValidByteStart() {\n        return this.validByteStart;\n    }\n\n    @Override\n    public void setValidByteStart(Number validByteStart) {\n        this.validByteStart = validByteStart;\n    }\n\n    @Override\n    public Number getValidByteEnd() {\n        return this.validByteEnd;\n    }\n\n    @Override\n    public void setValidByteEnd(Number validByteEnd) {\n        this.validByteEnd = validByteEnd;\n    }\n\n    @Override\n    public Number getByteStart() {\n        return this.byteStart;\n    }\n\n    @Override\n    public void setByteStart(Number byteStart) {\n        this.byteStart = byteStart;\n    }\n\n    @Override\n    public Number getByteEnd() {\n        return this.byteEnd;\n    }\n\n    @Override\n    public void setByteEnd(Number byteEnd) {\n        this.byteEnd = byteEnd;\n    }\n\n    @Override\n    public int getCacheBlockNumber() {\n        return this.cacheBlockNumber;\n    }\n\n    @Override\n    public void setCacheBlockNumber(int cacheBlockNumber) {\n        this.cacheBlockNumber = cacheBlockNumber;\n    }\n\n    @Override\n    public long getBufferWriteThreadId() {\n        return this.bufferWriteThreadId;\n    }\n\n    @Override\n    public void setBufferWriteThreadId(long bufferWriteThreadId) {\n        this.bufferWriteThreadId = bufferWriteThreadId;\n    }\n\n    @Override\n    public LogicVolume getVolume() {\n        return this.volume;\n    }\n\n    @Override\n    public void setVolume(LogicVolume volume) {\n        this.volume = volume;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripChannelReceiverJob.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\npublic interface StripChannelReceiverJob extends StripReceiverJob{\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripExportJob.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.hydra.storage.volume.runtime.VolumeJob;\n\npublic interface StripExportJob extends VolumeJob {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripLockEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.List;\nimport java.util.concurrent.Semaphore;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.concurrent.locks.Lock;\n\npublic interface StripLockEntity extends Pinenut {\n    Object getLockObject();\n\n    void setLockObject( Object lockObject );\n\n    void unlockBufferToFileLock();\n\n    Semaphore getBufferToFileLock();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripReceiveBufferInJob.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\npublic interface StripReceiveBufferInJob extends StripReceiverJob{\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripReceiveBufferOutJob.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\npublic interface StripReceiveBufferOutJob extends StripReceiverJob{\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripReceiverJob.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.hydra.storage.volume.runtime.VolumeJob;\n\npublic interface StripReceiverJob extends VolumeJob {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/StripTerminalStateRecord.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\npublic class StripTerminalStateRecord implements TerminalStateRecord {\n    protected int       sequentialNumbering;\n    protected Number    validByteStart;\n    protected Number    validByteEnd;\n\n    @Override\n    public int getSequentialNumbering() {\n        return this.sequentialNumbering;\n    }\n\n    @Override\n    public void setSequentialNumbering(int sequentialNumbering) {\n        this.sequentialNumbering = sequentialNumbering;\n    }\n\n    @Override\n    public Number getValidByteStart() {\n        return this.validByteStart;\n    }\n\n    @Override\n    public void setValidByteStart(Number validByteStart) {\n        this.validByteStart = validByteStart;\n    }\n\n    @Override\n    public Number getValidByteEnd() {\n        return this.validByteEnd;\n    }\n\n    @Override\n    public void setValidByteEnd(Number validByteEnd) {\n        this.validByteEnd = validByteEnd;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TerminalStateRecord.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface TerminalStateRecord extends Pinenut {\n    int getSequentialNumbering();\n    void setSequentialNumbering( int sequentialNumbering );\n\n    Number getValidByteStart();\n    void setValidByteStart( Number validByteStart );\n\n    Number getValidByteEnd();\n    void setValidByteEnd( Number validByteEnd );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanLocalStripedVolume.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteExecutor;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchLogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume;\nimport com.pinecone.hydra.storage.volume.source.StripedVolumeManipulator;\n\nimport java.io.IOException;\nimport java.sql.SQLException;\nimport java.util.List;\n\npublic class TitanLocalStripedVolume extends ArchLogicVolume implements LocalStripedVolume {\n    private StripedVolumeManipulator stripedVolumeManipulator;\n\n    public TitanLocalStripedVolume(VolumeManager volumeManager, StripedVolumeManipulator stripedVolumeManipulator) {\n        super(volumeManager);\n        this.stripedVolumeManipulator = stripedVolumeManipulator;\n    }\n\n    public TitanLocalStripedVolume( VolumeManager volumeManager){\n        super(volumeManager);\n    }\n\n    public TitanLocalStripedVolume(){\n    }\n\n\n    @Override\n    public void extendLogicalVolume(GUID physicalGuid) {\n\n    }\n\n    @Override\n    public List<GUID> listPhysicalVolume() {\n        return null;\n    }\n\n\n    public void setStripedVolumeManipulator(StripedVolumeManipulator stripedVolumeManipulator ){\n        this.stripedVolumeManipulator = stripedVolumeManipulator;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public void setVolumeTree(VolumeManager volumeManager) {\n        this.volumeManager = volumeManager;\n    }\n\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity) throws IOException {\n        return entity.receive();\n    }\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity, Number offset, Number endSize) throws IOException {\n        return entity.receive( offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(ReceiveEntity entity, Number offset, Number endSize) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(ReceiveEntity entity, CacheBlock cacheBlock, byte[] buffer) throws IOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity) throws IOException {\n        return entity.export();\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) throws UIOException {\n        return entity.export( cacheBlock, offset, endSize, buffer );\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, boolean accessRandom) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, Number offset, Number endSize, boolean accessRandom) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(ExporterEntity entity, CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer, boolean accessRandom) {\n        return null;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public boolean existStorageObject(GUID storageObject) throws SQLException {\n        return false;\n    }\n\n    @Override\n    public void build() throws SQLException {\n        VolumeConfig config = this.volumeManager.getConfig();\n        PhysicalVolume smallestCapacityPhysicalVolume = this.volumeManager.getSmallestCapacityPhysicalVolume();\n        String url = smallestCapacityPhysicalVolume.getMountPoint().getMountPoint() + config.getPathSeparator() + this.guid + config.getSqliteFileExtension();\n        SQLiteExecutor sqLiteExecutor = (SQLiteExecutor) this.volumeManager.getKenusPool().allot(url);\n        this.kenVolumeFileSystem.createStripMetaTable( sqLiteExecutor );\n        this.volumeManager.put( this );\n        this.kenVolumeFileSystem.insertSimpleTargetMappingTab( smallestCapacityPhysicalVolume.getGuid(), this.getGuid() );\n    }\n\n    @Override\n    public void storageExpansion(GUID volumeGuid) {\n        this.volumeManager.storageExpansion( this.getGuid(), volumeGuid );\n        LogicVolume logicVolume = this.volumeManager.get(volumeGuid);\n        this.stripedVolumeManipulator.updateDefinitionCapacity(this.guid, logicVolume.getVolumeCapacity().getDefinitionCapacity());\n    }\n\n    @Override\n    public void deductCapacity(long deductCapacity) {\n        this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() + deductCapacity );\n        this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity );\n    }\n\n    @Override\n    public void increaseCapacity(long increaseCapacity) {\n        this.volumeCapacity.setUsedSize( this.volumeCapacity.getUsedSize() - increaseCapacity );\n        this.volumeManager.updateVolumeUsedSize( this.guid, this.volumeCapacity );\n    }\n\n    @Override\n    public boolean checkCapacity(long size) {\n        long freeSpace = this.volumeCapacity.getDefinitionCapacity() - this.volumeCapacity.getUsedSize();\n        return freeSpace > size;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanStripBufferInJob.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.export.StripedExport;\nimport com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram;\nimport com.pinecone.hydra.storage.volume.runtime.VolumeJobCompromiseException;\n\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.concurrent.Semaphore;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.concurrent.locks.Lock;\n\npublic class TitanStripBufferInJob implements StripBufferInJob {\n    protected VolumeManager                 volumeManager;\n\n    protected StorageExportIORequest        object;\n    protected int                           jobCount;\n    protected int                           jobCode;\n    protected LogicVolume                   volume;\n    protected Chanface channel;\n    protected AtomicInteger                 currentCacheBlockNumber;\n    protected final Semaphore               blockerLatch;\n\n    protected StripBufferStatus             status;\n\n    protected List< CacheBlock >            cacheBlockGroup;\n    protected LocalStripedTaskThread        parentThread;\n\n    protected byte[]                        buffer;\n    protected Lock                          majorStatusIO;\n    protected MasterVolumeGram              masterVolumeGram;\n\n    protected Number                        offset;\n\n    protected Number                        endSize;\n\n    protected UnifiedTransmitConstructor    constructor;\n\n    public TitanStripBufferInJob(MasterVolumeGram masterVolumeGram, StripedExport stripedExport, LogicVolume volume, StorageExportIORequest object, int jobCode ){\n        this.masterVolumeGram             = masterVolumeGram;\n        this.object                       = object;\n        this.jobCount                     = this.masterVolumeGram.getJobCount();\n        this.jobCode                      = jobCode;\n        this.volumeManager                = stripedExport.getVolumeManager();\n        this.volume                       = volume;\n        this.channel                      = stripedExport.getFileChannel();\n        this.currentCacheBlockNumber      = new AtomicInteger( jobCode );\n        this.blockerLatch                 = new Semaphore(0);\n        this.buffer                       = masterVolumeGram.getBuffer();\n        this.cacheBlockGroup              = masterVolumeGram.getCacheGroup();\n        this.constructor                  = new UnifiedTransmitConstructor();\n\n        this.intoWritingStatus();\n    }\n\n    @Override\n    public void applyThread( LocalStripedTaskThread taskThread ) {\n        this.parentThread                 = taskThread;\n        this.masterVolumeGram            = (MasterVolumeGram) this.parentThread.parentExecutum();\n        this.majorStatusIO                = this.masterVolumeGram.getMajorStatusIO();\n    }\n\n    @Override\n    public StripBufferStatus getStatus() {\n        return this.status;\n    }\n\n    protected void intoWritingStatus() {\n        this.status = BufferWriteStatus.Writing;\n    }\n\n    protected void intoSuspendedStatus() {\n        this.status = BufferWriteStatus.Suspended;\n    }\n\n    protected void intoSynchronizationStatus() {\n        this.status = BufferWriteStatus.Synchronization;\n    }\n\n    protected void intoExitingStatus() {\n        this.status = BufferWriteStatus.Exiting;\n    }\n\n\n    @Override\n    public void execute() throws VolumeJobCompromiseException {\n        long size = this.object.getSize().longValue();\n        long stripSize = this.volumeManager.getConfig().getDefaultStripSize().longValue();\n        long currentPosition    = 0;\n\n\n        MasterVolumeGram parentProcess = (MasterVolumeGram)this.parentThread.parentExecutum();\n        while ( true ){\n            if( this.cacheBlockGroup.get( currentCacheBlockNumber.get()).getStatus() == CacheBlockStatus.Free){\n                long bufferSize = stripSize;\n                if( currentPosition >= size ){\n                    this.intoExitingStatus();\n                    this.wakeUpBufferToFileThread();\n                    break;\n                }\n\n                this.cacheBlockGroup.get( currentCacheBlockNumber.get()).setStatus( CacheBlockStatus.Writing );\n                if( currentPosition + bufferSize > size ){\n                    bufferSize = size - currentPosition;\n                }\n\n                try {\n\n//                    this.volume.channelExport( this.object, this.channel, this.cacheBlockGroup.get( currentCacheBlockNumber.get() ), currentPosition, bufferSize, this.buffer);\n                    //TitanSimpleExportEntity64 exportEntity = new TitanSimpleExportEntity64( this.volumeManager, this.object, this.channel );\n                    ExporterEntity exportEntity = this.constructor.getExportEntity(this.volume.getClass(), this.volumeManager, this.object, this.channel,this.volume);\n                    this.volume.export( exportEntity, this.cacheBlockGroup.get( currentCacheBlockNumber.get() ), currentPosition, bufferSize, this.buffer );\n                    currentPosition += bufferSize;\n                    this.wakeUpBufferToFileThread();\n\n                    // 切换缓存块\n                    this.intoSynchronizationStatus();\n                    this.currentCacheBlockNumber.addAndGet(this.jobCount);\n                    if( this.currentCacheBlockNumber.get() > cacheBlockGroup.size() - 1 ){\n                        this.currentCacheBlockNumber.getAndSet( jobCode );\n                    }\n                    if( this.cacheBlockGroup.get( this.currentCacheBlockNumber.get() ).getStatus() == CacheBlockStatus.Full ){\n                        try {\n                            this.intoSuspendedStatus();\n                            Debug.trace(\"线程\"+this.parentThread.getName()+\":\"+\"我摸鱼了，没得写了\");\n                             this.blockerLatch.acquire();\n                        }\n                        catch ( InterruptedException e ){\n                            Thread.currentThread().interrupt();\n                            e.printStackTrace();\n                        }\n                    }\n\n                    this.intoWritingStatus();\n\n                }\n                catch ( IOException e ) {\n                    throw new VolumeJobCompromiseException( e );\n                }\n            }\n            else {\n                try {\n                    this.intoSuspendedStatus();\n                    Debug.trace(\"我摸鱼了，没得写了\");\n                    this.wakeUpBufferToFileThread();\n                   this.blockerLatch.acquire();\n                }\n                catch ( InterruptedException e ){\n                    Thread.currentThread().interrupt();\n                    e.printStackTrace();\n                }\n            }\n        }\n\n        Debug.trace(\"我是线程\" + jobCode + \"我已经完成任务\");\n    }\n\n    @Override\n    public Semaphore getBlockerLatch() {\n        return this.blockerLatch;\n    }\n\n    @Override\n    public void setStatus(StripBufferStatus status) {\n        this.status = status;\n    }\n\n    private void wakeUpBufferToFileThread(){\n        this.majorStatusIO.lock();\n        try {\n            MasterVolumeGram masterVolumeGram = (MasterVolumeGram) this.parentThread.parentExecutum();\n            LocalStripedTaskThread bufferToFileThread = masterVolumeGram.getChildThread( this.masterVolumeGram.getBufferOutThreadId() );\n            if( bufferToFileThread.getJobStatus() == BufferOutStatus.Suspended ){\n                Debug.trace(\"线程\"+bufferToFileThread.getName()+\"被唤醒\");\n                bufferToFileThread.setJobStatus( BufferOutStatus.Writing );\n                this.masterVolumeGram.getBufferOutBlockerLatch().release();\n            }\n        }\n        finally {\n            this.majorStatusIO.unlock();\n        }\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanStripBufferOutJob.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram;\nimport com.pinecone.hydra.storage.volume.runtime.VolumeJobCompromiseException;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.concurrent.Semaphore;\nimport java.util.concurrent.atomic.AtomicInteger;\n\npublic class TitanStripBufferOutJob implements StripBufferOutJob {\n    protected VolumeManager             volumeManager;\n    protected Chanface channel;\n    protected int                       jobCount;\n\n    protected StripBufferStatus         status;\n    protected List< CacheBlock >        cacheBlocksGroup;\n    protected AtomicInteger             currentPosition;\n    protected LocalStripedTaskThread    parentThread;\n    protected byte[]                    mBuffer;\n    protected long                      totalSize;\n    protected long                      exportSize;\n    protected final Semaphore           mBlockerLatch;\n    protected MasterVolumeGram          masterVolumeGram;\n\n    public TitanStripBufferOutJob(MasterVolumeGram masterVolumeGram, VolumeManager volumeManager, Chanface channel, long totalSize, Semaphore blockerLatch){\n        this.masterVolumeGram  = masterVolumeGram;\n        this.volumeManager     = volumeManager;\n        this.channel           = channel;\n        this.jobCount          = masterVolumeGram.getJobCount();\n        this.currentPosition   = new AtomicInteger(0);\n        this.cacheBlocksGroup  = masterVolumeGram.getCacheGroup();\n        this.mBuffer           = masterVolumeGram.getBuffer();\n        this.totalSize         = totalSize;\n        this.mBlockerLatch     = blockerLatch;\n//        this.masterVolumeGram.applyBufferOutBlockerLatch( this.mBlockerLatch );\n    }\n\n    @Override\n    public void applyThread(LocalStripedTaskThread thread) {\n        this.parentThread = thread;\n    }\n\n    @Override\n    public StripBufferStatus getStatus() {\n        return this.status;\n    }\n\n    protected void setWritingStatus() {\n        this.status = BufferOutStatus.Writing;\n    }\n\n    protected void setSuspendedStatus() {\n        this.status = BufferOutStatus.Suspended;\n    }\n\n    protected void setExitingStatus() {\n        this.status = BufferOutStatus.Exiting;\n    }\n\n    @Override\n    public void execute() throws VolumeJobCompromiseException {\n        while( true ){\n\n            if( this.exportSize >= this.totalSize ){\n                this.setExitingStatus();\n                this.masterVolumeGram.getMajorJobFuture().complete( true );\n                return;\n            }\n            if( !this.isAllExiting() ){\n                try{\n                    Debug.trace(\"摸鱼罗\");\n                    this.setSuspendedStatus();\n                    this.mBlockerLatch.acquire();\n                }\n                catch ( InterruptedException e ) {\n                    Thread.currentThread().interrupt();\n                    this.masterVolumeGram.getMajorJobFuture().completeExceptionally( e );\n                    break;\n                }\n            }\n            List<CacheBlock> writableCacheBlocks = this.getWritableCacheBlocks();\n//            Debug.trace(\"准备干活\");\n            if (!writableCacheBlocks.isEmpty()){\n                Debug.trace(\"执行写入\");\n\n                //ByteBuffer buffer = this.mergeArrays( writableCacheBlocks );\n                //ByteBuffer writeBuffer = ByteBuffer.wrap(buffer, 0, buffer.length );\n                try {\n                    //this.channel.write(buffer);\n\n                    int write = this.channel.write(this.mBuffer, writableCacheBlocks);\n                    this.exportSize += write;\n                }\n                catch ( IOException e ) {\n                    this.masterVolumeGram.getMajorJobFuture().completeExceptionally( e );\n                    break;\n                }\n                //Arrays.fill(this.mBuffer, (byte) 0);\n                this.updateCurrentPosition( writableCacheBlocks.size() );\n\n\n                this.setSuspendedStatus();\n                //唤醒所有缓存线程\n                //this.lockEntity.unlockPipeStage();\n                for ( int i = 0; i < jobCount; ++i ){\n                    CacheBlock cacheBlock = this.cacheBlocksGroup.get(i);\n                    MasterVolumeGram masterVolumeGram = (MasterVolumeGram) this.parentThread.parentExecutum();\n                    LocalStripedTaskThread bufferWriteThread = masterVolumeGram.getChildThread(cacheBlock.getBufferWriteThreadId());\n                    //当文件较小时，只有一个线程在执行写入且一次执行完就结束线程，可能会导致thread为null的情况\n                    if( bufferWriteThread != null ){\n                        StripBufferStatus jobStatus = bufferWriteThread.getJobStatus();\n                        if( jobStatus == BufferWriteStatus.Suspended ){\n                            bufferWriteThread.setJobStatus( BufferWriteStatus.Writing );\n                            Semaphore jobLock = bufferWriteThread.getBlockerLatch();\n                            Debug.trace(\"线程\"+bufferWriteThread.getName()+\"被唤醒\");\n                            jobLock.release();\n                        }\n                    }\n\n                }\n            }\n\n        }\n\n        this.masterVolumeGram.getMajorJobFuture().complete( false );\n        //Debug.warnSyn( \"wangwang\" );\n    }\n\n    @Override\n    public Semaphore getBlockerLatch() {\n        return this.mBlockerLatch;\n    }\n\n    @Override\n    public void setStatus(StripBufferStatus status) {\n        this.status = status;\n    }\n\n//    private int getCacheLength(){\n//        int rounds = 0;\n//        int length = 0;\n//        for( int i = this.currentPosition.get(); i < this.cacheBlocksGroup.size(); i++ ){\n//            if( i == currentPosition.get() && rounds == 1 ){\n//                break;\n//            }\n//\n//            CacheBlock cacheBlock = cacheBlocksGroup.get(i);\n//            if( cacheBlock.getStatus() != CacheBlockStatus.Full){\n//                return length;\n//            }\n//            length++;\n//            if( i == this.cacheBlocksGroup.size() - 1 ){\n//                rounds++;\n//                i = -1;\n//            }\n//        }\n//        return length;\n//    }\n//\n//    private ByteBuffer mergeArrays( List< CacheBlock > writableCacheBlocks ){\n//        // 计算所有缓存块的总长度\n//        int totalLength = 0;\n//        for (CacheBlock cacheBlock : writableCacheBlocks) {\n//            totalLength += cacheBlock.getValidByteEnd().intValue() - cacheBlock.getValidByteStart().intValue();\n//        }\n//\n//        // 创建一个 ByteBuffer 来存储合并的数据\n//        ByteBuffer mergedBuffer = ByteBuffer.allocate(totalLength);\n//\n//        // 将数据从 mBuffer 复制到 mergedBuffer\n//        for (CacheBlock cacheBlock : writableCacheBlocks) {\n//            int start = cacheBlock.getValidByteStart().intValue();\n//            int end = cacheBlock.getValidByteEnd().intValue();\n//            int bufferSize = end - start;\n//\n//            // 将 mBuffer 中的数据复制到 mergedBuffer\n//            mergedBuffer.put(mBuffer, start, bufferSize);\n//\n//            // 将缓存块状态设置为 Free\n//            cacheBlock.setStatus(CacheBlockStatus.Free);\n//        }\n//        this.exportSize += totalLength;\n//        // 准备将 mergedBuffer 用于读取\n//        mergedBuffer.flip();\n//        return mergedBuffer;\n//    }\n\n    private List< CacheBlock > getWritableCacheBlocks(){\n        ArrayList<CacheBlock> cacheBlocks = new ArrayList<>();\n        int rounds = 0;\n        for( int i = this.currentPosition.get(); i < this.cacheBlocksGroup.size(); i++ ){\n            if( i == currentPosition.get() && rounds == 1 ){\n                break;\n            }\n\n            CacheBlock cacheBlock = cacheBlocksGroup.get(i);\n            if( cacheBlock.getStatus() != CacheBlockStatus.Full){\n                break;\n            }\n            cacheBlocks.add( cacheBlock );\n            if( i == this.cacheBlocksGroup.size() - 1 ){\n                rounds++;\n                i = -1;\n            }\n        }\n        return cacheBlocks;\n    }\n\n    private void updateCurrentPosition( int length ){\n        for( int i= 0; i < length; i++ ){\n            int incremented = this.currentPosition.incrementAndGet();\n            if( incremented == cacheBlocksGroup.size() ){\n                this.currentPosition.getAndSet( 0 );\n            }\n        }\n    }\n\n    private boolean isAllExiting(){\n        for( int i = 0; i < jobCount; ++i ){\n            CacheBlock cacheBlock = this.cacheBlocksGroup.get(i);\n            MasterVolumeGram masterVolumeGram = (MasterVolumeGram) this.parentThread.parentExecutum();\n            LocalStripedTaskThread bufferWriteThread = masterVolumeGram.getChildThread(cacheBlock.getBufferWriteThreadId());\n            if( bufferWriteThread == null ){\n                return false;\n            }\n            StripBufferStatus jobStatus = bufferWriteThread.getJobStatus();\n            if( jobStatus != BufferWriteStatus.Exiting ){\n                return false;\n            }\n        }\n        return true;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanStripLockEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport java.util.concurrent.Semaphore;\n\npublic class TitanStripLockEntity implements StripLockEntity{\n    private Semaphore     bufferToFileLock;\n\n    private Object        lockObject;\n\n\n    public TitanStripLockEntity(){}\n\n    public TitanStripLockEntity( Object lockObject, Semaphore bufferToFileLock ){\n        this.lockObject = lockObject;\n        this.bufferToFileLock  = bufferToFileLock;\n    }\n\n    @Override\n    public Object getLockObject() {\n        return this.lockObject;\n    }\n\n    @Override\n    public void setLockObject(Object lockObject) {\n        this.lockObject = lockObject;\n    }\n\n\n    @Override\n    public Semaphore getBufferToFileLock() {\n        return this.bufferToFileLock;\n    }\n\n    @Override\n    public void unlockBufferToFileLock() {\n        this.bufferToFileLock.release();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanStripReceiveBufferInJob.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram;\nimport com.pinecone.hydra.storage.volume.runtime.VolumeJobCompromiseException;\n\nimport java.io.IOException;\nimport java.util.concurrent.Semaphore;\nimport java.util.concurrent.locks.Lock;\n\npublic class TitanStripReceiveBufferInJob implements StripReceiveBufferInJob{\n    protected MasterVolumeGram          masterVolumeGram;\n\n    protected byte[]                    buffer;\n\n    protected int                       jobCode;\n\n    protected CacheBlock                cacheBlock;\n\n    protected StripBufferStatus         status;\n\n    protected Chanface                  stream;\n\n\n    protected final Semaphore           blockerLatch;\n\n    protected LocalStripedTaskThread    parentThread;\n\n    protected Lock                      majorStatusIO;\n\n    public TitanStripReceiveBufferInJob(MasterVolumeGram masterVolumeGram, int jobCode, Chanface stream, LogicVolume volume){\n        this.masterVolumeGram         = masterVolumeGram;\n        this.buffer                   = this.masterVolumeGram.getBuffer();\n        this.jobCode                  = jobCode;\n        this.cacheBlock               = this.masterVolumeGram.getCacheGroup().get( jobCode );\n        this.status                   = ReceiveBufferInStatus.Suspended;\n        this.stream                   = stream;\n        this.blockerLatch             = new Semaphore(0);\n        this.cacheBlock.setVolume( volume );\n    }\n\n\n    @Override\n    public void execute() throws VolumeJobCompromiseException {\n        while( true ){\n            try {\n                if( this.status == ReceiveBufferInStatus.Exiting ){\n                    this.masterVolumeGram.majorJobCountDown();\n                    break;\n                }\n                if(  this.masterVolumeGram.getCurrentBufferInJobCode() == this.jobCode ){\n                    Debug.trace(\"我是缓存线程我开始工作了\");\n                    this.status = ReceiveBufferInStatus.Writing;\n                    this.cacheBlock.setStatus( CacheBlockStatus.Writing );\n                    int start = this.cacheBlock.getByteStart().intValue();\n                    int end   = this.cacheBlock.getByteEnd().intValue();\n                    int length = end - start;\n                    int read = this.stream.read(this.buffer, this.cacheBlock.getByteStart().intValue(), length);\n                    this.cacheBlock.setValidByteStart( start );\n                    this.cacheBlock.setValidByteEnd( start + read );\n\n                    this.status = ReceiveBufferInStatus.Suspended;\n                    this.cacheBlock.setStatus( CacheBlockStatus.Full );\n\n                    LocalStripedTaskThread bufferOutThread = this.masterVolumeGram.getChildThread(this.masterVolumeGram.getBufferOutThreadId());\n                    //检测缓存写出线程的状态为摸鱼状态则唤醒\n                    if( bufferOutThread.getJobStatus() == ReceiveBufferOutStatus.Suspended ){\n                        this.masterVolumeGram.getBufferOutBlockerLatch().release();\n                    }\n                    //如果下一个线程不在工作则唤醒\n                    int nextJobCode = this.jobCode+1;\n                    if( nextJobCode >= this.masterVolumeGram.getJobCount() ){\n                        nextJobCode = 0;\n                    }\n\n                    CacheBlock nextCacheBlock = this.masterVolumeGram.getCacheGroup().get(nextJobCode);\n\n                    LocalStripedTaskThread nextThread = this.masterVolumeGram.getChildThread(nextCacheBlock.getBufferWriteThreadId());\n                    if( nextThread.getJobStatus() == ReceiveBufferInStatus.Suspended && nextJobCode != this.jobCode ){\n                        nextThread.getBlockerLatch().release();\n                    }\n                }\n                Debug.trace(\"我休息了\");\n                this.blockerLatch.acquire();\n            } catch (IOException | InterruptedException e) {\n                throw new RuntimeException(e);\n            }\n\n\n        }\n\n    }\n\n    @Override\n    public void applyThread(LocalStripedTaskThread thread) {\n        this.parentThread  = thread;\n        this.majorStatusIO = this.masterVolumeGram.getMajorStatusIO();\n    }\n\n    @Override\n    public StripBufferStatus getStatus() {\n        return this.status;\n    }\n\n    @Override\n    public Semaphore getBlockerLatch() {\n        return this.blockerLatch;\n    }\n\n    @Override\n    public void setStatus(StripBufferStatus status) {\n        this.status = status;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanStripReceiveBufferOutJob.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.rdb.MappedExecutor;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram;\nimport com.pinecone.hydra.storage.volume.runtime.VolumeJobCompromiseException;\n\nimport java.io.IOException;\nimport java.sql.SQLException;\nimport java.util.List;\nimport java.util.concurrent.Semaphore;\n\npublic class TitanStripReceiveBufferOutJob implements StripReceiveBufferOutJob{\n    protected MasterVolumeGram              masterVolumeGram;\n\n    protected byte[]                        buffer;\n\n    protected StripBufferStatus             status;\n\n    protected Chanface                      stream;\n\n    protected final Semaphore               blockerLatch;\n\n    protected List< CacheBlock >            cacheBlocksGroup;\n\n    protected LocalStripedTaskThread        parentThread;\n\n    protected VolumeManager                 volumeManager;\n\n    protected long                          totalSize;\n\n    protected long                          exportSize;\n\n\n    protected StorageReceiveIORequest       request;\n\n    protected OnVolumeFileSystem            kenVolumeFileSystem;\n\n    protected MappedExecutor                executor;\n    protected UnifiedTransmitConstructor    constructor;\n\n\n    public TitanStripReceiveBufferOutJob(MasterVolumeGram masterVolumeGram, VolumeManager volumeManager, Chanface stream, StorageReceiveIORequest request, MappedExecutor executor ){\n        this.masterVolumeGram       = masterVolumeGram;\n        this.stream                 = stream;\n        this.totalSize              = request.getSize().longValue();\n        this.volumeManager          = volumeManager;\n        this.blockerLatch           = new Semaphore(0);\n        this.masterVolumeGram.applyBufferOutBlockerLatch( this.blockerLatch );\n        this.exportSize             = 0;\n        this.cacheBlocksGroup       = this.masterVolumeGram.getCacheGroup();\n        this.status                 = ReceiveBufferOutStatus.Suspended;\n        this.request                = request;\n        this.buffer                 = masterVolumeGram.getBuffer();\n        this.kenVolumeFileSystem    = new KenVolumeFileSystem( this.volumeManager );\n        this.executor               = executor;\n    }\n    @Override\n    public void execute() throws VolumeJobCompromiseException {\n        while( true ){\n            try {\n                Debug.trace(\"我摸鱼了\");\n                this.blockerLatch.acquire();\n                if( exportSize >= totalSize ){\n                    this.status = ReceiveBufferOutStatus.Exiting;\n                    for( CacheBlock cacheBlock : cacheBlocksGroup ){\n                        LocalStripedTaskThread bufferInThread = this.masterVolumeGram.getChildThread(cacheBlock.getBufferWriteThreadId());\n                        bufferInThread.setJobStatus( ReceiveBufferInStatus.Exiting );\n                        bufferInThread.getBlockerLatch().release();\n                    }\n                    break;\n                }\n                Debug.trace(\"开始上班\");\n                this.status = ReceiveBufferOutStatus.Writing;\n                CacheBlock currentCacheBlock = this.cacheBlocksGroup.get(this.masterVolumeGram.getCurrentBufferInJobCode());\n                int start = currentCacheBlock.getValidByteStart().intValue();\n                int end   = currentCacheBlock.getValidByteEnd().intValue();\n                // todo应该使用适配器，现在默认底层是simpleVolume\n//                TitanSimpleStreamReceiveEntity64 entity = new TitanSimpleStreamReceiveEntity64( this.volumeManager,this.request, this.stream, (SimpleVolume) currentCacheBlock.getVolume() );\n                ReceiveEntity entity = this.constructor.getReceiveEntity(currentCacheBlock.getVolume().getClass(), this.volumeManager, request, this.stream, currentCacheBlock.getVolume());\n                StorageIOResponse response = currentCacheBlock.getVolume().receive(entity, currentCacheBlock, this.buffer);\n\n                this.status = ReceiveBufferOutStatus.Suspended;\n                if( !this.isExist() ){\n                    LogicVolume currentVolume = this.cacheBlocksGroup.get(this.masterVolumeGram.getCurrentBufferInJobCode()).getVolume();\n\n                    this.kenVolumeFileSystem.insertStripMetaTable( this.executor, this.masterVolumeGram.getCurrentBufferInJobCode(), currentVolume.getGuid(), this.request.getStorageObjectGuid(), response.getSourceName() );\n\n                }\n\n                this.exportSize += ( end - start );\n                this.masterVolumeGram.setCurrentBufferInJobCode( this.masterVolumeGram.getCurrentBufferInJobCode() + 1 );\n                if( this.masterVolumeGram.getCurrentBufferInJobCode() >= this.masterVolumeGram.getJobCount() ){\n                    this.masterVolumeGram.setCurrentBufferInJobCode( 0 );\n                }\n                //唤醒所有线程\n                for( CacheBlock cacheBlock : cacheBlocksGroup ){\n                    LocalStripedTaskThread bufferInThread = this.masterVolumeGram.getChildThread(cacheBlock.getBufferWriteThreadId());\n                    if( bufferInThread.getJobStatus() == ReceiveBufferInStatus.Suspended ){\n                        bufferInThread.getBlockerLatch().release();\n                    }\n                }\n            } catch (SQLException | IOException | InterruptedException e) {\n                throw new RuntimeException(e);\n            }\n\n\n        }\n\n    }\n\n    @Override\n    public void applyThread(LocalStripedTaskThread thread) {\n        this.parentThread = thread;\n    }\n\n    @Override\n    public StripBufferStatus getStatus() {\n        return this.status;\n    }\n\n    @Override\n    public Semaphore getBlockerLatch() {\n        return this.blockerLatch;\n    }\n\n    @Override\n    public void setStatus(StripBufferStatus status) {\n        this.status = status;\n    }\n\n    boolean isExist(  ) throws SQLException {\n        LogicVolume currentVolume = this.cacheBlocksGroup.get(this.masterVolumeGram.getCurrentBufferInJobCode()).getVolume();\n        return this.kenVolumeFileSystem.isExistStripMetaTable(this.executor, currentVolume.getGuid(), this.request.getStorageObjectGuid());\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/TitanStripReceiverJob.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.rdb.MappedExecutor;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram;\n\nimport java.io.IOException;\nimport java.sql.SQLException;\nimport java.util.concurrent.Semaphore;\n\npublic class TitanStripReceiverJob implements StripChannelReceiverJob{\n    private MasterVolumeGram            masterVolumeGram;\n    private LogicVolume                 volume;\n    private int                         jobCount;\n    private int                         jobCode;\n    private VolumeManager               volumeManager;\n    private StorageReceiveIORequest     object;\n    private Chanface                    chanface;\n    private OnVolumeFileSystem          kenVolumeFileSystem;\n    private MappedExecutor              executor;\n    private StorageIOResponse           storageIOResponse;\n    private Number                      offset;\n    private Number                      endSize;\n    private UnifiedTransmitConstructor  constructor;\n\n\n\n    public TitanStripReceiverJob(MasterVolumeGram masterVolumeGram,ReceiveEntity entity, Chanface channel, int jobCount, int jobCode, LogicVolume volume, MappedExecutor executor, Number offset, Number ednSize ){\n        this.masterVolumeGram       = masterVolumeGram;\n        this.volumeManager          = entity.getVolumeManager();\n        this.object                 = entity.getReceiveStorageObject();\n        this.chanface = channel;\n        this.jobCount               = jobCount;\n        this.jobCode                = jobCode;\n        this.volume                 = volume;\n        this.kenVolumeFileSystem    = new KenVolumeFileSystem( this.volumeManager );\n        this.executor               = executor;\n        this.offset                 = offset;\n        this.endSize                = ednSize;\n        this.constructor            = new UnifiedTransmitConstructor();\n    }\n\n    @Override\n    public void execute()  {\n        //每次计算要保存的部分\n        long size = this.endSize.longValue();\n        long stripSize = this.volumeManager.getConfig().getDefaultStripSize().longValue();\n        long currentPosition = jobCode * stripSize + this.offset.longValue();\n\n        while( true ){\n\n            long bufferSize = stripSize;\n            if( currentPosition >= size ){\n                this.masterVolumeGram.majorJobCountDown();\n                break;\n            }\n            if( currentPosition + bufferSize > size ){\n                bufferSize = size - currentPosition;\n            }\n\n            try {\n//                this.storageIOResponse = this.volume.channelReceive(this.object, this.fileChannel, currentPosition, bufferSize);\n//                TitanSimpleReceiveEntity64 receiveEntity = new TitanSimpleReceiveEntity64( this.volumeManager, this.object, this.fileChannel, (SimpleVolume) volume);\n                ReceiveEntity receiveEntity = this.constructor.getReceiveEntity(this.volume.getClass(), this.volumeManager, this.object, this.chanface, volume);\n                this.storageIOResponse = this.volume.receive( receiveEntity, currentPosition, bufferSize );\n            } catch (IOException e) {\n                e.printStackTrace();\n                throw new RuntimeException(e);\n            }\n\n            currentPosition += bufferSize * jobCount;\n        }\n        try {\n            if( this.storageIOResponse != null ){\n                this.kenVolumeFileSystem.insertStripMetaTable( executor, jobCode,  volume.getGuid(), this.object.getStorageObjectGuid(), this.storageIOResponse.getSourceName() );\n            }\n            //this.kenVolumeFileSystem.insertKVFSFileStripTable( executor, jobCode,  volume.getGuid(), this.object.getStorageObjectGuid(), this.storageIOResponse.getSourceName() );\n        } catch (SQLException e) {\n            throw new ProxyProvokeHandleException(e);\n        }\n    }\n\n    @Override\n    public void applyThread(LocalStripedTaskThread thread) {\n\n    }\n\n    @Override\n    public StripBufferStatus getStatus() {\n        return null;\n    }\n\n    @Override\n    public Semaphore getBlockerLatch() {\n        return null;\n    }\n\n    @Override\n    public void setStatus(StripBufferStatus status) {\n\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/StripedExport.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.export;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.Exporter;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\n\npublic interface StripedExport extends Exporter {\n    StorageIOResponse export(Chanface chanface) throws UIOException;\n\n    StorageIOResponse export( Chanface chanface,Number offset, Number endSize ) throws UIOException;\n\n    StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws UIOException;\n\n    StorageIOResponse export( RandomAccessChanface randomAccessChanface,Number offset, Number endSize ) throws UIOException;\n\n    VolumeManager getVolumeManager();\n\n    StorageExportIORequest getStorageIORequest();\n\n    Chanface getFileChannel();\n\n    StripedVolume getStripedVolume();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/StripedExport64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.export;\n\npublic interface StripedExport64 extends StripedExport {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/StripedExportEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.export;\n\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\n\npublic interface StripedExportEntity extends ExporterEntity {\n    StripedVolume getStripedVolume();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/StripedExportEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.export;\n\npublic interface StripedExportEntity64 extends StripedExportEntity {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/TitanStripedExport64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.export;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteExecutor;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.TitanStorageExportIORequest;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.LocalStripedTaskThread;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripBufferInJob;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripBufferOutJob;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram;\nimport com.pinecone.hydra.system.Hydrogen;\n\nimport java.io.File;\nimport java.sql.SQLException;\nimport java.util.List;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.Semaphore;\n\npublic class TitanStripedExport64 implements StripedExport64{\n    protected VolumeManager             volumeManager;\n\n    protected StorageExportIORequest    storageExportIORequest;\n\n    protected Chanface                  channel;\n\n    protected StripedVolume             stripedVolume;\n\n    protected OnVolumeFileSystem        kenVolumeFileSystem;\n\n    public TitanStripedExport64( StripedExportEntity64 entity ){\n        this.volumeManager              = entity.getVolumeManager();\n        this.storageExportIORequest     = entity.getStorageIORequest();\n        this.channel                    = entity.getChannel();\n        this.stripedVolume              = entity.getStripedVolume();\n        this.kenVolumeFileSystem        = new KenVolumeFileSystem( this.volumeManager );\n    }\n    @Override\n    public StorageIOResponse export(Chanface chanface) throws UIOException {\n        //初始化参数\n        List<LogicVolume> volumes = this.stripedVolume.queryChildren();\n        int jobCount = volumes.size();\n\n        int StripResidentCacheAllotRatio = volumeManager.getConfig().getStripResidentCacheAllotRatio();\n        Processum supProc = null;\n        MasterVolumeGram masterVolumeGram = null;\n        try {\n            SQLiteExecutor sqLiteExecutor = this.stripedVolume.getSQLiteExecutor();\n\n            supProc = this.volumeManager.getSuperiorProcess();\n            masterVolumeGram = this.createMasterVolumeGram(supProc,jobCount,StripResidentCacheAllotRatio);\n\n            // 创建文件写入线程\n            createBufferOutJob( masterVolumeGram, this.storageExportIORequest.getSize().longValue());\n\n            // 处理每个卷的线程\n            createAndStartVolumeThreads(volumes, sqLiteExecutor,  masterVolumeGram );\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n\n        // 同步等待任务完成并处理异常\n        this.waitForTaskCompletion(masterVolumeGram);\n        //masterVolumeGram.majorJobCountDownLatchWait();\n\n        supProc.getTaskManager().erase(masterVolumeGram);\n\n\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(Chanface chanface, Number offset, Number endSize) throws UIOException {\n        //初始化参数\n        List<LogicVolume> volumes = this.stripedVolume.queryChildren();\n        int jobCount = volumes.size();\n\n        int StripResidentCacheAllotRatio = volumeManager.getConfig().getStripResidentCacheAllotRatio();\n        Hydrogen hydrogen = null;\n        MasterVolumeGram masterVolumeGram = null;\n        try {\n            SQLiteExecutor sqLiteExecutor = this.stripedVolume.getSQLiteExecutor();\n\n            hydrogen = this.volumeManager.getHydrogen();\n            masterVolumeGram = this.createMasterVolumeGram(hydrogen,jobCount,StripResidentCacheAllotRatio);\n\n            // 创建文件写入线程\n            createBufferOutJob( masterVolumeGram, this.storageExportIORequest.getSize().longValue());\n\n            // 处理每个卷的线程\n            createAndStartVolumeThreads(volumes, sqLiteExecutor,  masterVolumeGram );\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n\n        // 同步等待任务完成并处理异常\n        this.waitForTaskCompletion(masterVolumeGram);\n        //masterVolumeGram.majorJobCountDownLatchWait();\n\n        hydrogen.getTaskManager().erase(masterVolumeGram);\n\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(RandomAccessChanface randomAccessChanface) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws UIOException {\n        return null;\n    }\n\n    private MasterVolumeGram createMasterVolumeGram(Processum supProcess, int jobCount, int StripResidentCacheAllotRatio ) {\n        Number stripSize = this.volumeManager.getConfig().getDefaultStripSize();\n\n        MasterVolumeGram masterVolumeGram = new MasterVolumeGram(this.stripedVolume.getGuid().toString(), supProcess,jobCount, StripResidentCacheAllotRatio, stripSize.intValue());\n        supProcess.getTaskManager().add(masterVolumeGram);\n        return masterVolumeGram;\n    }\n\n    private void  createBufferOutJob(MasterVolumeGram masterVolumeGram, long totalSize) {\n        Semaphore BufferOutLock = new Semaphore(0);\n\n        TitanStripBufferOutJob BufferOutJob = new TitanStripBufferOutJob(masterVolumeGram,this.volumeManager, this.channel,totalSize, BufferOutLock );\n        LocalStripedTaskThread BufferOutThread = new LocalStripedTaskThread(\"BufferOut\", masterVolumeGram, BufferOutJob);\n        masterVolumeGram.getTaskManager().add(BufferOutThread);\n        BufferOutThread.start();\n\n        masterVolumeGram.applyBufferOutBlockerLatch( BufferOutLock );\n        masterVolumeGram.applyBufferOutThreadId( BufferOutThread.getExecutumId() );\n\n    }\n\n    private void createAndStartVolumeThreads(List<LogicVolume> volumes, SQLiteExecutor sqLiteExecutor,  MasterVolumeGram masterVolumeGram) throws SQLException {\n\n        for ( LogicVolume volume : volumes ) {\n\n            String sourceName = this.kenVolumeFileSystem.getStripMetaSourceName(sqLiteExecutor, volume.getGuid(), this.storageExportIORequest.getStorageObjectGuid());\n            if ( sourceName == null ){\n                continue;\n            }\n            int code = this.kenVolumeFileSystem.getStripMetaCode(sqLiteExecutor, volume.getGuid(), this.storageExportIORequest.getStorageObjectGuid());\n            File file = new File(sourceName);\n            StorageExportIORequest titanStorageExportIORequest = new TitanStorageExportIORequest();\n            titanStorageExportIORequest.setStorageObjectGuid( this.storageExportIORequest.getStorageObjectGuid() );\n            titanStorageExportIORequest.setSourceName(sourceName);\n            titanStorageExportIORequest.setSize(file.length());\n\n\n            TitanStripBufferInJob exportJob = new TitanStripBufferInJob(masterVolumeGram,this, volume, titanStorageExportIORequest,code);\n            LocalStripedTaskThread taskThread = new LocalStripedTaskThread(this.stripedVolume.getName() + code, masterVolumeGram, exportJob);\n            for( int i = code; i < masterVolumeGram.getCacheGroup().size(); i += masterVolumeGram.getJobCount() ){\n                masterVolumeGram.getCacheGroup().get( i ).setBufferWriteThreadId( taskThread.getExecutumId() );\n            }\n            masterVolumeGram.getTaskManager().add(taskThread);\n            taskThread.start();\n\n        }\n\n    }\n\n    private void waitForTaskCompletion(MasterVolumeGram masterVolumeGram) throws ProxyProvokeHandleException {\n//        try {\n//            masterVolumeGram.getTaskManager().syncWaitingTerminated();\n//        }\n//        catch (Exception e) {\n//            throw new ProxyProvokeHandleException(e);\n//        }\n\n        try{\n            Object ret = masterVolumeGram.getMajorJobFuture().get();\n\n            if ( ret instanceof Exception ) {\n                throw new ProxyProvokeHandleException( (Exception) ret );\n            }\n\n            if ( !(Boolean) ret ) {\n                throw new IllegalStateException( \"Buffer-To-File thread has been returned `false`, which is expected `true`.\" );\n            }\n        }\n        catch ( InterruptedException | ExecutionException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public VolumeManager getVolumeManager() {\n        return this.volumeManager;\n    }\n\n    @Override\n    public StorageExportIORequest getStorageIORequest() {\n        return this.storageExportIORequest;\n    }\n\n    @Override\n    public Chanface getFileChannel() {\n        return this.channel;\n    }\n\n    @Override\n    public StripedVolume getStripedVolume() {\n        return this.stripedVolume;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/TitanStripedExportEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.export;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchExportEntity;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\npublic class TitanStripedExportEntity64 extends ArchExportEntity implements StripedExportEntity64 {\n    protected StripedVolume     stripedVolume;\n\n    protected StripedExport64   stripedExport;\n\n    public TitanStripedExportEntity64(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel, StripedVolume stripedVolume) {\n        super(volumeManager, storageExportIORequest, channel);\n        this.stripedVolume = stripedVolume;\n        this.stripedExport = new TitanStripedExport64( this );\n    }\n\n    @Override\n    public StorageIOResponse export() throws UIOException {\n        return this.stripedExport.export(this.channel);\n    }\n\n    @Override\n    public StorageIOResponse export(Number offset, Number endSize) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) {\n        return null;\n    }\n\n    @Override\n    public StripedVolume getStripedVolume() {\n        return this.stripedVolume;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/channel/StripedChannelExport.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.export.channel;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.volume.entity.Exporter;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\n\npublic interface StripedChannelExport extends Exporter {\n    StorageIOResponse export() throws UIOException;\n\n    StorageIOResponse export( Number offset, Number endSize ) throws UIOException;\n\n    VolumeManager getVolumeManager();\n\n    StorageExportIORequest getStorageIORequest();\n\n    Chanface getFileChannel();\n\n    StripedVolume getStripedVolume();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/channel/StripedChannelExport64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.export.channel;\n\npublic interface StripedChannelExport64 extends StripedChannelExport{\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/channel/StripedChannelExportEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.export.channel;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\n\npublic interface StripedChannelExportEntity extends ExporterEntity {\n    Chanface getChannel();\n    void setChannel( Chanface channel );\n    StripedVolume getStripedVolume();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/channel/StripedChannelExportEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.export.channel;\n\npublic interface StripedChannelExportEntity64 extends StripedChannelExportEntity{\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/channel/TitanStripedChannelExport64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.export.channel;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteExecutor;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.TitanStorageExportIORequest;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.LocalStripedTaskThread;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripBufferOutJob;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram;\n\nimport java.io.File;\nimport java.sql.SQLException;\nimport java.util.List;\nimport java.util.concurrent.Semaphore;\n\npublic class TitanStripedChannelExport64 implements StripedChannelExport64{\n    private VolumeManager           volumeManager;\n    private StorageExportIORequest  storageExportIORequest;\n    private Chanface channel;\n    private StripedVolume           stripedVolume;\n    private OnVolumeFileSystem      kenVolumeFileSystem;\n\n    public TitanStripedChannelExport64(StripedChannelExportEntity entity){\n        this.volumeManager = entity.getVolumeManager();\n        this.storageExportIORequest = entity.getStorageIORequest();\n        this.channel = entity.getChannel();\n        this.stripedVolume = entity.getStripedVolume();\n        this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager );\n    }\n\n    @Override\n    public StorageIOResponse export() throws UIOException {\n        //初始化参数\n        List<LogicVolume> volumes = this.stripedVolume.queryChildren();\n        int jobCount = volumes.size();\n\n        int StripResidentCacheAllotRatio = volumeManager.getConfig().getStripResidentCacheAllotRatio();\n        MasterVolumeGram masterVolumeGram = null;\n        try {\n            SQLiteExecutor sqLiteExecutor = this.stripedVolume.getSQLiteExecutor();\n\n            Hydrogen hydrogen = this.volumeManager.getHydrogen();\n            masterVolumeGram = this.createMasterVolumeGram(hydrogen,jobCount,StripResidentCacheAllotRatio);\n\n            // 创建文件写入线程\n            createBufferOutJob( masterVolumeGram, this.storageExportIORequest.getSize().longValue());\n\n            // 处理每个卷的线程\n            createAndStartVolumeThreads(volumes, sqLiteExecutor,  masterVolumeGram );\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n\n        // 同步等待任务完成并处理异常\n        this.waitForTaskCompletion(masterVolumeGram);\n\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(Number offset, Number endSize) throws UIOException {\n        //初始化参数\n        List<LogicVolume> volumes = this.stripedVolume.queryChildren();\n        int jobCount = volumes.size();\n\n        int StripResidentCacheAllotRatio = volumeManager.getConfig().getStripResidentCacheAllotRatio();\n        MasterVolumeGram masterVolumeGram = null;\n        try {\n            SQLiteExecutor sqLiteExecutor = this.stripedVolume.getSQLiteExecutor();\n\n            Hydrogen hydrogen = this.volumeManager.getHydrogen();\n            masterVolumeGram = this.createMasterVolumeGram(hydrogen,jobCount,StripResidentCacheAllotRatio);\n\n            // 创建文件写入线程\n            createBufferOutJob( masterVolumeGram, this.storageExportIORequest.getSize().longValue());\n\n            // 处理每个卷的线程\n            createAndStartVolumeThreads(volumes, sqLiteExecutor,  masterVolumeGram );\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n\n        // 同步等待任务完成并处理异常\n        this.waitForTaskCompletion(masterVolumeGram);\n\n        return null;\n    }\n\n    private MasterVolumeGram createMasterVolumeGram(Hydrogen hydrogen, int jobCount, int StripResidentCacheAllotRatio ) {\n        Number stripSize = this.volumeManager.getConfig().getDefaultStripSize();\n\n        MasterVolumeGram masterVolumeGram = new MasterVolumeGram(this.stripedVolume.getGuid().toString(), hydrogen,jobCount, StripResidentCacheAllotRatio, stripSize.intValue());\n        hydrogen.getTaskManager().add(masterVolumeGram);\n        return masterVolumeGram;\n    }\n\n    private void  createBufferOutJob(MasterVolumeGram masterVolumeGram, long totalSize) {\n        Semaphore BufferOutLock = new Semaphore(0);\n\n        TitanStripBufferOutJob BufferOutJob = new TitanStripBufferOutJob(masterVolumeGram,this.volumeManager, this.channel,totalSize, BufferOutLock );\n        LocalStripedTaskThread BufferOutThread = new LocalStripedTaskThread(\"BufferOut\", masterVolumeGram, BufferOutJob);\n        masterVolumeGram.getTaskManager().add(BufferOutThread);\n        BufferOutThread.start();\n\n        masterVolumeGram.applyBufferOutBlockerLatch( BufferOutLock );\n        masterVolumeGram.applyBufferOutThreadId( BufferOutThread.getExecutumId() );\n\n    }\n\n    private void createAndStartVolumeThreads(List<LogicVolume> volumes, SQLiteExecutor sqLiteExecutor,  MasterVolumeGram masterVolumeGram) throws SQLException {\n\n        for ( LogicVolume volume : volumes ) {\n\n            String sourceName = this.kenVolumeFileSystem.getStripMetaSourceName(sqLiteExecutor, volume.getGuid(), this.storageExportIORequest.getStorageObjectGuid());\n            if ( sourceName == null ){\n                continue;\n            }\n            int code = this.kenVolumeFileSystem.getStripMetaCode(sqLiteExecutor, volume.getGuid(), this.storageExportIORequest.getStorageObjectGuid());\n            File file = new File(sourceName);\n            StorageExportIORequest titanStorageExportIORequest = new TitanStorageExportIORequest();\n            titanStorageExportIORequest.setStorageObjectGuid( this.storageExportIORequest.getStorageObjectGuid() );\n            titanStorageExportIORequest.setSourceName(sourceName);\n            titanStorageExportIORequest.setSize(file.length());\n\n\n//            TitanStripBufferInJob exportJob = new TitanStripBufferInJob(masterVolumeGram,this, volume, titanStorageExportIORequest,code);\n//            LocalStripedTaskThread taskThread = new LocalStripedTaskThread(this.stripedVolume.getName() + code, masterVolumeGram, exportJob);\n//            for( int i = code; i < masterVolumeGram.getCacheGroup().size(); i += masterVolumeGram.getJobCount() ){\n//                masterVolumeGram.getCacheGroup().get( i ).setBufferWriteThreadId( taskThread.getId() );\n//            }\n//            masterVolumeGram.getTaskManager().add(taskThread);\n//            taskThread.start();\n\n        }\n\n    }\n\n    private void waitForTaskCompletion(MasterVolumeGram masterVolumeGram) throws ProxyProvokeHandleException {\n        try {\n            masterVolumeGram.getTaskManager().syncWaitingTerminated();\n        }\n        catch (Exception e) {\n            throw new ProxyProvokeHandleException(e);\n        }\n    }\n\n    @Override\n    public VolumeManager getVolumeManager() {\n        return this.volumeManager;\n    }\n\n    @Override\n    public StorageExportIORequest getStorageIORequest() {\n        return this.storageExportIORequest;\n    }\n\n    @Override\n    public Chanface getFileChannel() {\n        return this.channel;\n    }\n\n    @Override\n    public StripedVolume getStripedVolume() {\n        return this.stripedVolume;\n    }\n\n    private byte[] initializationBuffer(int jobCount, int bufferSize, int StripResidentCacheAllotRatio ){\n        return new byte[jobCount * bufferSize * StripResidentCacheAllotRatio];\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/export/channel/TitanStripedChannelExportEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.export.channel;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchExportEntity;\nimport com.pinecone.hydra.storage.StorageExportIORequest;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\npublic class TitanStripedChannelExportEntity64 extends ArchExportEntity implements StripedChannelExportEntity64{\n    private Chanface channel;\n    private StripedChannelExport64      stripedChannelExport64;\n    private StripedVolume               stripedVolume;\n    public TitanStripedChannelExportEntity64(VolumeManager volumeManager, StorageExportIORequest storageExportIORequest, Chanface channel, StripedVolume stripedVolume) {\n        super(volumeManager, storageExportIORequest,null);\n        this.channel = channel;\n        this.stripedVolume = stripedVolume;\n        this.stripedChannelExport64 = new TitanStripedChannelExport64( this );\n    }\n\n    @Override\n    public Chanface getChannel() {\n        return this.channel;\n    }\n\n    @Override\n    public void setChannel(Chanface channel) {\n        this.channel = channel;\n    }\n\n    @Override\n    public StorageIOResponse export() throws UIOException {\n        return this.stripedChannelExport64.export();\n    }\n\n    @Override\n    public StorageIOResponse export(Number offset, Number endSize) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse export(CacheBlock cacheBlock, Number offset, Number endSize, byte[] buffer) {\n        return null;\n    }\n\n\n    @Override\n    public StripedVolume getStripedVolume() {\n        return this.stripedVolume;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/StripedReceive.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive;\n\nimport com.pinecone.hydra.storage.volume.entity.Receiver;\n\npublic interface StripedReceive extends Receiver {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/StripedReceive64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive;\n\npublic interface StripedReceive64 extends StripedReceive {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/StripedReceiveEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive;\n\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\n\npublic interface StripedReceiveEntity extends ReceiveEntity {\n    StripedVolume  getStripedVolume();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/StripedReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive;\n\npublic interface StripedReceiveEntity64 extends StripedReceiveEntity {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/StripedReceiver.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive;\n\nimport com.pinecone.hydra.storage.volume.entity.Receiver;\n\npublic interface StripedReceiver extends Receiver {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/StripedReceiverEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive;\n\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\n\npublic interface StripedReceiverEntity extends ReceiveEntity {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/TitanStripedReceive64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.rdb.MappedExecutor;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.LocalStripedTaskThread;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripReceiveBufferInJob;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripReceiveBufferOutJob;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripReceiverJob;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram;\nimport com.pinecone.hydra.system.Hydrogen;\n\nimport java.sql.SQLException;\nimport java.util.List;\nimport java.util.concurrent.CountDownLatch;\nimport java.util.concurrent.Semaphore;\n\npublic class TitanStripedReceive64 implements StripedReceive64{\n    protected VolumeManager             volumeManager;\n\n    protected StorageReceiveIORequest   storageReceiveIORequest;\n\n    protected StripedVolume             stripedVolume;\n\n    protected ReceiveEntity             entity;\n\n    protected OnVolumeFileSystem        kenVolumeFileSystem;\n\n    protected MappedExecutor            mappedExecutor;\n\n    public TitanStripedReceive64( StripedReceiveEntity64 entity ){\n        this.volumeManager              = entity.getVolumeManager();\n        this.storageReceiveIORequest    = entity.getReceiveStorageObject();\n        this.kenVolumeFileSystem        = new KenVolumeFileSystem( this.volumeManager );\n        this.stripedVolume              = entity.getStripedVolume();\n        this.entity                     = entity;\n        try {\n            this.mappedExecutor             = this.getExecutor();\n        } catch (SQLException e) {\n            throw new RuntimeException(e);\n        }\n    }\n    @Override\n    public StorageIOResponse receive(Chanface chanface) throws UIOException {\n        Hydrogen hydrogen = this.volumeManager.getHydrogen();\n        MasterVolumeGram masterVolumeGram = new MasterVolumeGram( this.stripedVolume.getGuid().toString(), hydrogen);\n        hydrogen.getTaskManager().add( masterVolumeGram );\n        List<LogicVolume> volumes = this.stripedVolume.queryChildren();\n\n\n        int index = 0;\n        masterVolumeGram.setMajorJobCountDownNum( volumes.size() );\n        for( LogicVolume volume : volumes ){\n            TitanStripReceiverJob receiverJob = new TitanStripReceiverJob(masterVolumeGram, this.entity, chanface, volumes.size(), index, volume, mappedExecutor, 0, this.entity.getReceiveStorageObject().getSize() );\n            LocalStripedTaskThread taskThread = new LocalStripedTaskThread(  this.stripedVolume.getName() + index, masterVolumeGram, receiverJob );\n            masterVolumeGram.getTaskManager().add( taskThread );\n            taskThread.start();\n\n            index ++;\n        }\n//        this.waitForTaskCompletion( masterVolumeGram );\n//        masterVolumeGram.kill();\n\n        masterVolumeGram.majorJobCountDownLatchWait();\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface,Number offset, Number endSize) throws UIOException {\n        Hydrogen hydrogen = this.volumeManager.getHydrogen();\n        MasterVolumeGram masterVolumeGram = new MasterVolumeGram( this.stripedVolume.getGuid().toString(), hydrogen);\n        hydrogen.getTaskManager().add( masterVolumeGram );\n        List<LogicVolume> volumes = this.stripedVolume.queryChildren();\n\n\n        int index = 0;\n        masterVolumeGram.setMajorJobCountDownNum( volumes.size() );\n        for( LogicVolume volume : volumes ){\n            TitanStripReceiverJob receiverJob = new TitanStripReceiverJob(masterVolumeGram, this.entity, chanface, volumes.size(), index, volume, this.mappedExecutor, offset, offset.longValue()+endSize.longValue() );\n            LocalStripedTaskThread taskThread = new LocalStripedTaskThread(  this.stripedVolume.getName() + index, masterVolumeGram, receiverJob );\n            masterVolumeGram.getTaskManager().add( taskThread );\n            taskThread.start();\n\n            index ++;\n        }\n\n//        this.waitForTaskCompletion( masterVolumeGram );\n//        masterVolumeGram.kill();\n\n        masterVolumeGram.majorJobCountDownLatchWait();\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws UIOException {\n        Hydrogen hydrogen = this.volumeManager.getHydrogen();\n        List<LogicVolume> volumes = this.stripedVolume.queryChildren();\n        MasterVolumeGram masterVolumeGram = new MasterVolumeGram( this.stripedVolume.getGuid().toString(), hydrogen, volumes.size(), 1, this.volumeManager.getConfig().getDefaultStripSize().intValue() );\n        hydrogen.getTaskManager().add( masterVolumeGram );\n        MappedExecutor executor = null;\n        try {\n            executor = this.getExecutor();\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n\n        TitanStripReceiveBufferOutJob bufferOutJob = new TitanStripReceiveBufferOutJob( masterVolumeGram, this.volumeManager, randomAccessChanface, this.storageReceiveIORequest, executor );\n        LocalStripedTaskThread taskThread = new LocalStripedTaskThread( \"bufferOut\",masterVolumeGram, bufferOutJob );\n        masterVolumeGram.getTaskManager().add( taskThread );\n        masterVolumeGram.applyBufferOutThreadId( taskThread.getExecutumId() );\n        taskThread.start();\n\n        int index = 0;\n        masterVolumeGram.setMajorJobCountDownNum( volumes.size() );\n        for( LogicVolume volume : volumes ){\n            TitanStripReceiveBufferInJob bufferInJob = new TitanStripReceiveBufferInJob( masterVolumeGram, index,randomAccessChanface,volume );\n            LocalStripedTaskThread bufferInThread = new LocalStripedTaskThread(volume.getName(), masterVolumeGram, bufferInJob);\n            masterVolumeGram.getTaskManager().add( bufferInThread );\n            CacheBlock cacheBlock = masterVolumeGram.getCacheGroup().get(index);\n            cacheBlock.setBufferWriteThreadId( bufferInThread.getExecutumId() );\n            bufferInThread.start();\n            index++;\n        }\n\n//        this.waitForTaskCompletion( masterVolumeGram );\n\n        masterVolumeGram.majorJobCountDownLatchWait();\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws UIOException {\n        return null;\n    }\n\n    private MappedExecutor getExecutor() throws SQLException {\n        VolumeConfig config = this.volumeManager.getConfig();\n        GUID physicsVolumeGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume(this.stripedVolume.getGuid());\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsVolumeGuid);\n        String url = physicalVolume.getMountPoint().getMountPoint()+ config.getPathSeparator() +this.stripedVolume.getGuid()+ config.getSqliteFileExtension();\n        return this.volumeManager.getKenusPool().allot(url);\n    }\n\n    private void waitForTaskCompletion(MasterVolumeGram masterVolumeGram) throws ProxyProvokeHandleException {\n        Semaphore semaphore = new Semaphore(0);\n        //semaphore.a\n        CountDownLatch latch = new CountDownLatch(10);\n        latch.countDown();\n\n        try{\n            latch.await();\n        }\n        catch ( InterruptedException e ) {\n            Thread.currentThread().interrupt();\n            throw new ProxyProvokeHandleException( e );\n        }\n//        try {\n//            masterVolumeGram.getTaskManager().syncWaitingTerminated();\n//        }\n//        catch (Exception e) {\n//            throw new ProxyProvokeHandleException(e);\n//        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/TitanStripedReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic class TitanStripedReceiveEntity64 extends ArchReceiveEntity implements StripedReceiveEntity64{\n    protected StripedVolume         stripedVolume;\n\n    protected StripedReceive64      stripedReceive;\n\n    protected Chanface              chanface;\n\n    public TitanStripedReceiveEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, StripedVolume stripedVolume) {\n        super(volumeManager, storageReceiveIORequest, channel);\n        this.stripedVolume  = stripedVolume;\n        this.stripedReceive = new TitanStripedReceive64( this );\n        this.chanface = channel;\n    }\n\n    @Override\n    public StorageIOResponse receive() throws IOException {\n        return this.stripedReceive.receive(this.chanface);\n    }\n\n    @Override\n    public StorageIOResponse receive(Number offset, Number endSize) throws IOException {\n        return this.stripedReceive.receive( this.chanface, offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StripedVolume getStripedVolume() {\n        return this.stripedVolume;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/channnel/StripedChannelReceiver.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive.channnel;\n\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.receive.StripedReceiver;\n\npublic interface StripedChannelReceiver extends StripedReceiver {\n    StorageIOResponse channelReceive( ) throws UIOException;\n    StorageIOResponse channelReceive(Number offset, Number endSize) throws UIOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/channnel/StripedChannelReceiver64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive.channnel;\n\npublic interface StripedChannelReceiver64 extends StripedChannelReceiver{\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/channnel/StripedChannelReceiverEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive.channnel;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.receive.StripedReceiverEntity;\n\npublic interface StripedChannelReceiverEntity extends StripedReceiverEntity {\n    Chanface getChannel();\n    void setChannel( Chanface channel );\n    StripedVolume getStripedVolume();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/channnel/StripedChannelReceiverEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive.channnel;\n\npublic interface StripedChannelReceiverEntity64 extends StripedChannelReceiverEntity{\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/channnel/TitanStripedChannelReceiver64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive.channnel;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.rdb.MappedExecutor;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteHost;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.LocalStripedTaskThread;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.TitanStripReceiverJob;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram;\n\nimport java.sql.SQLException;\nimport java.util.List;\n\npublic class TitanStripedChannelReceiver64 implements StripedChannelReceiver64{\n    private Chanface fileChannel;\n    private VolumeManager               volumeManager;\n    private StorageReceiveIORequest     storageReceiveIORequest;\n    private StripedVolume               stripedVolume;\n    private ReceiveEntity               entity;\n    private OnVolumeFileSystem          kenVolumeFileSystem;\n    private SQLiteHost                  mSqLiteHost;\n\n    public TitanStripedChannelReceiver64( StripedChannelReceiverEntity entity ){\n        this.entity = entity;\n        this.fileChannel   = entity.getChannel();\n        this.volumeManager = entity.getVolumeManager();\n        this.storageReceiveIORequest = entity.getReceiveStorageObject();\n        this.stripedVolume = entity.getStripedVolume();\n        this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager );\n    }\n\n    @Override\n    public StorageIOResponse channelReceive() throws UIOException {\n        Hydrogen hydrogen = this.volumeManager.getHydrogen();\n        MasterVolumeGram masterVolumeGram = new MasterVolumeGram( this.stripedVolume.getGuid().toString(), hydrogen);\n        hydrogen.getTaskManager().add( masterVolumeGram );\n        List<LogicVolume> volumes = this.stripedVolume.queryChildren();\n\n        try {\n            MappedExecutor sqLiteExecutor = this.getExecutor();\n\n            int index = 0;\n            for( LogicVolume volume : volumes ){\n                TitanStripReceiverJob receiverJob = new TitanStripReceiverJob(masterVolumeGram, this.entity, this.fileChannel, volumes.size(), index, volume, sqLiteExecutor, 0, this.entity.getReceiveStorageObject().getSize() );\n                LocalStripedTaskThread taskThread = new LocalStripedTaskThread(  this.stripedVolume.getName() + index, masterVolumeGram, receiverJob );\n                masterVolumeGram.getTaskManager().add( taskThread );\n                taskThread.start();\n\n                index ++;\n            }\n            this.mSqLiteHost.close();\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n        this.waitForTaskCompletion( masterVolumeGram );\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse channelReceive(Number offset, Number endSize) throws UIOException {\n        Hydrogen hydrogen = this.volumeManager.getHydrogen();\n        MasterVolumeGram masterVolumeGram = new MasterVolumeGram( this.stripedVolume.getGuid().toString(), hydrogen);\n        hydrogen.getTaskManager().add( masterVolumeGram );\n        List<LogicVolume> volumes = this.stripedVolume.queryChildren();\n\n        MappedExecutor sqLiteExecutor = null;\n        try {\n            sqLiteExecutor = this.getExecutor();\n        } catch (SQLException e) {\n            throw new UIOException(e);\n        }\n\n        int index = 0;\n        for( LogicVolume volume : volumes ){\n            TitanStripReceiverJob receiverJob = new TitanStripReceiverJob(masterVolumeGram, this.entity, this.fileChannel, volumes.size(), index, volume, sqLiteExecutor, offset, offset.longValue()+endSize.longValue() );\n            LocalStripedTaskThread taskThread = new LocalStripedTaskThread(  this.stripedVolume.getName() + index, masterVolumeGram, receiverJob );\n            masterVolumeGram.getTaskManager().add( taskThread );\n            taskThread.start();\n\n            index ++;\n        }\n\n        this.waitForTaskCompletion( masterVolumeGram );\n        return null;\n    }\n\n    private MappedExecutor getExecutor() throws SQLException {\n        VolumeConfig config = this.volumeManager.getConfig();\n        GUID physicsVolumeGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume(this.stripedVolume.getGuid());\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsVolumeGuid);\n        String url = physicalVolume.getMountPoint().getMountPoint()+ config.getPathSeparator() +this.stripedVolume.getGuid()+ config.getSqliteFileExtension();\n        return this.volumeManager.getKenusPool().allot(url);\n    }\n\n    private void waitForTaskCompletion(MasterVolumeGram masterVolumeGram) throws ProxyProvokeHandleException {\n        try {\n            masterVolumeGram.getTaskManager().syncWaitingTerminated();\n        }\n        catch (Exception e) {\n            throw new ProxyProvokeHandleException(e);\n        }\n    }\n\n//    @Override\n//    public StorageIOResponse receive() throws UIOException {\n//        return null;\n//    }\n//\n//    @Override\n//    public StorageIOResponse receive(Number offset, Number endSize) throws UIOException {\n//        return null;\n//    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface, Number offset, Number endSize) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws UIOException {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/channnel/TitanStripedChannelReceiverEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive.channnel;\n\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\n\npublic class TitanStripedChannelReceiverEntity64 extends ArchReceiveEntity implements StripedChannelReceiverEntity64{\n    private Chanface channel;\n    private StripedVolume            stripedVolume;\n    private StripedChannelReceiver64 stripedChannelReceiver64;\n    public TitanStripedChannelReceiverEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, Chanface channel, StripedVolume stripedVolume) {\n        super(volumeManager, storageReceiveIORequest,null);\n        this.channel = channel;\n        this.stripedVolume = stripedVolume;\n        this.stripedChannelReceiver64 = new TitanStripedChannelReceiver64( this );\n    }\n\n    @Override\n    public Chanface getChannel() {\n        return this.channel;\n    }\n\n    @Override\n    public void setChannel(Chanface channel) {\n        this.channel = channel;\n    }\n\n    @Override\n    public StripedVolume getStripedVolume() {\n        return this.stripedVolume;\n    }\n\n    @Override\n    public StorageIOResponse receive() throws UIOException {\n        return this.stripedChannelReceiver64.channelReceive();\n    }\n\n    @Override\n    public StorageIOResponse receive(Number offset, Number endSize) throws UIOException {\n        return this.stripedChannelReceiver64.channelReceive( offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws UIOException {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/stream/StripedStreamReceive.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive.stream;\n\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.receive.StripedReceiver;\n\npublic interface StripedStreamReceive extends StripedReceiver {\n    StorageIOResponse streamReceive( ) throws UIOException;\n\n    StorageIOResponse streamReceive( Number offset, Number endSize ) throws UIOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/stream/StripedStreamReceive64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive.stream;\n\npublic interface StripedStreamReceive64 extends StripedStreamReceive{\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/stream/StripedStreamReceiveEntity.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive.stream;\n\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.receive.StripedReceiverEntity;\n\nimport java.io.InputStream;\n\npublic interface StripedStreamReceiveEntity extends StripedReceiverEntity {\n    InputStream getStream();\n    void setStream( InputStream stream );\n    StripedVolume getStripedVolume();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/stream/StripedStreamReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive.stream;\n\npublic interface StripedStreamReceiveEntity64 extends StripedStreamReceiveEntity{\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/stream/TitanStripedStreamReceive64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive.stream;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.rdb.MappedExecutor;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteHost;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.RandomAccessChanface;\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.kvfs.OnVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.runtime.MasterVolumeGram;\n\nimport java.io.InputStream;\nimport java.sql.SQLException;\n\npublic class TitanStripedStreamReceive64 implements StripedStreamReceive64{\n    protected InputStream  stream;\n\n    protected VolumeManager volumeManager;\n\n    protected StorageReceiveIORequest storageReceiveIORequest;\n\n    protected StripedVolume  stripedVolume;\n\n    protected ReceiveEntity  entity;\n\n    protected OnVolumeFileSystem kenVolumeFileSystem;\n\n    protected SQLiteHost   mSqLiteHost;\n\n    public TitanStripedStreamReceive64( StripedStreamReceiveEntity entity ){\n        this.entity = entity;\n        this.stream = entity.getStream();\n        this.volumeManager = entity.getVolumeManager();\n        this.storageReceiveIORequest = entity.getReceiveStorageObject();\n        this.kenVolumeFileSystem = new KenVolumeFileSystem( this.volumeManager );\n        this.stripedVolume = entity.getStripedVolume();\n    }\n\n    @Override\n    public StorageIOResponse streamReceive() throws UIOException {\n//        Hydrogen hydrogen = this.volumeManager.getHydrogen();\n//        List<LogicVolume> volumes = this.stripedVolume.queryChildren();\n//        MasterVolumeGram masterVolumeGram = new MasterVolumeGram( this.stripedVolume.getGuid().toString(), hydrogen, volumes.size(), 1, this.volumeManager.getConfig().getDefaultStripSize().intValue() );\n//        hydrogen.getTaskManager().add( masterVolumeGram );\n//        MappedExecutor executor = this.getExecutor();\n//\n//        TitanStripReceiveBufferOutJob bufferOutJob = new TitanStripReceiveBufferOutJob( masterVolumeGram, this.volumeManager, this.stream, this.storageReceiveIORequest, executor );\n//        LocalStripedTaskThread taskThread = new LocalStripedTaskThread( \"bufferOut\",masterVolumeGram, bufferOutJob );\n//        masterVolumeGram.getTaskManager().add( taskThread );\n//        masterVolumeGram.applyBufferOutThreadId( taskThread.getId() );\n//        taskThread.start();\n//\n//        int index = 0;\n//        for( LogicVolume volume : volumes ){\n//            TitanStripReceiveBufferInJob bufferInJob = new TitanStripReceiveBufferInJob( masterVolumeGram, index,this.stream,volume );\n//            LocalStripedTaskThread bufferInThread = new LocalStripedTaskThread(volume.getName(), masterVolumeGram, bufferInJob);\n//            masterVolumeGram.getTaskManager().add( bufferInThread );\n//            CacheBlock cacheBlock = masterVolumeGram.getCacheGroup().get(index);\n//            cacheBlock.setBufferWriteThreadId( bufferInThread.getId() );\n//            bufferInThread.start();\n//            index++;\n//        }\n//\n//        this.waitForTaskCompletion( masterVolumeGram );\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse streamReceive(Number offset, Number endSize) throws UIOException {\n        return null;\n    }\n\n    private MappedExecutor getExecutor() throws SQLException {\n        VolumeConfig config = this.volumeManager.getConfig();\n        GUID physicsVolumeGuid = this.kenVolumeFileSystem.getKVFSPhysicsVolume(this.stripedVolume.getGuid());\n        PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(physicsVolumeGuid);\n        String url = physicalVolume.getMountPoint().getMountPoint()+ config.getPathSeparator() +this.stripedVolume.getGuid()+ config.getSqliteFileExtension();\n        return this.volumeManager.getKenusPool().allot(url);\n    }\n\n    private void waitForTaskCompletion(MasterVolumeGram masterVolumeGram) throws ProxyProvokeHandleException {\n        try {\n            masterVolumeGram.getTaskManager().syncWaitingTerminated();\n        }\n        catch (Exception e) {\n            throw new ProxyProvokeHandleException(e);\n        }\n    }\n\n//    @Override\n//    public StorageIOResponse receive() throws UIOException {\n//        return null;\n//    }\n//\n//    @Override\n//    public StorageIOResponse receive(Number offset, Number endSize) throws UIOException {\n//        return null;\n//    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(Chanface chanface, Number offset, Number endSize) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Chanface chanface, Number offset, Number endSize) {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(RandomAccessChanface randomAccessChanface, Number offset, Number endSize) throws UIOException {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/entity/local/striped/receive/stream/TitanStripedStreamReceiveEntity64.java",
    "content": "package com.pinecone.hydra.storage.volume.entity.local.striped.receive.stream;\n\nimport com.pinecone.hydra.storage.StorageIOResponse;\nimport com.pinecone.hydra.storage.StorageReceiveIORequest;\nimport com.pinecone.hydra.storage.io.UIOException;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ArchReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\npublic class TitanStripedStreamReceiveEntity64 extends ArchReceiveEntity implements StripedStreamReceiveEntity64{\n    protected InputStream  stream;\n    protected StripedVolume stripedVolume;\n    protected StripedStreamReceive64 streamReceive;\n    public TitanStripedStreamReceiveEntity64(VolumeManager volumeManager, StorageReceiveIORequest storageReceiveIORequest, InputStream stream, StripedVolume stripedVolume) {\n        super(volumeManager, storageReceiveIORequest,null);\n        this.stream = stream;\n        this.stripedVolume = stripedVolume;\n        this.streamReceive = new TitanStripedStreamReceive64( this );\n    }\n\n    @Override\n    public StorageIOResponse receive() throws UIOException {\n        return this.streamReceive.streamReceive();\n    }\n\n    @Override\n    public StorageIOResponse receive(Number offset, Number endSize) throws UIOException {\n        return this.streamReceive.streamReceive( offset, endSize );\n    }\n\n    @Override\n    public StorageIOResponse randomReceive(Number offset, Number endSize) throws IOException {\n        return null;\n    }\n\n    @Override\n    public StorageIOResponse receive(CacheBlock cacheBlock, byte[] buffer) throws UIOException {\n        return null;\n    }\n\n    @Override\n    public InputStream getStream() {\n        return this.stream;\n    }\n\n    @Override\n    public void setStream(InputStream stream) {\n        this.stream = stream;\n    }\n\n    @Override\n    public StripedVolume getStripedVolume() {\n        return this.stripedVolume;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/kvfs/ExecutorPool.java",
    "content": "package com.pinecone.hydra.storage.volume.kvfs;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.rdb.MappedExecutor;\n\npublic interface ExecutorPool extends Pinenut {\n    MappedExecutor allot( String name );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/kvfs/KenVolumeFileSystem.java",
    "content": "package com.pinecone.hydra.storage.volume.kvfs;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.rdb.MappedExecutor;\nimport com.pinecone.framework.util.rdb.ResultSession;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.source.SQLiteVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\n\npublic class KenVolumeFileSystem implements OnVolumeFileSystem {\n\n    private VolumeManager               volumeManager;\n    private VolumeMasterManipulator     volumeMasterManipulator;\n    private SQLiteVolumeManipulator     sqLiteVolumeManipulator;\n\n    public KenVolumeFileSystem( VolumeManager volumeManager ){\n        this.volumeManager           = volumeManager;\n        this.volumeMasterManipulator = volumeManager.getMasterManipulator();\n        this.sqLiteVolumeManipulator = this.volumeMasterManipulator.getSQLiteVolumeManipulator();\n    }\n\n    @Override\n    public GUID getKVFSPhysicsVolume(GUID volumeGuid) {\n        return this.sqLiteVolumeManipulator.getPhysicsGuid(volumeGuid);\n    }\n\n    @Override\n    public void insertSimpleTargetMappingTab( GUID physicsGuid, GUID volumeGuid ) {\n        this.sqLiteVolumeManipulator.insert( physicsGuid, volumeGuid );\n    }\n    @Override\n    public void createSimpleTargetMappingTab( MappedExecutor mappedExecutor ) throws SQLException {\n        mappedExecutor.execute( \"CREATE TABLE `kvfs_simple_target_mapping`( `id` INTEGER PRIMARY KEY AUTOINCREMENT, `storage_object_guid` VARCHAR(36) , `storage_object_name` VARCHAR(36), `source_name` VARCHAR(330) );\", false );\n    }\n\n    @Override\n    public void removeSimpleTargetMappingTab(GUID storageObjectGuid, MappedExecutor mappedExecutor) throws SQLException {\n        mappedExecutor.execute(\"DELETE FROM `kvfs_simple_target_mapping` WHERE `storage_object_guid` = '\" + storageObjectGuid + \"'\", false);\n    }\n\n    @Override\n    public void insertSimpleTargetMappingSoloRecord( GUID storageObjectGuid, String storageObjectName, String sourceName, MappedExecutor mappedExecutor ) throws SQLException {\n        mappedExecutor.execute( \"INSERT INTO `kvfs_simple_target_mapping` ( `storage_object_guid` , `storage_object_name` , `source_name` ) VALUES ( '\"+ storageObjectGuid+ \"', '\"+storageObjectName+\"', '\"+sourceName+\"' )\", false );\n    }\n\n    @Override\n    public String getSimpleStorageObjectSourceName(GUID storageObjectGuid, MappedExecutor mappedExecutor) throws SQLException {\n        ResultSession query = mappedExecutor.query(\"SELECT `source_name` FROM `kvfs_simple_target_mapping` WHERE `storage_object_guid` = '\" + storageObjectGuid + \"' \");\n        ResultSet resultSet = query.getResultSet();\n        if( resultSet.next() ){\n            return resultSet.getString(\"source_name\");\n        }\n        return null;\n    }\n\n    @Override\n    public boolean existStorageObject(MappedExecutor mappedExecutor, GUID storageObjectGuid) throws SQLException {\n        ResultSession query = mappedExecutor.query(\" SELECT COUNT(*) FROM `kvfs_simple_target_mapping` WHERE `storage_object_guid` = '\" + storageObjectGuid + \"' \");\n        ResultSet resultSet = query.getResultSet();\n        if( resultSet.next() ){\n            int count = resultSet.getInt(1);\n            return count != 0;\n        }\n        return false;\n    }\n\n    @Override\n    public int hashStorageObjectID( GUID keyGuid, int volumeNum ) {\n        int hash = (keyGuid.hashCode() ^ 137) % volumeNum; // TODO! CONST\n        hash = (hash ^ (hash >> 31)) - (hash >> 31);\n        return hash;\n    }\n\n    @Override\n    public void createSpannedIndexTable(MappedExecutor mappedExecutor) throws SQLException {\n        mappedExecutor.execute( \"CREATE TABLE `kvfs_span_volume_index`( `id` INTEGER PRIMARY KEY AUTOINCREMENT, `hash_key` int , `target_volume_guid` VARCHAR(36));\", false );\n    }\n\n    @Override\n    public void insertSpannedIndexTable(MappedExecutor mappedExecutor, int hashKey, GUID targetVolumeGuid) throws SQLException {\n        mappedExecutor.execute( \"INSERT INTO `kvfs_span_volume_index` ( `hash_key`, `target_volume_guid` ) VALUES ( \"+hashKey+\", '\"+targetVolumeGuid+\"' )\", false );\n    }\n\n    @Override\n    public GUID getSpannedIndexTableTargetGuid(MappedExecutor mappedExecutor, int hashKey) throws SQLException {\n        ResultSession query = mappedExecutor.query(\"SELECT `target_volume_guid` FROM `kvfs_span_volume_index` WHERE `hash_key` = \" + hashKey + \" \");\n        ResultSet resultSet = query.getResultSet();\n        if ( resultSet.next() ){\n            String targetVolumeGuid = resultSet.getString(\"target_volume_guid\");\n            return GUIDs.GUID128( targetVolumeGuid );\n        }\n        return null;\n    }\n\n    @Override\n    public void creatSpanLinkedVolumeTable(MappedExecutor mappedExecutor) throws SQLException {\n        mappedExecutor.execute( \"CREATE TABLE `kvfs_span_linked_volume`( `id` INTEGER PRIMARY KEY AUTOINCREMENT, `hash_key` int , `key_guid` VARCHAR(36), `target_volume_guid` VARCHAR(36)) ;\", false );\n    }\n\n    @Override\n    public void insertSpanLinkedVolumeTable(MappedExecutor mappedExecutor, int hashKey, GUID keyGuid, GUID targetVolumeGuid) throws SQLException {\n        mappedExecutor.execute( \"INSERT INTO `kvfs_span_linked_volume` ( `hash_key`, `key_guid`, `target_volume_guid` ) VALUES ( \"+hashKey+\", '\"+keyGuid+\"', '\"+targetVolumeGuid+\"' )\", false );\n    }\n\n    @Override\n    public GUID getSpanLinkedVolumeTableTargetGuid(MappedExecutor mappedExecutor, GUID keyGuid) throws SQLException {\n        ResultSession query = mappedExecutor.query(\"SELECT `target_volume_guid` FROM `kvfs_span_linked_volume` WHERE `key_guid` = '\" + keyGuid + \"' \");\n        ResultSet resultSet = query.getResultSet();\n        if ( resultSet.next() ){\n            String targetVolumeGuid = resultSet.getString(\"target_volume_guid\");\n            return GUIDs.GUID128( targetVolumeGuid );\n        }\n        return null;\n    }\n\n    @Override\n    public void createStripMetaTable(MappedExecutor mappedExecutor) throws SQLException {\n        mappedExecutor.execute( \"CREATE TABLE `kvfs_strip_meta`( `id` INTEGER PRIMARY KEY AUTOINCREMENT, `code` int , `volume_guid` VARCHAR(36), `storage_object_guid` VARCHAR(36), `source_name` TEXT) ;\", false );\n    }\n\n    @Override\n    public void insertStripMetaTable(MappedExecutor mappedExecutor, int code, GUID volumeGuid, GUID storageObjectGuid, String sourceName) throws SQLException {\n        mappedExecutor.execute( \"INSERT INTO `kvfs_strip_meta` ( `code`, `volume_guid`, `storage_object_guid`, `source_name` ) VALUES ( \"+code+\", '\"+volumeGuid+\"', '\"+storageObjectGuid+\"', '\"+sourceName+\"' )\", false );\n    }\n\n    @Override\n    public void removeStripMetaTable(GUID storageGuid, MappedExecutor mappedExecutor) throws SQLException {\n        mappedExecutor.execute( \"DELETE FROM `kvfs_strip_meta` WHERE `storage_object_guid` = '\" + storageGuid + \"'\", false );\n    }\n\n    @Override\n    public String getStripMetaSourceName(MappedExecutor mappedExecutor, GUID volumeGuid, GUID storageObjectGuid) throws SQLException {\n        ResultSession query = mappedExecutor.query(\"SELECT `source_name` FROM `kvfs_strip_meta` WHERE `volume_guid` = '\" + volumeGuid + \"' AND `storage_object_guid` = '\" + storageObjectGuid + \"' \");\n        ResultSet resultSet = query.getResultSet();\n        if ( resultSet.next() ){\n            return resultSet.getString(\"source_name\");\n        }\n        return null;\n    }\n\n    @Override\n    public int getStripMetaCode(MappedExecutor mappedExecutor, GUID volumeGuid, GUID storageObjectGuid) throws SQLException {\n        ResultSession query = mappedExecutor.query(\"SELECT `code` FROM `kvfs_strip_meta` WHERE `volume_guid` = '\" + volumeGuid + \"' AND `storage_object_guid` = '\" + storageObjectGuid + \"' \");\n        ResultSet resultSet = query.getResultSet();\n        if ( resultSet.next() ){\n            return resultSet.getInt(\"code\");\n        }\n        return 0;\n    }\n\n    @Override\n    public boolean isExistStripMetaTable(MappedExecutor mappedExecutor, GUID volumeGuid, GUID storageObjectGuid) throws SQLException {\n        ResultSession query = mappedExecutor.query(\"SELECT COUNT(*) FROM `kvfs_strip_meta` WHERE `volume_guid` = '\" + volumeGuid + \"' AND `storage_object_guid` = '\" + storageObjectGuid + \"' \");\n        ResultSet resultSet = query.getResultSet();\n        if( resultSet.next() ){\n            int count = resultSet.getInt(1);\n            return count != 0;\n        }\n        return false;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/kvfs/KenusPool.java",
    "content": "package com.pinecone.hydra.storage.volume.kvfs;\n\nimport com.pinecone.framework.util.rdb.MappedExecutor;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteExecutor;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteHost;\n\nimport java.sql.SQLException;\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\n\npublic class KenusPool implements ExecutorPool {\n    protected Map<String, MappedExecutor> kenusInstances;\n\n    public KenusPool(){\n        this.kenusInstances = new ConcurrentHashMap<>();\n    }\n\n    @Override\n    public MappedExecutor allot(String name) {\n        MappedExecutor mappedExecutor = this.kenusInstances.get(name);\n        if( mappedExecutor == null ){\n            try {\n                SQLiteHost sqLiteHost = new SQLiteHost(name);\n                SQLiteExecutor sqLiteExecutor = new SQLiteExecutor(sqLiteHost);\n                this.kenusInstances.put(name, sqLiteExecutor);\n                mappedExecutor = sqLiteExecutor;\n            } catch (SQLException e) {\n                 e.printStackTrace();\n            }\n\n        }\n        return mappedExecutor;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/kvfs/OnVolumeFileSystem.java",
    "content": "package com.pinecone.hydra.storage.volume.kvfs;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.rdb.MappedExecutor;\n\nimport java.sql.SQLException;\n\npublic interface OnVolumeFileSystem extends Pinenut {\n    GUID getKVFSPhysicsVolume(GUID volumeGuid);\n\n    void insertSimpleTargetMappingTab(GUID physicsGuid, GUID volumeGuid);\n\n    void createSimpleTargetMappingTab(MappedExecutor mappedExecutor ) throws SQLException;\n\n    void removeSimpleTargetMappingTab( GUID storageObjectGuid, MappedExecutor mappedExecutor ) throws SQLException;\n\n    void insertSimpleTargetMappingSoloRecord(GUID storageObjectGuid, String storageObjectName, String sourceName, MappedExecutor mappedExecutor ) throws SQLException;\n    String getSimpleStorageObjectSourceName(GUID storageObjectGuid, MappedExecutor mappedExecutor ) throws SQLException;\n\n    boolean existStorageObject( MappedExecutor mappedExecutor, GUID storageObjectGuid ) throws SQLException;\n\n    int hashStorageObjectID( GUID keyGuid, int volumeNum);\n\n    void createSpannedIndexTable(MappedExecutor mappedExecutor ) throws SQLException;\n    void insertSpannedIndexTable(MappedExecutor mappedExecutor, int hashKey, GUID targetVolumeGuid ) throws SQLException;\n    GUID getSpannedIndexTableTargetGuid(MappedExecutor mappedExecutor, int hashKey ) throws SQLException;\n\n\n    void creatSpanLinkedVolumeTable( MappedExecutor mappedExecutor ) throws SQLException;\n    void insertSpanLinkedVolumeTable( MappedExecutor mappedExecutor, int hashKey, GUID keyGuid, GUID targetVolumeGuid ) throws SQLException;\n    GUID getSpanLinkedVolumeTableTargetGuid( MappedExecutor mappedExecutor, GUID keyGuid ) throws SQLException;\n\n    void createStripMetaTable(MappedExecutor mappedExecutor ) throws SQLException;\n    void insertStripMetaTable(MappedExecutor mappedExecutor, int code, GUID volumeGuid, GUID storageObjectGuid, String sourceName ) throws SQLException;\n    void removeStripMetaTable( GUID storageGuid, MappedExecutor mappedExecutor ) throws SQLException;\n    String getStripMetaSourceName(MappedExecutor mappedExecutor, GUID volumeGuid, GUID storageObjectGuid ) throws SQLException;\n    int getStripMetaCode(MappedExecutor mappedExecutor, GUID volumeGuid, GUID storageObjectGuid ) throws SQLException;\n    boolean isExistStripMetaTable(MappedExecutor mappedExecutor, GUID volumeGuid, GUID storageObjectGuid ) throws SQLException;\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/ArchVolumeOperator.java",
    "content": "package com.pinecone.hydra.storage.volume.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.storage.volume.source.VolumeCapacityManipulator;\nimport com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator;\nimport com.pinecone.hydra.system.ko.UOIUtils;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.time.LocalDateTime;\n\npublic abstract class ArchVolumeOperator implements VolumeOperator{\n    protected VolumeManager                 volumeManager;\n    protected VolumeOperatorFactory         factory;\n    protected ImperialTree                  imperialTree;\n    protected VolumeMasterManipulator       volumeMasterManipulator;\n    protected VolumeCapacityManipulator     volumeCapacityManipulator;\n    protected KenVolumeFileSystem           kenVolumeFileSystem;\n\n    public ArchVolumeOperator( VolumeMasterManipulator masterManipulator, VolumeManager volumeManager ){\n        this.imperialTree =  volumeManager.getMasterTrieTree();\n        this.volumeManager = volumeManager;\n        this.volumeMasterManipulator   =  masterManipulator;\n        this.volumeCapacityManipulator =  masterManipulator.getVolumeCapacityManipulator();\n        this.kenVolumeFileSystem       =  this.volumeManager.getKVFSystem();\n    }\n\n    protected ImperialTreeNode affirmPreinsertionInitialize(LogicVolume volume ){\n        GUID guid = volume.getGuid();\n        volume.setUpdateTime( LocalDateTime.now() );\n        ImperialTreeNode imperialTreeNode = new GUIDImperialTrieNode();\n        imperialTreeNode.setGuid( guid );\n        imperialTreeNode.setType( UOIUtils.createLocalJavaClass( volume.getClass().getName() ) );\n\n        return imperialTreeNode;\n    }\n\n    public VolumeOperatorFactory  getVolumeOperatorFactory(){\n        return this.factory;\n    }\n    protected String getVolumeMetaType( TreeNode treeNode ){\n        return treeNode.className().replace(\"Titan\",\"\");\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/SimpleVolumeOperator.java",
    "content": "package com.pinecone.hydra.storage.volume.operator;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteExecutor;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.VolumeCapacity64;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume;\nimport com.pinecone.hydra.storage.volume.source.SimpleVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.io.File;\nimport java.sql.SQLException;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\npublic class SimpleVolumeOperator extends ArchVolumeOperator  implements VolumeOperator{\n    protected Map<GUID, LogicVolume>  cacheMap  =  new HashMap<>();\n    protected SimpleVolumeManipulator       simpleVolumeManipulator;\n\n    public SimpleVolumeOperator( VolumeOperatorFactory  factory ){\n        this( factory.getMasterManipulator(), factory.getVolumeManager() );\n        this.factory = factory;\n    }\n\n    public SimpleVolumeOperator(VolumeMasterManipulator masterManipulator, VolumeManager volumeManager) {\n        super(masterManipulator, volumeManager);\n        this.simpleVolumeManipulator    =  masterManipulator.getSimpleVolumeManipulator();\n    }\n\n    @Override\n    public GUID insert(TreeNode treeNode) {\n        LocalSimpleVolume simpleVolume = ( LocalSimpleVolume ) treeNode;\n        ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(simpleVolume);\n        GUID guid = simpleVolume.getGuid();\n        VolumeCapacity64 volumeCapacity = simpleVolume.getVolumeCapacity();\n        if ( volumeCapacity.getVolumeGuid() == null ){\n            volumeCapacity.setVolumeGuid( guid );\n        }\n\n        this.imperialTree.insert(imperialTreeNode);\n        this.simpleVolumeManipulator.insert( simpleVolume );\n        this.volumeCapacityManipulator.insert( volumeCapacity );\n        return guid;\n    }\n\n    @Override\n    public void purge(GUID guid) {\n        List<GUIDImperialTrieNode> children = this.imperialTree.getChildren(guid);\n        for( GUIDImperialTrieNode node : children ){\n            TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class<? >[]{this.getClass()}, this );\n            VolumeOperator operator = this.factory.getOperator(this.getVolumeMetaType(newInstance));\n            operator.purge( node.getGuid() );\n        }\n        this.removeNode( guid );\n    }\n\n    @Override\n    public SimpleVolume get(GUID guid)  {\n        SimpleVolume simpleVolume = this.simpleVolumeManipulator.getSimpleVolume(guid);\n        VolumeCapacity64 volumeCapacity = this.volumeCapacityManipulator.getVolumeCapacity(guid);\n        simpleVolume.setVolumeCapacity( volumeCapacity );\n        simpleVolume.setVolumeTree( this.volumeManager);\n        simpleVolume.setKenVolumeFileSystem();\n        try {\n            simpleVolume.assembleSQLiteExecutor();\n        } catch (SQLException e) {\n            throw new ProxyProvokeHandleException(e);\n        }\n        return simpleVolume;\n    }\n\n    @Override\n    public TreeNode get(GUID guid, int depth) {\n        return null;\n    }\n\n    @Override\n    public TreeNode getAsRootDepth(GUID guid) {\n        return null;\n    }\n\n    @Override\n    public void update(TreeNode treeNode) {\n        SimpleVolume simpleVolume = (SimpleVolume) treeNode;\n\n    }\n\n    @Override\n    public void removeStorageObject(GUID volumeGuid,GUID storageObjectGuid,long size) {\n        SimpleVolume simpleVolume = (SimpleVolume)this.volumeManager.get(volumeGuid);\n        try {\n            SQLiteExecutor sqLiteExecutor = simpleVolume.getSQLiteExecutor();\n            String sourceName = this.kenVolumeFileSystem.getSimpleStorageObjectSourceName(storageObjectGuid, sqLiteExecutor);\n            if( sourceName == null ){\n                return;\n            }\n            File file = new File(sourceName);\n            simpleVolume.increaseCapacity( file.length() );\n            List<GUID> guids = simpleVolume.listPhysicalVolume();\n            PhysicalVolume physicalVolume = this.volumeManager.getPhysicalVolume(guids.get(0));\n            physicalVolume.increaseCapacity( file.length() );\n            file.delete();\n            this.kenVolumeFileSystem.removeSimpleTargetMappingTab( storageObjectGuid, sqLiteExecutor );\n        } catch (SQLException e) {\n            throw new RuntimeException(e);\n        }\n    }\n\n    @Override\n    public void updateName(GUID guid, String name) {\n\n    }\n    private void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.simpleVolumeManipulator.remove( guid );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/SpannedVolumeOperator.java",
    "content": "package com.pinecone.hydra.storage.volume.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.SpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.VolumeCapacity64;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume;\nimport com.pinecone.hydra.storage.volume.source.SpannedVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.util.Collection;\nimport java.util.List;\n\npublic class SpannedVolumeOperator extends ArchVolumeOperator  implements VolumeOperator{\n    protected SpannedVolumeManipulator     SpannedVolumeManipulator;\n\n    public SpannedVolumeOperator( VolumeOperatorFactory  factory ){\n        this( factory.getMasterManipulator(), factory.getVolumeManager() );\n        this.factory = factory;\n    }\n\n    public SpannedVolumeOperator(VolumeMasterManipulator masterManipulator, VolumeManager volumeManager) {\n        super(masterManipulator, volumeManager);\n        this.SpannedVolumeManipulator = masterManipulator.getSpannedVolumeManipulator();\n    }\n\n    @Override\n    public GUID insert(TreeNode treeNode) {\n        LocalSpannedVolume simpleVolume = ( LocalSpannedVolume ) treeNode;\n        ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(simpleVolume);\n        GUID guid = simpleVolume.getGuid();\n        VolumeCapacity64 volumeCapacity = simpleVolume.getVolumeCapacity();\n        if ( volumeCapacity.getVolumeGuid() == null ){\n            volumeCapacity.setVolumeGuid( guid );\n        }\n\n        this.imperialTree.insert(imperialTreeNode);\n        this.SpannedVolumeManipulator.insert( simpleVolume );\n        this.volumeCapacityManipulator.insert( volumeCapacity );\n        return guid;\n    }\n\n    @Override\n    public void purge(GUID guid) {\n        List<GUIDImperialTrieNode> children = this.imperialTree.getChildren(guid);\n        for( GUIDImperialTrieNode node : children ){\n            TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class<? >[]{this.getClass()}, this );\n            VolumeOperator operator = this.factory.getOperator(this.getVolumeMetaType(newInstance));\n            operator.purge( node.getGuid() );\n        }\n        this.removeNode( guid );\n    }\n\n    @Override\n    public TreeNode get(GUID guid) {\n        SpannedVolume spannedVolume = this.SpannedVolumeManipulator.getSpannedVolume(guid);\n        VolumeCapacity64 volumeCapacity = this.volumeCapacityManipulator.getVolumeCapacity(guid);\n        spannedVolume.setVolumeCapacity( volumeCapacity );\n        spannedVolume.setVolumeTree( this.volumeManager);\n        spannedVolume.setKenVolumeFileSystem();\n        return spannedVolume;\n    }\n\n    @Override\n    public TreeNode get(GUID guid, int depth) {\n        return null;\n    }\n\n    @Override\n    public TreeNode getAsRootDepth(GUID guid) {\n        return null;\n    }\n\n    @Override\n    public void update(TreeNode treeNode) {\n\n    }\n\n    @Override\n    public void removeStorageObject(GUID volumeGuid,GUID storageObjectGuid,long size) {\n        Collection<TreeNode> children = this.volumeManager.getChildren(volumeGuid);\n        for( TreeNode treeNode : children ){\n            this.volumeManager.removeStorageObject( treeNode.getGuid(), storageObjectGuid,size );\n        }\n    }\n\n    @Override\n    public void updateName(GUID guid, String name) {\n\n    }\n\n    private void removeNode( GUID guid ){\n        GUIDImperialTrieNode node = this.imperialTree.getNode(guid);\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.SpannedVolumeManipulator.remove( guid );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/StripedVolumeOperator.java",
    "content": "package com.pinecone.hydra.storage.volume.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.rdb.sqlite.SQLiteExecutor;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.VolumeCapacity64;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume;\nimport com.pinecone.hydra.storage.volume.source.StripedVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.ImperialTreeNode;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport java.sql.SQLException;\nimport java.util.Collection;\nimport java.util.List;\n\npublic class StripedVolumeOperator extends ArchVolumeOperator  implements VolumeOperator{\n    protected StripedVolumeManipulator          stripedVolumeManipulator;\n\n    public StripedVolumeOperator( VolumeOperatorFactory  factory ){\n        this( factory.getMasterManipulator(), factory.getVolumeManager() );\n        this.factory = factory;\n    }\n\n    public StripedVolumeOperator(VolumeMasterManipulator masterManipulator, VolumeManager volumeManager) {\n        super(masterManipulator, volumeManager);\n        this.stripedVolumeManipulator = masterManipulator.getStripedVolumeManipulator();\n    }\n\n    @Override\n    public GUID insert(TreeNode treeNode) {\n        LocalStripedVolume stripedVolume = ( LocalStripedVolume ) treeNode;\n        ImperialTreeNode imperialTreeNode = this.affirmPreinsertionInitialize(stripedVolume);\n        GUID guid = stripedVolume.getGuid();\n        VolumeCapacity64 volumeCapacity = stripedVolume.getVolumeCapacity();\n        if ( volumeCapacity.getVolumeGuid() == null ){\n            volumeCapacity.setVolumeGuid( guid );\n        }\n\n        this.imperialTree.insert(imperialTreeNode);\n        this.stripedVolumeManipulator.insert( stripedVolume );\n        this.volumeCapacityManipulator.insert( volumeCapacity );\n        return guid;\n    }\n\n    @Override\n    public void purge(GUID guid) {\n        List<GUIDImperialTrieNode> children = this.imperialTree.getChildren(guid);\n        for( GUIDImperialTrieNode node : children ){\n            TreeNode newInstance = (TreeNode)node.getType().newInstance( new Class<? >[]{this.getClass()}, this );\n            VolumeOperator operator = this.factory.getOperator(this.getVolumeMetaType(newInstance));\n            operator.purge( node.getGuid() );\n        }\n        this.removeNode( guid );\n    }\n\n    @Override\n    public TreeNode get(GUID guid) {\n        StripedVolume stripedVolume = this.stripedVolumeManipulator.getStripedVolume(guid);\n        VolumeCapacity64 volumeCapacity = this.volumeCapacityManipulator.getVolumeCapacity(guid);\n        stripedVolume.setVolumeCapacity( volumeCapacity );\n        stripedVolume.setVolumeTree( this.volumeManager);\n        stripedVolume.setKenVolumeFileSystem();\n        return stripedVolume;\n    }\n\n    @Override\n    public TreeNode get(GUID guid, int depth) {\n        return null;\n    }\n\n    @Override\n    public TreeNode getAsRootDepth(GUID guid) {\n        return null;\n    }\n\n    @Override\n    public void update(TreeNode treeNode) {\n\n    }\n\n    @Override\n    public void updateName(GUID guid, String name) {\n\n    }\n\n    @Override\n    public void removeStorageObject(GUID volumeGuid,GUID storageObjectGuid,long size) {\n        LogicVolume logicVolume = this.volumeManager.get(volumeGuid);\n        try {\n            SQLiteExecutor sqLiteExecutor = logicVolume.getSQLiteExecutor();\n            this.kenVolumeFileSystem.removeStripMetaTable( storageObjectGuid, sqLiteExecutor );\n            Collection<TreeNode> children = this.volumeManager.getChildren(volumeGuid);\n            for( TreeNode treeNode : children ){\n                this.volumeManager.removeStorageObject( treeNode.getGuid(), storageObjectGuid, size );\n            }\n            logicVolume.increaseCapacity( size );\n        } catch (SQLException e) {\n            throw new RuntimeException(e);\n        }\n    }\n\n    private void removeNode(GUID guid ){\n        this.imperialTree.purge( guid );\n        this.imperialTree.removeCachePath( guid );\n        this.stripedVolumeManipulator.remove( guid );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/TitanVolumeOperatorFactory.java",
    "content": "package com.pinecone.hydra.storage.volume.operator;\n\nimport com.pinecone.hydra.storage.file.entity.GenericFileNode;\nimport com.pinecone.hydra.storage.file.entity.GenericFolder;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.TreeMap;\n\npublic class TitanVolumeOperatorFactory implements VolumeOperatorFactory{\n    protected  VolumeMasterManipulator          volumeMasterManipulator;\n    protected VolumeManager                     volumeManager;\n    protected Map<String, TreeNodeOperator>     registerer = new HashMap<>();\n    protected Map<String, String >              metaTypeMap = new TreeMap<>();\n\n    protected void registerDefaultMetaType( Class<?> genericType ) {\n        this.metaTypeMap.put( genericType.getName(), genericType.getSimpleName().replace(\n                this.volumeManager.getConfig().getVersionSignature(), \"\"\n        ));\n    }\n\n    protected void registerDefaultMetaTypes() {\n        this.registerDefaultMetaType( GenericFolder.class );\n        this.registerDefaultMetaType( GenericFileNode.class );\n    }\n\n    public TitanVolumeOperatorFactory(VolumeManager volumeManager, VolumeMasterManipulator volumeMasterManipulator ){\n        this.volumeManager = volumeManager;\n        this.volumeMasterManipulator = volumeMasterManipulator;\n\n        this.registerer.put(\n                DefaultSimpleVolume,\n                new SimpleVolumeOperator( this )\n        );\n\n        this.registerer.put(\n                DefaultStripedVolume,\n                new StripedVolumeOperator( this )\n        );\n\n        this.registerer.put(\n                DefaultSpannedVolume,\n                new SpannedVolumeOperator( this )\n        );\n        this.registerDefaultMetaTypes();\n    }\n\n\n    @Override\n    public void register( String typeName, TreeNodeOperator functionalNodeOperation ) {\n        this.registerer.put( typeName, functionalNodeOperation );\n    }\n\n    @Override\n    public void registerMetaType( Class<?> clazz, String metaType ){\n        this.registerMetaType( clazz.getName(), metaType );\n    }\n\n    @Override\n    public void registerMetaType( String classFullName, String metaType ){\n        this.metaTypeMap.put( classFullName, metaType );\n    }\n\n    @Override\n    public String getMetaType( String classFullName ) {\n        return this.metaTypeMap.get( classFullName );\n    }\n\n    @Override\n    public VolumeOperator getOperator(String typeName ) {\n        //Debug.trace( this.registerer.toString() );\n        //Debug.trace( typeName );\n        return (VolumeOperator) this.registerer.get( typeName );\n    }\n\n    @Override\n    public VolumeManager getVolumeManager() {\n        return this.volumeManager;\n    }\n\n    @Override\n    public VolumeMasterManipulator getMasterManipulator() {\n        return this.volumeMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/VolumeOperator.java",
    "content": "package com.pinecone.hydra.storage.volume.operator;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface VolumeOperator extends TreeNodeOperator {\n    void removeStorageObject(GUID volumeGuid,GUID storageObjectGuid,long size);\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/operator/VolumeOperatorFactory.java",
    "content": "package com.pinecone.hydra.storage.volume.operator;\n\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume;\nimport com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.operator.OperatorFactory;\nimport com.pinecone.hydra.unit.imperium.operator.TreeNodeOperator;\n\npublic interface VolumeOperatorFactory extends OperatorFactory {\n    String DefaultSimpleVolume         = LocalSimpleVolume.class.getSimpleName();\n    String DefaultStripedVolume        = LocalStripedVolume.class.getSimpleName();\n    String DefaultSpannedVolume        = LocalSpannedVolume.class.getSimpleName();\n\n    void register( String typeName, TreeNodeOperator functionalNodeOperation );\n\n    void registerMetaType( Class<?> clazz, String metaType );\n\n    void registerMetaType( String classFullName, String metaType );\n\n    String getMetaType( String classFullName );\n\n    VolumeOperator getOperator(String typeName );\n\n    VolumeManager getVolumeManager();\n\n    VolumeMasterManipulator getMasterManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/Dummy.java",
    "content": "package com.pinecone.hydra.storage.volume.policy;\n\npublic class Dummy {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/ArchSizingMatcher.java",
    "content": "package com.pinecone.hydra.storage.volume.policy.strip;\n\npublic abstract class ArchSizingMatcher implements SizingMatcher {\n    protected Number                        mnLevelSize;\n    protected DynamicStripSizingPolicy      mSizingPolicy;\n\n    public ArchSizingMatcher( DynamicStripSizingPolicy sizingPolicy, Number levelSize ) {\n        this.mSizingPolicy = sizingPolicy;\n    }\n\n    public DynamicStripSizingPolicy getSizingPolicy() {\n        return this.mSizingPolicy;\n    }\n\n    @Override\n    public Number getLevelSize() {\n        return this.mnLevelSize;\n    }\n\n    @Override\n    public int getLevel() {\n        return this.getSizingPolicy().getLevelByMatcher( this );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/DynamicStripSizingPolicy.java",
    "content": "package com.pinecone.hydra.storage.volume.policy.strip;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface DynamicStripSizingPolicy extends Pinenut {\n    Number evalStripSize( Number integritySize );\n\n    List<SizingMatcher> getMatchers();\n\n    int getLevelByMatcher( SizingMatcher that );\n\n    Number getDefaultStripSize();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/GenericDynamicStripSizingPolicy.java",
    "content": "package com.pinecone.hydra.storage.volume.policy.strip;\n\nimport java.util.List;\n\npublic class GenericDynamicStripSizingPolicy implements DynamicStripSizingPolicy {\n    protected List<SizingMatcher > mMatchers;\n    protected Number               mnDefaultStripSize;\n\n    // Parent\n\n    @Override\n    public Number evalStripSize( Number integritySize ) {\n        for( SizingMatcher matcher : this.mMatchers ) {\n            Number ret = matcher.isMatched( integritySize );\n            if( ret != null ) {\n                return ret;\n            }\n        }\n        return this.mnDefaultStripSize;\n    }\n\n    @Override\n    public List<SizingMatcher> getMatchers() {\n        return this.mMatchers;\n    }\n\n    @Override\n    public int getLevelByMatcher( SizingMatcher that ) {\n        return this.mMatchers.indexOf( that );\n    }\n\n    @Override\n    public Number getDefaultStripSize() {\n        return this.mnDefaultStripSize;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/MegaFileSizingMatcher64.java",
    "content": "package com.pinecone.hydra.storage.volume.policy.strip;\n\npublic class MegaFileSizingMatcher64 extends ArchSizingMatcher {\n    public MegaFileSizingMatcher64( DynamicStripSizingPolicy sizingPolicy, Number levelSize ) {\n        super( sizingPolicy, levelSize );\n    }\n\n    @Override\n    public Number isMatched( Number integritySize ) {\n        long i64IntegritySize = integritySize.longValue();\n        if( i64IntegritySize > 100L * 1024 * 1024 * 1024 ) { // (100GB, +∞]\n            return this.getLevelSize();\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/SizingMatcher.java",
    "content": "package com.pinecone.hydra.storage.volume.policy.strip;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface SizingMatcher extends Pinenut {\n    Number isMatched( Number integritySize );\n\n    Number getLevelSize();\n\n    int getLevel();\n\n    DynamicStripSizingPolicy getSizingPolicy();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/SmallFileSizingMatcher64.java",
    "content": "package com.pinecone.hydra.storage.volume.policy.strip;\n\npublic class SmallFileSizingMatcher64 extends ArchSizingMatcher {\n    public SmallFileSizingMatcher64( DynamicStripSizingPolicy sizingPolicy, Number levelSize ) {\n        super( sizingPolicy, levelSize );\n    }\n\n    @Override\n    public Number isMatched( Number integritySize ) {\n        long i64IntegritySize = integritySize.longValue();\n        if( i64IntegritySize > 1024 * 1024 * 1024 && i64IntegritySize <= 100L * 1024 * 1024 * 1024 ) { // (1GB, 100GB]\n            return this.getLevelSize();\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/policy/strip/TinyFileSizingMatcher64.java",
    "content": "package com.pinecone.hydra.storage.volume.policy.strip;\n\npublic class TinyFileSizingMatcher64 extends ArchSizingMatcher {\n    public TinyFileSizingMatcher64( DynamicStripSizingPolicy sizingPolicy, Number levelSize ) {\n        super( sizingPolicy, levelSize );\n    }\n\n    @Override\n    public Number isMatched( Number integritySize ) {\n        long i64IntegritySize = integritySize.longValue();\n        if( i64IntegritySize <= 1024 * 1024 * 1024 ) { // [0, 1G]\n            return this.getLevelSize();\n        }\n\n        return null;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/ArchStripedTaskThread.java",
    "content": "package com.pinecone.hydra.storage.volume.runtime;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.executum.Processum;\n\npublic abstract class ArchStripedTaskThread extends ArchTaskThread implements Runnable {\n    protected VolumeJob    mVolumeJob;\n\n    protected ArchStripedTaskThread ( String szName, Processum parent, VolumeJob volumeJob ) {\n        super( szName, parent );\n        this.mVolumeJob = volumeJob;\n\n        Thread affinityThread = new Thread( this );\n        affinityThread.setDaemon(false);\n\n        this.setThreadAffinity( affinityThread );\n        this.getAffiliateThread().setName( szName );\n        this.setName( affinityThread.getName() );\n    }\n\n\n    protected void executeSingleJob() throws VolumeJobCompromiseException {\n        this.mVolumeJob.execute();\n    }\n\n    @Override\n    public void run() {\n        //switch ()\n        try{\n            this.executeSingleJob();\n        }\n        catch ( VolumeJobCompromiseException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/ArchTaskThread.java",
    "content": "package com.pinecone.hydra.storage.volume.runtime;\nimport com.pinecone.framework.system.executum.ArchThreadum;\nimport com.pinecone.framework.system.executum.Processum;\n\npublic abstract class ArchTaskThread extends ArchThreadum {\n    protected ArchTaskThread ( String szName, Processum parent ) {\n        super( szName, parent, null );\n    }\n\n    @Override\n    public void start() {\n        if( this.getAffiliateThread() != null ) {\n            this.getAffiliateThread().start();\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/MasterVolumeGram.java",
    "content": "package com.pinecone.hydra.storage.volume.runtime;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.CountDownLatch;\nimport java.util.concurrent.Semaphore;\nimport java.util.concurrent.locks.Lock;\n\nimport com.pinecone.framework.system.GenericMasterTaskManager;\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.executum.ArchProcessum;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.lock.SpinLock;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.LocalStripedTaskThread;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.StripCacheBlock;\n\npublic class MasterVolumeGram extends ArchProcessum implements VolumeGram {\n    protected Lock                  mMajorStatusIO = new SpinLock();\n    protected int                   jobCount;\n    protected long                  bufferOutThreadId;\n    protected Semaphore             bufferOutBlockerLatch;\n    protected int                   currentBufferInJobCode;\n    protected CountDownLatch        countDownLatch;\n    protected final CompletableFuture<Object> majorJobFuture;\n    \n    \n    protected List<CacheBlock>      cacheGroup;\n    protected byte[]                buffer;\n\n\n\n    public MasterVolumeGram( String szName, Processum parent ) {\n        super( szName, parent );\n        this.mTaskManager      = new GenericMasterTaskManager( this );\n        this.majorJobFuture    = new CompletableFuture<>();\n    }\n\n    public MasterVolumeGram( String szName, Processum parent, int jobCount, int StripResidentCacheAllotRatio, int stripSize ){\n        this( szName, parent );\n        this.jobCount       = jobCount;\n        this.cacheGroup     = this.initializeCacheGroup( jobCount, StripResidentCacheAllotRatio, stripSize );\n        this.buffer         = new byte[ jobCount * stripSize * StripResidentCacheAllotRatio ];\n        this.currentBufferInJobCode = 0;\n    }\n\n    public Lock getMajorStatusIO() {\n        return this.mMajorStatusIO;\n    }\n\n    public LocalStripedTaskThread getChildThread( long threadId ){\n        return (LocalStripedTaskThread) this.getTaskManager().getExecutumPool().get( threadId );\n    }\n\n\n    @Override\n    public int getJobCount() {\n        return this.jobCount;\n    }\n\n    @Override\n    public void setJobCount(int jobCount) {\n        this.jobCount = jobCount;\n    }\n\n    @Override\n    public List<CacheBlock> getCacheGroup() {\n        return this.cacheGroup;\n    }\n\n    @Override\n    public void setCacheGroup(List<CacheBlock> cacheGroup) {\n        this.cacheGroup = cacheGroup;\n    }\n\n    @Override\n    public byte[] getBuffer() {\n        return this.buffer;\n    }\n\n    @Override\n    public void setBuffer(byte[] buffer) {\n        this.buffer = buffer;\n    }\n\n    @Override\n    public long getBufferOutThreadId() {\n        return this.bufferOutThreadId;\n    }\n\n    @Override\n    public void applyBufferOutThreadId(long bufferOutThreadId) {\n        this.bufferOutThreadId = bufferOutThreadId;\n    }\n\n    @Override\n    public void applyBufferOutBlockerLatch(Semaphore bufferOutBlockerLatch) {\n        this.bufferOutBlockerLatch = bufferOutBlockerLatch;\n    }\n\n    @Override\n    public Semaphore getBufferOutBlockerLatch() {\n        return this.bufferOutBlockerLatch;\n    }\n\n    @Override\n    public int getCurrentBufferInJobCode() {\n        return this.currentBufferInJobCode;\n    }\n\n    @Override\n    public void setCurrentBufferInJobCode(int currentBufferInJobCode) {\n        this.currentBufferInJobCode = currentBufferInJobCode;\n    }\n\n    @Override\n    public CompletableFuture<Object> getMajorJobFuture() {\n        return this.majorJobFuture;\n    }\n\n    @Override\n    public void majorJobCountDown() {\n        this.countDownLatch.countDown();\n    }\n\n    @Override\n    public void setMajorJobCountDownNum(int num) {\n        this.countDownLatch = new CountDownLatch(num);\n    }\n\n    @Override\n    public void majorJobCountDownLatchWait() {\n        try {\n            this.countDownLatch.await();\n        }\n        catch ( InterruptedException e ) {\n            Thread.currentThread().interrupt();\n            throw new ProxyProvokeHandleException(e);\n        }\n    }\n\n    private List< CacheBlock > initializeCacheGroup(int jobCount, int StripResidentCacheAllotRatio, Number stripSize ){\n        ArrayList<CacheBlock> cacheGroup = new ArrayList<>();\n        Number currentPosition = 0;\n        for( int i = 0; i < jobCount * StripResidentCacheAllotRatio; i++ ){\n            StripCacheBlock stripCacheBlock = new StripCacheBlock( i, currentPosition, currentPosition.intValue() + stripSize.intValue() );\n            cacheGroup.add( stripCacheBlock );\n            currentPosition = currentPosition.intValue() + stripSize.intValue();\n        }\n        return  cacheGroup;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/TaskThread.java",
    "content": "package com.pinecone.hydra.storage.volume.runtime;\n\nimport com.pinecone.framework.system.executum.Executum;\n\npublic interface TaskThread extends Executum {\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/VolumeGram.java",
    "content": "package com.pinecone.hydra.storage.volume.runtime;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.CacheBlock;\n\nimport java.util.List;\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.Semaphore;\n\npublic interface VolumeGram extends Processum {\n    int getJobCount();\n    void setJobCount( int jobCount );\n\n    List<CacheBlock> getCacheGroup();\n    void setCacheGroup( List<CacheBlock> cacheGroup );\n\n    byte[] getBuffer();\n    void setBuffer( byte[] buffer );\n\n    long getBufferOutThreadId();\n    void applyBufferOutThreadId(long bufferOutThreadId );\n\n    void applyBufferOutBlockerLatch( Semaphore bufferOutBlockerLatch);\n    Semaphore getBufferOutBlockerLatch();\n\n    int getCurrentBufferInJobCode();\n    void setCurrentBufferInJobCode( int currentBufferInJobCode );\n\n    CompletableFuture<Object> getMajorJobFuture();\n\n    void majorJobCountDown();\n\n    void setMajorJobCountDownNum( int num );\n\n    void majorJobCountDownLatchWait();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/VolumeJob.java",
    "content": "package com.pinecone.hydra.storage.volume.runtime;\n\nimport com.pinecone.framework.system.functions.Executor;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.LocalStripedTaskThread;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.StripBufferStatus;\n\nimport java.util.concurrent.Semaphore;\n\npublic interface VolumeJob extends Executor {\n    void execute() throws VolumeJobCompromiseException;\n\n    void applyThread( LocalStripedTaskThread thread );\n\n    StripBufferStatus getStatus();\n\n    Semaphore getBlockerLatch();\n    void setStatus( StripBufferStatus status );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/runtime/VolumeJobCompromiseException.java",
    "content": "package com.pinecone.hydra.storage.volume.runtime;\n\nimport com.pinecone.framework.system.executum.JobCompromisedException;\n\npublic class VolumeJobCompromiseException extends JobCompromisedException {\n    public VolumeJobCompromiseException    () {\n        super();\n    }\n\n    public VolumeJobCompromiseException    ( String message ) {\n        super(message);\n    }\n\n    public VolumeJobCompromiseException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public VolumeJobCompromiseException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected VolumeJobCompromiseException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/LineSegmentManipulator.java",
    "content": "package com.pinecone.hydra.storage.volume.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface LineSegmentManipulator extends Pinenut {\n    void insert( int idMin, int idMax, GUID volumeGuid );\n    GUID getVolumeGuid( int id );\n    void delete( int idMin, int idMax );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/LogicVolumeManipulator.java",
    "content": "package com.pinecone.hydra.storage.volume.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\n\nimport java.util.List;\n\npublic interface LogicVolumeManipulator extends GUIDNameManipulator {\n    void extendLogicalVolume( GUID logicGuid, GUID physicalGuid );\n\n    List< GUID > listPhysicalVolume( GUID logicGuid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/MirroredVolumeManipulator.java",
    "content": "package com.pinecone.hydra.storage.volume.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.MirroredVolume;\n\nimport java.util.List;\n\npublic interface MirroredVolumeManipulator extends LogicVolumeManipulator {\n    void insert( MirroredVolume mirroredVolume );\n\n    void remove( GUID guid );\n\n    MirroredVolume getMirroredVolume(GUID guid);\n\n    void extendLogicalVolume( GUID logicGuid, GUID physicalGuid );\n\n    List<GUID> listPhysicalVolume(GUID logicGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/MountPointManipulator.java",
    "content": "package com.pinecone.hydra.storage.volume.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.MountPoint;\n\npublic interface MountPointManipulator extends Pinenut {\n    void insert( MountPoint mountPoint );\n\n    void remove( GUID guid );\n\n    void removeByVolumeGuid( GUID guid );\n\n    MountPoint getMountPoint(GUID guid);\n\n    MountPoint getMountPointByVolumeGuid( GUID guid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/PhysicalVolumeManipulator.java",
    "content": "package com.pinecone.hydra.storage.volume.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.Volume;\n\nimport java.util.List;\n\npublic interface PhysicalVolumeManipulator extends Pinenut {\n    void insert( PhysicalVolume physicalVolume );\n    void remove( GUID guid );\n    PhysicalVolume getPhysicalVolume(GUID guid);\n    PhysicalVolume getPhysicalVolumeByName( String name );\n    PhysicalVolume getSmallestCapacityPhysicalVolume();\n    GUID getParent( GUID guid );\n    List<Volume> queryAllPhysicalVolumes();\n    void update( PhysicalVolume physicalVolume );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/SQLiteVolumeManipulator.java",
    "content": "package com.pinecone.hydra.storage.volume.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface SQLiteVolumeManipulator extends Pinenut {\n    void insert( GUID physicsGuid, GUID volumeGuid );\n\n    GUID getPhysicsGuid( GUID volumeGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/SimpleVolumeManipulator.java",
    "content": "package com.pinecone.hydra.storage.volume.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.Volume;\n\nimport java.util.List;\n\npublic interface SimpleVolumeManipulator extends LogicVolumeManipulator {\n    void insert( SimpleVolume simpleVolume );\n\n    void remove( GUID guid );\n\n    void update( SimpleVolume simpleVolume );\n\n    SimpleVolume getSimpleVolume(GUID guid);\n\n    void extendLogicalVolume( GUID logicGuid, GUID physicalGuid );\n\n    List<GUID> listPhysicalVolume(GUID logicGuid );\n\n    List<Volume> queryAllSimpleVolumes();\n\n    void updateDefinitionCapacity( GUID guid, long definitionCapacity );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/SpannedVolumeManipulator.java",
    "content": "package com.pinecone.hydra.storage.volume.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.SpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.Volume;\n\nimport java.util.List;\n\npublic interface SpannedVolumeManipulator extends LogicVolumeManipulator {\n    void insert( SpannedVolume spannedVolume );\n\n    void remove( GUID guid );\n\n    void update( SpannedVolume spannedVolume );\n\n    SpannedVolume getSpannedVolume(GUID guid);\n\n    void extendLogicalVolume( GUID logicGuid, GUID physicalGuid );\n\n    List<GUID> listPhysicalVolume(GUID logicGuid );\n\n    List<Volume> queryAllSpannedVolume();\n\n    void updateDefinitionCapacity( GUID guid, long definitionCapacity );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/StripedVolumeManipulator.java",
    "content": "package com.pinecone.hydra.storage.volume.source;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.Volume;\n\nimport java.util.List;\n\npublic interface StripedVolumeManipulator extends LogicVolumeManipulator {\n    void insert( StripedVolume stripedVolume );\n    void remove( GUID guid );\n\n    void update( StripedVolume stripedVolume );\n    StripedVolume getStripedVolume(GUID guid);\n    void extendLogicalVolume( GUID logicGuid, GUID physicalGuid );\n    List<GUID> listPhysicalVolume(GUID logicGuid );\n\n    List<Volume> queryAllStripedVolume();\n\n    void updateDefinitionCapacity( GUID guid, long definitionCapacity );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/VolumeAllocateManipulator.java",
    "content": "package com.pinecone.hydra.storage.volume.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface VolumeAllocateManipulator extends Pinenut {\n    void insert( GUID objectGuid, GUID childVolumeGuid, GUID parentVolumeGuid );\n    GUID get( GUID objectGuid, GUID parentGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/VolumeCapacityManipulator.java",
    "content": "package com.pinecone.hydra.storage.volume.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.VolumeCapacity64;\n\npublic interface VolumeCapacityManipulator extends Pinenut {\n    void insert( VolumeCapacity64 volumeCapacity );\n\n    void remove( GUID guid );\n\n    VolumeCapacity64 getVolumeCapacity(GUID guid);\n\n    void update( GUID guid, long usedSize );\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/VolumeMasterManipulator.java",
    "content": "package com.pinecone.hydra.storage.volume.source;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\n\npublic interface VolumeMasterManipulator extends KOIMasterManipulator {\n    MirroredVolumeManipulator   getMirroredVolumeManipulator();\n\n    MountPointManipulator       getMountPointManipulator();\n\n    SimpleVolumeManipulator     getSimpleVolumeManipulator();\n\n    SpannedVolumeManipulator    getSpannedVolumeManipulator();\n\n    StripedVolumeManipulator    getStripedVolumeManipulator();\n\n    VolumeCapacityManipulator   getVolumeCapacityManipulator();\n\n    PhysicalVolumeManipulator   getPhysicalVolumeManipulator();\n\n    VolumeAllocateManipulator   getVolumeAllocateManipulator();\n\n    SQLiteVolumeManipulator     getSQLiteVolumeManipulator();\n\n    LogicVolumeManipulator      getPrimeLogicVolumeManipulator();\n}\n"
  },
  {
    "path": "Hydra/hydra-framework-storage/src/main/java/com/pinecone/hydra/storage/volume/source/VolumeTreeManipulator.java",
    "content": "package com.pinecone.hydra.storage.volume.source;\n\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\n\npublic interface VolumeTreeManipulator extends TrieTreeManipulator {\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>9</source>\n                    <target>9</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n    <artifactId>hydra-kom-default-driver</artifactId>\n    <version>2.1.0</version>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-service</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-storage</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-config</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-device</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-service-control</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n\n\n\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n\n        <!-- MyBatis dependencies -->\n        <dependency>\n            <groupId>org.mybatis</groupId>\n            <artifactId>mybatis</artifactId>\n            <version>3.5.9</version>\n        </dependency>\n        <dependency>\n            <groupId>org.mybatis</groupId>\n            <artifactId>mybatis-spring</artifactId>\n            <version>2.0.6</version>\n        </dependency>\n\n        <!-- MySQL Connector -->\n        <dependency>\n            <groupId>mysql</groupId>\n            <artifactId>mysql-connector-java</artifactId>\n            <version>8.0.26</version>\n        </dependency>\n\n        <!-- Logging dependencies -->\n        <dependency>\n            <groupId>org.slf4j</groupId>\n            <artifactId>slf4j-api</artifactId>\n            <version>1.7.30</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime.jelly</groupId>\n            <artifactId>jelly</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/FileOwnerMapper.java",
    "content": "package com.pinecone.hydra;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\n\nimport java.util.List;\n\npublic interface FileOwnerMapper extends TireOwnerManipulator {\n    void insertRootNode (GUID guid, LinkedType linkedType );\n\n    default void insertRootNode ( GUID guid ) {\n        this.insertRootNode( guid, LinkedType.Owned );\n    }\n\n    void insert( GUID targetGuid, GUID parentGUID, LinkedType linkedType );\n\n    default void insertOwnedNode( GUID targetGuid, GUID parentGUID ) {\n        this.insert( targetGuid, parentGUID, LinkedType.Owned );\n    }\n\n    default void insertHardLinkedNode( GUID targetGuid, GUID parentGUID ) {\n        this.insert( targetGuid, parentGUID, LinkedType.Hard );\n    }\n\n\n\n    void update( GUID targetGuid, GUID parentGUID, LinkedType linkedType );\n\n    void updateParentGuid( GUID targetGuid, GUID parentGUID );\n\n    void updateLinkedType( GUID targetGuid, LinkedType linkedType );\n\n\n\n    void remove( GUID subordinateGuid, GUID ownerGuid );\n\n    void removeBySubordinate( GUID subordinateGuid );\n\n    void removeByOwner( GUID OwnerGuid );\n\n    GUID getOwner( GUID subordinateGuid );\n\n    List<GUID > getSubordinates(GUID guid );\n\n\n\n    void setLinkedType             ( GUID sourceGuid, GUID targetGuid, LinkedType linkedType );\n\n    default void setOwned          ( GUID sourceGuid, GUID targetGuid ) {\n        this.setLinkedType( sourceGuid, targetGuid, LinkedType.Owned );\n    }\n\n    default void setHardLink       ( GUID sourceGuid, GUID targetGuid ) {\n        this.setLinkedType( sourceGuid, targetGuid, LinkedType.Hard );\n    }\n\n    LinkedType getLinkedType       ( GUID childGuid,GUID parentGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/AuthorizationMapper.java",
    "content": "package com.pinecone.hydra.account.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.entity.Authorization;\nimport com.pinecone.hydra.account.entity.GenericAuthorization;\nimport com.pinecone.hydra.account.source.AuthorizationManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface AuthorizationMapper extends AuthorizationManipulator {\n    @Insert(\"INSERT INTO `hydra_account_authorization` (`guid`, `user_name`, `user_guid`, `credential_guid`, `privilege_token`, `privilege_guid`, `create_time`, `update_time`) VALUES (#{guid},#{userName},#{userGuid},#{credentialGuid},#{privilrgrToken},#{privilegeGuid},#{createTime},#{updateTime})\")\n    void insert(Authorization authorization);\n\n    @Delete(\"DELETE FROM `hydra_account_authorization` WHERE `guid` = #{authorizationGuid}\")\n    void remove(GUID authorizationGuid);\n\n    @Insert(\"UPDATE `hydra_account_authorization` SET `privilege_token` = #{privilegeToken}, `privilege_guid` = #{privilegeGuid}, `update_time` = #{updateTime} WHERE guid = #{authorizationGuid}\")\n    void update(GUID authorizationGuid);\n\n    @Select(\"SELECT `id`, `guid`, `user_name`, `user_guid`, `credential_guid`, `privilege_token`, `privilege_guid`, `create_time`, `update_time` FROM `hydra_account_authorization` WHERE guid = #{authorizationGuid}\")\n    Authorization queryCredential(GUID authorizationGuid );\n\n    @Select(\"SELECT `id`, `guid`, `user_name` AS userName, `user_guid` AS userGuid, `credential_guid` AS credentialGuid , `privilege_token` AS privilegeToken, `privilege_guid` AS privilegeGuid, `create_time` AS createTime, `update_time` AS updateTime FROM `hydra_account_authorization` WHERE user_guid = #{userGuid}\")\n    List<GenericAuthorization> queryAuthorizationByUserGuid(GUID userGuid);\n\n    @Delete(\"DELETE FROM `hydra_account_authorization` WHERE user_guid = #{userGuid}\")\n    void removeAuthorizationByUserGuid(GUID userGuid);\n\n    @Select(\"SELECT `id`, `guid`, `user_name` AS userName, `user_guid` AS userGuid, `credential_guid` AS credentialGuid , `privilege_token` AS privilegeToken, `privilege_guid` AS privilegeGuid, `create_time` AS createTime, `update_time` AS updateTime FROM `hydra_account_authorization`\")\n    List<GenericAuthorization> queryAllAuthorization();\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/CredentialMapper.java",
    "content": "package com.pinecone.hydra.account.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.entity.Credential;\nimport com.pinecone.hydra.account.source.CredentialManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Select;\n\n@IbatisDataAccessObject\npublic interface CredentialMapper extends CredentialManipulator {\n    @Insert(\"INSERT INTO `hydra_account_credential` (`guid`, `name`, `credential`, `create_time`, `update_time`, `type`) VALUES (#{guid},#{name},#{credential},#{createTime},#{updateTime},#{type})\")\n    void insert(Credential credential);\n\n    @Delete(\"DELETE FROM hydra_account_credential WHERE guid = #{guid}\")\n    void remove(GUID credentialGuid);\n\n    @Select(\"SELECT `id`, `guid`, `name`, `credential`, `create_time`, `update_time`, `type` FROM hydra_account_credential WHERE `guid` = #{guid}\")\n    Credential queryCredential(GUID credentialGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/DomainNodeMapper.java",
    "content": "package com.pinecone.hydra.account.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.entity.Domain;\nimport com.pinecone.hydra.account.entity.GenericDomain;\nimport com.pinecone.hydra.account.source.DomainNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface DomainNodeMapper extends DomainNodeManipulator {\n\n    @Insert(\"INSERT INTO `hydra_account_domain_node` (`domain_name`, `domin_guid`, `name`) VALUES (#{domainName}, #{guid}, #{name})\")\n    void insert(Domain domain);\n\n    @Delete(\"DELETE FROM `hydra_account_domain_node` WHERE `domin_guid` = #{domainGuid}\")\n    void remove(GUID domainGuid);\n\n    @Select(\"SELECT `id`, `domain_name` AS domainName, `domin_guid` AS guid, `name` FROM `hydra_account_domain_node` WHERE `domin_guid` = #{domainGuid}\")\n    GenericDomain queryDomain0(GUID domainGuid );\n\n    default GenericDomain queryDomain(GUID domainGuid ){\n        GenericDomain domain = this.queryDomain0(domainGuid);\n        domain.setDomainNodeManipulator( this );\n        return domain;\n    }\n\n    @Select(\"SELECT `domin_guid` FROM hydra_account_domain_node WHERE `name` = #{name}\")\n    List<GUID > getGuidsByName(String name );\n    @Select(\"SELECT `domin_guid` FROM hydra_account_domain_node WHERE `name` = #{name} AND domin_guid = #{guid}\")\n    List<GUID > getGuidsByNameID(@Param(\"name\") String name, @Param(\"guid\") GUID guid );\n\n    @Select(\"SELECT `id`, `domain_name` AS domainName, `domin_guid` AS guid, `name` FROM `hydra_account_domain_node`\")\n    List<GenericDomain> queryAllDomain();\n    @Select(\"SELECT `name` AS domainName FROM `hydra_account_domain_node` WHERE `domin_guid` = #{domainGuid}\")\n    String queryDomainNameByGuid(GUID domainGuid);\n\n    @Insert(\"UPDATE `hydra_account_domain_node` SET `domain_name` = #{domainName}, `name` = #{name} WHERE `domin_guid` = #{guid} \")\n    void update(Domain domain);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/GroupNodeMapper.java",
    "content": "package com.pinecone.hydra.account.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.entity.GenericGroup;\nimport com.pinecone.hydra.account.entity.Group;\nimport com.pinecone.hydra.account.source.GroupNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface GroupNodeMapper extends GroupNodeManipulator {\n    @Insert(\"INSERT INTO `hydra_account_group_node` (`default_privilege_policy_guid`, `guid`, `name`) VALUES (#{defaultPrivilegePolicyGuid},#{guid},#{name})\")\n    void insert(Group group);\n\n    @Delete(\"DELETE FROM `hydra_account_group_node` WHERE `guid` = #{groupGuid}\")\n    void remove(GUID groupGuid);\n\n    @Select(\"SELECT `id`, `default_privilege_policy_guid`, `guid`, `name` FROM `hydra_account_group_node` WHERE `guid` = #{groupGuid}\")\n    GenericGroup queryGroup(GUID groupGuid );\n\n    @Select(\"SELECT `guid` FROM hydra_account_group_node WHERE `name` = #{name}\")\n    List<GUID > getGuidsByName(String name );\n\n    @Select(\"SELECT `guid` FROM hydra_account_group_node WHERE `name` = #{name} AND guid = #{guid}\")\n    List<GUID > getGuidsByNameID(@Param(\"name\") String name, @Param(\"guid\") GUID guid );\n\n    @Insert(\"UPDATE `hydra_account_group_node` SET `default_privilege_policy_guid` = #{defaultPrivilegePolicyGuid}, `guid` = #{guid}, `name` = #{name} WHERE `guid` = #{guid}\")\n    void update(Group group);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/PrivilegeMapper.java",
    "content": "package com.pinecone.hydra.account.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.entity.GenericPrivilege;\nimport com.pinecone.hydra.account.source.PrivilegeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface PrivilegeMapper extends PrivilegeManipulator {\n    @Insert(\"INSERT INTO `hydra_account_privilege` (`guid`, `name`, `privilege_code`, `create_time`, `update_time`, `type`, `parent_priv_guid`,`token`) VALUES (#{guid}, #{name}, #{privilegeCode}, #{createTime}, #{updateTime}, #{type}, #{parentPrivGuid},#{token})\")\n    void insert(GenericPrivilege privilege);\n\n    @Insert(\"UPDATE `hydra_account_privilege` SET `guid` = #{guid}, `name` = #{name}, `privilege_code` = #{privilegeCode}, `create_time` = #{createTime}, `update_time` = #{updateTime}, `type` = #{type}, `parent_priv_guid` = #{parentPrivGuid} WHERE `guid` = #{guid}\")\n    void update(GenericPrivilege privilege);\n\n    @Delete(\"DELETE FROM `hydra_account_privilege` WHERE `guid` = #{privilegeGuid}\")\n    void remove(GUID privilegeGuid);\n\n    @Select(\"SELECT * FROM `hydra_account_privilege`\")\n    List<GenericPrivilege> queryAllPrivileges();\n\n    @Select(\"SELECT id, guid, token, name, privilege_code AS 'privilegeCode', create_time AS 'createTime', update_time AS 'updateTime' ,type, parent_priv_guid AS 'parentPrivGuid' FROM `hydra_account_privilege` WHERE `guid` = #{guid}\")\n    GenericPrivilege queryPrivilege(GUID guid);\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/RoleMapper.java",
    "content": "package com.pinecone.hydra.account.ibatis;\n\nimport com.pinecone.hydra.account.entity.GenericRole;\nimport com.pinecone.hydra.account.entity.Role;\nimport com.pinecone.hydra.account.source.RoleManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface RoleMapper extends RoleManipulator {\n    @Insert(\"INSERT INTO `hydra_account_role` (`id`, `name`, `privilege_guids`, `create_time`, `update_time`, `type`) VALUES (#{id}, #{name}, #{privilegeGuids}, #{createTime}, #{updateTime}, #{type})\")\n    void insert(Role role);\n    @Delete(\"DELETE FROM `hydra_account_role` WHERE `id` = #{id}\")\n    void remove(int id);\n\n    @Insert(\"UPDATE `hydra_account_role` SET `create_time` = #{createTime}, `privilege_guids` = #{privilegeGuids}, `update_time` = #{updateTime}, `type`= #{type} WHERE `name` = #{name}\")\n    void updateRole(GenericRole role);\n\n    @Select(\"SELECT * FROM `hydra_account_role` WHERE `name` = #{name}\")\n    GenericRole queryRolesByUserGuid(String userGuid);\n\n    @Select(\"SELECT id , name, privilege_guids AS 'privilegeGuids', create_time AS 'createTime', update_time AS 'updateTime', type FROM `hydra_account_role`\")\n    List<GenericRole> queryAllRoles();\n\n\n    @Delete(\"DELETE FROM `hydra_account_role` WHERE `id` = #{id}\")\n    void removeRoleById(int id);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/UserNodeMapper.java",
    "content": "package com.pinecone.hydra.account.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.entity.Account;\nimport com.pinecone.hydra.account.entity.GenericAccount;\nimport com.pinecone.hydra.account.source.UserNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface UserNodeMapper extends UserNodeManipulator {\n    @Insert(\"INSERT INTO `hydra_account_user_node` (`guid`, `user_name`, `nick_name`, `kernel_credential`, `credential_guid`, `kernel_group_type`, `create_time`, `update_time`) VALUES (#{guid},#{name},#{nickName},#{kernelCredential},#{credentialGuid},#{kernelGroupType},#{createTime},#{updateTime})\")\n    void insert(Account account);\n\n    @Delete(\"DELETE FROM `hydra_account_user_node` WHERE `guid` = #{userGuid}\")\n    void remove(GUID userGuid);\n\n    @Select(\"SELECT `id`, `guid`, `user_name` AS name, `nick_name` AS nickName, `kernel_credential` AS kernelCredential, `credential_guid` AS credentialGuid, `kernel_group_type` AS kernelGroupType, `create_time` AS createTime, `update_time` AS updateTime FROM hydra_account_user_node WHERE `guid` = #{userGuid}\")\n    GenericAccount queryUser(GUID userGuid );\n\n    @Select(\"SELECT `guid` FROM hydra_account_user_node WHERE `user_name` = #{name}\")\n    List<GUID > getGuidsByName(String name );\n\n    @Select(\"SELECT `guid` FROM hydra_account_user_node WHERE `user_name` = #{name} AND guid = #{guid}\")\n    List<GUID > getGuidsByNameID(@Param(\"name\") String name, @Param(\"guid\") GUID guid );\n\n    @Select(\"SELECT  `guid`, `user_name` AS name, `nick_name` AS nickName,  `kernel_group_type` AS kernelGroupType, `create_time` AS createTime, `update_time` AS updateTime ,`role` AS role FROM hydra_account_user_node\")\n    List<GenericAccount> queryAllAccount();\n\n    @Select(\"SELECT  `guid`, `user_name` AS name, `nick_name` AS nickName,  `kernel_group_type` AS kernelGroupType, `create_time` AS createTime, `update_time` AS updateTime ,`role` AS role FROM hydra_account_user_node WHERE `user_name` = #{userName}\")\n    GenericAccount queryAccountByName(String userName);\n\n    @Select(\"UPDATE `hydra_account_user_node` SET `user_name` = #{name}, `nick_name` = #{nickName}, `kernel_group_type` = #{kernelGroupType}, `update_time` = #{updateTime}, `role` = #{role} WHERE `guid` = #{guid}\")\n    void update(GenericAccount account);\n\n    @Select(\"SELECT  `guid`, `user_name` AS name, `nick_name` AS nickName,  `kernel_group_type` AS kernelGroupType, `create_time` AS createTime, `update_time` AS updateTime ,`role`,`credential_guid` AS credentialGuid FROM hydra_account_user_node WHERE `guid` = #{userGuid}\")\n    GenericAccount queryAccountByUserGuid(GUID userGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/UserOwnerMapper.java",
    "content": "package com.pinecone.hydra.account.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Update;\n@IbatisDataAccessObject\npublic interface UserOwnerMapper extends TireOwnerManipulator {\n    @Insert(\"INSERT INTO `hydra_account_node_tree` (`guid`) VALUES ( #{guid} )\")\n    void insertRootNode(@Param(\"guid\") GUID guid );\n\n    @Insert( \"INSERT INTO `hydra_account_node_tree` (`guid`, `parent_guid`) VALUES (#{targetGuid}, #{parentGuid})\" )\n    void insert( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID );\n\n\n    @Update( \"UPDATE `hydra_account_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}\" )\n    void update( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID );\n\n    @Update( \"UPDATE `hydra_account_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}\" )\n    void updateParentGuid( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID );\n\n    @Delete( \"DELETE FROM `hydra_account_node_tree` WHERE `guid`=#{subordinateGuid} \" )\n    void remove( @Param(\"subordinateGuid\") GUID subordinateGuid );\n\n    @Delete( \"DELETE FROM `hydra_account_node_tree` WHERE `guid`=#{subordinateGuid} \" )\n    void removeBySubordinate( GUID subordinateGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/UserPathCacheMapper.java",
    "content": "package com.pinecone.hydra.account.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n@IbatisDataAccessObject\npublic interface UserPathCacheMapper extends TriePathCacheManipulator {\n    @Insert(\"INSERT INTO `hydra_account_node_cache_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )\")\n    void insert(@Param(\"guid\") GUID guid, @Param(\"path\") String path );\n\n    @Delete(\"DELETE FROM `hydra_account_node_cache_path` WHERE `guid`=#{guid}\")\n    void remove( GUID guid );\n\n    @Select(\"SELECT `path` FROM `hydra_account_node_cache_path` WHERE `guid`=#{guid}\")\n    String getPath( GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_account_node_cache_path` WHERE `guid`=#{guid}\")\n    GUID getNode( String path );\n\n    @Select(\"SELECT `guid` FROM `hydra_account_node_cache_path` WHERE `path`=#{path}\")\n    GUID queryGUIDByPath( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/UserTreeMapper.java",
    "content": "package com.pinecone.hydra.account.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n@IbatisDataAccessObject\npublic interface UserTreeMapper extends TrieTreeManipulator {\n    @Insert(\"INSERT INTO `hydra_account_node_tree` (`guid`) VALUES ( #{guid} )\")\n    void insertRootNode(@Param(\"guid\") GUID guid);\n\n    @Override\n    default void insert (TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){\n        this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() );\n        ownerManipulator.insertRootNode( node.getGuid() );\n    }\n\n    @Insert(\"INSERT INTO `hydra_account_nodes` (`guid`, `type`,`base_data_guid`,`node_meta_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})\")\n    void insertTreeNode(@Param(\"guid\") GUID guid, @Param(\"type\") UOI type, @Param(\"baseDataGuid\") GUID baseDataGuid, @Param(\"nodeMetaGuid\") GUID nodeMetaGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_meta_guid AS nodeMetadataGUID FROM hydra_account_nodes WHERE guid=#{guid}\")\n    GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid );\n\n    @Select(\"SELECT COUNT( `id` ) FROM hydra_account_nodes WHERE guid=#{guid}\")\n    boolean contains( GUID key );\n\n    @Override\n    default GUIDImperialTrieNode getNode(GUID guid ) {\n        GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid );\n        if( node == null ){\n            return node;\n        }\n        List<GUID > parent = this.fetchParentGuids( guid );\n        node.setParentGUID( parent );\n        return node;\n    }\n\n    @Select(\"SELECT id, guid, parent_guid FROM hydra_account_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    GUIDImperialTrieNode getTreeNodeOnly(@Param(\"guid\") GUID guid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT count( * ) FROM hydra_account_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    long countNode( GUID guid, GUID parentGuid );\n\n\n    @Override\n    default void purge( GUID guid ) {\n        this.removeNodeMeta( guid );\n        this.removeTreeNode( guid );\n    }\n\n    @Delete(\"DELETE FROM `hydra_account_nodes` WHERE `guid`=#{guid}\")\n    void removeNodeMeta( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_account_node_tree` WHERE `guid` = #{guid}\")\n    void removeTreeNode( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_account_node_tree` WHERE `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeByParentGuid( @Param(\"parent_guid\") GUID parentGuid );\n\n    @Delete(\"DELETE FROM `hydra_account_node_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeYoke( @Param(\"guid\") GUID guid, @Param(\"parent_guid\") GUID parentGuid );\n\n\n    @Delete(\"DELETE FROM `hydra_account_node_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}\")\n    void removeInheritance( @Param(\"chileGuid\") GUID childGuid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_account_node_tree` WHERE `parent_guid`=#{guid}\")\n    List<GUIDImperialTrieNode> getChildren(GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_account_node_tree` WHERE `parent_guid` = #{parentGuid}\")\n    List<GUID > fetchChildrenGuids( @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `parent_guid` FROM `hydra_account_node_tree` WHERE `guid`=#{guid}\")\n    List<GUID > fetchParentGuids( GUID guid );\n\n    @Update(\"UPDATE `hydra_account_nodes` SET `type` = #{type} WHERE guid=#{guid}\")\n    void updateType( UOI type , GUID guid );\n\n    @Select( \"SELECT guid FROM hydra_account_node_tree WHERE parent_guid IS NULL \" )\n    List<GUID > fetchRoot();\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_account_node_tree WHERE `parent_guid` IS NULL AND guid = #{guid}\" )\n    boolean isRoot( GUID guid );\n\n    @Update(\"UPDATE hydra_account_node_tree SET parent_guid = #{parentGuid} WHERE guid = #{childGuid}\")\n    void addChild( @Param(\"childGuid\") GUID childGuid, @Param(\"parentGuid\") GUID parentGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/hydranium/UserMappingDriver.java",
    "content": "package com.pinecone.hydra.account.ibatis.hydranium;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class UserMappingDriver extends ArchMappingDriver implements KOIMappingDriver {\n    protected KOIMasterManipulator mKOIMasterManipulator;\n\n    public UserMappingDriver( Processum superiorProcess ) {\n        super(superiorProcess);\n    }\n\n    public UserMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, UserMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n\n        this.mKOIMasterManipulator = new UserMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/hydranium/UserMasterManipulatorImpl.java",
    "content": "package com.pinecone.hydra.account.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.account.ibatis.AuthorizationMapper;\nimport com.pinecone.hydra.account.ibatis.CredentialMapper;\nimport com.pinecone.hydra.account.ibatis.PrivilegeMapper;\nimport com.pinecone.hydra.account.ibatis.RoleMapper;\nimport com.pinecone.hydra.account.source.AuthorizationManipulator;\nimport com.pinecone.hydra.account.source.CredentialManipulator;\nimport com.pinecone.hydra.account.source.PrivilegeManipulator;\nimport com.pinecone.hydra.account.source.RoleManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.account.ibatis.DomainNodeMapper;\nimport com.pinecone.hydra.account.ibatis.GroupNodeMapper;\nimport com.pinecone.hydra.account.ibatis.UserNodeMapper;\nimport com.pinecone.hydra.account.source.DomainNodeManipulator;\nimport com.pinecone.hydra.account.source.GroupNodeManipulator;\nimport com.pinecone.hydra.account.source.UserMasterManipulator;\nimport com.pinecone.hydra.account.source.UserNodeManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n@Component\npublic class UserMasterManipulatorImpl implements UserMasterManipulator {\n    @Resource\n    @Structure( type = DomainNodeMapper.class )\n    protected DomainNodeManipulator         domainNodeManipulator;\n\n    @Resource\n    @Structure( type = GroupNodeMapper.class )\n    protected GroupNodeManipulator          groupNodeManipulator;\n\n    @Resource\n    @Structure( type = UserNodeMapper.class )\n    protected UserNodeManipulator           userNodeManipulator;\n\n    @Resource\n    @Structure( type = AuthorizationMapper.class )\n    protected AuthorizationManipulator      authorizationManipulator;\n\n    @Resource\n    @Structure( type = UserMasterTreeManipulatorImpl.class )\n    protected KOISkeletonMasterManipulator skeletonMasterManipulator;\n\n    @Resource\n    @Structure( type = CredentialMapper.class )\n    protected CredentialManipulator credentialManipulator;\n\n    @Resource\n    @Structure( type = PrivilegeMapper.class )\n    protected PrivilegeManipulator privilegeManipulator;\n\n    @Resource\n    @Structure( type = RoleMapper.class )\n    protected RoleManipulator roleManipulator;\n\n\n\n    public UserMasterManipulatorImpl() {\n\n    }\n\n    public UserMasterManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( UserMasterManipulatorImpl.class, Map.of(), this );\n        this.skeletonMasterManipulator = new UserMasterTreeManipulatorImpl( driver );\n    }\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return this.skeletonMasterManipulator;\n    }\n\n    @Override\n    public DomainNodeManipulator getDomainNodeManipulator() {\n        return this.domainNodeManipulator;\n    }\n\n    @Override\n    public GroupNodeManipulator getGroupNodeManipulator() {\n        return this.groupNodeManipulator;\n    }\n\n    @Override\n    public UserNodeManipulator getUserNodeManipulator() {\n        return this.userNodeManipulator;\n    }\n\n    @Override\n    public CredentialManipulator getCredentialManipulator() {\n        return this.credentialManipulator;\n    }\n\n    @Override\n    public AuthorizationManipulator getAuthorizationManipulator() {\n        return this.authorizationManipulator;\n    }\n\n    @Override\n    public PrivilegeManipulator getPrivilegeManipulator() {\n        return this.privilegeManipulator;\n    }\n\n    @Override\n    public RoleManipulator getRoleManipulator() {\n        return this.roleManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/account/ibatis/hydranium/UserMasterTreeManipulatorImpl.java",
    "content": "package com.pinecone.hydra.account.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.hydra.account.ibatis.UserOwnerMapper;\nimport com.pinecone.hydra.account.ibatis.UserPathCacheMapper;\nimport com.pinecone.hydra.account.ibatis.UserTreeMapper;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n@Component\npublic class UserMasterTreeManipulatorImpl implements TreeMasterManipulator {\n    @Resource\n    @Structure( type = UserOwnerMapper.class )\n    protected TireOwnerManipulator          tireOwnerManipulator;\n\n    @Resource\n    @Structure( type = UserTreeMapper.class )\n    protected TrieTreeManipulator           trieTreeManipulator;\n\n    @Resource\n    @Structure( type = UserPathCacheMapper.class )\n    protected TriePathCacheManipulator      triePathCacheManipulator;\n\n    public UserMasterTreeManipulatorImpl() {\n\n    }\n\n    public UserMasterTreeManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( UserMasterTreeManipulatorImpl.class, Map.of(), this );\n    }\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.tireOwnerManipulator;\n    }\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n    @Override\n    public TriePathCacheManipulator getTriePathCacheManipulator() {\n        return this.triePathCacheManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/bucket/ibatis/BucketMapping.java",
    "content": "package com.pinecone.hydra.bucket.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.bucket.entity.Bucket;\nimport com.pinecone.hydra.storage.bucket.source.BucketManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface BucketMapping extends BucketManipulator {\n    @Insert(\"INSERT INTO hydra_uos_bucket (`bucket_name`, `create_Time`, `bucket_guid`, `user_guid`, `mount_point_guid`) VALUES (#{bucketName},#{createTime},#{bucketGuid},#{userGuid},#{mountPoint})\")\n    void insert( Bucket bucket );\n\n    @Delete(\"DELETE FROM hydra_uos_bucket WHERE `bucket_guid` = #{bucketGuid}\")\n    void remove( GUID bucketGuid );\n\n    @Delete(\"DELETE FROM `hydra_uos_bucket` WHERE `user_guid` = #{accountGuid} AND `bucket_name` = #{bucketName}\")\n    void removeByAccountAndBucketName(@Param(\"accountGuid\") GUID accountGuid, @Param(\"bucketName\") String bucketName);\n\n    @Select(\"SELECT `id`, `bucket_name` AS bucketName, `create_Time` AS createTime, `bucket_guid` AS bucketGuid, `user_guid` AS userGuid, `mount_point_guid` AS mountPoint FROM hydra_uos_bucket WHERE `bucket_guid` = #{bucketGuid}\")\n    Bucket queryBucketByBucketGuid( GUID bucketGuid );\n\n    @Select(\"SELECT `id`, `bucket_name` AS bucketName, `create_Time` AS createTime, `bucket_guid` AS bucketGuid, `user_guid` AS userGuid, `mount_point_guid` AS mountPoint FROM hydra_uos_bucket WHERE `user_guid` = #{userGuid} \")\n    List<Bucket> queryBucketsByUserGuid(GUID userGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/bucket/ibatis/SiteMapping.java",
    "content": "package com.pinecone.hydra.bucket.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.bucket.entity.GenericSite;\nimport com.pinecone.hydra.storage.bucket.entity.Site;\nimport com.pinecone.hydra.storage.bucket.source.SiteManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface SiteMapping extends SiteManipulator {\n    @Insert(\"INSERT INTO `hydra_ucdn_sites` (`site_name`, `create_time`, `site_guid`, `mount_point_guid`) VALUES (#{siteName}, #{createTime}, #{siteGuid}, #{mountPointGuid})\")\n    void insert(Site site );\n\n    @Delete(\"DELETE FROM `hydra_ucdn_sites` WHERE `site_guid` = #{siteGuid}\")\n    void remove( GUID siteGuid );\n\n    @Delete(\"DELETE FROM `hydra_ucdn_sites` WHERE `site_name` = #{siteName}\")\n    void removeByName( String siteName );\n\n    @Select(\"SELECT `id`, `site_name` AS siteName, `create_time` AS createTime, `site_guid` AS siteGuid, `mount_point_guid` AS mountPointGuid FROM `hydra_ucdn_sites` WHERE `site_guid` = #{siteGuid}\")\n    GenericSite querySite(GUID siteGuid );\n\n    @Select(\"SELECT `id`, `site_name` AS siteName, `create_time` AS createTime, `site_guid` AS siteGuid, `mount_point_guid` AS mountPointGuid FROM `hydra_ucdn_sites` WHERE site_name = #{siteName}\")\n    GenericSite querySiteByName( String siteName );\n\n    default List<Site> listSite(){\n        List<GenericSite> genericSites = this.listSite0();\n        return new ArrayList<>(genericSites);\n    }\n\n    @Select(\"SELECT `id`, `site_name` AS siteName, `create_time` AS createTime, `site_guid` AS siteGuid, `mount_point_guid` AS mountPointGuid FROM `hydra_ucdn_sites`\")\n    List<GenericSite> listSite0();\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/bucket/ibatis/SiteNodeMapper.java",
    "content": "package com.pinecone.hydra.bucket.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.bucket.entity.GenericSiteNode;\nimport com.pinecone.hydra.storage.bucket.entity.Site;\nimport com.pinecone.hydra.storage.bucket.entity.SiteNode;\nimport com.pinecone.hydra.storage.bucket.source.SiteNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface SiteNodeMapper extends SiteNodeManipulator {\n\n    @Insert(\"INSERT INTO `hydra_ucdn_site_node` (`node_name`, `node_guid`, `state`, `is_enabled`, `related_service`, `site_guid`) VALUES (#{nodeName},#{nodeGuid},#{state},#{isEnabled},#{relatedService},#{siteGuid})\")\n    void insert(SiteNode siteNode );\n\n    @Delete(\"DELETE FROM `hydra_ucdn_site_node` WHERE `node_guid` = #{siteNodeGuid}\")\n    void remove( GUID siteNodeGuid );\n\n    @Select(\"SELECT `node_name` AS nodeName, `node_guid` AS nodeGuid, `state`, `is_enabled` AS isEnabled, `related_service` AS relatedService, `id`, site_guid AS siteGuid FROM hydra_ucdn_site_node WHERE node_guid = #{siteNodeGuid}\")\n    GenericSiteNode querySiteNode(GUID siteNodeGuid );\n\n\n    default List<SiteNode> querySiteNodeBySiteGuid( GUID siteGuid ){\n        List<GenericSiteNode> genericSiteNodes = this.querySiteNodeBySiteGuid0(siteGuid);\n        return new ArrayList<>(genericSiteNodes);\n    }\n\n    @Select(\"SELECT `node_name` AS nodeName, `node_guid` AS nodeGuid, `state`, `is_enabled` AS isEnabled, `related_service` AS relatedService, `id`, site_guid AS siteGuid FROM hydra_ucdn_site_node WHERE site_guid = #{siteGuid}\")\n    List<GenericSiteNode> querySiteNodeBySiteGuid0( GUID siteGuid );\n\n    @Update(\"UPDATE `hydra_ucdn_site_node` SET `node_name` = #{nodeName}, `state` = #{state}, `is_enabled` = #{isEnabled} WHERE `node_guid` = #{nodeGuid}\")\n    void update( SiteNode siteNode );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/bucket/ibatis/hydranium/BucketMappingDriver.java",
    "content": "package com.pinecone.hydra.bucket.ibatis.hydranium;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class BucketMappingDriver extends ArchMappingDriver implements KOIMappingDriver {\n    protected KOIMasterManipulator mKOIMasterManipulator;\n\n    public BucketMappingDriver( Processum superiorProcess  ) {\n        super( superiorProcess );\n    }\n\n    // Temp , TODO\n    public BucketMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, BucketMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n\n        this.mKOIMasterManipulator = new BucketMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/bucket/ibatis/hydranium/BucketMasterManipulatorImpl.java",
    "content": "package com.pinecone.hydra.bucket.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.bucket.ibatis.BucketMapping;\nimport com.pinecone.hydra.bucket.ibatis.SiteMapping;\nimport com.pinecone.hydra.bucket.ibatis.SiteNodeMapper;\nimport com.pinecone.hydra.storage.bucket.source.BucketManipulator;\nimport com.pinecone.hydra.storage.bucket.source.BucketMasterManipulator;\nimport com.pinecone.hydra.storage.bucket.source.SiteManipulator;\nimport com.pinecone.hydra.storage.bucket.source.SiteNodeManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class BucketMasterManipulatorImpl implements BucketMasterManipulator {\n    @Resource\n    @Structure( type = BucketMapping.class )\n    BucketManipulator bucketMapping;\n\n    @Resource\n    @Structure( type = SiteMapping.class )\n    SiteManipulator siteManipulator;\n\n    @Resource\n    @Structure( type = SiteNodeMapper.class )\n    SiteNodeManipulator siteNodeManipulator;\n\n\n    public BucketMasterManipulatorImpl() {\n\n    }\n\n    public BucketMasterManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( BucketMasterManipulatorImpl.class, Map.of(), this );\n    }\n    @Override\n    public BucketManipulator getBucketManipulator() {\n        return this.bucketMapping;\n    }\n\n    @Override\n    public SiteManipulator getSiteManipulator() {\n        return this.siteManipulator;\n    }\n\n    @Override\n    public SiteNodeManipulator getSiteNodeManipulator() {\n        return this.siteNodeManipulator;\n    }\n\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/ClusterNodeMapper.java",
    "content": "package com.pinecone.hydra.deploy.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.ClusterElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericClusterElement;\nimport com.pinecone.hydra.deploy.kom.source.ClusterNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface ClusterNodeMapper extends ClusterNodeManipulator {\n\n    @Override\n    @Insert(\"INSERT INTO `hydra_deploy_cluster_node` \" +\n            \"(`guid`, `name`, `type`, `create_time`, `update_time`) \" +\n            \"VALUES (#{guid}, #{name}, #{type}, #{createTime}, #{updateTime})\")\n    void insert( ClusterElement clusterElement);\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_deploy_cluster_node` WHERE `guid` = #{guid}\")\n    void remove( @Param(\"guid\") GUID guid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `name`, `type`, \" +\n            \"`create_time` AS `createTime`, `update_time` AS `updateTime` \" +\n            \"FROM `hydra_deploy_cluster_node` WHERE `guid` = #{guid}\")\n    GenericClusterElement getAppElement(@Param(\"guid\") GUID guid );\n\n    @Override\n    default ClusterElement getClusterElement(GUID guid, DeployInstrument instrument ) {\n        GenericClusterElement element = this.getAppElement( guid );\n        element.apply( instrument );\n\n        return element;\n    }\n\n    @Override\n    @Update(\"UPDATE `hydra_deploy_cluster_node` SET \" +\n            \"`name` = #{name}, \" +\n            \"`type` = #{type}, \" +\n            \"`create_time` = #{createTime}, \" +\n            \"`update_time` = #{updateTime} \" +\n            \"WHERE `guid` = #{guid}\")\n    void update( ClusterElement clusterElement);\n\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_deploy_cluster_node` WHERE `name` = #{name}\" )\n    List<GUID > getGuidsByName( String name );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_deploy_cluster_node` WHERE `name` = #{name} AND `guid` = #{guid}\" )\n    List<GUID > getGuidsByNameID( @Param(\"name\") String name, @Param(\"guid\") GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/ContainerElementMapper.java",
    "content": "package com.pinecone.hydra.deploy.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.ContainerElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericContainerElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericQuickElement;\nimport com.pinecone.hydra.deploy.kom.entity.QuickElement;\nimport com.pinecone.hydra.deploy.kom.source.ContainerElementManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n@Mapper\n@IbatisDataAccessObject\npublic interface ContainerElementMapper extends ContainerElementManipulator {\n\n    @Override\n    @Insert(\"INSERT INTO `hydra_deploy_container` (`guid`, `status`,`name`) VALUES (#{guid},#{status},#{name})\")\n    void insert( ContainerElement quickElement );\n\n    @Override\n    @Insert(\"UPDATE `hydra_deploy_container` SET `status` = #{status} ,`name` = #{name} WHERE  `guid` = #{guid}\")\n    void update( ContainerElement serviceElement );\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_deploy_container` WHERE `guid` = #{guid}\")\n    void remove( GUID guid );\n\n    @Select(\"SELECT `guid`, `status` AS status FROM `hydra_deploy_container` WHERE `guid` = #{guid}\")\n    GenericContainerElement getContainerElement0( GUID guid );\n\n    @Override\n    default GenericContainerElement getContainerElement(GUID guid, DeployInstrument instrument ){\n        GenericContainerElement element = this.getContainerElement0( guid );\n        element.apply( instrument );\n        return element;\n    }\n\n\n    @Override\n    @Select(\"SELECT `guid`, `status` AS status,`name` AS name FROM `hydra_deploy_container` WHERE `guid` = #{guid}\")\n    List<GUID > getGuidsByName(String name );\n\n    @Override\n    @Select(\"SELECT `guid`, `status` AS status,`name` AS name FROM `hydra_deploy_container` WHERE `guid` = #{guid} AND `name` = #{name}\")\n    List<GUID > getGuidsByNameID( String name, GUID guid );\n\n\n    @Select( \"SELECT `guid`, `status` AS status,`name` AS name FROM `hydra_deploy_container` WHERE `name` = #{name}\")\n    List<QuickElement> fetchQuickElementByName(@Param(\"name\") String name );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployNamespaceMapper.java",
    "content": "package com.pinecone.hydra.deploy.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.deploy.kom.entity.Namespace;\nimport com.pinecone.hydra.deploy.kom.source.DeployNamespaceManipulator;\n\n\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n\n@Mapper\n@IbatisDataAccessObject\npublic interface DeployNamespaceMapper extends DeployNamespaceManipulator {\n\n    @Override\n    @Insert(\"INSERT INTO `hydra_deploy_namespace_node` (`guid`, `name`) VALUES (#{guid},#{name})\")\n    void insert( Namespace ns );\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_deploy_namespace_node` WHERE `guid`=#{guid}\")\n    void remove( @Param(\"guid\") GUID GUID );\n\n    @Override\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_deploy_namespace_node` WHERE `guid`=#{guid}\")\n    GenericNamespace getNamespace( @Param(\"guid\") GUID guid );\n\n    @Override\n    @Update(\"UPDATE `hydra_deploy_namespace_node` SET `name` = #{name} WHERE `guid` = #{guid}\")\n    void update( Namespace ns );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_deploy_namespace_node` WHERE name=#{name}\")\n    List<GenericNamespace > fetchNamespaceNodeByName0( @Param(\"name\") String name );\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default List<Namespace > fetchNamespaceNodeByName( String name ){\n        return (List) this.fetchNamespaceNodeByName0( name );\n    }\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_deploy_namespace_node` WHERE `name` = #{name}\" )\n    List<GUID > getGuidsByName(String name);\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_deploy_namespace_node` WHERE `name` = #{name} AND `guid` = #{guid}\" )\n    List<GUID > getGuidsByNameID( @Param(\"name\") String name, @Param(\"guid\") GUID guid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployNodeMapper.java",
    "content": "package com.pinecone.hydra.deploy.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.entity.DeployElement;\nimport com.pinecone.hydra.deploy.kom.source.DeployNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface DeployNodeMapper extends DeployNodeManipulator {\n    @Override\n    @Insert( \"INSERT INTO `hydra_deploy_deploy_nodes` (`guid`,`enable`,`name`) VALUES (#{guid}, #{enable}, #{name})\")\n    void insert( DeployElement deployElement );\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_deploy_deploy_nodes` WHERE `guid`=#{guid}\")\n    void remove( GUID UUID );\n    @Override\n    @Insert( \"UPDATE `hydra_deploy_deploy_nodes` SET `enable`=#{enable}, `name`=#{name} WHERE `guid`=#{guid}\")\n    void update( DeployElement deployElement );\n    @Override\n    @Select(\"SELECT `guid`, `enable` AS Enable FROM `hydra_deploy_deploy_nodes` WHERE `name`=#{name}\")\n    List<DeployElement> fetchDeployNodeByName( @Param(\"name\") String name );\n\n    @Select(\"SELECT `guid` FROM `hydra_deploy_deploy_nodes` WHERE `name`=#{name}\")\n    @Override\n    List<GUID> getGuidsByName( String name );\n\n    @Select(\"SELECT `guid` FROM `hydra_deploy_deploy_nodes` WHERE `name`=#{name} AND `guid`!=#{guid}\")\n    @Override\n    List<GUID> getGuidsByNameID( String name, GUID guid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployNodeMetaMapper.java",
    "content": "package com.pinecone.hydra.deploy.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.DeployFamilyNode;\nimport com.pinecone.hydra.deploy.kom.entity.GenericCommonMeta;\nimport com.pinecone.hydra.deploy.kom.entity.Namespace;\nimport com.pinecone.hydra.deploy.kom.source.NodeMetaManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface DeployNodeMetaMapper extends NodeMetaManipulator {\n\n    @Override\n    @Insert( \"INSERT INTO `hydra_deploy_node_meta` (`guid`,`description`,`extra_information`,`name`,`ip_address`) VALUES (#{guid}, #{description}, #{extraInformation},#{name},#{ipAddress})\")\n    void insert( DeployFamilyNode node );\n\n    @Override\n    @Insert( \"INSERT INTO `hydra_deploy_node_meta` (`guid`,`description`,`extra_information`,`name`,`ip_address`) VALUES (#{guid}, #{description}, #{extraInformation},#{name},#{ipAddress})\")\n    void insertNS( Namespace node );\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_deploy_node_meta` WHERE `guid`=#{guid}\")\n    void remove( GUID guid );\n\n    @Override\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `description` AS Description, `extra_information` AS ExtraInformation ,`ip_address` AS ipAddress FROM `hydra_deploy_node_meta` WHERE `guid` = #{guid}\")\n    GenericCommonMeta getNodeCommonMeta(@Param(\"guid\") GUID guid );\n\n    @Override\n    @Update( \"UPDATE `hydra_deploy_node_meta` SET `description` = #{description} , `extra_information` = #{extraInformation},`ip_address` = #{ipAddress} WHERE guid = #{guid}\")\n    void update( DeployFamilyNode node );\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployNodeOwnerMapper.java",
    "content": "package com.pinecone.hydra.deploy.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface DeployNodeOwnerMapper extends TireOwnerManipulator {\n    @Override\n    @Insert(\"INSERT INTO `hydra_deploy_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )\")\n    void insertRootNode( @Param(\"guid\")  GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    @Insert( \"INSERT INTO `hydra_deploy_node_tree` (`guid`, `parent_guid`,`linked_type`) VALUES (#{targetGuid}, #{parentGuid}, #{linkedType})\" )\n    void insert( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    @Update( \"UPDATE `hydra_deploy_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}\" )\n    void update( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    @Update( \"UPDATE `hydra_deploy_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}\" )\n    void updateParentGuid( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID );\n\n    @Override\n    @Update( \"UPDATE `hydra_deploy_node_tree` SET `guid` = #{targetGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}\" )\n    void updateLinkedType( @Param(\"targetGuid\") GUID targetGuid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    @Delete( \"DELETE FROM `hydra_deploy_node_tree` WHERE `guid`=#{subordinateGuid}  AND `linked_type` = 'Owned'\" )\n    void remove( @Param(\"subordinateGuid\") GUID subordinateGuid, @Param(\"ownerGuid\") GUID ownerGuid );\n\n    @Override\n    @Delete( \"DELETE FROM `hydra_deploy_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'\" )\n    void removeBySubordinate( GUID subordinateGuid );\n\n//    @Delete(\"DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}\")\n//    void removeByOwner(GUID ownerGuid);\n    @Override\n    @Select( \"SELECT `parent_guid` FROM `hydra_deploy_node_tree` WHERE `guid`=#{subordinateGuid} AND linked_type = 'Owned'\" )\n    GUID getOwner( GUID subordinateGuid );\n\n    @Override\n    @Select( \"SELECT guid FROM hydra_deploy_node_tree where parent_guid=#{guid} AND linked_type = 'Owned'\" )\n    List<GUID > getSubordinates( GUID guid );\n\n\n    @Update(\"UPDATE `hydra_deploy_node_tree` SET `linked_type` = '#{linkedType}' WHERE `guid` = #{sourceGuid} AND `parent_guid` = #{targetGuid}\")\n    void setLinkedType( @Param(\"sourceGuid\") GUID sourceGuid, @Param(\"targetGuid\") GUID targetGuid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Select(\"SELECT `linked_type` FROM `hydra_deploy_node_tree` WHERE `guid` = #{childGuid} AND `parent_guid` =#{parentGuid}\")\n    LinkedType getLinkedType( @Param(\"childGuid\") GUID childGuid,@Param(\"parentGuid\") GUID parentGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployNodePathCacheMapper.java",
    "content": "package com.pinecone.hydra.deploy.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\n@IbatisDataAccessObject\npublic interface DeployNodePathCacheMapper extends TriePathCacheManipulator {\n    @Override\n    @Insert(\"INSERT INTO `hydra_deploy_node_path`(`path`, `guid`) VALUES ( #{path}, #{guid} )\")\n    void insert(@Param(\"guid\") GUID guid, @Param(\"path\") String path );\n\n    @Override\n    @Insert(\"INSERT INTO `hydra_deploy_node_path` (path, long_path, guid) VALUES ( #{path},#{longPath},#{guid} )\")\n    void insertLongPath( @Param(\"guid\") GUID guid, @Param(\"path\") String path, @Param(\"longPath\") String longPath );\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_deploy_node_path` WHERE `guid`=#{guid}\")\n    void remove( GUID guid );\n\n\n    default String getPath( GUID guid ){\n        String longPath = this.getLongPath(guid);\n        if ( longPath != null ){\n            return this.getPath0( guid )+this.getLongPath( guid );\n        }\n        return this.getPath0( guid );\n    };\n\n    @Select(\"SELECT `long_path` FROM `hydra_deploy_node_path` WHERE `guid`=#{guid}\")\n    String getLongPath( GUID guid );\n\n    @Select(\"SELECT `path` FROM `hydra_deploy_node_path` WHERE `guid`=#{guid}\")\n    String getPath0( GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_deploy_node_path` WHERE `guid`=#{guid}\")\n    GUID getNode( String path );\n\n    @Select(\"SELECT `guid` FROM `hydra_deploy_node_path` WHERE `path`=#{path}\")\n    GUID queryGUIDByPath( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployServiceInsMappingMapper.java",
    "content": "package com.pinecone.hydra.deploy.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.entity.DeployInsMapping;\nimport com.pinecone.hydra.deploy.kom.entity.GenericDeployInsMapping;\nimport com.pinecone.hydra.deploy.kom.source.DeployServiceInsMappingManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface DeployServiceInsMappingMapper extends DeployServiceInsMappingManipulator {\n    @Override\n    @Insert(\"INSERT INTO `hydra_deploy_service_ins_mapping` (`deploy_guid`, `service_ins_guid`) VALUES (#{deployGuid}, #{serviceInsGuid})\")\n    void insert(@Param(\"deployInsMapping\") DeployInsMapping deployInsMapping);\n\n    @Override\n    @Select(\"SELECT `id`, `deploy_guid`, `service_ins_guid`, `create_time`, `update_time` \" +\n            \"FROM `hydra_deploy_service_ins_mapping` WHERE `service_ins_guid` = #{insGuid}\")\n    GenericDeployInsMapping queryDeployInsMappingByInsGuid(@Param(\"insGuid\") GUID insGuid);\n\n    @Override\n    @Select(\"SELECT `id`, `deploy_guid`, `service_ins_guid`, `create_time`, `update_time` \" +\n            \"FROM `hydra_deploy_service_ins_mapping` WHERE `deploy_guid` = #{deployGuid}\")\n    GenericDeployInsMapping queryDeployInsMappingByDeployGuid(GUID deployGuid);\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_deploy_service_ins_mapping` WHERE `service_ins_guid` = #{insGuid}\")\n    void removeByInsGuid(GUID insGuid);\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_deploy_service_ins_mapping` WHERE `deploy_guid` = #{deployGuid}\")\n    void removeByDeployGuid(GUID deployGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/DeployTreeMapper.java",
    "content": "package com.pinecone.hydra.deploy.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.entity.TreeReparseLinkNode;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface DeployTreeMapper extends TrieTreeManipulator {\n    @Insert(\"INSERT INTO `hydra_deploy_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )\")\n    void insertRootNode( @Param(\"guid\")  GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    default void insert ( TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){\n        this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() );\n        ownerManipulator.insertRootNode( node.getGuid() );\n    }\n\n    @Insert(\"INSERT INTO hydra_deploy_nodes (`guid`, `type`,`base_data_guid`,`node_metadata_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})\")\n    void insertTreeNode( @Param(\"guid\") GUID guid, @Param(\"type\") UOI type, @Param(\"baseDataGuid\") GUID baseDataGuid, @Param(\"nodeMetaGuid\") GUID nodeMetaGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_metadata_guid AS nodeMetadataGUID FROM hydra_deploy_nodes WHERE guid=#{guid}\")\n    GUIDImperialTrieNode getNodeExtendsFromMeta( GUID guid );\n\n    @Override\n    default GUIDImperialTrieNode getNode( GUID guid ) {\n        GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid );\n        if ( node == null ) {\n            return null;\n        }\n\n        List<GUID > parent = this.fetchParentGuids( guid );\n        node.setParentGUID( parent );\n        return node;\n    }\n\n    @Select(\"SELECT COUNT( `id` ) FROM hydra_deploy_nodes WHERE guid=#{guid}\")\n    boolean contains( GUID key );\n\n    @Select(\"SELECT id, guid, parent_guid, linked_type FROM hydra_deploy_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    GUIDImperialTrieNode getTreeNodeOnly(@Param(\"guid\") GUID guid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT count( * ) FROM hydra_deploy_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    long countNode( GUID guid, GUID parentGuid );\n\n\n\n    @Override\n    default void purge( GUID guid ) {\n        this.removeNodeMeta( guid );\n        this.removeTreeNode( guid );\n        this.removeOwnedTreeNode( guid );\n    }\n\n    @Delete(\"DELETE FROM `hydra_deploy_nodes` WHERE `guid`=#{guid}\")\n    void removeNodeMeta( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_deploy_node_tree` WHERE `guid` = #{guid}\")\n    void removeTreeNode( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_deploy_node_tree` WHERE `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeByParentGuid( @Param(\"parent_guid\") GUID parentGuid );\n\n    @Delete(\"DELETE FROM `hydra_deploy_node_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeYoke( @Param(\"guid\") GUID guid, @Param(\"parent_guid\") GUID parentGuid );\n\n    @Delete(\"DELETE FROM `hydra_deploy_node_tree` WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}\")\n    void removeTreeNodeWithLinkedType( @Param(\"guid\") GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n\n    @Delete(\"DELETE FROM `hydra_deploy_node_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}\")\n    void removeInheritance( @Param(\"chileGuid\") GUID childGuid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_deploy_node_tree` WHERE `parent_guid`=#{guid}\")\n    List<GUIDImperialTrieNode> getChildren(GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_deploy_node_tree` WHERE `parent_guid` = #{parentGuid}\")\n    List<GUID > fetchChildrenGuids( @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `parent_guid` FROM `hydra_deploy_node_tree` WHERE `guid`=#{guid}\")\n    List<GUID > fetchParentGuids( GUID guid );\n\n    @Update(\"UPDATE `hydra_deploy_nodes` SET `type` = #{type} WHERE guid=#{guid}\")\n    void updateType( UOI type , GUID guid );\n\n    @Select( \"SELECT guid FROM hydra_deploy_node_tree WHERE parent_guid IS NULL \" )\n    List<GUID > fetchRoot();\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_deploy_node_tree WHERE `parent_guid` IS NULL AND guid = #{guid}\" )\n    boolean isRoot( GUID guid );\n\n\n\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_deploy_node_tree WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}\" )\n    long queryLinkedCount( @Param(\"guid\") GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_deploy_node_tree WHERE `guid` = #{guid}\" )\n    long queryAllLinkedCount( @Param(\"guid\") GUID guid );\n\n\n    @Override\n    @Insert(\n            \"INSERT INTO `hydra_deploy_node_tree` (`guid`, `linked_type`,`tag_name`,`tag_guid`,`parent_guid`) \" +\n            \"VALUES (#{originalGuid}, #{linkedType}, #{tagName}, #{tagGuid}, #{dirGuid})\"\n    )\n    void newLinkTag(\n            @Param(\"originalGuid\") GUID originalGuid, @Param(\"dirGuid\") GUID dirGuid,\n            @Param(\"tagName\") String tagName, @Param(\"tagGuid\") GUID tagGuid, @Param(\"linkedType\") LinkedType linkedType\n    );\n\n    @Override\n    @Update( \"UPDATE hydra_deploy_node_tree SET tag_name = #{tagName} WHERE tag_guid =#{tagGuid}\" )\n    void updateLinkTagName( @Param(\"tagGuid\") GUID tagGuid, @Param(\"tagName\") String tagName );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_deploy_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{dirGuid}\" )\n    GUID getOriginalGuid( @Param(\"tagName\") String tagName, @Param(\"dirGuid\") GUID dirGuid );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_deploy_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}\" )\n    GUID getOriginalGuidByNodeGuid( @Param(\"tagName\") String tagName, @Param(\"nodeGuid\") GUID nodeGUID );\n\n    @Override\n    @Select( \"SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_deploy_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{parentDirGuid}\" )\n    TreeReparseLinkNode getReparseLinkNode( @Param(\"tagName\") String tagName, @Param(\"parentDirGuid\") GUID parentDirGuid );\n\n    @Override\n    @Select( \"SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_deploy_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}\" )\n    TreeReparseLinkNode getReparseLinkNodeByNodeGuid( @Param(\"tagName\") String tagName, @Param(\"nodeGuid\") GUID nodeGUID );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_deploy_node_tree WHERE `tag_name` = #{tagName}\" )\n    List<GUID > fetchOriginalGuid( String tagName );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_deploy_node_tree WHERE `tag_name` = #{tagName} AND `parent_guid` IS NULL\" )\n    List<GUID > fetchOriginalGuidRoot( String tagName );\n\n    @Override\n    @Select( \"SELECT COUNT(*) FROM `hydra_deploy_node_tree` WHERE `tag_guid` = #{guid}\" )\n    boolean isTagGuid(GUID guid);\n\n    @Override\n    @Delete( \"DELETE FROM `hydra_deploy_node_tree` WHERE `tag_guid` = #{guid}\" )\n    void removeReparseLink( GUID guid );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_deploy_node_tree` WHERE `tag_guid` = #{tagGuid}\" )\n    GUID getOriginalGuidByTagGuid(GUID tagGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/PhysicalHostMapper.java",
    "content": "package com.pinecone.hydra.deploy.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.entity.GenericPhysicalHost;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.GenericPhysicalHostElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericQuickElement;\nimport com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement;\nimport com.pinecone.hydra.deploy.kom.source.PhysicalHostManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface PhysicalHostMapper extends PhysicalHostManipulator {\n\n    @Override\n    @Insert(\"INSERT INTO `hydra_deploy_physical_host` (`guid`, `name`, `hardware_specs`, `status`) VALUES (#{guid},#{name},#{hardwareSpecs},#{status})\")\n    void insert( PhysicalHostElement physicalHostElement );\n\n    @Override\n    @Insert(\"UPDATE `hydra_deploy_physical_host` SET `name` = #{name},  `hardware_specs` = #{hardwareSpecs}, `status` = #{status} WHERE `guid` = #{guid}\")\n    void update( PhysicalHostElement serviceElement );\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_deploy_physical_host` WHERE `guid` = #{guid}\")\n    void remove(GUID guid);\n\n    @Select(\"SELECT `guid`, `name` as ipAddress, `hardware_specs` as hardwareSpecs, `status` FROM `hydra_deploy_physical_host` WHERE `guid` = #{guid}\")\n    GenericPhysicalHostElement getPhysicalHostElement0( GUID guid );\n\n    @Override\n    default GenericPhysicalHostElement getPhysicalHostElement( GUID guid, DeployInstrument instrument ){\n        GenericPhysicalHostElement element = this.getPhysicalHostElement0( guid );\n        element.apply( instrument );\n        return element;\n    }\n\n    @Select(\"SELECT `guid` FROM `hydra_deploy_physical_host` WHERE `name`=#{name}\")\n    @Override\n    List<GUID> getGuidsByName(String name );\n\n    @Select(\"SELECT `guid` FROM `hydra_deploy_physical_host` WHERE `name`=#{name} AND `guid`!=#{guid}\")\n    @Override\n    List<GUID> getGuidsByNameID( String name, GUID guid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/QuickElementMapper.java",
    "content": "package com.pinecone.hydra.deploy.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.GenericQuickElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement;\nimport com.pinecone.hydra.deploy.kom.entity.QuickElement;\nimport com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement;\nimport com.pinecone.hydra.deploy.kom.source.QuickElementManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface QuickElementMapper extends QuickElementManipulator {\n\n    @Override\n    @Insert(\"INSERT INTO `hydra_deploy_quick` (`guid`, `type_name`, `enable`,`name`) VALUES (#{guid},#{typeName},#{enable},#{name})\")\n    void insert( QuickElement quickElement );\n\n    @Override\n    @Insert(\"UPDATE `hydra_deploy_quick` SET `enable` = #{enable} , `type_name` = #{typeName} ,`name` = #{name} WHERE  `guid` = #{guid}\")\n    void update( QuickElement serviceElement );\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_deploy_quick` WHERE `guid` = #{guid}\")\n    void remove( GUID guid );\n\n    @Select(\"SELECT `guid`, `type_name` AS typeName, `enable` AS Enable FROM `hydra_deploy_quick` WHERE `guid` = #{guid}\")\n    GenericQuickElement getQuickElement0( GUID guid );\n\n    @Override\n    default GenericQuickElement getQuickElement( GUID guid, DeployInstrument instrument ){\n        GenericQuickElement element = this.getQuickElement0( guid );\n        element.apply( instrument );\n        return element;\n    }\n\n\n    @Override\n    @Select(\"SELECT `guid`, `type_name` AS typeName, `enable` AS enable ,`name` AS Name FROM `hydra_deploy_quick` WHERE `guid` = #{guid}\")\n    List<GUID > getGuidsByName(String name );\n\n    @Override\n    @Select(\"SELECT `guid`, `type_name` AS typeName, `enable` AS enable ,`name` AS Name FROM `hydra_deploy_quick` WHERE `guid` = #{guid} AND `name` = #{name}\")\n    List<GUID > getGuidsByNameID( String name, GUID guid );\n\n\n    @Select( \"SELECT `guid`, `type_name` AS typeName, `enable` AS enable ,`name` AS Name FROM `hydra_deploy_quick` WHERE `name` = #{name}\")\n    List<QuickElement> fetchQuickElementByName(@Param(\"name\") String name );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/VirtualMachineMapper.java",
    "content": "package com.pinecone.hydra.deploy.ibatis;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.entity.GenericVirtualMachine;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement;\nimport com.pinecone.hydra.deploy.kom.entity.VirtualMachineElement;\nimport com.pinecone.hydra.deploy.kom.source.VirtualMachineManipulator;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.GenericTaskElement;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface VirtualMachineMapper extends VirtualMachineManipulator {\n\n    @Insert(\"INSERT INTO `hydra_deploy_virtual_machine` (`guid`, `name`,  `status`,`affiliate_host_guid`) VALUES (#{guid},#{name},#{status},#{affiliateHostGuid})\")\n    void insert( VirtualMachineElement virtualMachineElement );\n\n    @Insert( \"UPDATE `hydra_deploy_virtual_machine` SET `name` = #{name},  `status` = #{status}, `affiliate_host_guid` = #{affiliateHostGuid} WHERE `guid` = #{guid}\")\n    void update( VirtualMachineElement serviceElement );\n\n    @Delete(\"DELETE FROM `hydra_deploy_virtual_machine` WHERE `guid` = #{guid}\")\n    void remove( GUID guid );\n\n    @Select(\"SELECT `guid`, `name` , `status`, `affiliate_host_guid` FROM `hydra_deploy_virtual_machine` WHERE `guid` = #{guid}\")\n    GenericVirtualMachineElement getDeployNode0( GUID guid );\n\n    @Override\n    default VirtualMachineElement getDeployNode( GUID guid, DeployInstrument instrument ){\n        GenericVirtualMachineElement element = this.getDeployNode0( guid );\n        element.apply( instrument );\n        return element;\n    }\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_deploy_virtual_machine` WHERE `name` = #{name}\" )\n    List<GUID> getGuidsByName( String name );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_deploy_virtual_machine` WHERE `name` = #{name} AND `guid` = #{guid}\" )\n    List<GUID> getGuidsByNameID( @Param(\"name\") String name, @Param(\"guid\") GUID guid );\n\n\n}"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/hydranium/DeployMappingDriver.java",
    "content": "package com.pinecone.hydra.deploy.ibatis.hydranium;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class DeployMappingDriver extends ArchMappingDriver implements KOIMappingDriver {\n    protected KOIMasterManipulator mKOIMasterManipulator;\n\n    public DeployMappingDriver(Processum superiorProcess  ) {\n        super( superiorProcess );\n    }\n\n    // Temp , TODO\n    public DeployMappingDriver(Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, DeployMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n\n        this.mKOIMasterManipulator = new DeployMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/hydranium/DeployMasterManipulatorImpl.java",
    "content": "package com.pinecone.hydra.deploy.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.deploy.ibatis.ContainerElementMapper;\nimport com.pinecone.hydra.deploy.ibatis.DeployNamespaceMapper;\nimport com.pinecone.hydra.deploy.ibatis.DeployNodeMapper;\nimport com.pinecone.hydra.deploy.ibatis.DeployNodeMetaMapper;\nimport com.pinecone.hydra.deploy.ibatis.DeployNodeOwnerMapper;\nimport com.pinecone.hydra.deploy.ibatis.DeployServiceInsMappingMapper;\nimport com.pinecone.hydra.deploy.ibatis.DeployTreeMapper;\nimport com.pinecone.hydra.deploy.ibatis.ClusterNodeMapper;\nimport com.pinecone.hydra.deploy.ibatis.PhysicalHostMapper;\nimport com.pinecone.hydra.deploy.ibatis.QuickElementMapper;\nimport com.pinecone.hydra.deploy.ibatis.VirtualMachineMapper;\nimport com.pinecone.hydra.deploy.kom.source.ContainerElementManipulator;\nimport com.pinecone.hydra.deploy.kom.source.DeployMasterManipulator;\nimport com.pinecone.hydra.deploy.kom.source.DeployNamespaceManipulator;\nimport com.pinecone.hydra.deploy.kom.source.DeployNodeManipulator;\nimport com.pinecone.hydra.deploy.kom.source.DeployServiceInsMappingManipulator;\nimport com.pinecone.hydra.deploy.kom.source.PhysicalHostManipulator;\nimport com.pinecone.hydra.deploy.kom.source.QuickElementManipulator;\nimport com.pinecone.hydra.deploy.kom.source.VirtualMachineManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.deploy.kom.source.ClusterNodeManipulator;\nimport com.pinecone.hydra.deploy.kom.source.NodeMetaManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class DeployMasterManipulatorImpl implements DeployMasterManipulator {\n    @Resource\n    @Structure( type = DeployNodeOwnerMapper.class )\n    TireOwnerManipulator tireOwnerManipulator;\n\n    @Resource\n    @Structure(type = DeployTreeMapper.class )\n    TrieTreeManipulator trieTreeManipulator;\n\n    @Resource\n    @Structure(type = ClusterNodeMapper.class )\n    ClusterNodeManipulator jobNodeManipulator;\n\n    @Resource\n    @Structure(type = DeployNodeMetaMapper.class )\n    NodeMetaManipulator nodeMetaManipulator;\n\n    @Resource\n    @Structure(type = DeployNodeMapper.class)\n    DeployNodeManipulator deployNodeManipulator;\n\n    @Resource\n    @Structure( type = DeployNamespaceMapper.class )\n    DeployNamespaceManipulator deployNamespaceManipulator;\n\n    @Resource\n    @Structure( type = PhysicalHostMapper.class )\n    PhysicalHostManipulator physicalHostManipulator;\n\n    @Resource\n    @Structure( type = VirtualMachineMapper.class )\n    VirtualMachineManipulator virtualMachineManipulator;\n\n    @Resource\n    @Structure( type = QuickElementMapper.class )\n    QuickElementManipulator quickElementManipulator;\n\n\n    @Resource\n    @Structure( type = ContainerElementMapper.class )\n    ContainerElementManipulator containerElementManipulator;\n\n    @Resource\n    @Structure( type = DeployServiceInsMappingMapper.class )\n    DeployServiceInsMappingManipulator deployServiceInsMappingManipulator;\n\n\n    @Resource( type = DeployMasterTreeManipulatorImpl.class )\n    KOISkeletonMasterManipulator skeletonMasterManipulator;\n\n    public DeployMasterManipulatorImpl() {\n\n    }\n\n    public DeployMasterManipulatorImpl(KOIMappingDriver driver ) {\n        driver.autoConstruct( DeployMasterManipulatorImpl.class, Map.of(), this );\n        this.skeletonMasterManipulator = new DeployMasterTreeManipulatorImpl( driver );\n    }\n\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return this.skeletonMasterManipulator;\n    }\n\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n    @Override\n    public NodeMetaManipulator getNodeMetaManipulator() {\n        return this.nodeMetaManipulator;\n    }\n\n    @Override\n    public ClusterNodeManipulator getJobNodeManipulator() {\n        return this.jobNodeManipulator;\n    }\n\n    @Override\n    public DeployNodeManipulator getDeployNodeManipulator() {\n        return this.deployNodeManipulator;\n    }\n\n    @Override\n    public DeployNamespaceManipulator getNamespaceManipulator() {\n        return this.deployNamespaceManipulator;\n    }\n\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.tireOwnerManipulator;\n    }\n\n    @Override\n    public PhysicalHostManipulator getPhysicalHostManipulator() {\n        return this.physicalHostManipulator;\n    }\n\n    @Override\n    public VirtualMachineManipulator getVirtualMachineManipulator() {\n        return this.virtualMachineManipulator;\n    }\n\n    @Override\n    public QuickElementManipulator getQuickElementManipulator() {\n        return this.quickElementManipulator;\n    }\n\n    @Override\n    public ContainerElementManipulator getContainerElementManipulator() {\n        return this.containerElementManipulator;\n    }\n\n    @Override\n    public DeployServiceInsMappingManipulator getDeployServiceInsMappingManipulator() {\n        return this.deployServiceInsMappingManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/deploy/ibatis/hydranium/DeployMasterTreeManipulatorImpl.java",
    "content": "package com.pinecone.hydra.deploy.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.deploy.ibatis.DeployNodeOwnerMapper;\nimport com.pinecone.hydra.deploy.ibatis.DeployNodePathCacheMapper;\nimport com.pinecone.hydra.deploy.ibatis.DeployTreeMapper;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class DeployMasterTreeManipulatorImpl implements TreeMasterManipulator {\n\n    @Resource\n    @Structure( type = DeployNodePathCacheMapper.class )\n    TriePathCacheManipulator triePathCacheManipulator;\n\n    @Resource\n    @Structure( type = DeployNodeOwnerMapper.class )\n    TireOwnerManipulator tireOwnerManipulator;\n\n    @Resource\n    @Structure( type = DeployTreeMapper.class )\n    TrieTreeManipulator  trieTreeManipulator;\n\n    public DeployMasterTreeManipulatorImpl() {\n\n    }\n\n    public DeployMasterTreeManipulatorImpl(KOIMappingDriver driver ) {\n        driver.autoConstruct( DeployMasterTreeManipulatorImpl.class, Map.of(), this );\n    }\n\n    @Override\n    public TriePathCacheManipulator getTriePathCacheManipulator() {\n        return this.triePathCacheManipulator;\n    }\n\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.tireOwnerManipulator;\n    }\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/entity/ibatis/GUID128TypeHandler.java",
    "content": "package com.pinecone.hydra.entity.ibatis;\n\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport org.apache.ibatis.type.BaseTypeHandler;\nimport org.apache.ibatis.type.JdbcType;\nimport org.apache.ibatis.type.MappedJdbcTypes;\nimport org.apache.ibatis.type.MappedTypes;\n\nimport java.sql.CallableStatement;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\n\n@MappedTypes(UUID128.class)\n@MappedJdbcTypes(JdbcType.VARCHAR)\npublic class GUID128TypeHandler extends BaseTypeHandler<UUID128> {\n    @Override\n    public void setNonNullParameter(PreparedStatement ps, int i, UUID128 parameter, JdbcType jdbcType) throws SQLException {\n        ps.setString(i, parameter.toString());\n    }\n\n    @Override\n    public UUID128 getNullableResult(ResultSet rs, String columnName) throws SQLException {\n        String value = rs.getString(columnName);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UUID128( value );\n    }\n\n    @Override\n    public UUID128 getNullableResult(ResultSet rs, int columnIndex) throws SQLException {\n        String value = rs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UUID128( value );\n    }\n\n    @Override\n    public UUID128 getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {\n        String value = cs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UUID128( value );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/entity/ibatis/GUID72TypeHandler.java",
    "content": "package com.pinecone.hydra.entity.ibatis;\n\nimport com.pinecone.ulf.util.guid.i64.GUID72;\nimport org.apache.ibatis.type.BaseTypeHandler;\nimport org.apache.ibatis.type.JdbcType;\nimport org.apache.ibatis.type.MappedJdbcTypes;\nimport org.apache.ibatis.type.MappedTypes;\n\nimport java.sql.CallableStatement;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\n\n@MappedTypes(GUID72.class)\n@MappedJdbcTypes(JdbcType.VARCHAR)\npublic class GUID72TypeHandler extends BaseTypeHandler<GUID72> {\n\n    @Override\n    public void setNonNullParameter(PreparedStatement ps, int i, GUID72 parameter, JdbcType jdbcType) throws SQLException {\n        ps.setString(i, parameter.toString());\n    }\n\n    @Override\n    public GUID72 getNullableResult(ResultSet rs, String columnName) throws SQLException {\n        String value = rs.getString(columnName);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new GUID72( value );\n    }\n\n    @Override\n    public GUID72 getNullableResult(ResultSet rs, int columnIndex) throws SQLException {\n        String value = rs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new GUID72( value );\n    }\n\n    @Override\n    public GUID72 getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {\n        String value = cs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new GUID72( value );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/entity/ibatis/GUIDTypeHandler.java",
    "content": "package com.pinecone.hydra.entity.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport org.apache.ibatis.type.BaseTypeHandler;\nimport org.apache.ibatis.type.JdbcType;\nimport org.apache.ibatis.type.MappedJdbcTypes;\nimport org.apache.ibatis.type.MappedTypes;\n\nimport java.sql.CallableStatement;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\n\n@MappedTypes(GUID.class)\n@MappedJdbcTypes(JdbcType.VARCHAR)\npublic class GUIDTypeHandler extends BaseTypeHandler<GUID> {\n\n    @Override\n    public void setNonNullParameter(PreparedStatement ps, int i, GUID parameter, JdbcType jdbcType) throws SQLException {\n        ps.setString(i, parameter.toString());\n    }\n\n    @Override\n    public GUID getNullableResult(ResultSet rs, String columnName) throws SQLException {\n        String value = rs.getString(columnName);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UUID128( value );\n    }\n\n    @Override\n    public GUID getNullableResult(ResultSet rs, int columnIndex) throws SQLException {\n        String value = rs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UUID128( value );\n    }\n\n    @Override\n    public GUID getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {\n        String value = cs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UUID128( value );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/entity/ibatis/UOITypeHandler.java",
    "content": "package com.pinecone.hydra.entity.ibatis;\n\nimport java.sql.CallableStatement;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\n\nimport org.apache.ibatis.type.BaseTypeHandler;\nimport org.apache.ibatis.type.JdbcType;\nimport org.apache.ibatis.type.MappedJdbcTypes;\nimport org.apache.ibatis.type.MappedTypes;\n\nimport com.pinecone.framework.util.uoi.UOI;\n\n\n@MappedTypes(UOI.class)\n@MappedJdbcTypes(JdbcType.VARCHAR)\npublic class UOITypeHandler extends BaseTypeHandler<UOI > {\n    @Override\n    public void setNonNullParameter( PreparedStatement ps, int i, UOI parameter, JdbcType jdbcType ) throws SQLException {\n        ps.setString(i, parameter.toString());\n    }\n\n    @Override\n    public UOI getNullableResult( ResultSet rs, String columnName ) throws SQLException {\n        String value = rs.getString(columnName);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UOI( value );\n    }\n\n    @Override\n    public UOI getNullableResult( ResultSet rs, int columnIndex ) throws SQLException {\n        String value = rs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UOI( value );\n    }\n\n    @Override\n    public UOI getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {\n        String value = cs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UOI( value );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/entity/ibatis/URITypeHandler.java",
    "content": "package com.pinecone.hydra.entity.ibatis;\n\nimport java.net.URI;\nimport java.sql.CallableStatement;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\n\nimport org.apache.ibatis.type.BaseTypeHandler;\nimport org.apache.ibatis.type.JdbcType;\nimport org.apache.ibatis.type.MappedJdbcTypes;\nimport org.apache.ibatis.type.MappedTypes;\n\n@MappedTypes(URI.class)\n@MappedJdbcTypes(JdbcType.VARCHAR)\npublic class URITypeHandler extends BaseTypeHandler<URI> {\n\n    @Override\n    public void setNonNullParameter(PreparedStatement ps, int i, URI parameter, JdbcType jdbcType) throws SQLException {\n        ps.setString(i, parameter.toString());\n    }\n\n    @Override\n    public URI getNullableResult(ResultSet rs, String columnName) throws SQLException {\n        String value = rs.getString(columnName);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return URI.create( value );\n    }\n\n    @Override\n    public URI getNullableResult(ResultSet rs, int columnIndex) throws SQLException {\n        String value = rs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return URI.create( value );\n    }\n\n    @Override\n    public URI getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {\n        String value = cs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return URI.create( value );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/entity/ibatis/hydranium/ArchMappingDriver.java",
    "content": "package com.pinecone.hydra.entity.ibatis.hydranium;\n\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.construction.UnifyStructureInjector;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.homotype.StereotypicInjector;\nimport com.pinecone.hydra.entity.ibatis.GUID128TypeHandler;\nimport com.pinecone.hydra.entity.ibatis.GUID72TypeHandler;\nimport com.pinecone.hydra.entity.ibatis.GUIDTypeHandler;\nimport com.pinecone.hydra.entity.ibatis.UOITypeHandler;\nimport com.pinecone.hydra.entity.ibatis.URITypeHandler;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.slime.jelly.source.ibatis.ProxySessionMapperPool;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic abstract class ArchMappingDriver implements KOIMappingDriver {\n    protected Hydrogen mSystem;\n\n    protected Processum            mSuperiorProcess;\n\n    protected IbatisClient         mIbatisClient;\n\n    //protected SqlSession           mSqlSession;\n\n    protected List<Class<? > >     mMapperCandidates;\n\n    protected ResourceDispenserCenter mResourceDispenserCenter;\n\n    public ArchMappingDriver( Processum superiorProcess ) {\n        this.mSuperiorProcess                 = superiorProcess;\n        if ( this.mSuperiorProcess instanceof Hydrogen) {\n            this.mSystem                      = (Hydrogen) this.mSuperiorProcess;\n        }\n        else {\n            this.mSystem                      = (Hydrogen) superiorProcess.parentSystem();\n        }\n    }\n\n    // Temp , TODO\n    public ArchMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter, String szPackageName ) {\n        this( superiorProcess );\n\n        this.mIbatisClient = ibatisClient;\n        //this.mSqlSession   = ibatisClient.openSession( true );\n\n        //SqlSessionTemplate\n\n        ibatisClient.getConfiguration().getTypeHandlerRegistry().register( GUID72TypeHandler.class );\n        ibatisClient.getConfiguration().getTypeHandlerRegistry().register( GUID128TypeHandler.class );\n        ibatisClient.getConfiguration().getTypeHandlerRegistry().register( GUIDTypeHandler.class );\n        ibatisClient.getConfiguration().getTypeHandlerRegistry().register( UOITypeHandler.class );\n        ibatisClient.getConfiguration().getTypeHandlerRegistry().register( URITypeHandler.class );\n\n        ibatisClient.addXMLObjectScope( \"mapper.kernel.task\" );\n        this.mMapperCandidates = ibatisClient.addDataAccessObjectScope( szPackageName );\n\n        for( Class<? > mapperClass : this.mMapperCandidates ) {\n            dispenserCenter.getInstanceDispenser().register(\n                    mapperClass,\n                    //new SoloSessionMapperPool( this.mSqlSession, mapperClass )\n                    new ProxySessionMapperPool( ibatisClient, mapperClass )\n            );\n        }\n\n        this.mResourceDispenserCenter = dispenserCenter;\n    }\n\n    @Override\n    public StereotypicInjector autoConstruct( Class<?> stereotype, Map config, Object instance ) {\n        UnifyStructureInjector injector = new UnifyStructureInjector( stereotype, this.mResourceDispenserCenter.getInstanceDispenser() );\n        try {\n            injector.inject( config, instance );\n        }\n        catch ( Exception e ){\n            throw new ProxyProvokeHandleException( e );\n        }\n        return injector;\n    }\n\n    @Override\n    public String getVersionSignature() {\n        return \"HydraniumV2.1\";\n    }\n\n    @Override\n    public Hydrogen getSystem() {\n        return this.mSystem;\n    }\n\n    @Override\n    public Processum getSuperiorProcess() {\n        return this.mSuperiorProcess;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/ExternalSymbolicMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ExternalSymbolic;\nimport com.pinecone.hydra.storage.file.entity.GenericExternalSymbolic;\nimport com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Select;\n\n@IbatisDataAccessObject\npublic interface ExternalSymbolicMapper extends ExternalSymbolicManipulator {\n    @Insert(\"INSERT INTO hydra_uofs_directly_external_symbolic (`guid`, `create_time`, `update_time`, `name`, `reparsed_point`) VALUES (#{guid},#{createTime},#{updateTime},#{name},#{reparsedPoint})\")\n    void insert( ExternalSymbolic externalSymbolic );\n    @Delete(\"DELETE FROM hydra_uofs_directly_external_symbolic WHERE `guid` = #{guid}\")\n    void remove( GUID guid );\n\n    @Select(\"SELECT `id`, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `reparsed_point` AS reparsedPoint FROM hydra_uofs_directly_external_symbolic WHERE `guid` = #{guid}\")\n    GenericExternalSymbolic getSymbolicByGuid( GUID guid );\n\n    @Select(\"SELECT `id`, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `reparsed_point` AS reparsedPoint FROM hydra_uofs_directly_external_symbolic WHERE `guid` = #{guid} AND `name` = #{nodeName}\")\n    GenericExternalSymbolic getSymbolicByNameGuid(String nodeName, GUID guid );\n\n    @Select(\"SELECT COUNT(*) FROM hydra_uofs_directly_external_symbolic WHERE `guid` = #{guid} AND `name` = #{nodeName}\")\n    boolean isSymbolicMatchedByNameGuid(String nodeName, GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FileMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.GenericFileNode;\nimport com.pinecone.hydra.storage.file.source.FileManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface FileMapper extends FileManipulator {\n    FileNode getFileNode(GUID guid, ElementNode element);\n    @Insert(\"INSERT INTO `hydra_uofs_files` (`guid`, `create_time`, `update_time`, `deleted_at`, `name`, `checksum`, `parity_check`, `physical_size`,`logic_size`,`definition_size`,`crc32_xor`,`integrity_check_enable`,`disable_cluster`) VALUES (#{guid},#{createTime},#{updateTime},#{deletedTime},#{name},#{checksum},#{parityCheck},#{physicalSize},#{logicSize},#{definitionSize},#{crc32Xor},#{integrityCheckEnable},#{disableCluster})\")\n    void insert( FileNode fileNode );\n    @Delete(\"DELETE FROM `hydra_uofs_files` WHERE `guid` = #{guid}\")\n    void remove( GUID guid );\n    @Select(\"SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `deleted_at` AS deletedTime, `name`, `checksum`, `parity_check` AS parityCheck, `physical_size` AS physicalSize,`logic_size` AS logicSize,`definition_size` AS definitionSize,`crc32_xor` AS crc32Xor,`integrity_check_enable` AS integrityCheckEnable,`disable_cluster` AS disableCluster FROM hydra_uofs_files WHERE `guid` = #{guid}\")\n    GenericFileNode getFileNodeByGuid(GUID guid);\n\n    @Select(\"SELECT `guid` FROM `hydra_uofs_files` WHERE `name` = #{name}\")\n    List<GUID > getGuidsByName(String name );\n\n    @Select(\"SELECT `guid` FROM `hydra_uofs_files` WHERE `name` = #{name} AND `guid` = #{guid}\")\n    List<GUID > getGuidsByNameID(@Param(\"name\") String name, @Param(\"guid\") GUID guid );\n\n    @Select(\"SELECT `guid` FROM hydra_uofs_files \")\n    List<GUID > dumpGuid();\n\n    @Update(\"UPDATE hydra_uofs_files SET checksum = #{checksum}, parity_check = #{parityCheck}, physical_size = #{physicalSize}, logic_size = #{logicSize}, crc32_xor = #{crc32Xor}, definition_size = #{definitionSize} WHERE guid = #{guid}\")\n    void update( FileNode fileNode );\n\n    @Update(\"UPDATE hydra_uofs_files SET name = #{name} WHERE guid = #{guid}\")\n    void rename( GUID guid,String newName );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FileMetaMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.FileMeta;\nimport com.pinecone.hydra.storage.file.entity.GenericFileMeta;\nimport com.pinecone.hydra.storage.file.source.FileMetaManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Select;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface FileMetaMapper extends FileMetaManipulator {\n    FileMeta getFileMeta(GUID guid, ElementNode element);\n    @Insert(\"INSERT INTO `hydra_uofs_files_meta` (`guid`) VALUES (#{guid})\")\n    void insert( FileMeta fileMeta );\n    @Delete(\"DELETE FROM `hydra_uofs_files_meta` WHERE `guid` = #{guid}\")\n    void remove( GUID guid );\n    @Select(\"SELECT `id` AS emunId, `guid` FROM `hydra_uofs_files_meta` WHERE guid = #{guid}\")\n    GenericFileMeta getFileMetaByGuid(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FileOwnerMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n@Mapper\n@IbatisDataAccessObject\npublic interface FileOwnerMapper extends TireOwnerManipulator {\n    @Insert(\"INSERT INTO `hydra_uofs_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )\")\n    void insertRootNode(@Param(\"guid\")  GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Insert( \"INSERT INTO `hydra_uofs_node_tree` (`guid`, `parent_guid`,`linked_type`) VALUES (#{targetGuid}, #{parentGuid}, #{linkedType})\" )\n    void insert( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n    @Update( \"UPDATE `hydra_uofs_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}\" )\n    void update( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Update( \"UPDATE `hydra_uofs_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}\" )\n    void updateParentGuid( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID );\n\n    @Update( \"UPDATE `hydra_uofs_node_tree` SET `guid` = #{targetGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}\" )\n    void updateLinkedType( @Param(\"targetGuid\") GUID targetGuid, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n    @Delete( \"DELETE FROM `hydra_uofs_node_tree` WHERE `guid`=#{subordinateGuid}  AND `linked_type` = 'Owned'\" )\n    void remove( @Param(\"subordinateGuid\") GUID subordinateGuid, @Param(\"ownerGuid\") GUID ownerGuid );\n\n    @Delete( \"DELETE FROM `hydra_uofs_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'\" )\n    void removeBySubordinate( GUID subordinateGuid );\n\n//    @Delete(\"DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}\")\n//    void removeByOwner(GUID ownerGuid);\n\n    @Select( \"SELECT `parent_guid` FROM `hydra_uofs_node_tree` WHERE `guid`=#{subordinateGuid} AND linked_type = 'Owned'\" )\n    GUID getOwner( GUID subordinateGuid );\n\n    @Select( \"SELECT guid FROM hydra_uofs_node_tree where parent_guid=#{guid} AND linked_type = 'Owned'\" )\n    List<GUID > getSubordinates( GUID guid );\n\n\n    @Update(\"UPDATE `hydra_uofs_node_tree` SET `linked_type` = '#{linkedType}' WHERE `guid` = #{sourceGuid} AND `parent_guid` = #{targetGuid}\")\n    void setLinkedType( @Param(\"sourceGuid\") GUID sourceGuid, @Param(\"targetGuid\") GUID targetGuid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Select(\"SELECT `linked_type` FROM `hydra_uofs_node_tree` WHERE `guid` = #{childGuid} AND `parent_guid` =#{parentGuid}\")\n    LinkedType getLinkedType( @Param(\"childGuid\") GUID childGuid,@Param(\"parentGuid\") GUID parentGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FilePathCacheMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n@Mapper\n@IbatisDataAccessObject\npublic interface FilePathCacheMapper extends TriePathCacheManipulator {\n    @Insert(\"INSERT INTO `hydra_uofs_node_cache_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )\")\n    void insert(@Param(\"guid\") GUID guid, @Param(\"path\") String path );\n\n    @Insert(\"INSERT INTO `hydra_uofs_node_cache_path` (path, long_path, guid) VALUES ( #{path},#{longPath},#{guid} )\")\n    void insertLongPath( @Param(\"guid\") GUID guid, @Param(\"path\") String path, @Param(\"longPath\") String longPath );\n\n    @Delete(\"DELETE FROM `hydra_uofs_node_cache_path` WHERE `guid`=#{guid}\")\n    void remove( GUID guid );\n\n\n    default String getPath( GUID guid ){\n        String longPath = this.getLongPath(guid);\n        if( longPath != null ){\n            return this.getPath0( guid )+longPath;\n        }\n        return this.getPath0( guid );\n    };\n    @Select(\"SELECT `long_path` FROM `hydra_uofs_node_cache_path` WHERE `guid`=#{guid}\")\n    String getLongPath( GUID guid );\n    @Select(\"SELECT `path` FROM `hydra_uofs_node_cache_path` WHERE `guid`=#{guid}\")\n    String getPath0( GUID guid );\n    @Select(\"SELECT `guid` FROM `hydra_uofs_node_cache_path` WHERE `guid`=#{guid}\")\n    GUID getNode( String path );\n\n    @Select(\"SELECT `guid` FROM `hydra_uofs_node_cache_path` WHERE `path`=#{path}\")\n    GUID queryGUIDByPath( String path );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FileSystemAttributeMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.FileSystemAttributes;\nimport com.pinecone.hydra.storage.file.source.FileSystemAttributeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\n\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\n\nimport java.util.List;\nimport java.util.Map;\n@Mapper\n@IbatisDataAccessObject\npublic interface FileSystemAttributeMapper extends FileSystemAttributeManipulator {\n    @Insert( \"INSERT INTO `hydra_registry_node_attributes` (`guid`, `key`, `value`) VALUES (#{guid}, #{key}, #{value})\" )\n    void insertAttribute(GUID guid, String key, String value );\n\n    List<Map<String, Object >> getAttributesByGuid(GUID guid );\n\n    void updateAttribute( GUID guid, String key, String value );\n    void remove( GUID guid );\n\n    default FileSystemAttributes getAttributes(GUID guid, ElementNode element ){\n        return null;\n    };\n\n    default void insert( FileSystemAttributes attributes) {\n        for ( Map.Entry<String, String> entry : attributes.getAttributes().entrySet() ) {\n            this.insertAttribute( attributes.getGuid(), entry.getKey(), entry.getValue() );\n        }\n    }\n\n    default void update( FileSystemAttributes attributes) {\n        for ( Map.Entry<String, String> entry : attributes.getAttributes().entrySet() ) {\n            this.updateAttribute( attributes.getGuid(), entry.getKey(), entry.getValue() );\n        }\n    }\n\n\n    boolean containsKey ( GUID guid, String key );\n\n    void clearAttributes( GUID guid );\n\n    void removeAttributeWithValue( GUID guid, String key, String value );\n\n    void removeAttribute( GUID guid, String key );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FileTreeMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.entity.TreeReparseLinkNode;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n@Mapper\n@IbatisDataAccessObject\npublic interface FileTreeMapper extends TrieTreeManipulator {\n    @Insert(\"INSERT INTO `hydra_uofs_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )\")\n    void insertRootNode(@Param(\"guid\")  GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    default void insert ( TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){\n        this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() );\n        ownerManipulator.insertRootNode( node.getGuid() );\n    }\n\n    @Insert(\"INSERT INTO hydra_uofs_nodes (`guid`, `type`,`base_data_guid`,`node_meta_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})\")\n    void insertTreeNode( @Param(\"guid\") GUID guid, @Param(\"type\") UOI type, @Param(\"baseDataGuid\") GUID baseDataGuid, @Param(\"nodeMetaGuid\") GUID nodeMetaGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_meta_guid AS nodeMetadataGUID FROM hydra_uofs_nodes WHERE guid=#{guid}\")\n    GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid );\n\n    @Select(\"SELECT COUNT( `id` ) FROM hydra_uofs_nodes WHERE guid=#{guid}\")\n    boolean contains( GUID key );\n\n\n    @Override\n    default GUIDImperialTrieNode getNode(GUID guid ) {\n        GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid );\n        List<GUID > parent = this.fetchParentGuids( guid );\n        node.setParentGUID( parent );\n        return node;\n    }\n\n    @Select(\"SELECT id, guid, parent_guid, linked_type FROM hydra_uofs_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    GUIDImperialTrieNode getTreeNodeOnly(@Param(\"guid\") GUID guid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT count( * ) FROM hydra_uofs_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    long countNode( GUID guid, GUID parentGuid );\n\n\n\n    @Override\n    default void purge( GUID guid ) {\n        this.removeNodeMeta( guid );\n        this.removeTreeNode( guid );\n        this.removeOwnedTreeNode( guid );\n    }\n\n    @Delete(\"DELETE FROM `hydra_uofs_nodes` WHERE `guid`=#{guid}\")\n    void removeNodeMeta( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_uofs_node_tree` WHERE `guid` = #{guid}\")\n    void removeTreeNode( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_uofs_node_tree` WHERE `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeByParentGuid( @Param(\"parent_guid\") GUID parentGuid );\n\n    @Delete(\"DELETE FROM `hydra_uofs_node_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeYoke( @Param(\"guid\") GUID guid, @Param(\"parent_guid\") GUID parentGuid );\n\n    @Delete(\"DELETE FROM `hydra_uofs_node_tree` WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}\")\n    void removeTreeNodeWithLinkedType( @Param(\"guid\") GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n\n    @Delete(\"DELETE FROM `hydra_uofs_node_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}\")\n    void removeInheritance( @Param(\"chileGuid\") GUID childGuid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_uofs_node_tree` WHERE `parent_guid`=#{guid}\")\n    List<GUIDImperialTrieNode> getChildren(GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_uofs_node_tree` WHERE `parent_guid` = #{parentGuid}\")\n    List<GUID > fetchChildrenGuids( @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `parent_guid` FROM `hydra_uofs_node_tree` WHERE `guid`=#{guid}\")\n    List<GUID > fetchParentGuids( GUID guid );\n\n    @Update(\"UPDATE `hydra_uofs_nodes` SET `type` = #{type} WHERE guid=#{guid}\")\n    void updateType( UOI type , GUID guid );\n\n    @Select( \"SELECT guid FROM hydra_uofs_node_tree WHERE parent_guid IS NULL \" )\n    List<GUID > fetchRoot();\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_uofs_node_tree WHERE `parent_guid` IS NULL AND guid = #{guid}\" )\n    boolean isRoot( GUID guid );\n\n\n\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_uofs_node_tree WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}\" )\n    long queryLinkedCount( @Param(\"guid\") GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_uofs_node_tree WHERE `guid` = #{guid}\" )\n    long queryAllLinkedCount( @Param(\"guid\") GUID guid );\n\n\n    @Override\n    @Insert(\n            \"INSERT INTO `hydra_uofs_node_tree` (`guid`, `linked_type`,`tag_name`,`tag_guid`,`parent_guid`) \" +\n                    \"VALUES (#{originalGuid}, #{linkedType}, #{tagName}, #{tagGuid}, #{dirGuid})\"\n    )\n    void newLinkTag(\n            @Param(\"originalGuid\") GUID originalGuid, @Param(\"dirGuid\") GUID dirGuid,\n            @Param(\"tagName\") String tagName, @Param(\"tagGuid\") GUID tagGuid, @Param(\"linkedType\") LinkedType linkedType\n    );\n\n    @Override\n    @Update( \"UPDATE hydra_uofs_node_tree SET tag_name = #{tagName} WHERE tag_guid =#{tagGuid}\" )\n    void updateLinkTagName( @Param(\"tagGuid\") GUID tagGuid, @Param(\"tagName\") String tagName );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_uofs_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{dirGuid}\" )\n    GUID getOriginalGuid( @Param(\"tagName\") String tagName, @Param(\"dirGuid\") GUID dirGuid );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_uofs_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}\" )\n    GUID getOriginalGuidByNodeGuid( @Param(\"tagName\") String tagName, @Param(\"nodeGuid\") GUID nodeGUID );\n\n    @Override\n    @Select( \"SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_uofs_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{parentDirGuid}\" )\n    TreeReparseLinkNode getReparseLinkNode(@Param(\"tagName\") String tagName, @Param(\"parentDirGuid\") GUID parentDirGuid );\n\n    @Override\n    @Select( \"SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_uofs_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}\" )\n    TreeReparseLinkNode getReparseLinkNodeByNodeGuid( @Param(\"tagName\") String tagName, @Param(\"nodeGuid\") GUID nodeGUID );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_uofs_node_tree WHERE `tag_name` = #{tagName}\" )\n    List<GUID > fetchOriginalGuid( String tagName );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_uofs_node_tree WHERE `tag_name` = #{tagName} AND `parent_guid` IS NULL\" )\n    List<GUID > fetchOriginalGuidRoot( String tagName );\n\n    @Override\n    @Select( \"SELECT COUNT(*) FROM `hydra_uofs_node_tree` WHERE `tag_guid` = #{guid}\" )\n    boolean isTagGuid(GUID guid);\n\n    @Override\n    @Delete( \"DELETE FROM `hydra_uofs_node_tree` WHERE `tag_guid` = #{guid}\" )\n    void removeReparseLink( GUID guid );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_uofs_node_tree` WHERE `tag_guid` = #{tagGuid}\" )\n    GUID getOriginalGuidByTagGuid(GUID tagGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FolderMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.file.entity.GenericFolder;\nimport com.pinecone.hydra.storage.file.source.FolderManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface FolderMapper extends FolderManipulator {\n    Folder getFolder(GUID guid, ElementNode element);\n    @Insert(\"INSERT INTO `hydra_uofs_folders` (`guid`, `create_time`, `update_time`, `name`) VALUES (#{guid},#{createTime},#{updateTime},#{name})\")\n    void insert( Folder folder );\n    @Delete(\"DELETE FROM `hydra_uofs_folders` WHERE `guid` = #{guid}\")\n    void remove( GUID guid );\n    @Update(\"UPDATE `hydra_uofs_folders` SET update_time = #{updateTime}, name = #{name} WHERE guid = #{guid}\")\n    void update( Folder folder );\n    @Select(\"SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name` FROM `hydra_uofs_folders` WHERE `guid` = #{guid}\")\n    GenericFolder getFolderByGuid(GUID guid);\n    @Select(\"SELECT `guid` FROM `hydra_uofs_folders` WHERE `name` = #{name}\")\n    List<GUID > getGuidsByName(String name );\n    @Select(\"SELECT `guid` FROM `hydra_uofs_folders` WHERE `name` = #{name} AND `guid` = #{guid}\")\n    List<GUID > getGuidsByNameID(@Param(\"name\") String name, @Param(\"guid\") GUID guid );\n\n    @Select(\"SELECT `guid` FROM hydra_uofs_folders\")\n    List<GUID > dumpGuid();\n    @Select(\"SELECT COUNT('id') FROM hydra_uofs_folders WHERE guid = #{guid}\")\n    boolean isFolder(GUID guid);\n\n    @Update(\"UPDATE hydra_uofs_folders SET name = #{newName} WHERE guid = #{fileGuid}\")\n    void rename( GUID fileGuid, String newName );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FolderMetaMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.FolderMeta;\nimport com.pinecone.hydra.storage.file.entity.GenericFolderMeta;\nimport com.pinecone.hydra.storage.file.source.FolderMetaManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Select;\n@Mapper\n@IbatisDataAccessObject\npublic interface FolderMetaMapper extends FolderMetaManipulator {\n    FolderMeta getFolderMeta(GUID guid, ElementNode element);\n    @Insert(\"INSERT INTO `hydra_uofs_folder_meta` (`guid`) VALUES (#{guid})\")\n    void insert( FolderMeta folderMeta );\n    @Delete(\"DELETE FROM `hydra_uofs_folder_meta` WHERE `guid` = #{guid}\")\n    void remove( GUID guid );\n    @Select(\"SELECT `id` AS enumId, `guid` FROM `hydra_uofs_folder_meta` WHERE `guid` = #{guid}\")\n    GenericFolderMeta getFolderMetaByGuid(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/FolderVolumeMappingMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.source.FolderVolumeMappingManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface FolderVolumeMappingMapper extends FolderVolumeMappingManipulator {\n    @Insert(\"INSERT INTO `hydra_uofs_file_volume_mapping` (`folder_guid`, `volume_guid`) VALUES (#{folderGuid}, #{volumeGuid})\")\n    void insert(@Param(\"folderGuid\") GUID folderGuid, @Param(\"volumeGuid\") GUID volumeGuid );\n\n    @Delete(\"DELETE FROM `hydra_uofs_file_volume_mapping` WHERE `volume_guid` = #{volumeGuid} AND `folder_guid` = #{folderGuid}\")\n    void remove( @Param(\"folderGuid\") GUID folderGuid, @Param(\"volumeGuid\") GUID volumeGuid );\n\n    @Select(\"SELECT `volume_guid` FROM `hydra_uofs_file_volume_mapping` WHERE `folder_guid` = #{folderGuid}\")\n    GUID getVolumeGuid( GUID folderGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/LocalClusterMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.GenericLocalCluster;\nimport com.pinecone.hydra.storage.file.entity.LocalCluster;\nimport com.pinecone.hydra.storage.file.source.LocalClusterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface LocalClusterMapper extends LocalClusterManipulator {\n    LocalCluster getLocalCluster(GUID guid, ElementNode element);\n    @Insert(\"INSERT INTO `hydra_uofs_local_cluster_fat` (`file_guid`, `seg_guid`, `seg_id`, `create_time`, `update_time`, `source_name`, `crc32`, `size`) VALUES (#{fileGuid},#{segGuid},#{segId},#{createTime},#{updateTime},#{sourceName},#{crc32},#{size})\")\n    void insert( LocalCluster localCluster );\n    @Delete(\"DELETE FROM `hydra_uofs_local_cluster_fat` WHERE `seg_guid` = #{guid}\")\n    void remove( GUID guid );\n    @Delete(\"DELETE FROM `hydra_uofs_local_cluster_fat` WHERE `file_guid` = #{fileGuid}\")\n    void removeClustersByFile( GUID fileGuid );\n    default GenericLocalCluster getLocalClusterByGuid(GUID guid){\n        GenericLocalCluster localCluster = this.getLocalClusterByGuid0(guid);\n        if ( localCluster == null ) return null;\n        localCluster.setLocalClusterManipulator( this );\n        return localCluster;\n    }\n    @Select(\"SELECT `id` AS enumId, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `seg_id` AS segId, `create_time` AS createTime, `update_time` AS updateTime, `source_name` AS sourceName, `crc32`, `size` FROM `hydra_uofs_local_cluster_fat` WHERE `seg_guid` = #{guid}\")\n    GenericLocalCluster getLocalClusterByGuid0(GUID guid);\n    @Select(\"SELECT `id`, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `seg_id` AS segId, `create_time` AS createTime, `update_time` AS updateTime, `source_name` AS sourceName, `crc32`, `size` FROM `hydra_uofs_local_cluster_fat` WHERE `file_guid` = #{guid}\")\n    List<GenericLocalCluster> getLocalClusterByFileGuid0(GUID guid );\n\n    @Select(\"SELECT `id`, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `seg_id` AS segId, `create_time` AS createTime, `update_time` AS updateTime, `source_name` AS sourceName, `crc32`, `size` FROM `hydra_uofs_local_cluster_fat` WHERE `file_guid` = #{fileGuid} AND `seg_id` = #{segId}\")\n    GenericLocalCluster getClusterByFileWithId0(GUID fileGuid, long segId );\n\n    @Update(\"UPDATE `hydra_uofs_local_cluster_fat` SET `size` = #{size} WHERE `file_guid` = #{fileGuid} AND `seg_id` = #{segId}\")\n    void update( LocalCluster localCluster );\n\n    @Delete(\"DELETE FROM `hydra_uofs_local_cluster_fat` WHERE file_guid = #{fileGuid} AND seg_id = #{segId}\")\n    void removeClusterByFileWithId( GUID fileGuid, long segId );\n\n    default GenericLocalCluster getClusterByFileWithId(GUID fileGuid, long segId ){\n        GenericLocalCluster frame = this.getClusterByFileWithId0(fileGuid, segId);\n        if( frame == null ){\n            return null;\n        }\n        frame.setLocalClusterManipulator( this );\n        return frame;\n    }\n    default List<LocalCluster> getLocalClusterByFileGuid(GUID guid){\n        List<LocalCluster> localClusters = new ArrayList<>();\n        List<GenericLocalCluster> frames = this.getLocalClusterByFileGuid0(guid);\n        for (LocalCluster frame : frames){\n            frame.setLocalClusterManipulator(this);\n            localClusters.add( frame );\n        }\n        return localClusters;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/RemoteClusterMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.GenericRemoteCluster;\nimport com.pinecone.hydra.storage.file.entity.RemoteCluster;\nimport com.pinecone.hydra.storage.file.source.RemoteClusterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface RemoteClusterMapper extends RemoteClusterManipulator {\n    RemoteCluster getRemoteCluster(GUID guid, ElementNode element);\n    @Insert(\"INSERT INTO `hydra_uofs_files_cluster_mapping` (`file_guid`, `seg_guid`, `device_guid`, `seg_id`, `crc32`, `size`) VALUES (#{fileGuid},#{segGuid},#{deviceGuid},#{segId},#{crc32},#{size})\")\n    void insert( RemoteCluster remoteCluster );\n    @Delete(\"DELETE FROM `hydra_uofs_files_cluster_mapping` WHERE `seg_guid` = #{guid}\")\n    void remove( GUID guid );\n    @Delete(\"DELETE FROM `hydra_uofs_files_cluster_mapping` WHERE file_guid = #{fileGuid}\")\n    void removeClustersByFile( GUID fileGuid );\n    @Select(\"SELECT `id` AS enumID, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `device_guid` AS deviceGuid, `seg_id` AS segId, `crc32`, `size` FROM `hydra_uofs_files_cluster_mapping` WHERE `seg_guid` = #{guid}\")\n    RemoteCluster fetchRemoteClustersByFileGuid(GUID guid);\n    @Select(\"SELECT `id`, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `device_guid` AS deviceGuid, `seg_id` AS segId, `crc32`, `size` FROM `hydra_uofs_files_cluster_mapping` WHERE `file_guid` = #{guid}\")\n    List<GenericRemoteCluster> fetchRemoteClustersByFileGuid0( GUID guid );\n\n    @Override\n    default List<RemoteCluster> fetchRemoteClusterByFileGuid( GUID guid ){\n        List<RemoteCluster> remoteClusters = new ArrayList<>();\n        List<GenericRemoteCluster> frames = this.fetchRemoteClustersByFileGuid0(guid);\n        for (RemoteCluster frame : frames){\n            frame.setRemoteClusterManipulator(this);\n            remoteClusters.add(frame);\n        }\n        return remoteClusters;\n    };\n\n    @Select(\"SELECT `id`, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `device_guid` AS deviceGuid, `seg_id` AS segId, `crc32`, `size` \" +\n            \"FROM `hydra_uofs_files_cluster_mapping` \" +\n            \"WHERE `file_guid` = #{guid} \" +\n            \"ORDER BY `seg_id`, `id` ASC \" +\n            \"LIMIT #{offset}, #{pageSize}\")\n    List<GenericRemoteCluster> fetchRemoteClusterByFileGuidPart0(\n            @Param(\"guid\") GUID guid,\n            @Param(\"offset\") long offset,\n            @Param(\"pageSize\") int pageSize);\n\n    @Override\n    default List<RemoteCluster > fetchRemoteClusterByFileGuid( GUID guid, long offset, int pageSize ) {\n        List<RemoteCluster> remoteClusters = new ArrayList<>();\n        List<GenericRemoteCluster> frames = this.fetchRemoteClusterByFileGuidPart0( guid, offset, pageSize );\n        for ( RemoteCluster frame : frames ){\n            frame.setRemoteClusterManipulator( this );\n            remoteClusters.add(frame);\n        }\n        return remoteClusters;\n    };\n\n    @Select(\"SELECT COUNT(*) FROM `hydra_uofs_files_cluster_mapping` WHERE `file_guid` = #{guid}\")\n    long countRemoteClustersByFileGuid( @Param(\"guid\") GUID guid );\n\n\n\n    @Select(\"SELECT `id` AS emunId, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `device_guid` AS deviceGuid, `seg_id` AS segId, `crc32`, `size` FROM `hydra_uofs_files_cluster_mapping` WHERE `file_guid` = #{guid} ORDER BY `seg_id` DESC LIMIT 1\")\n    RemoteCluster getLastCluster(GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_uofs_files_cluster_mapping` WHERE `file_guid` = #{fileGuid} AND `seg_id` = #{segId}\")\n    void removeClusterByFileWithId(GUID fileGuid, long segId );\n\n    @Select(\"SELECT COUNT(*) FROM `hydra_uofs_files_cluster_mapping` WHERE file_guid = #{fileGuid}\")\n    long countFileClusters( @Param(\"fileGuid\") GUID fileGuid );\n\n    default RemoteCluster getClusterByFileWithId( GUID fileGuid, long segId ){\n        GenericRemoteCluster cluster = this.getClusterByFileWithId0(fileGuid, segId);\n        if( cluster == null ) {\n            return null;\n        }\n\n        cluster.setRemoteClusterManipulator( this );\n        return cluster;\n    }\n\n    @Select(\"SELECT `id` AS emunId, `file_guid` AS fileGuid, `seg_guid` AS segGuid, `device_guid` AS deviceGuid, `seg_id` AS segId, `crc32`, `size` FROM `hydra_uofs_files_cluster_mapping` WHERE `file_guid` = #{fileGuid} AND `seg_id` = #{segId}\")\n    GenericRemoteCluster getClusterByFileWithId0( GUID fileGuid, long segId );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/SymbolicMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.Symbolic;\nimport com.pinecone.hydra.storage.file.source.SymbolicManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Mapper;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface SymbolicMapper extends SymbolicManipulator {\n    //Symbolic getSymbolic(GUID guid, ElementNode element);\n    void insert( Symbolic symbolic );\n    void remove( GUID guid );\n    Symbolic getSymbolicByGuid(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/SymbolicMetaMapper.java",
    "content": "package com.pinecone.hydra.file.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.entity.SymbolicMeta;\nimport com.pinecone.hydra.storage.file.source.SymbolicMetaManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Mapper;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface SymbolicMetaMapper extends SymbolicMetaManipulator {\n    //SymbolicMeta getSymbolicMeta(GUID guid, ElementNode element);\n    void insert( SymbolicMeta symbolicMeta );\n    void remove( GUID guid );\n    SymbolicMeta getSymbolicMetaByGuid(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/hydranium/FileMappingDriver.java",
    "content": "package com.pinecone.hydra.file.ibatis.hydranium;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class FileMappingDriver extends ArchMappingDriver implements KOIMappingDriver {\n    protected KOIMasterManipulator mKOIMasterManipulator;\n\n    public FileMappingDriver( Processum superiorProcess  ) {\n        super( superiorProcess );\n    }\n\n    // Temp , TODO\n    public FileMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, FileMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n\n        this.mKOIMasterManipulator = new FileMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/hydranium/FileMasterManipulatorImpl.java",
    "content": "package com.pinecone.hydra.file.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.file.ibatis.ExternalSymbolicMapper;\nimport com.pinecone.hydra.file.ibatis.FileMapper;\nimport com.pinecone.hydra.file.ibatis.FileMetaMapper;\nimport com.pinecone.hydra.file.ibatis.FileSystemAttributeMapper;\nimport com.pinecone.hydra.file.ibatis.FolderMapper;\nimport com.pinecone.hydra.file.ibatis.FolderMetaMapper;\nimport com.pinecone.hydra.file.ibatis.FolderVolumeMappingMapper;\nimport com.pinecone.hydra.file.ibatis.LocalClusterMapper;\nimport com.pinecone.hydra.file.ibatis.RemoteClusterMapper;\nimport com.pinecone.hydra.file.ibatis.SymbolicMapper;\nimport com.pinecone.hydra.file.ibatis.SymbolicMetaMapper;\n\nimport com.pinecone.hydra.storage.file.source.ExternalSymbolicManipulator;\nimport com.pinecone.hydra.storage.file.source.FileManipulator;\nimport com.pinecone.hydra.storage.file.source.FileMasterManipulator;\nimport com.pinecone.hydra.storage.file.source.FileMetaManipulator;\nimport com.pinecone.hydra.storage.file.source.FileSystemAttributeManipulator;\nimport com.pinecone.hydra.storage.file.source.FolderManipulator;\nimport com.pinecone.hydra.storage.file.source.FolderMetaManipulator;\nimport com.pinecone.hydra.storage.file.source.FolderVolumeMappingManipulator;\nimport com.pinecone.hydra.storage.file.source.LocalClusterManipulator;\nimport com.pinecone.hydra.storage.file.source.RemoteClusterManipulator;\nimport com.pinecone.hydra.storage.file.source.SymbolicManipulator;\nimport com.pinecone.hydra.storage.file.source.SymbolicMetaManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class FileMasterManipulatorImpl implements FileMasterManipulator {\n    @Resource\n    @Structure( type = FileSystemAttributeMapper.class )\n    FileSystemAttributeManipulator fileSystemAttributeManipulator;\n\n    @Resource\n    @Structure( type = FileMapper.class )\n    FileManipulator fileManipulator;\n\n    @Resource\n    @Structure( type = FileMetaMapper.class )\n    FileMetaManipulator fileMetaManipulator;\n\n    @Resource\n    @Structure( type = FolderMapper.class )\n    FolderManipulator folderManipulator;\n\n    @Resource\n    @Structure( type = FolderMetaMapper.class )\n    FolderMetaManipulator folderMetaManipulator;\n\n    @Resource\n    @Structure( type = LocalClusterMapper.class )\n    LocalClusterManipulator localClusterManipulator;\n\n    @Resource\n    @Structure( type = RemoteClusterMapper.class )\n    RemoteClusterManipulator remoteClusterManipulator;\n\n    @Resource\n    @Structure( type = SymbolicMapper.class )\n    SymbolicManipulator symbolicManipulator;\n\n    @Resource\n    @Structure( type = SymbolicMetaMapper.class )\n    SymbolicMetaManipulator symbolicMetaManipulator;\n\n    @Resource\n    @Structure( type = ExternalSymbolicMapper.class )\n    ExternalSymbolicManipulator externalSymbolicManipulator;\n\n    @Resource( type = FileMasterTreeManipulatorImpl.class )\n    KOISkeletonMasterManipulator skeletonMasterManipulator;\n\n    @Structure( type = FolderVolumeMappingMapper.class)\n    FolderVolumeMappingMapper folderVolumeRelationMapper;\n\n    public FileMasterManipulatorImpl() {\n\n    }\n\n    public FileMasterManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( FileMasterManipulatorImpl.class, Map.of(), this );\n        this.skeletonMasterManipulator = new FileMasterTreeManipulatorImpl( driver );\n    }\n\n    @Override\n    public FileSystemAttributeManipulator getAttributeManipulator() {\n        return this.fileSystemAttributeManipulator;\n    }\n\n    @Override\n    public FileManipulator getFileManipulator() {\n        return this.fileManipulator;\n    }\n\n    @Override\n    public FileMetaManipulator getFileMetaManipulator() {\n        return this.fileMetaManipulator;\n    }\n\n    @Override\n    public FolderManipulator getFolderManipulator() {\n        return this.folderManipulator;\n    }\n\n    @Override\n    public FolderMetaManipulator getFolderMetaManipulator() {\n        return this.folderMetaManipulator;\n    }\n\n    @Override\n    public LocalClusterManipulator getLocalClusterManipulator() {\n        return this.localClusterManipulator;\n    }\n\n    @Override\n    public RemoteClusterManipulator getRemoteClusterManipulator() {\n        return this.remoteClusterManipulator;\n    }\n\n    @Override\n    public SymbolicManipulator getSymbolicManipulator() {\n        return this.symbolicManipulator;\n    }\n\n    @Override\n    public SymbolicMetaManipulator getSymbolicMetaManipulator() {\n        return this.symbolicMetaManipulator;\n    }\n\n    @Override\n    public ExternalSymbolicManipulator getExternalSymbolicManipulator() {\n        return this.externalSymbolicManipulator;\n    }\n\n    @Override\n    public FolderVolumeMappingManipulator getFolderVolumeRelationManipulator() {\n        return this.folderVolumeRelationMapper;\n    }\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return this.skeletonMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/file/ibatis/hydranium/FileMasterTreeManipulatorImpl.java",
    "content": "package com.pinecone.hydra.file.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.file.ibatis.FileOwnerMapper;\nimport com.pinecone.hydra.file.ibatis.FilePathCacheMapper;\nimport com.pinecone.hydra.file.ibatis.FileTreeMapper;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n@Component\npublic class FileMasterTreeManipulatorImpl implements TreeMasterManipulator {\n    @Resource\n    @Structure( type = FilePathCacheMapper.class )\n    TriePathCacheManipulator triePathCacheManipulator;\n\n    @Resource\n    @Structure( type = FileOwnerMapper.class )\n    TireOwnerManipulator tireOwnerManipulator;\n\n    @Resource\n    @Structure( type = FileTreeMapper.class )\n    TrieTreeManipulator trieTreeManipulator;\n\n    public FileMasterTreeManipulatorImpl() {\n\n    }\n\n    public FileMasterTreeManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( FileMasterTreeManipulatorImpl.class, Map.of(), this );\n    }\n    @Override\n    public TriePathCacheManipulator getTriePathCacheManipulator() {\n        return this.triePathCacheManipulator;\n    }\n\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.tireOwnerManipulator;\n    }\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/LayerCachePathMapper.java",
    "content": "package com.pinecone.hydra.layer.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerPathCacheManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\n@IbatisDataAccessObject\npublic interface LayerCachePathMapper extends LayerPathCacheManipulator {\n    @Insert(\"INSERT INTO `hydra_atlas_layer_node_cache_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )\")\n    void insert(@Param(\"guid\") GUID guid, @Param(\"path\") String path );\n\n    @Delete(\"DELETE FROM `hydra_atlas_layer_node_cache_path` WHERE `guid`=#{guid}\")\n    void remove( GUID guid );\n\n    @Select(\"SELECT `path` FROM `hydra_atlas_layer_node_cache_path` WHERE `guid`=#{guid}\")\n    String getPath( GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_atlas_layer_node_cache_path` WHERE `guid`=#{guid}\")\n    GUID getNode( String path );\n\n    @Select(\"SELECT `guid` FROM `hydra_atlas_layer_node_cache_path` WHERE `path`=#{path}\")\n    GUID queryGUIDByPath( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/LayerHandleMapper.java",
    "content": "package com.pinecone.hydra.layer.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerHandleManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface LayerHandleMapper extends LayerHandleManipulator {\n    @Override\n    @Insert(\"INSERT INTO `hydra_atlas_layer_handle` (`layer_guid`, `handle_guid`, `type`) VALUES (#{layerGuid},#{handleGuid},'source')\")\n    void insertSourceNode(GUID layerGuid, GUID handleGuid);\n\n    @Override\n    @Insert(\"INSERT INTO `hydra_atlas_layer_handle` (`layer_guid`, `handle_guid`, `type`) VALUES (#{layerGuid},#{handleGuid},'sink')\")\n    void insertSinkNode(GUID layerGuid, GUID handleGuid);\n\n    @Override\n    @Insert({\n            \"<script>\",\n            \"INSERT INTO `hydra_atlas_layer_handle` (`layer_guid`, `handle_guid`, `type`) VALUES \",\n            \"<foreach collection='handleGuids' item='handleGuid' separator=','>\",\n            \"(#{layerGuid}, #{handleGuid}, 'source')\",\n            \"</foreach>\",\n            \"</script>\"\n    })\n    void batchInsertSourceNodes(@Param(\"layerGuid\") GUID layerGuid, @Param(\"handleGuids\") List<GUID> handleGuids);\n\n    @Override\n    @Insert({\n            \"<script>\",\n            \"INSERT INTO `hydra_atlas_layer_handle` (`layer_guid`, `handle_guid`, `type`) VALUES \",\n            \"<foreach collection='handleGuids' item='handleGuid' separator=','>\",\n            \"(#{layerGuid}, #{handleGuid}, 'sink')\",\n            \"</foreach>\",\n            \"</script>\"\n    })\n    void batchInsertSinkNodes(@Param(\"layerGuid\") GUID layerGuid, @Param(\"handleGuids\") List<GUID> handleGuids);\n\n    @Override\n    @Select(\"SELECT `handle_guid` FROM `hydra_atlas_layer_handle` WHERE `layer_guid` = #{layerGuid} AND `type` = 'source'\")\n    List<GUID> fetchSourceNodes( GUID layerGuid );\n\n    @Override\n    @Select(\"SELECT `handle_guid` FROM `hydra_atlas_layer_handle` WHERE `layer_guid` = #{layerGuid} AND `type` = 'sink'\")\n    List<GUID> fetchSinkNodes( GUID layerGuid );\n\n    @Override\n    @Select(\"SELECT COUNT(id) FROM `hydra_atlas_layer_handle` WHERE `layer_guid` = #{layerGuid} AND `type` = 'source'\")\n    long countSourceNode(GUID layerGuid);\n\n    @Override\n    @Select(\"SELECT halh.handle_guid \" +\n            \"FROM hydra_atlas_layer_handle halh \" +\n            \"JOIN hydra_atlas_vgraph_task_mapping vatm ON halh.handle_guid = vatm.vgraph_node_guid \" +\n            \"JOIN hydra_task_task_node httn ON vatm.task_guid = httn.guid \" +\n            \"WHERE halh.layer_guid = #{layerGuid} \" +\n            \"AND halh.type = 'source' \" +\n            \"AND NOT EXISTS (\" +\n            \"    SELECT id FROM hydra_atlas_vgraph_adjacent hava WHERE hava.guid = halh.handle_guid\" +\n            \") \" +\n            \"ORDER BY httn.priority \" +\n            \"LIMIT #{limit} OFFSET #{offset}\")\n    List<GUID> fetchSourceGuidsByTaskPriority(GUID layerGuid, long offset, long limit);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/LayerMapper.java",
    "content": "package com.pinecone.hydra.layer.ibatis;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.layer.AtlasLayer;\nimport com.pinecone.hydra.unit.vgraph.layer.Layer;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerGraphHandle;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.pinecone.slime.meta.TableIndex64Meta;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface LayerMapper extends LayerManipulator {\n\n    @Override\n    @Insert(\n        \"INSERT INTO `hydra_atlas_layer_layers` \" +\n        \"(`layer_guid`, `layer_name`, `update_time`, `create_time`) \" +\n        \"VALUES (#{mGuid}, #{parentGuid}, #{mszName}, #{mUpdateTime}, #{mCreateTime})\"\n    )\n    void insertLayer( LayerGraphHandle layer );\n\n    @Override\n    @Insert(\n        \"<script>\" +\n        \"INSERT INTO `hydra_atlas_layer_layers` \" +\n        \"(`layer_guid`, `layer_name`, `update_time`, `create_time`) VALUES \" +\n        \"<foreach collection='list' item='item' separator=','>\" +\n        \"(#{item.mGuid}, #{item.parentGuid}, #{item.mszName}, #{item.mUpdateTime}, #{item.mCreateTime})\" +\n        \"</foreach>\" +\n        \"</script>\"\n    )\n    void batchInsertLayer( @Param( \"list\" ) List<LayerGraphHandle> list );\n\n    @Override\n    @Delete(\n        \"DELETE FROM `hydra_atlas_layer_layers` \" +\n        \"WHERE `layer_guid` = #{guid}\"\n    )\n    void remove( GUID guid );\n\n\n\n    @Select(\n        \"SELECT \" +\n        \"l.`id` AS id, \" +\n        \"l.`layer_guid` AS guid, \" +\n        \"t.`parent_guid` AS parentGuid, \" +\n        \"l.`layer_name` AS name, \" +\n        \"l.`update_time` AS updateTime, \" +\n        \"l.`create_time` AS createTime \" +\n        \"FROM `hydra_atlas_layer_layers` l \" +\n        \"LEFT JOIN `hydra_atlas_layer_tree` t ON t.`guid` = l.`layer_guid` \" +\n        \"WHERE l.`layer_guid` = #{guid}\"\n    )\n    AtlasLayer queryLayer0( GUID guid );\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default Layer queryLayer( GUID guid ) {\n        return this.queryLayer0( guid );\n    }\n\n\n    @Override\n    @Select(\n        \"SELECT `layer_guid` AS mGuid \" +\n        \"FROM `hydra_atlas_layer_layers` \" +\n        \"WHERE `layer_name` = #{name}\"\n    )\n    List<GUID> getGuidsByName( String name );\n\n    @Override\n    @Select(\n        \"SELECT `layer_guid` AS mGuid \" +\n        \"FROM `hydra_atlas_layer_layers` \" +\n        \"WHERE `layer_name` = #{name} \" +\n        \"AND `layer_guid` = #{guid}\"\n    )\n    List<GUID> getGuidsByNameID( String name, GUID guid );\n\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default List<Layer> fetchLayer( List<GUID> guids ) {\n        return ( List ) this.fetchLayer0( guids );\n    }\n\n    @Select(\n        \"<script>\" +\n        \"SELECT \" +\n        \"l.`id` AS id, \" +\n        \"l.`layer_guid` AS guid, \" +\n        \"t.`parent_guid` AS parentGuid, \" +\n        \"l.`layer_name` AS name, \" +\n        \"l.`update_time` AS updateTime, \" +\n        \"l.`create_time` AS createTime \" +\n        \"FROM `hydra_atlas_layer_layers` l \" +\n        \"LEFT JOIN `hydra_atlas_layer_tree` t ON t.`guid` = l.`layer_guid` \" +\n        \"<where>\" +\n        \"<if test='guids != null and guids.size() > 0'>\" +\n        \"l.`layer_guid` IN \" +\n        \"<foreach item='guid' collection='guids' open='(' separator=',' close=')'>\" +\n        \"#{guid}\" +\n        \"</foreach>\" +\n        \"</if>\" +\n        \"</where>\" +\n        \"</script>\"\n    )\n    List<AtlasLayer> fetchLayer0( List<GUID> guids );\n\n\n\n\n\n\n\n\n\n    @Select(\n        \"<script>\" +\n        \"SELECT \" +\n        \"l.`id` AS id, \" +\n        \"l.`layer_guid` AS guid, \" +\n        \"t.`parent_guid` AS parentGuid, \" +\n        \"l.`layer_name` AS name, \" +\n        \"l.`update_time` AS updateTime, \" +\n        \"l.`create_time` AS createTime \" +\n        \"FROM `hydra_atlas_layer_layers` l \" +\n        \"LEFT JOIN `hydra_atlas_layer_tree` t ON t.`guid` = l.`layer_guid` \" +\n        \"<where>\" +\n        \"<if test=\\\"anyNode == false\\\">\" +\n        \"t.`parent_guid` = #{parentGuid} \" +\n        \"</if>\" +\n        \"</where>\" +\n        \"ORDER BY l.`id` ASC \" +\n        \"LIMIT #{limit} OFFSET #{offset}\" +\n        \"</script>\"\n    )\n    List<AtlasLayer> fetchLayerPage0(\n        @Param( \"offset\" ) long offset,\n        @Param( \"limit\" ) long limit,\n        @Param( \"anyNode\" ) boolean anyNode,\n        @Param( \"parentGuid\" ) @Nullable GUID parentGuid\n    );\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default List<Layer> fetchLayerPage(\n        long offset,\n        long limit,\n        boolean anyNode,\n        @Nullable GUID parentGuid\n    ) {\n        return ( List ) this.fetchLayerPage0( offset, limit, anyNode, parentGuid );\n    }\n\n\n    @Select(\n        \"<script>\" +\n        \"SELECT \" +\n        \"l.`id` AS id, \" +\n        \"l.`layer_guid` AS guid, \" +\n        \"t.`parent_guid` AS parentGuid, \" +\n        \"l.`layer_name` AS name, \" +\n        \"l.`update_time` AS updateTime, \" +\n        \"l.`create_time` AS createTime \" +\n        \"FROM `hydra_atlas_layer_layers` l \" +\n        \"LEFT JOIN `hydra_atlas_layer_tree` t ON t.`guid` = l.`layer_guid` \" +\n        \"<where>\" +\n        \"l.`id` &gt;= #{idStart} AND l.`id` &lt;= #{idEnd} \" +\n        \"<if test=\\\"anyNode == false\\\">\" +\n        \"AND t.`parent_guid` = #{parentGuid} \" +\n        \"</if>\" +\n        \"</where>\" +\n        \"ORDER BY l.`id` ASC\" +\n        \"</script>\"\n    )\n    List<AtlasLayer> fetchLayerPageById0(\n        @Param( \"idStart\" ) long idStart,\n        @Param( \"idEnd\" ) long idEnd,\n        @Param( \"anyNode\" ) boolean anyNode,\n        @Param( \"parentGuid\" ) @Nullable GUID parentGuid\n    );\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default List<Layer> fetchLayerPageById(\n        long idStart,\n        long idEnd,\n        boolean anyNode,\n        @Nullable GUID parentGuid\n    ) {\n        return ( List ) this.fetchLayerPageById0( idStart, idEnd, anyNode, parentGuid );\n    }\n\n    @Override\n    @Select(\n        \"<script>\" +\n        \"SELECT \" +\n        \"COALESCE( MIN(l.`id`), 0 ) AS minId, \" +\n        \"COALESCE( MAX(l.`id`), 0 ) AS maxId \" +\n        \"FROM `hydra_atlas_layer_layers` l \" +\n        \"LEFT JOIN `hydra_atlas_layer_tree` t ON t.`guid` = l.`layer_guid` \" +\n        \"<where>\" +\n        \"<if test=\\\"anyNode == false\\\">\" +\n        \"t.`parent_guid` = #{parentGuid} \" +\n        \"</if>\" +\n        \"</where>\" +\n        \"</script>\"\n    )\n    TableIndex64Meta selectLayerIndexMeta(\n        @Param( \"anyNode\" ) boolean anyNode,\n        @Param( \"parentGuid\" ) @Nullable GUID parentGuid\n    );\n\n    @Override\n    @Select(\n        \"<script>\" +\n        \"SELECT COUNT( * ) \" +\n        \"FROM `hydra_atlas_layer_layers` l \" +\n        \"LEFT JOIN `hydra_atlas_layer_tree` t ON t.`guid` = l.`layer_guid` \" +\n        \"<where>\" +\n        \"<if test=\\\"anyNode == false\\\">\" +\n        \"t.`parent_guid` = #{parentGuid} \" +\n        \"</if>\" +\n        \"</where>\" +\n        \"</script>\"\n    )\n    long countLayer(\n        @Param( \"anyNode\" ) boolean anyNode,\n        @Param( \"parentGuid\" ) @Nullable GUID parentGuid\n    );\n\n}"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/LayerOwnerMapper.java",
    "content": "package com.pinecone.hydra.layer.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerOwnerManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface LayerOwnerMapper extends LayerOwnerManipulator {\n    @Insert(\"INSERT INTO `hydra_atlas_layer_tree` (`guid`) VALUES ( #{guid} )\")\n    void insertRootNode(@Param(\"guid\") GUID guid );\n\n    @Insert( \"INSERT INTO `hydra_atlas_layer_tree` (`guid`, `parent_guid`,`linked_type`) VALUES (#{targetGuid}, #{parentGuid}, #{linkedType})\" )\n    void insert( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n    @Update( \"UPDATE `hydra_atlas_layer_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}\" )\n    void update( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Update( \"UPDATE `hydra_atlas_layer_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}\" )\n    void updateParentGuid( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID );\n\n    @Update( \"UPDATE `hydra_atlas_layer_tree` SET `guid` = #{targetGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}\" )\n    void updateLinkedType( @Param(\"targetGuid\") GUID targetGuid, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n    @Delete( \"DELETE FROM `hydra_atlas_layer_tree` WHERE `guid`=#{subordinateGuid}  AND `linked_type` = 'Owned'\" )\n    void remove( @Param(\"subordinateGuid\") GUID subordinateGuid, @Param(\"ownerGuid\") GUID ownerGuid );\n\n    @Delete( \"DELETE FROM `hydra_atlas_layer_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'\" )\n    void removeBySubordinate( GUID subordinateGuid );\n\n//    @Delete(\"DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}\")\n//    void removeByOwner(GUID ownerGuid);\n\n    @Select( \"SELECT `parent_guid` FROM `hydra_atlas_layer_tree` WHERE `guid`=#{subordinateGuid} AND linked_type = 'Owned'\" )\n    GUID getOwner( GUID subordinateGuid );\n\n    @Select( \"SELECT guid FROM hydra_atlas_layer_tree where parent_guid=#{guid} AND linked_type = 'Owned'\" )\n    List<GUID > getSubordinates(GUID guid );\n\n\n    @Update(\"UPDATE `hydra_atlas_layer_tree` SET `linked_type` = '#{linkedType}' WHERE `guid` = #{sourceGuid} AND `parent_guid` = #{targetGuid}\")\n    void setLinkedType( @Param(\"sourceGuid\") GUID sourceGuid, @Param(\"targetGuid\") GUID targetGuid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Select(\"SELECT `linked_type` FROM `hydra_atlas_layer_tree` WHERE `guid` = #{childGuid} AND `parent_guid` =#{parentGuid}\")\n    LinkedType getLinkedType( @Param(\"childGuid\") GUID childGuid,@Param(\"parentGuid\") GUID parentGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/LayerTreeMapper.java",
    "content": "package com.pinecone.hydra.layer.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.entity.TreeReparseLinkNode;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerTreeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface LayerTreeMapper extends LayerTreeManipulator {\n    @Insert(\"INSERT INTO hydra_atlas_layer_tree_nodes (`guid`) VALUES ( #{guid} )\")\n    void insertRootNode(@Param(\"guid\")  GUID guid);\n\n    @Override\n    default void insert ( TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){\n        this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() );\n        ownerManipulator.insertRootNode( node.getGuid() );\n    }\n\n    @Insert(\"INSERT INTO `hydra_atlas_layer_tree_nodes` (`guid`, `type`,`base_data_guid`,`node_meta_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})\")\n    void insertTreeNode( @Param(\"guid\") GUID guid, @Param(\"type\") UOI type, @Param(\"baseDataGuid\") GUID baseDataGuid, @Param(\"nodeMetaGuid\") GUID nodeMetaGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_meta_guid AS nodeMetadataGUID FROM hydra_atlas_layer_tree_nodes WHERE guid=#{guid}\")\n    GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid );\n\n    @Select(\"SELECT COUNT( `id` ) FROM hydra_atlas_layer_tree_nodes WHERE guid=#{guid}\")\n    boolean contains( GUID key );\n\n    @Override\n    default GUIDImperialTrieNode getNode(GUID guid ) {\n        GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid );\n        if( node == null ){\n            return node;\n        }\n        List<GUID > parent = this.fetchParentGuids( guid );\n        node.setParentGUID( parent );\n        return node;\n    }\n\n    @Select(\"SELECT id, guid, parent_guid FROM hydra_atlas_layer_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    GUIDImperialTrieNode getTreeNodeOnly(@Param(\"guid\") GUID guid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT count( * ) FROM hydra_atlas_layer_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    long countNode( GUID guid, GUID parentGuid );\n\n\n    @Override\n    default void purge( GUID guid ) {\n        this.removeNodeMeta( guid );\n        this.removeTreeNode( guid );\n    }\n\n    @Delete(\"DELETE FROM `hydra_atlas_layer_tree_nodes` WHERE `guid`=#{guid}\")\n    void removeNodeMeta( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_atlas_layer_tree` WHERE `guid` = #{guid}\")\n    void removeTreeNode( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_atlas_layer_tree` WHERE `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeByParentGuid( @Param(\"parent_guid\") GUID parentGuid );\n\n    @Delete(\"DELETE FROM `hydra_atlas_layer_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeYoke( @Param(\"guid\") GUID guid, @Param(\"parent_guid\") GUID parentGuid );\n\n\n    @Delete(\"DELETE FROM `hydra_atlas_layer_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}\")\n    void removeInheritance( @Param(\"chileGuid\") GUID childGuid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_atlas_layer_tree` WHERE `parent_guid`=#{guid}\")\n    List<GUIDImperialTrieNode> getChildren(GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_atlas_layer_tree` WHERE `parent_guid` = #{parentGuid}\")\n    List<GUID > fetchChildrenGuids( @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `parent_guid` FROM `hydra_atlas_layer_tree` WHERE `guid`=#{guid}\")\n    List<GUID > fetchParentGuids( GUID guid );\n\n    @Update(\"UPDATE `hydra_atlas_layer_tree_nodes` SET `type` = #{type} WHERE guid=#{guid}\")\n    void updateType( UOI type , GUID guid );\n\n    @Select( \"SELECT guid FROM hydra_atlas_layer_tree WHERE parent_guid IS NULL \" )\n    List<GUID > fetchRoot(@Param(\"tableName\") String tableName);\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_atlas_layer_tree WHERE `parent_guid` IS NULL AND guid = #{guid}\" )\n    boolean isRoot( GUID guid );\n\n    @Update(\"UPDATE hydra_atlas_layer_tree SET parent_guid = #{parentGuid} WHERE guid = #{childGuid}\")\n    void addChild( @Param(\"childGuid\") GUID childGuid, @Param(\"parentGuid\") GUID parentGuid );\n\n\n\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_atlas_layer_tree WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}\" )\n    long queryLinkedCount( @Param(\"guid\") GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_atlas_layer_tree WHERE `guid` = #{guid}\" )\n    long queryAllLinkedCount( @Param(\"guid\") GUID guid );\n\n\n    @Override\n    @Insert(\n            \"INSERT INTO `hydra_atlas_layer_tree` (`guid`, `linked_type`,`tag_name`,`tag_guid`,`parent_guid`) \" +\n                    \"VALUES (#{originalGuid}, #{linkedType}, #{tagName}, #{tagGuid}, #{dirGuid})\"\n    )\n    void newLinkTag(\n            @Param(\"originalGuid\") GUID originalGuid, @Param(\"dirGuid\") GUID dirGuid,\n            @Param(\"tagName\") String tagName, @Param(\"tagGuid\") GUID tagGuid, @Param(\"linkedType\") LinkedType linkedType\n    );\n\n    @Override\n    @Update( \"UPDATE hydra_atlas_layer_tree SET tag_name = #{tagName} WHERE tag_guid =#{tagGuid}\" )\n    void updateLinkTagName( @Param(\"tagGuid\") GUID tagGuid, @Param(\"tagName\") String tagName );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_atlas_layer_tree WHERE tag_name = #{tagName} AND parent_guid = #{dirGuid}\" )\n    GUID getOriginalGuid( @Param(\"tagName\") String tagName, @Param(\"dirGuid\") GUID dirGuid );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_atlas_layer_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}\" )\n    GUID getOriginalGuidByNodeGuid( @Param(\"tagName\") String tagName, @Param(\"nodeGuid\") GUID nodeGUID );\n\n    @Override\n    @Select( \"SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_atlas_layer_tree WHERE tag_name = #{tagName} AND parent_guid = #{parentDirGuid}\" )\n    TreeReparseLinkNode getReparseLinkNode(@Param(\"tagName\") String tagName, @Param(\"parentDirGuid\") GUID parentDirGuid );\n\n    @Override\n    @Select( \"SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_atlas_layer_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}\" )\n    TreeReparseLinkNode getReparseLinkNodeByNodeGuid( @Param(\"tagName\") String tagName, @Param(\"nodeGuid\") GUID nodeGUID );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_atlas_layer_tree WHERE `tag_name` = #{tagName}\" )\n    List<GUID > fetchOriginalGuid( String tagName );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_atlas_layer_tree WHERE `tag_name` = #{tagName} AND `parent_guid` IS NULL\" )\n    List<GUID > fetchOriginalGuidRoot( String tagName );\n\n    @Override\n    @Select( \"SELECT COUNT(*) FROM `hydra_atlas_layer_tree` WHERE `tag_guid` = #{guid}\" )\n    boolean isTagGuid(GUID guid);\n\n    @Override\n    @Delete( \"DELETE FROM `hydra_atlas_layer_tree` WHERE `tag_guid` = #{guid}\" )\n    void removeReparseLink( GUID guid );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_atlas_layer_tree` WHERE `tag_guid` = #{tagGuid}\" )\n    GUID getOriginalGuidByTagGuid(GUID tagGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/NamespaceMapper.java",
    "content": "package com.pinecone.hydra.layer.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerNamespace;\nimport com.pinecone.hydra.unit.vgraph.layer.source.NamespaceManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface NamespaceMapper extends NamespaceManipulator {\n    @Insert(\"INSERT INTO `hydra_atlas_layer_namespace` (`guid`, `name`, `update_time`, `create_time`) VALUES (#{guid},#{name},#{updateTime},#{createTime})\")\n    void insert( LayerNamespace layerNamespace );\n\n    @Delete(\"DELETE FROM `hydra_atlas_layer_namespace` WHERE guid = #{guid}\")\n    void remove( GUID guid );\n\n    @Select(\"SELECT `id` AS enumId, `guid`, `name`, `update_time` AS updateTime, `create_time` AS createTime FROM `hydra_atlas_layer_namespace` WHERE `guid` = #{guid}\")\n    LayerNamespace query( GUID guid );\n\n    @Select( \"SELECT `guid` FROM `hydra_atlas_layer_namespace` WHERE `name` = #{name}\" )\n    List<GUID > getGuidsByName(String name );\n\n    @Select( \"SELECT `guid` FROM `hydra_atlas_layer_namespace` WHERE `name` = #{name} AND `guid` = #{guid}\" )\n    List<GUID > getGuidsByNameID(@Param(\"name\") String name, @Param(\"guid\") GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/hydranium/LayerMappingDriver.java",
    "content": "package com.pinecone.hydra.layer.ibatis.hydranium;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class LayerMappingDriver extends ArchMappingDriver implements KOIMappingDriver {\n    protected KOIMasterManipulator mKOIMasterManipulator;\n\n    public LayerMappingDriver(Processum superiorProcess ) {\n        super(superiorProcess);\n    }\n\n    public LayerMappingDriver(Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, LayerMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n\n        this.mKOIMasterManipulator = new LayerMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/hydranium/LayerMasterManipulatorImpl.java",
    "content": "package com.pinecone.hydra.layer.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.layer.ibatis.LayerHandleMapper;\nimport com.pinecone.hydra.layer.ibatis.LayerMapper;\nimport com.pinecone.hydra.layer.ibatis.NamespaceMapper;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerHandleManipulator;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerManipulator;\nimport com.pinecone.hydra.unit.vgraph.layer.source.LayerMasterManipulator;\nimport com.pinecone.hydra.unit.vgraph.layer.source.NamespaceManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class LayerMasterManipulatorImpl implements LayerMasterManipulator {\n    @Resource\n    @Structure( type = LayerMasterTreeManipulatorImpl.class )\n    KOISkeletonMasterManipulator skeletonMasterManipulator;\n\n    @Resource\n    @Structure( type = NamespaceMapper.class )\n    NamespaceManipulator namespaceManipulator;\n\n    @Resource\n    @Structure( type = LayerMapper.class )\n    LayerManipulator layerManipulator;\n\n    @Resource\n    @Structure( type = LayerHandleMapper.class )\n    LayerHandleManipulator layerHandleManipulator;\n\n    public LayerMasterManipulatorImpl() {\n\n    }\n\n    public LayerMasterManipulatorImpl(KOIMappingDriver driver ) {\n        driver.autoConstruct( LayerMasterManipulatorImpl.class, Map.of(), this );\n        this.skeletonMasterManipulator = new LayerMasterTreeManipulatorImpl( driver );\n    }\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return this.skeletonMasterManipulator;\n    }\n\n    @Override\n    public LayerManipulator getLayerManipulator() {\n        return this.layerManipulator;\n    }\n\n    @Override\n    public NamespaceManipulator getNamespaceManipulator() {\n        return this.namespaceManipulator;\n    }\n\n    @Override\n    public LayerHandleManipulator getLayerHandleManipulator() {\n        return this.layerHandleManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/layer/ibatis/hydranium/LayerMasterTreeManipulatorImpl.java",
    "content": "package com.pinecone.hydra.layer.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.layer.ibatis.LayerCachePathMapper;\nimport com.pinecone.hydra.layer.ibatis.LayerOwnerMapper;\nimport com.pinecone.hydra.layer.ibatis.LayerTreeMapper;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class LayerMasterTreeManipulatorImpl implements TreeMasterManipulator {\n    @Resource\n    @Structure( type = LayerTreeMapper.class )\n    TrieTreeManipulator trieTreeManipulator;\n\n    @Resource\n    @Structure( type = LayerCachePathMapper.class )\n    TriePathCacheManipulator triePathCacheManipulator;\n\n    @Resource\n    @Structure( type = LayerOwnerMapper.class )\n    TireOwnerManipulator tireOwnerManipulator;\n\n    public LayerMasterTreeManipulatorImpl() {\n\n    }\n\n    public LayerMasterTreeManipulatorImpl(KOIMappingDriver driver ) {\n        driver.autoConstruct( LayerMasterTreeManipulatorImpl.class, Map.of(), this );\n    }\n\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.tireOwnerManipulator;\n    }\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n    @Override\n    public TriePathCacheManipulator getTriePathCacheManipulator() {\n        return this.triePathCacheManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/policy/ibatis/PolicyFileMappingMapper.java",
    "content": "package com.pinecone.hydra.policy.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.policy.source.PolicyFileMappingManipulator;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\npublic interface PolicyFileMappingMapper extends PolicyFileMappingManipulator {\n    @Insert(\"INSERT INTO `hydra_uofs_file_policy_mapping` (`file_path`, `policy_guid`) VALUES (#{filePath}, #{policyGuid})\")\n    void insert(@Param(\"policyGuid\" ) GUID policyGuid, @Param(\"filePath\") String filePath);\n\n    @Delete(\"DELETE FROM hydra_uofs_file_policy_mapping WHERE policy_guid = #{policyGuid} AND file_path = #{filePath}\")\n    void remove(@Param(\"policyGuid\") GUID policyGuid, @Param(\"filePath\") String filePath);\n\n    @Select(\"SELECT policy_guid FROM hydra_uofs_file_policy_mapping WHERE file_path = #{filePath}\")\n    List<GUID> queryPolicy(@Param(\"filePath\") String filePath );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/policy/ibatis/PolicyMapper.java",
    "content": "package com.pinecone.hydra.policy.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.policy.entity.Policy;\nimport com.pinecone.hydra.storage.policy.source.PolicyManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Select;\n\n@IbatisDataAccessObject\npublic interface PolicyMapper extends PolicyManipulator {\n    @Insert(\"INSERT INTO `hydra_uofs_policy` (`policy_name`, `policy_guid`, `policy_desc`) VALUES (#{policyName}, #{policyGuid}, #{policyDesc})\")\n    void insert(Policy policy);\n    @Delete(\"DELETE FROM `hydra_uofs_policy` WHERE `policy_guid` = #{policyGuid}\")\n    void remove(GUID policyGuid);\n    @Select(\"SELECT `id`, `policy_name` AS policyName, `policy_guid` AS policyGuid, `policy_desc` AS policyDesc FROM hydra_uofs_policy\")\n    Policy queryPolicy( GUID policyGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/policy/ibatis/hydranium/PolicyMappingDriver.java",
    "content": "package com.pinecone.hydra.policy.ibatis.hydranium;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class PolicyMappingDriver extends ArchMappingDriver implements KOIMappingDriver {\n    protected KOIMasterManipulator mKOIMasterManipulator;\n\n    public PolicyMappingDriver( Processum superiorProcess  ) {\n        super( superiorProcess );\n    }\n\n    // Temp , TODO\n    public PolicyMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, PolicyMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n\n        this.mKOIMasterManipulator = new PolicyMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/policy/ibatis/hydranium/PolicyMasterManipulatorImpl.java",
    "content": "package com.pinecone.hydra.policy.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.policy.ibatis.PolicyFileMappingMapper;\nimport com.pinecone.hydra.policy.ibatis.PolicyMapper;\nimport com.pinecone.hydra.storage.policy.source.PolicyFileMappingManipulator;\nimport com.pinecone.hydra.storage.policy.source.PolicyManipulator;\nimport com.pinecone.hydra.storage.policy.source.PolicyMasterManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\npublic class PolicyMasterManipulatorImpl implements PolicyMasterManipulator {\n    @Resource\n    @Structure( type = PolicyMapper.class )\n    PolicyManipulator policyMapping;\n\n    @Resource\n    @Structure( type = PolicyFileMappingMapper.class )\n    PolicyFileMappingMapper policyFileMappingMapper;\n\n    public PolicyMasterManipulatorImpl() {\n\n    }\n\n    public PolicyMasterManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( PolicyMasterManipulatorImpl.class, Map.of(), this );\n    }\n    @Override\n    public PolicyManipulator getPolicyManipulator() {\n        return this.policyMapping;\n    }\n\n    @Override\n    public PolicyFileMappingManipulator getPolicyFileMappingManipulator() {\n        return this.policyFileMappingMapper;\n    }\n\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/queue/ibatis/AtlasExecuteQueueMapper.java",
    "content": "package com.pinecone.hydra.queue.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.iqueue.DPQueueManipulator;\nimport com.pinecone.hydra.unit.iqueue.QueueMeta;\nimport com.pinecone.hydra.unit.iqueue.entity.GenericQueueElement;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueElement;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface AtlasExecuteQueueMapper extends DPQueueManipulator {\n\n    @Override\n    @Insert({\n            \"<script>\",\n            \"INSERT INTO ${meta.QueueTable}\",\n            \"(object_guid, priority, linked_priority, bias, ${field})\",\n            \"SELECT \",\n            \"#{element.objectGuid}, #{element.priority},\",\n            \"(SELECT COUNT(*) + 1 FROM ${meta.QueueTable}\",\n            \" WHERE priority = #{element.priority} AND ${field} = #{segmentName}),\",\n            \"#{element.bias}, #{segmentName}\",\n            \"</script>\"\n    })\n    void pushBack(@Param(\"element\") QueueElement queueElement,\n                  @Param(\"field\") String sharedSegmentField,\n                  @Param(\"segmentName\") String sharedSegmentName,\n                  @Param(\"meta\") QueueMeta meta);\n\n    @Override\n    @Insert({\n            \"<script>\",\n            \"INSERT INTO ${meta.QueueTable}\",\n            \"(object_guid, priority, linked_priority, bias, ${field})\",\n            \"SELECT \",\n            \"#{element.objectGuid}, #{element.priority},\",\n            \"1, #{element.bias}, #{segmentName}\",\n            \"FROM (SELECT `id` AS mnEnumId, `object_guid` AS mObjectGuid, `priority`, `linked_priority` AS mnLinkedPriority, `bias` FROM ${meta.QueueTable}) AS tmp\",\n            \"WHERE priority = #{element.priority} AND ${field} = #{segmentName}\",\n            \"</script>\"\n    })\n    void pushFront(@Param(\"element\") QueueElement queueElement, @Param(\"field\") String sharedSegmentField,\n                   @Param(\"segmentName\") String sharedSegmentName, @Param(\"meta\") QueueMeta meta);\n\n    @Override\n    @Update({\n            \"<script>\",\n            \"UPDATE ${meta.QueueTable}\",\n            \"SET linked_priority = linked_priority + 1\",\n            \"WHERE priority = #{element.priority}\",\n            \"AND ${field} = #{segmentName}\",\n            \"AND object_guid != #{element.objectGuid}\",\n            \"</script>\"\n    })\n    void incrementLinkedPriorities(@Param(\"element\") QueueElement queueElement, @Param(\"field\") String sharedSegmentField,\n                                   @Param(\"segmentName\") String sharedSegmentName, @Param(\"meta\") QueueMeta meta);\n\n    @Override\n    @Select({\n            \"<script>\",\n            \"SELECT `id` AS mnEnumId, `object_guid` AS mObjectGuid, `priority`, `linked_priority` AS mnLinkedPriority, `bias`\",\n            \"FROM ${meta.QueueTable}\",\n            \"WHERE id = #{currentPos} AND ${field} = #{segmentName}\",\n            \"</script>\"\n    })\n    GenericQueueElement popFront (\n            @Param(\"currentPos\") long currentPos, @Param(\"field\") String sharedSegmentField,\n            @Param(\"segmentName\") String sharedSegmentName, @Param(\"meta\") QueueMeta meta\n    );\n\n    @Override\n    default List<QueueElement> batchPopFront(long currentPos, String sharedSegmentField, String sharedSegmentName, QueueMeta meta, long limit, long offset) {\n        List<GenericQueueElement> queueElements = this.batchPopFront0(currentPos, sharedSegmentField, sharedSegmentName, meta, limit, offset);\n        ArrayList<QueueElement> arrayList = new ArrayList<>(queueElements);\n        long i = 0;\n        for( QueueElement element : arrayList ) {\n            Long indexPriority = this.getIndexPriority(currentPos + i, sharedSegmentField, sharedSegmentName, meta);\n            element.setIndexPriority( indexPriority );\n            i++;\n        }\n        return arrayList;\n    }\n\n    @Select({\n            \"<script>\",\n            \"SELECT `id` AS mnEnumId, `object_guid` AS mObjectGuid, `priority`, `linked_priority` AS mnLinkedPriority, `bias`\",\n            \"FROM ${meta.QueueTable}\",\n            \"WHERE ${sharedSegmentField} = #{sharedSegmentName}\",\n            \"ORDER BY id ASC\",\n            \"LIMIT #{limit} OFFSET #{offset}\",\n            \"</script>\"\n    })\n    List<GenericQueueElement> batchPopFront0(\n            @Param(\"currentPos\") long currentPos,\n            @Param(\"sharedSegmentField\") String sharedSegmentField,\n            @Param(\"sharedSegmentName\") String sharedSegmentName,\n            @Param(\"meta\") QueueMeta meta,\n            @Param(\"limit\") long limit,\n            @Param(\"offset\") long offset\n    );\n\n    @Override\n    default List<QueueElement> batchPopBack(String sharedSegmentField, String sharedSegmentName, QueueMeta meta, long limit, long offset) {\n        List<GenericQueueElement> queueElements = this.batchPopBack0(sharedSegmentField, sharedSegmentName, meta, limit, offset);\n        return new ArrayList<>(queueElements);\n    }\n\n    @Select({\n            \"<script>\",\n            \"SELECT `id` AS mnEnumId, `object_guid` AS mObjectGuid, `priority`, `linked_priority` AS mnLinkedPriority, `bias`\",\n            \"FROM ${meta.QueueTable}\",\n            \"WHERE ${sharedSegmentField} = #{sharedSegmentName}\",\n            \"ORDER BY priority DESC, linked_priority DESC\",\n            \"LIMIT #{limit} OFFSET #{offset}\",\n            \"</script>\"\n    })\n    List<GenericQueueElement> batchPopBack0(\n            @Param(\"sharedSegmentField\") String sharedSegmentField,\n            @Param(\"sharedSegmentName\") String sharedSegmentName,\n            @Param(\"meta\") QueueMeta meta,\n            @Param(\"limit\") long limit,\n            @Param(\"offset\") long offset\n    );\n\n    @Override\n    @Select({\n            \"<script>\",\n            \"SELECT `id` AS mnEnumId, `object_guid` AS mObjectGuid, `priority`, `linked_priority` AS mnLinkedPriority, `bias`\",\n            \"FROM ${meta.QueueTable}\",\n            \"WHERE ${field} = #{segmentName}\",\n            \"ORDER BY priority DESC, linked_priority DESC\",\n            \"LIMIT 1\",\n            \"</script>\"\n    })\n    QueueElement popBack(\n            @Param(\"field\") String sharedSegmentField,\n            @Param(\"segmentName\") String sharedSegmentName,\n            @Param(\"meta\") QueueMeta meta\n    );\n\n    @Override\n    @Select({\n            \"<script>\",\n            \"SELECT COUNT(id) FROM ${meta.QueueTable}\",\n            \"WHERE ${field} = #{segmentName}\",\n            \"</script>\"\n    })\n    long queryQueueSize( @Param(\"field\") String sharedSegmentField, @Param(\"segmentName\") String sharedSegmentName,\n               @Param(\"meta\") QueueMeta meta );\n\n    @Delete(\"DELETE FROM ${meta.QueueTable} WHERE id = #{currentPos} AND ${field} = ${segmentName}\")\n    QueueElement remove( @Param(\"currentPos\") long currentPos, @Param(\"field\") String sharedSegmentField,\n                         @Param(\"segmentName\") String sharedSegmentName, @Param(\"meta\") QueueMeta meta );\n\n    @Override\n    @Select(\"SELECT `id` AS enumId, `object_guid` AS objectGuid, `priority`, \" +\n            \"`linked_priority` AS linkedPriority, `bias` \" +\n            \"FROM ${meta.QueueTable} WHERE id = #{id} AND ${field} = ${segmentName}\")\n    GenericQueueElement query( @Param(\"id\") long enumId, @Param(\"field\") String sharedSegmentField,\n                        @Param(\"segmentName\") String sharedSegmentName, @Param(\"meta\") QueueMeta meta );\n\n    @Override\n    default List<QueueElement> fetchElementByPriority(long priority, String sharedSegmentField,\n                                              String sharedSegmentName, QueueMeta meta,\n                                              long limit, long offset) {\n        return new ArrayList<>(this.fetchElementByPriority0( priority, sharedSegmentField, sharedSegmentName,meta,limit,offset ));\n    }\n\n    @Select({\n            \"<script>\",\n            \"SELECT `id` AS enumId, `object_guid` AS objectGuid, `priority`, \",\n            \"`linked_priority` AS linkedPriority, `bias` \",\n            \"FROM ${meta.QueueTable} \",\n            \"WHERE priority = #{priority} \",\n            \"AND ${sharedSegmentField} = #{sharedSegmentName} \",\n            \"ORDER BY linked_priority DESC, id ASC \",\n            \"LIMIT #{limit} OFFSET #{offset}\",\n            \"</script>\"\n    })\n    List<GenericQueueElement> fetchElementByPriority0(\n            @Param(\"priority\") long priority,\n            @Param(\"sharedSegmentField\") String sharedSegmentField,\n            @Param(\"sharedSegmentName\") String sharedSegmentName,\n            @Param(\"meta\") QueueMeta meta,\n            @Param(\"limit\") long limit,\n            @Param(\"offset\") long offset\n    );\n\n    @Override\n    default List<QueueElement> fetchElement(String sharedSegmentField, String sharedSegmentName,\n                                    QueueMeta meta, long limit,long offset) {\n        List<GenericQueueElement> elements = this.fetchElement0(sharedSegmentField, sharedSegmentName, meta, limit, offset);\n        return new ArrayList<>(elements);\n    }\n\n    @Select({\n            \"<script>\",\n            \"SELECT `id` AS enumId, `object_guid` AS objectGuid, `priority`, \",\n            \"`linked_priority` AS linkedPriority, `bias` \",\n            \"FROM ${meta.QueueTable} \",\n            \"WHERE \",\n            \"${sharedSegmentField} = #{sharedSegmentName} \",\n            \"ORDER BY linked_priority DESC, id ASC \",\n            \"LIMIT #{limit} OFFSET #{offset}\",\n            \"</script>\"\n    })\n    List<GenericQueueElement> fetchElement0(@Param(\"sharedSegmentField\") String sharedSegmentField,\n                                    @Param(\"sharedSegmentName\") String sharedSegmentName,\n                                    @Param(\"meta\") QueueMeta meta,\n                                    @Param(\"limit\") long limit,\n                                    @Param(\"offset\") long offset);\n    @Override\n    @Select({\n            \"<script>\",\n            \"SELECT  `object_guid` AS objectGuid \",\n            \"FROM ${meta.QueueTable} \",\n            \"WHERE \",\n            \"${sharedSegmentField} = #{sharedSegmentName} \",\n            \"ORDER BY linked_priority DESC, id ASC \",\n            \"LIMIT #{limit} OFFSET #{offset}\",\n            \"</script>\"\n    })\n    List<GUID> fetchElementGuid(@Param(\"sharedSegmentField\") String sharedSegmentField,\n                                @Param(\"sharedSegmentName\") String sharedSegmentName,\n                                @Param(\"meta\") QueueMeta meta,\n                                @Param(\"limit\") long limit,\n                                @Param(\"offset\") long offset);\n\n    @Override\n    @Select({\n            \"<script>\",\n            \"SELECT `id` AS enumId, `object_guid` AS objectGuid, `priority`,\" + \"`linked_priority` AS linkedPriority, `bias` FROM ${meta.QueueTable}\",\n            \"WHERE ${field} = #{segmentName}\",\n            \"ORDER BY priority ASC, linked_priority ASC\",\n            \"LIMIT 1 OFFSET #{index}\",\n            \"</script>\"\n    })\n    GenericQueueElement getByIndex(\n            @Param(\"index\") long index,\n            @Param(\"field\") String sharedSegmentField,\n            @Param(\"segmentName\") String sharedSegmentName,\n            @Param(\"meta\") QueueMeta meta\n    );\n\n    @Override\n    @Select({\n            \"<script>\",\n            \"SELECT id FROM ${meta.queueTable}\",\n            \"WHERE ${sharedSegmentField} = #{sharedSegmentName}\",\n            \"AND id > #{currentPos}\",\n            \"ORDER BY priority ASC, linked_priority ASC\",\n            \"LIMIT 1\",\n            \"</script>\"\n    })\n    Long nextPos(@Param(\"currentPos\") long currentPos,\n                 @Param(\"sharedSegmentField\") String sharedSegmentField,\n                 @Param(\"sharedSegmentName\") String sharedSegmentName,\n                 @Param(\"meta\") QueueMeta meta);\n\n    @Override\n    @Select({\n            \"<script>\",\n            \"SELECT COUNT(id) AS rank_value\",\n            \"FROM ${meta.QueueTable}\",\n            \"WHERE ${sharedSegmentField} = #{sharedSegmentName}\",\n            \"AND (priority &lt; (SELECT priority FROM ${meta.QueueTable} WHERE id = #{currentPos})\",\n            \"     OR (priority = (SELECT priority FROM ${meta.QueueTable} WHERE id = #{currentPos})\",\n            \"         AND linked_priority &lt; (SELECT linked_priority FROM ${meta.QueueTable} WHERE id = #{currentPos})))\",\n            \"</script>\"\n    })\n    Long getIndexPriority(@Param(\"currentPos\") long currentPos,\n                          @Param(\"sharedSegmentField\") String sharedSegmentField,\n                          @Param(\"sharedSegmentName\") String sharedSegmentName,\n                          @Param(\"meta\") QueueMeta meta);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/queue/ibatis/AtlasStratumQueueMapper.java",
    "content": "package com.pinecone.hydra.queue.ibatis;\n\nimport com.pinecone.hydra.unit.iqueue.DPStratumQueueManipulator;\nimport com.pinecone.hydra.unit.iqueue.QueueMeta;\nimport com.pinecone.hydra.unit.iqueue.entity.GenericStratumQueueElement;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueElement;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueStratumElement;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface AtlasStratumQueueMapper extends DPStratumQueueManipulator {\n    @Override\n    @Insert({\n            \"<script>\",\n            \"INSERT INTO ${meta.QueueTable}\",\n            \"(object_guid, priority, linked_priority, bias, ${field}, stratum)\",\n            \"SELECT \",\n            \"#{element.objectGuid}, #{element.priority},\",\n            \"(SELECT COUNT(*) + 1 FROM ${meta.QueueTable}\",\n            \" WHERE priority = #{element.priority} AND ${field} = #{segmentName}),\",\n            \"#{element.bias}, #{segmentName}, #{element.stratum}\",\n            \"WHERE NOT EXISTS (\",\n            \"  SELECT 1 FROM ${meta.QueueTable}\",\n            \"  WHERE object_guid = #{element.objectGuid}\",\n            \"  AND ${field} = #{segmentName}\",\n            \")\",\n            \"</script>\"\n    })\n    void pushBack(@Param(\"element\") QueueStratumElement queueElement,\n                  @Param(\"field\") String sharedSegmentField,\n                  @Param(\"segmentName\") String sharedSegmentName,\n                  @Param(\"meta\") QueueMeta meta);\n\n    @Override\n    @Select({\n            \"<script>\",\n            \"SELECT `id` AS mnEnumId, `object_guid` AS mObjectGuid, `priority`, `linked_priority` AS mnLinkedPriority, `bias`, `stratum`\",\n            \"FROM ${meta.QueueTable}\",\n            \"WHERE ${field} = #{segmentName}\",\n            \"ORDER BY id ASC\",\n            \"LIMIT 1\",\n            \"</script>\"\n    })\n    GenericStratumQueueElement popFront( @Param(\"field\") String sharedSegmentField, @Param(\"segmentName\") String sharedSegmentName, @Param(\"meta\") QueueMeta queueMeta );\n\n    @Override\n    @Delete({\n            \"<script>\",\n            \"DELETE FROM ${meta.QueueTable}\",\n            \"WHERE ${field} = #{segmentName}\",\n            \"ORDER BY id ASC\",\n            \"LIMIT 1\",\n            \"</script>\"\n    })\n    void removeFront(@Param(\"field\") String sharedSegmentField, @Param(\"segmentName\") String sharedSegmentName, @Param(\"meta\") QueueMeta queueMeta);\n\n    @Override\n    @Select({\n            \"<script>\",\n            \"SELECT COUNT(`id`)\",\n            \"FROM ${meta.QueueTable}\",\n            \"WHERE ${field} = #{segmentName}\",\n            \"</script>\"\n    })\n    long isEmpty(@Param(\"field\") String sharedSegmentField, @Param(\"segmentName\") String sharedSegmentName, @Param(\"meta\") QueueMeta queueMeta);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/queue/ibatis/QueueExistMapper.java",
    "content": "package com.pinecone.hydra.queue.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.iqueue.QueueExistManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface QueueExistMapper extends QueueExistManipulator {\n    @Override\n    @Insert(\"INSERT INTO `hydra_global_queue_exist` (`layer_guid`, `is_exist`) VALUES (#{layer_guid},1)\")\n    void insertQueueExist(GUID layerGuid );\n\n    @Override\n    @Insert(\"INSERT INTO `hydra_global_queue_exist` (`layer_guid`, `is_exist`) VALUES (#{layer_guid},0)\")\n    void insertQueueNotExist(GUID layerGuid );\n\n    @Override\n    @Update(\"UPDATE `hydra_global_queue_exist` SET `is_exist` = 1 WHERE `layer_guid` = #{layerGuid}\")\n    void setQueueExist(GUID layerGuid);\n\n    @Override\n    @Update(\"UPDATE `hydra_global_queue_exist` SET `is_exist` = 2 WHERE `layer_guid` = #{layerGuid}\")\n    void setQueueNotExist(GUID layerGuid);\n\n    @Override\n    default boolean isExist( GUID layerGuid ){\n        Integer isExist = this.isExist0(layerGuid);\n        if( isExist == null ) {\n            this.insertQueueNotExist( layerGuid );\n            return false;\n        }\n        return isExist == 1;\n    }\n\n    @Select(\"SELECT `is_exist` FROM `hydra_global_queue_exist` WHERE `layer_guid` = #{layerGuid}\")\n    Integer isExist0( GUID layerGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/queue/ibatis/hydranium/QueueMappingDriver.java",
    "content": "package com.pinecone.hydra.queue.ibatis.hydranium;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.registry.ibatis.hydranium.RegistryMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class QueueMappingDriver extends ArchMappingDriver implements KOIMappingDriver {\n\n    protected KOIMasterManipulator mKOIMasterManipulator;\n\n    public QueueMappingDriver(Processum superiorProcess) {\n        super(superiorProcess);\n    }\n\n    public QueueMappingDriver(Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, QueueMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n        this.mKOIMasterManipulator = new QueueMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/queue/ibatis/hydranium/QueueMasterManipulatorImpl.java",
    "content": "package com.pinecone.hydra.queue.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.queue.ibatis.AtlasExecuteQueueMapper;\nimport com.pinecone.hydra.queue.ibatis.AtlasStratumQueueMapper;\nimport com.pinecone.hydra.queue.ibatis.QueueExistMapper;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.unit.iqueue.DPQueueManipulator;\nimport com.pinecone.hydra.unit.iqueue.DPStratumQueueManipulator;\nimport com.pinecone.hydra.unit.iqueue.QueueExistManipulator;\nimport com.pinecone.hydra.unit.iqueue.QueueMasterManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class QueueMasterManipulatorImpl implements QueueMasterManipulator {\n    @Resource\n    @Structure( type = AtlasExecuteQueueMapper.class )\n    protected DPQueueManipulator            mDPQueueManipulator;\n\n    @Resource\n    @Structure( type = AtlasStratumQueueMapper.class )\n    protected DPStratumQueueManipulator     mDPStratumQueueManipulator;\n\n    @Resource\n    @Structure( type = QueueExistMapper.class )\n    protected QueueExistManipulator         mQueueExistManipulator;\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return null;\n    }\n\n    public QueueMasterManipulatorImpl() {}\n\n    public QueueMasterManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( QueueMasterManipulatorImpl.class, Map.of(), this );\n    }\n\n    @Override\n    public DPQueueManipulator getDPQueueManipulator() {\n        return this.mDPQueueManipulator;\n    }\n\n    @Override\n    public DPStratumQueueManipulator getDPStratumQueueManipulator() {\n        return this.mDPStratumQueueManipulator;\n    }\n\n    @Override\n    public QueueExistManipulator getQueueExistManipulator() {\n        return this.mQueueExistManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryAttributesMapper.java",
    "content": "package com.pinecone.hydra.registry.ibatis;\n\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.ElementNode;\nimport com.pinecone.hydra.registry.entity.GenericAttributes;\nimport com.pinecone.hydra.registry.entity.Attributes;\nimport com.pinecone.hydra.registry.source.RegistryAttributesManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface RegistryAttributesMapper extends RegistryAttributesManipulator {\n    @Override\n    @Delete( \"DELETE FROM `hydra_registry_node_attributes` WHERE `guid`=#{guid}\" )\n    void remove ( GUID guid );\n\n    @Override\n    @Insert( \"INSERT INTO `hydra_registry_node_attributes` (`guid`, `key`, `value`) VALUES (#{guid}, #{key}, #{value})\" )\n    void insertAttribute( @Param(\"guid\") GUID guid, @Param(\"key\") String key, @Param(\"value\") String value );\n\n    @Override\n    @Select( \"SELECT `id` AS `enumId`, `guid`, `key`, `value` FROM `hydra_registry_node_attributes` WHERE `guid`=#{guid}\" )\n    List<Map<String, Object > > getAttributesByGuid( GUID guid );\n\n    @Override\n    @Update( \"UPDATE `hydra_registry_node_attributes` SET `value`=#{value} WHERE `guid`=#{guid} AND `key`=#{key}\")\n    void updateAttribute( @Param(\"guid\") GUID guid, @Param(\"key\") String key, @Param(\"value\") String value );\n\n    @Override\n    default Attributes getAttributes( GUID guid, ElementNode element ) {\n        List<Map<String, Object > > raws = this.getAttributesByGuid( guid );\n        Attributes attributes = new GenericAttributes( guid, element, this );\n        if ( raws.isEmpty() ) {\n            return attributes;\n        }\n\n        for ( Map<String, Object > raw : raws ) {\n            attributes.setAttribute( (String) raw.get( \"key\" ), (String) raw.get( \"value\" ) );\n        }\n\n        return attributes;\n    }\n\n    @Override\n    @Select( \"SELECT COUNT(*) FROM `hydra_registry_node_attributes` WHERE `guid` = #{guid} AND `key` = #{key}\" )\n    boolean containsKey( @Param(\"guid\") GUID guid, @Param(\"key\") String key );\n\n    @Override\n    @Delete( \"DELETE FROM `hydra_registry_node_attributes` WHERE `guid` = #{guid}\" )\n    void clearAttributes( @Param(\"guid\") GUID guid );\n\n    @Override\n    @Delete( \"DELETE FROM `hydra_registry_node_attributes` WHERE `guid` = #{guid} AND `key` = #{key} AND `value` = #{value}\" )\n    void removeAttributeWithValue( @Param(\"guid\") GUID guid, @Param(\"key\") String key, @Param(\"value\") String value );\n\n    @Override\n    @Delete( \"DELETE FROM `hydra_registry_node_attributes` WHERE `guid` = #{guid} AND `key` = #{key}\" )\n    void removeAttribute( @Param(\"guid\") GUID guid, @Param(\"key\") String key );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryConfigNodeMapper.java",
    "content": "package com.pinecone.hydra.registry.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.registry.entity.ConfigNode;\nimport com.pinecone.hydra.registry.entity.GenericProperties;\nimport com.pinecone.hydra.registry.entity.GenericTextFile;\nimport com.pinecone.hydra.registry.source.RegistryConfigNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.time.LocalDateTime;\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface RegistryConfigNodeMapper extends RegistryConfigNodeManipulator {\n    @Insert(\"INSERT INTO `hydra_registry_config_node` (`guid`, `data_affinity_guid`, `create_time`, `update_time`,`name`) VALUES (#{guid},#{dataAffinityGuid},#{createTime},#{updateTime},#{name})\")\n    void insert( ConfigNode configNode );\n\n    @Delete(\"DELETE FROM `hydra_registry_config_node` WHERE `guid`=#{guid}\")\n    void remove( @Param(\"guid\") GUID guid );\n\n    @Override\n    @Select( \"SELECT COUNT(`id`) FROM `hydra_registry_config_node` WHERE guid = #{guid}\" )\n    boolean isConfigNode( GUID guid );\n\n    @Select(\"SELECT `type` FROM `hydra_registry_nodes` WHERE `guid`=#{guid}\")\n    UOI getUOIByGUID( GUID guid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `data_affinity_guid` AS dataAffinityGuid, `create_time` AS createTime, `update_time` updateTime, `name` FROM `hydra_registry_config_node` WHERE `guid` = #{guid}\")\n    GenericProperties getPropertiesNode( GUID guid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `data_affinity_guid` AS dataAffinityGuid, `create_time` AS createTime, `update_time` updateTime, `name` FROM `hydra_registry_config_node` WHERE `guid`=#{guid}\")\n    GenericTextFile getTextConfigNode(GUID guid );\n\n    @Override\n    default ConfigNode getConfigNode (GUID guid ) {\n        String objectName = this.getUOIByGUID(guid).getObjectName();\n        if ( objectName.equals( GenericTextFile.class.getName()) ){\n            return this.getTextConfigNode(guid);\n        }\n        else if ( objectName.equals(GenericProperties.class.getName()) ){\n            return this.getPropertiesNode(guid);\n        }\n        return null;\n    }\n\n    @Override\n    default void update( ConfigNode configNode ) {\n        if (configNode.getUpdateTime() != null){\n            this.updateUpdateTime(configNode.getUpdateTime(),configNode.getGuid());\n        }\n        if (configNode.getName() != null){\n            //updateName(configNode.getName(),configNode.getGuid());\n        }\n    }\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_registry_config_node` WHERE `name` = #{name}\" )\n    List<GUID > getGuidsByName( String name );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_registry_config_node` WHERE `name` = #{name} AND `guid` = #{guid}\" )\n    List<GUID > getGuidsByNameID( @Param(\"name\") String name, @Param(\"guid\") GUID guid );\n\n    @Update( \"UPDATE `hydra_registry_config_node` SET `update_time` = #{updateTime} WHERE `guid` = #{guid}\" )\n    void updateUpdateTime(@Param(\"updateTime\") LocalDateTime updateTime,@Param(\"guid\") GUID guid);\n\n    @Select( \"SELECT `guid` FROM `hydra_registry_config_node`\" )\n    List<GUID > dumpGuid();\n\n    @Update( \"UPDATE `hydra_registry_config_node` SET `name` = #{name} WHERE `guid` = #{guid}\" )\n    void updateName( @Param(\"guid\") GUID guid ,@Param(\"name\") String name );\n\n    @Select( \"SELECT `data_affinity_guid` FROM `hydra_registry_config_node` WHERE `guid` = #{guid}\" )\n    GUID getDataAffinityGuid ( GUID guid );\n\n    @Update( \"UPDATE `hydra_registry_config_node` SET `data_affinity_guid` = #{affinityGuid} WHERE `guid` = #{guid}\" )\n    void setDataAffinityGuid( @Param(\"guid\") GUID guid, @Param(\"affinityGuid\") GUID affinityGuid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryNSNodeMapper.java",
    "content": "package com.pinecone.hydra.registry.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.GenericNamespace;\nimport com.pinecone.hydra.registry.entity.Namespace;\nimport com.pinecone.hydra.registry.source.RegistryNSNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface RegistryNSNodeMapper extends RegistryNSNodeManipulator {\n    @Override\n    @Insert(\"INSERT INTO `hydra_registry_namespace` (`guid`, `create_time`, `name`, `update_time`) VALUES (#{guid},#{createTime},#{name},#{updateTime})\")\n    void insert(Namespace namespace);\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_registry_namespace` WHERE `guid`=#{guid}\")\n    void remove( GUID guid );\n\n    @Override\n    @Select( \"SELECT COUNT(`id`) FROM `hydra_registry_namespace` WHERE guid = #{guid}\" )\n    boolean isNamespaceNode( GUID guid );\n\n    @Override\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `create_time` AS createTime, `name`, `update_time` AS updateTime FROM `hydra_registry_namespace` WHERE guid=#{guid}\")\n    GenericNamespace getNamespaceWithMeta( GUID guid );\n\n    @Override\n    @Update(\"UPDATE `hydra_registry_namespace` SET `create_time`=#{createTime},`name`=#{name},`update_time`=#{updateTime} WHERE `guid`=#{guid}\")\n    void update(Namespace namespace);\n\n    @Override\n    @Select(\"SELECT `guid` FROM `hydra_registry_namespace` WHERE `name`=#{name}\")\n    List<GUID > getGuidsByName(String name);\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_registry_namespace` WHERE `name` = #{name} AND `guid` = #{guid}\" )\n    List<GUID > getGuidsByNameID( @Param( \"name\" ) String name, @Param( \"guid\" ) GUID guid );\n\n    @Override\n    @Select(\"SELECT `guid` FROM `hydra_registry_namespace`\")\n    List<GUID > dumpGuid();\n\n    @Override\n    @Update( \"UPDATE `hydra_registry_namespace` SET `name` = #{name} WHERE `guid` = #{guid}\" )\n    void updateName( @Param(\"guid\") GUID guid ,@Param(\"name\") String name );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryNSNodeMetaMapper.java",
    "content": "package com.pinecone.hydra.registry.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.GenericNamespaceMeta;\nimport com.pinecone.hydra.registry.entity.NamespaceMeta;\nimport com.pinecone.hydra.registry.source.RegistryNSNodeMetaManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Select;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface RegistryNSNodeMetaMapper extends RegistryNSNodeMetaManipulator {\n    @Insert(\"INSERT INTO `hydra_registry_ns_node_meta` (`guid`) VALUES (#{guid})\")\n    void insert( NamespaceMeta namespaceMeta);\n\n    @Delete(\"DELETE FROM `hydra_registry_ns_node_meta` WHERE `guid`=#{guid}\")\n    void remove( GUID guid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid` FROM `hydra_registry_ns_node_meta` WHERE guid=#{guid}\")\n    GenericNamespaceMeta getNamespaceNodeMeta(GUID guid);\n\n    void update( NamespaceMeta namespaceMeta);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryNodeMetaMapper.java",
    "content": "package com.pinecone.hydra.registry.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.ConfigNodeMeta;\nimport com.pinecone.hydra.registry.entity.GenericConfigNodeMeta;\nimport com.pinecone.hydra.registry.source.RegistryNodeMetaManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Select;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface RegistryNodeMetaMapper extends RegistryNodeMetaManipulator {\n    @Insert(\"INSERT INTO `hydra_registry_conf_node_meta` (`guid`) VALUES (#{guid})\")\n    void insert(ConfigNodeMeta configNodeMeta);\n    @Delete(\"DELETE FROM `hydra_registry_conf_node_meta` WHERE `guid`=#{guid}\")\n    void remove(GUID guid);\n    @Select(\"SELECT `id` AS `enumId`, `guid` FROM `hydra_registry_conf_node_meta` WHERE `guid`=#{guid}\")\n    GenericConfigNodeMeta getConfigNodeMeta(GUID guid);\n\n    void update(ConfigNodeMeta configNodeMeta);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryNodeOwnerMapper.java",
    "content": "package com.pinecone.hydra.registry.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface RegistryNodeOwnerMapper extends TireOwnerManipulator {\n    @Insert(\"INSERT INTO `hydra_registry_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )\")\n    void insertRootNode( @Param(\"guid\")  GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Insert( \"INSERT INTO `hydra_registry_node_tree` (`guid`, `parent_guid`,`linked_type`) VALUES (#{targetGuid}, #{parentGuid}, #{linkedType})\" )\n    void insert( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n    @Update( \"UPDATE `hydra_registry_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}\" )\n    void update( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Update( \"UPDATE `hydra_registry_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}\" )\n    void updateParentGuid( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID );\n\n    @Update( \"UPDATE `hydra_registry_node_tree` SET `guid` = #{targetGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}\" )\n    void updateLinkedType( @Param(\"targetGuid\") GUID targetGuid, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n    @Delete( \"DELETE FROM `hydra_registry_node_tree` WHERE `guid`=#{subordinateGuid}  AND `linked_type` = 'Owned'\" )\n    void remove( @Param(\"subordinateGuid\") GUID subordinateGuid, @Param(\"ownerGuid\") GUID ownerGuid );\n\n    @Delete( \"DELETE FROM `hydra_registry_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'\" )\n    void removeBySubordinate( GUID subordinateGuid );\n\n//    @Delete(\"DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}\")\n//    void removeByOwner(GUID ownerGuid);\n\n    @Select( \"SELECT `parent_guid` FROM `hydra_registry_node_tree` WHERE `guid`=#{subordinateGuid} AND linked_type = 'Owned'\" )\n    GUID getOwner( GUID subordinateGuid );\n\n    @Select( \"SELECT guid FROM hydra_registry_node_tree where parent_guid=#{guid} AND linked_type = 'Owned'\" )\n    List<GUID > getSubordinates( GUID guid );\n\n\n    @Update(\"UPDATE `hydra_registry_node_tree` SET `linked_type` = '#{linkedType}' WHERE `guid` = #{sourceGuid} AND `parent_guid` = #{targetGuid}\")\n    void setLinkedType( @Param(\"sourceGuid\") GUID sourceGuid, @Param(\"targetGuid\") GUID targetGuid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Select(\"SELECT `linked_type` FROM `hydra_registry_node_tree` WHERE `guid` = #{childGuid} AND `parent_guid` =#{parentGuid}\")\n    LinkedType getLinkedType( @Param(\"childGuid\") GUID childGuid,@Param(\"parentGuid\") GUID parentGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryNodePathCacheMapper.java",
    "content": "package com.pinecone.hydra.registry.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\n@IbatisDataAccessObject\npublic interface RegistryNodePathCacheMapper extends TriePathCacheManipulator {\n    @Insert(\"INSERT INTO `hydra_registry_node_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )\")\n    void insert( @Param(\"guid\") GUID guid, @Param(\"path\") String path );\n\n    @Delete(\"DELETE FROM `hydra_registry_node_path` WHERE `guid`=#{guid}\")\n    void remove( GUID guid );\n\n    @Select(\"SELECT `path` FROM `hydra_registry_node_path` WHERE `guid`=#{guid}\")\n    String getPath( GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_registry_node_path` WHERE `guid`=#{guid}\")\n    GUID getNode( String path );\n\n    @Select(\"SELECT `guid` FROM `hydra_registry_node_path` WHERE `path`=#{path}\")\n    GUID queryGUIDByPath( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryPropertiesMapper.java",
    "content": "package com.pinecone.hydra.registry.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.GenericProperty;\nimport com.pinecone.hydra.registry.entity.Properties;\nimport com.pinecone.hydra.registry.entity.Property;\nimport com.pinecone.hydra.registry.source.RegistryPropertiesManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Result;\nimport org.apache.ibatis.annotations.Results;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.sql.Timestamp;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface RegistryPropertiesMapper extends RegistryPropertiesManipulator {\n    @Insert(\"INSERT INTO hydra_registry_conf_node_properties (`guid`, `key`, `type`, `create_time`, `update_time`, `value`) VALUES (#{guid},#{key},#{type},#{createTime},#{updateTime},#{rawValue})\")\n    void insert( Property property );\n\n    @Delete(\"DELETE FROM `hydra_registry_conf_node_properties` WHERE `guid`=#{guid} AND `key`=#{key}\")\n    void remove( GUID guid, String key );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `key`, `type`, `create_time` AS createTime, `update_time` AS updateTime, `value` AS rawValue FROM hydra_registry_conf_node_properties WHERE `guid`=#{guid}\")\n    @Results({\n            @Result(column = \"enumId\", property = \"enumId\", javaType = Long.class)\n    })\n    List<Map > getProperties0( GUID guid );\n\n    @Override\n    default List<Property > getProperties( GUID guid, Properties parent ) {\n        List<Map >    raws = this.getProperties0( guid );\n        List<Property > ps = new ArrayList<>( raws.size() );\n\n        for( Map raw : raws ) {\n            Property property = new GenericProperty( parent );\n            property.setEnumId( ( (Number) raw.get( \"enumId\" ) ).longValue() );\n            property.setGuid  ( GUIDs.GUID128( (String) raw.get( \"guid\" ) )  );\n            property.setType  ( (String) raw.get( \"type\" )  );\n            property.setKey   ( (String) raw.get( \"key\" )   );\n\n            property.setCreateTime ( ( (Timestamp) raw.get(\"createTime\") ).toLocalDateTime() );\n            property.setUpdateTime ( ( (Timestamp) raw.get(\"updateTime\") ).toLocalDateTime() );\n            property.setRawValue   ( raw.get( \"rawValue\" )   );\n\n            ps.add( property );\n        }\n        return ps;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    default List<Property > getProperties( GUID guid ) {\n        return (List) this.getProperties0( guid );\n    }\n\n    @Update( \"UPDATE `hydra_registry_conf_node_properties` SET `key`=#{key}, `type`=#{type}, update_time=#{updateTime}, value=#{rawValue} WHERE `guid`=#{guid} AND `key`=#{key}\" )\n    void update( Property property );\n\n    @Delete(\"DELETE FROM `hydra_registry_conf_node_properties` WHERE `guid` = #{guid}\")\n    void removeAll( GUID guid );\n\n    @Insert( \"INSERT INTO `hydra_registry_conf_node_properties` (`guid`, `key`, `type`, `create_time`, `update_time`, `value`) SELECT\\n\" +\n            \"\\t#{destinationGuid},\\n\" +\n            \"\\t`key`,\\n\" +\n            \"\\t`type`,\\n\" +\n            \"\\t`create_time`,\\n\" +\n            \"\\t`update_time`,\\n\" +\n            \"\\t`value` \\n\" +\n            \"FROM\\n\" +\n            \"\\t`hydra_registry_conf_node_properties` AS src \\n\" +\n            \"WHERE\\n\" +\n            \"\\t`guid` = #{sourceGuid} \\n\" +\n            \"\\tAND NOT EXISTS ( \\n\" +\n            \"\\tSELECT `guid` FROM `hydra_registry_conf_node_properties` AS dest WHERE dest.`guid` = #{destinationGuid} AND dest.`key` = src.`key` \\n\" +\n            \"\\t)\" )\n    void copyPropertiesTo( @Param(\"sourceGuid\") GUID sourceGuid, @Param(\"destinationGuid\") GUID destinationGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryTextFileMapper.java",
    "content": "package com.pinecone.hydra.registry.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.registry.entity.GenericTextValue;\nimport com.pinecone.hydra.registry.entity.TextValue;\nimport com.pinecone.hydra.registry.source.RegistryTextFileManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface RegistryTextFileMapper extends RegistryTextFileManipulator {\n    @Insert(\"INSERT INTO `hydra_registry_conf_node_text_value` (`guid`, `value`, `create_time`, `update_time`, `type`) VALUES (#{guid},#{value},#{createTime},#{updateTime},#{type})\")\n    void insert(TextValue textValue);\n\n    @Delete(\"DELETE FROM `hydra_registry_conf_node_text_value` WHERE `guid`=#{guid}\")\n    void remove(GUID guid);\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `value`, `create_time` AS createTime, `update_time` AS updateTime, `type` FROM `hydra_registry_conf_node_text_value` WHERE guid=#{guid}\")\n    GenericTextValue getTextValue(GUID guid);\n\n    @Update(\"UPDATE `hydra_registry_conf_node_text_value` SET `value`=#{value}, `update_time`=#{updateTime}, `type`=#{type} WHERE guid=#{guid}\")\n    void update(TextValue textValue);\n\n    @Insert(\"INSERT INTO `hydra_registry_conf_node_text_value` (`guid`, `type`, `create_time`, `update_time`, `value`) SELECT #{destinationGuid}, `type`, `create_time`, `update_time`, `value` \" +\n            \"FROM `hydra_registry_conf_node_text_value` WHERE `guid` = #{sourceGuid} AND `guid` != #{destinationGuid}\")\n    void copyTextValueTo( @Param(\"sourceGuid\") GUID sourceGuid, @Param(\"destinationGuid\") GUID destinationGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/RegistryTreeMapper.java",
    "content": "package com.pinecone.hydra.registry.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.entity.TreeReparseLinkNode;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface RegistryTreeMapper extends TrieTreeManipulator {\n    @Insert(\"INSERT INTO `hydra_registry_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )\")\n    void insertRootNode( @Param(\"guid\")  GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    default void insert ( TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){\n        this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() );\n        ownerManipulator.insertRootNode( node.getGuid() );\n    }\n\n    @Insert(\"INSERT INTO hydra_registry_nodes (`guid`, `type`,`base_data_guid`,`node_meta_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})\")\n    void insertTreeNode( @Param(\"guid\") GUID guid, @Param(\"type\") UOI type, @Param(\"baseDataGuid\") GUID baseDataGuid, @Param(\"nodeMetaGuid\") GUID nodeMetaGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_meta_guid AS nodeMetadataGUID FROM hydra_registry_nodes WHERE guid=#{guid}\")\n    GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid );\n\n    @Override\n    default GUIDImperialTrieNode getNode( GUID guid ) {\n        GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid );\n        if ( node == null ) {\n            return null;\n        }\n        List<GUID > parent = this.fetchParentGuids( guid );\n        node.setParentGUID( parent );\n        return node;\n    }\n\n    @Select(\"SELECT COUNT( `id` ) FROM hydra_registry_nodes WHERE guid=#{guid}\")\n    boolean contains( GUID key );\n\n    @Select(\"SELECT id, guid, parent_guid, linked_type FROM hydra_registry_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    GUIDImperialTrieNode getTreeNodeOnly(@Param(\"guid\") GUID guid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT count( * ) FROM hydra_registry_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    long countNode( GUID guid, GUID parentGuid );\n\n\n\n    @Override\n    default void purge( GUID guid ) {\n        this.removeNodeMeta( guid );\n        this.removeTreeNode( guid );\n        this.removeOwnedTreeNode( guid );\n    }\n\n    @Delete(\"DELETE FROM `hydra_registry_nodes` WHERE `guid`=#{guid}\")\n    void removeNodeMeta( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_registry_node_tree` WHERE `guid` = #{guid}\")\n    void removeTreeNode( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_registry_node_tree` WHERE `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeByParentGuid( @Param(\"parent_guid\") GUID parentGuid );\n\n    @Delete(\"DELETE FROM `hydra_registry_node_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeYoke( @Param(\"guid\") GUID guid, @Param(\"parent_guid\") GUID parentGuid );\n\n    @Delete(\"DELETE FROM `hydra_registry_node_tree` WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}\")\n    void removeTreeNodeWithLinkedType( @Param(\"guid\") GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n\n    @Delete(\"DELETE FROM `hydra_registry_node_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}\")\n    void removeInheritance( @Param(\"chileGuid\") GUID childGuid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_registry_node_tree` WHERE `parent_guid`=#{guid}\")\n    List<GUIDImperialTrieNode> getChildren(GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_registry_node_tree` WHERE `parent_guid` = #{parentGuid}\")\n    List<GUID > fetchChildrenGuids( @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `parent_guid` FROM `hydra_registry_node_tree` WHERE `guid`=#{guid}\")\n    List<GUID > fetchParentGuids( GUID guid );\n\n    @Update(\"UPDATE `hydra_registry_nodes` SET `type` = #{type} WHERE guid=#{guid}\")\n    void updateType( UOI type , GUID guid );\n\n    @Select( \"SELECT guid FROM hydra_registry_node_tree WHERE parent_guid IS NULL \" )\n    List<GUID > fetchRoot();\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_registry_node_tree WHERE `parent_guid` IS NULL AND guid = #{guid}\" )\n    boolean isRoot( GUID guid );\n\n\n\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_registry_node_tree WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}\" )\n    long queryLinkedCount( @Param(\"guid\") GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_registry_node_tree WHERE `guid` = #{guid}\" )\n    long queryAllLinkedCount( @Param(\"guid\") GUID guid );\n\n\n    @Override\n    @Insert(\n            \"INSERT INTO `hydra_registry_node_tree` (`guid`, `linked_type`,`tag_name`,`tag_guid`,`parent_guid`) \" +\n            \"VALUES (#{originalGuid}, #{linkedType}, #{tagName}, #{tagGuid}, #{dirGuid})\"\n    )\n    void newLinkTag(\n            @Param(\"originalGuid\") GUID originalGuid, @Param(\"dirGuid\") GUID dirGuid,\n            @Param(\"tagName\") String tagName, @Param(\"tagGuid\") GUID tagGuid, @Param(\"linkedType\") LinkedType linkedType\n    );\n\n    @Override\n    @Update( \"UPDATE hydra_registry_node_tree SET tag_name = #{tagName} WHERE tag_guid =#{tagGuid}\" )\n    void updateLinkTagName( @Param(\"tagGuid\") GUID tagGuid, @Param(\"tagName\") String tagName );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_registry_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{dirGuid}\" )\n    GUID getOriginalGuid( @Param(\"tagName\") String tagName, @Param(\"dirGuid\") GUID dirGuid );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_registry_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}\" )\n    GUID getOriginalGuidByNodeGuid( @Param(\"tagName\") String tagName, @Param(\"nodeGuid\") GUID nodeGUID );\n\n    @Override\n    @Select( \"SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_registry_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{parentDirGuid}\" )\n    TreeReparseLinkNode getReparseLinkNode( @Param(\"tagName\") String tagName, @Param(\"parentDirGuid\") GUID parentDirGuid );\n\n    @Override\n    @Select( \"SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_registry_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}\" )\n    TreeReparseLinkNode getReparseLinkNodeByNodeGuid( @Param(\"tagName\") String tagName, @Param(\"nodeGuid\") GUID nodeGUID );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_registry_node_tree WHERE `tag_name` = #{tagName}\" )\n    List<GUID > fetchOriginalGuid( String tagName );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_registry_node_tree WHERE `tag_name` = #{tagName} AND `parent_guid` IS NULL\" )\n    List<GUID > fetchOriginalGuidRoot( String tagName );\n\n    @Override\n    @Select( \"SELECT COUNT(*) FROM `hydra_registry_node_tree` WHERE `tag_guid` = #{guid}\" )\n    boolean isTagGuid(GUID guid);\n\n    @Override\n    @Delete( \"DELETE FROM `hydra_registry_node_tree` WHERE `tag_guid` = #{guid}\" )\n    void removeReparseLink( GUID guid );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_registry_node_tree` WHERE `tag_guid` = #{tagGuid}\" )\n    GUID getOriginalGuidByTagGuid(GUID tagGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/hydranium/RegistryMappingDriver.java",
    "content": "package com.pinecone.hydra.registry.ibatis.hydranium;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class RegistryMappingDriver extends ArchMappingDriver implements KOIMappingDriver {\n    protected KOIMasterManipulator mKOIMasterManipulator;\n\n    public RegistryMappingDriver( Processum superiorProcess  ) {\n        super( superiorProcess );\n    }\n\n    // Temp , TODO\n    public RegistryMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, RegistryMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n\n        this.mKOIMasterManipulator = new RegistryMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/hydranium/RegistryMasterManipulatorImpl.java",
    "content": "package com.pinecone.hydra.registry.ibatis.hydranium;\n\nimport java.util.Map;\nimport javax.annotation.Resource;\n\nimport org.springframework.stereotype.Component;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.registry.ibatis.RegistryAttributesMapper;\nimport com.pinecone.hydra.registry.ibatis.RegistryNSNodeMapper;\nimport com.pinecone.hydra.registry.ibatis.RegistryNSNodeMetaMapper;\nimport com.pinecone.hydra.registry.ibatis.RegistryConfigNodeMapper;\nimport com.pinecone.hydra.registry.ibatis.RegistryNodeMetaMapper;\nimport com.pinecone.hydra.registry.ibatis.RegistryPropertiesMapper;\nimport com.pinecone.hydra.registry.ibatis.RegistryTextFileMapper;\nimport com.pinecone.hydra.registry.source.RegistryMasterManipulator;\nimport com.pinecone.hydra.registry.source.RegistryNSNodeManipulator;\nimport com.pinecone.hydra.registry.source.RegistryNSNodeMetaManipulator;\nimport com.pinecone.hydra.registry.source.RegistryConfigNodeManipulator;\nimport com.pinecone.hydra.registry.source.RegistryNodeMetaManipulator;\nimport com.pinecone.hydra.registry.source.RegistryAttributesManipulator;\nimport com.pinecone.hydra.registry.source.RegistryPropertiesManipulator;\nimport com.pinecone.hydra.registry.source.RegistryTextFileManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\n\n@Component\npublic class RegistryMasterManipulatorImpl implements RegistryMasterManipulator {\n    @Resource\n    @Structure( type = RegistryConfigNodeMapper.class )\n    RegistryConfigNodeManipulator configNodeManipulator;\n\n    @Resource\n    @Structure( type = RegistryNSNodeMapper.class )\n    RegistryNSNodeManipulator namespaceNodeManipulator;\n\n    @Resource\n    @Structure( type = RegistryPropertiesMapper.class )\n    RegistryPropertiesManipulator registryPropertiesManipulator;\n\n    @Resource\n    @Structure( type = RegistryTextFileMapper.class )\n    RegistryTextFileManipulator registryTextFileManipulator;\n\n    @Resource\n    @Structure( type = RegistryNodeMetaMapper.class )\n    RegistryNodeMetaManipulator configNodeMetaManipulator;\n\n    @Resource\n    @Structure( type = RegistryNSNodeMetaMapper.class )\n    RegistryNSNodeMetaManipulator namespaceNodeMetaManipulator;\n\n    @Resource\n    @Structure( type = RegistryAttributesMapper.class )\n    RegistryAttributesManipulator registryAttributesManipulator;\n\n    @Resource( type = RegistryMasterTreeManipulatorImpl.class )\n    KOISkeletonMasterManipulator skeletonMasterManipulator;\n\n    public RegistryMasterManipulatorImpl() {\n\n    }\n\n    public RegistryMasterManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( RegistryMasterManipulatorImpl.class, Map.of(), this );\n        this.skeletonMasterManipulator = new RegistryMasterTreeManipulatorImpl( driver );\n    }\n\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return this.skeletonMasterManipulator;\n    }\n\n    @Override\n    public RegistryConfigNodeManipulator getConfigNodeManipulator() {\n        return this.configNodeManipulator;\n    }\n\n    @Override\n    public RegistryNSNodeManipulator getNSNodeManipulator() {\n        return this.namespaceNodeManipulator;\n    }\n\n    @Override\n    public RegistryPropertiesManipulator getPropertiesManipulator() {\n        return this.registryPropertiesManipulator;\n    }\n\n    @Override\n    public RegistryTextFileManipulator getTextFileManipulator() {\n        return this.registryTextFileManipulator;\n    }\n\n    @Override\n    public RegistryNodeMetaManipulator getNodeMetaManipulator() {\n        return this.configNodeMetaManipulator;\n    }\n\n    @Override\n    public RegistryNSNodeMetaManipulator getNSNodeMetaManipulator() {\n        return this.namespaceNodeMetaManipulator;\n    }\n\n    @Override\n    public RegistryAttributesManipulator getAttributesManipulator() {\n        return this.registryAttributesManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/registry/ibatis/hydranium/RegistryMasterTreeManipulatorImpl.java",
    "content": "package com.pinecone.hydra.registry.ibatis.hydranium;\n\nimport java.util.Map;\nimport javax.annotation.Resource;\n\nimport org.springframework.stereotype.Component;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.registry.ibatis.RegistryNodeOwnerMapper;\nimport com.pinecone.hydra.registry.ibatis.RegistryNodePathCacheMapper;\nimport com.pinecone.hydra.registry.ibatis.RegistryTreeMapper;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\n\n@Component\npublic class RegistryMasterTreeManipulatorImpl implements TreeMasterManipulator {\n\n    @Resource\n    @Structure( type = RegistryNodePathCacheMapper.class )\n    TriePathCacheManipulator triePathCacheManipulator;\n\n    @Resource\n    @Structure( type = RegistryNodeOwnerMapper.class )\n    TireOwnerManipulator tireOwnerManipulator;\n\n    @Resource\n    @Structure( type = RegistryTreeMapper.class )\n    TrieTreeManipulator  trieTreeManipulator;\n\n    public RegistryMasterTreeManipulatorImpl() {\n\n    }\n\n    public RegistryMasterTreeManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( RegistryMasterTreeManipulatorImpl.class, Map.of(), this );\n    }\n\n    @Override\n    public TriePathCacheManipulator getTriePathCacheManipulator() {\n        return this.triePathCacheManipulator;\n    }\n\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.tireOwnerManipulator;\n    }\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/ScenarioCommonDataMapper.java",
    "content": "package com.pinecone.hydra.scenario.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.scenario.entity.GenericScenarioCommonData;\nimport com.pinecone.hydra.scenario.entity.ScenarioCommonData;\nimport com.pinecone.hydra.scenario.source.ScenarioCommonDataManipulator;\n\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Select;\n@Mapper\n@IbatisDataAccessObject\npublic interface ScenarioCommonDataMapper extends ScenarioCommonDataManipulator {\n    @Insert(\"INSERT INTO `hydra_scenario_commom_data` (`guid`, `create_time`, `update_time`) VALUES (#{guid},#{createTime},#{updateTime})\")\n    void insert(ScenarioCommonData scenarioCommonData);\n\n    @Delete(\"DELETE FROM `hydra_scenario_commom_data` WHERE `guid`=#{guid}\")\n    void remove(GUID guid);\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `create_time`, `update_time` FROM `hydra_scenario_commom_data` WHERE `guid`=#{guid}\")\n    GenericScenarioCommonData getScenarioCommonData(GUID guid);\n\n    void update(ScenarioCommonData scenarioCommonData);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/ScenarioNamespaceNodeMapper.java",
    "content": "package com.pinecone.hydra.scenario.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.scenario.entity.GenericNamespaceNode;\nimport com.pinecone.hydra.scenario.entity.NamespaceNode;\nimport com.pinecone.hydra.scenario.source.NamespaceNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n@Mapper\n@IbatisDataAccessObject\npublic interface ScenarioNamespaceNodeMapper extends NamespaceNodeManipulator {\n    @Insert(\"INSERT INTO `hydra_scenario_namespace_node` (`guid`, `name`) VALUES (#{guid},#{name})\")\n    void insert(NamespaceNode namespaceNode);\n\n    @Delete(\"DELETE FROM `hydra_scenario_namespace_node` WHERE `guid`=#{guid}\")\n    void remove(GUID guid);\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_scenario_namespace_node` WHERE guid=#{guid}\")\n    GenericNamespaceNode getNamespaceNode(GUID guid);\n\n    void update(NamespaceNode namespaceNode);\n\n    @Select(\"SELECT guid FROM hydra_scenario_namespace_node where name=#{name}\")\n    List<GUID> getGuidsByName(String name);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/ScenarioNamespaceNodeMetaMapper.java",
    "content": "package com.pinecone.hydra.scenario.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.scenario.entity.GenericNamespaceNodeMeta;\nimport com.pinecone.hydra.scenario.entity.NamespaceNodeMeta;\nimport com.pinecone.hydra.scenario.source.NamespaceNodeMetaManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Select;\n@Mapper\n@IbatisDataAccessObject\npublic interface ScenarioNamespaceNodeMetaMapper extends NamespaceNodeMetaManipulator {\n    @Insert(\"INSERT INTO `hydra_scenario_namespace_node_meta` (`guid`) VALUES (#{guid})\")\n    void insert(NamespaceNodeMeta namespaceNodeMeta);\n\n    @Delete(\"DELETE FROM `hydra_scenario_namespace_node_meta` WHERE `guid`=#{guid}\")\n    void remove(GUID guid);\n\n    @Select(\"SELECT `id` AS `enumId`, `guid` FROM `hydra_scenario_namespace_node_meta` WHERE `guid`=#{guid}\")\n    GenericNamespaceNodeMeta getNamespaceNodeMeta(GUID guid);\n\n    void update(NamespaceNodeMeta namespaceNodeMeta);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/ScenarioNodeOwnerMapper.java",
    "content": "package com.pinecone.hydra.scenario.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\n\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Mapper;\n\nimport java.util.List;\n@Mapper\n@IbatisDataAccessObject\npublic interface ScenarioNodeOwnerMapper extends TireOwnerManipulator {\n\n    void insert(GUID subordinateGuid, GUID ownerGuid);\n\n    void remove(GUID subordinateGuid,GUID ownerGuid);\n\n    void removeBySubordinate(GUID subordinateGuid);\n\n    void removeByOwner(GUID OwnerGuid);\n\n    GUID getOwner(GUID subordinateGuid);\n\n    List<GUID> getSubordinates(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/ScenarioNodePathCacheMapper.java",
    "content": "package com.pinecone.hydra.scenario.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface ScenarioNodePathCacheMapper extends TriePathCacheManipulator {\n    @Insert(\"INSERT INTO hydra_scenario_node_path (path, guid) VALUES (#{path},#{guid})\")\n    void insert(@Param(\"guid\") GUID guid, @Param(\"path\") String path);\n    @Delete(\"DELETE FROM hydra_scenario_node_path WHERE guid=#{guid}\")\n    void remove(GUID guid);\n    @Select(\"SELECT path FROM hydra_scenario_node_path WHERE guid=#{guid}\")\n    String getPath(GUID guid);\n    @Select(\"SELECT path FROM hydra_scenario_node_path WHERE path=#{path}\")\n    GUID getNode(String path);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/ScenarioTreeMapper.java",
    "content": "package com.pinecone.hydra.scenario.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface ScenarioTreeMapper extends TrieTreeManipulator {\n    @Insert(\"INSERT INTO hydra_scenario_node_map (guid, type, base_data_guid, node_meta_guid) VALUES (#{guid},#{type},#{baseDataGUID},#{nodeMetadataGUID})\")\n    void insert (GUIDImperialTrieNode distributedConfTreeNode);\n\n    @Select(\"SELECT COUNT( `id` ) FROM hydra_scenario_node_map WHERE guid=#{guid}\")\n    boolean contains( GUID key );\n\n    default GUIDImperialTrieNode getNode(GUID guid){\n        GUIDImperialTrieNode metaNode = this.getMetaNode(guid);\n        List<GUID> parentNodes = this.fetchParentGuids(guid);\n        if (parentNodes != null){\n            metaNode.setParentGUID(parentNodes);\n        }else {\n            metaNode.setParentGUID(new ArrayList<GUID>());\n        }\n        return metaNode;\n    }\n\n    @Select(\"SELECT id, guid, type, base_data_guid AS baseDataGUID, node_meta_guid AS nodeMetadataGUID FROM hydra_scenario_node_map WHERE guid=#{guid}\")\n    GUIDImperialTrieNode getMetaNode(GUID guid);\n\n    default void remove(GUID guid){\n        removeMeta(guid);\n        removeParentNode(guid);\n    }\n\n    @Delete(\"DELETE FROM hydra_scenario_node_map WHERE guid=#{guid}\")\n    void removeMeta(GUID guid);\n\n    @Delete(\"DELETE FROM hydra_scenario_node_tree WHERE guid=#{guid}\")\n    void removeParentNode(GUID guid);\n\n    @Delete(\"DELETE FROM `hydra_scenario_node_tree` WHERE `guid`=#{childGuid} AND `parent_guid`=#{parentGuid}\")\n    void removeInheritance(@Param(\"childGuid\") GUID childGuid, @Param(\"parentGuid\") GUID parentGuid);\n\n    @Select(\"SELECT `parent_guid` FROM `hydra_scenario_node_tree` WHERE `guid`=#{guid}\")\n    List<GUID> fetchParentGuids(GUID guid);\n\n    @Select(\"SELECT `path` FROM `hydra_scenario_node_path` WHERE `guid`=#{guid}\")\n    String getPath(GUID guid);\n\n    void updatePath( GUID guid, String path);\n\n    @Select(\"SELECT `guid` FROM `hydra_scenario_node_path` WHERE `path`=#{path}\")\n    GUID queryGUIDByPath( String path );\n\n    void insertOwnedNode(GUID nodeGUID,GUID parentGUID);\n    @Select(\"SELECT guid FROM hydra_scenario_node_tree WHERE parent_guid=#{guid}\")\n\n    List<GUIDImperialTrieNode> getChild(GUID guid);\n    @Delete(\"DELETE FROM `hydra_scenario_node_path` WHERE `guid`=#{guid}\")\n    void removePath(GUID guid);\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/hydranium/ScenarioMappingDriver.java",
    "content": "package com.pinecone.hydra.scenario.ibatis.hydranium;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class ScenarioMappingDriver extends ArchMappingDriver implements KOIMappingDriver {\n    protected KOIMasterManipulator mKOIMasterManipulator;\n    public ScenarioMappingDriver( Processum superiorProcess ) {\n        super( superiorProcess );\n    }\n    public ScenarioMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, ScenarioMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n\n        this.mKOIMasterManipulator = new ScenarioMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/hydranium/ScenarioMasterManipulatorImpl.java",
    "content": "package com.pinecone.hydra.scenario.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.scenario.ibatis.ScenarioCommonDataMapper;\nimport com.pinecone.hydra.scenario.ibatis.ScenarioNamespaceNodeMapper;\nimport com.pinecone.hydra.scenario.ibatis.ScenarioNamespaceNodeMetaMapper;\nimport com.pinecone.hydra.scenario.source.NamespaceNodeManipulator;\nimport com.pinecone.hydra.scenario.source.NamespaceNodeMetaManipulator;\nimport com.pinecone.hydra.scenario.source.ScenarioCommonDataManipulator;\nimport com.pinecone.hydra.scenario.source.ScenarioMasterManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n@Component\npublic class ScenarioMasterManipulatorImpl implements ScenarioMasterManipulator {\n    @Resource\n    @Structure(type = ScenarioNamespaceNodeMapper.class)\n    NamespaceNodeManipulator namespaceNodeManipulator;\n\n    @Resource\n    @Structure(type = ScenarioNamespaceNodeMetaMapper.class)\n    NamespaceNodeMetaManipulator namespaceNodeMetaManipulator;\n\n    @Resource\n    @Structure(type = ScenarioCommonDataMapper.class)\n    ScenarioCommonDataManipulator scenarioCommonDataManipulator;\n\n    @Resource\n    @Structure(type = ScenarioMasterTreeManipulatorImpl.class)\n    KOISkeletonMasterManipulator skeletonMasterManipulator;\n    public ScenarioMasterManipulatorImpl() {\n\n    }\n\n    public ScenarioMasterManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( ScenarioMasterManipulatorImpl.class, Map.of(), this );\n        this.skeletonMasterManipulator = new ScenarioMasterTreeManipulatorImpl( driver );\n    }\n    @Override\n    public NamespaceNodeManipulator getNamespaceNodeManipulator() {\n        return this.namespaceNodeManipulator;\n    }\n\n    @Override\n    public NamespaceNodeMetaManipulator getNSNodeMetaManipulator() {\n        return this.namespaceNodeMetaManipulator;\n    }\n\n    @Override\n    public ScenarioCommonDataManipulator getScenarioCommonDataManipulator() {\n        return this.scenarioCommonDataManipulator;\n    }\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return this.skeletonMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/scenario/ibatis/hydranium/ScenarioMasterTreeManipulatorImpl.java",
    "content": "package com.pinecone.hydra.scenario.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.scenario.ibatis.ScenarioNodeOwnerMapper;\nimport com.pinecone.hydra.scenario.ibatis.ScenarioNodePathCacheMapper;\nimport com.pinecone.hydra.scenario.ibatis.ScenarioTreeMapper;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n@Component\npublic class ScenarioMasterTreeManipulatorImpl implements TreeMasterManipulator {\n\n    @Resource\n    @Structure( type = ScenarioNodePathCacheMapper.class )\n    TriePathCacheManipulator triePathCacheManipulator;\n\n    @Resource\n    @Structure( type = ScenarioNodeOwnerMapper.class )\n    TireOwnerManipulator tireOwnerManipulator;\n\n    @Resource\n    @Structure( type = ScenarioTreeMapper.class )\n    TrieTreeManipulator  trieTreeManipulator;\n\n    public ScenarioMasterTreeManipulatorImpl() {\n\n    }\n\n    public ScenarioMasterTreeManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( ScenarioMasterTreeManipulatorImpl.class, Map.of(), this );\n    }\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.tireOwnerManipulator;\n    }\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n    @Override\n    public TriePathCacheManipulator getTriePathCacheManipulator() {\n        return this.triePathCacheManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/AppNodeMetaMapper.java",
    "content": "package com.pinecone.hydra.service.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.entity.ApplicationElement;\nimport com.pinecone.hydra.service.kom.entity.GenericApplicationElement;\nimport com.pinecone.hydra.service.kom.source.ApplicationMetaManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.time.LocalDateTime;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface AppNodeMetaMapper extends ApplicationMetaManipulator {\n    @Insert( \"INSERT INTO `hydra_service_app_node_meta` (`guid`, `name`, `path`, `type`, `alias`, resource_type, deployment_method, create_time, update_time) VALUES (#{metaGuid},#{name},#{path},#{type},#{alias},#{resourceType},#{deploymentMethod},#{createTime},#{updateTime})\" )\n    void insert( ApplicationElement applicationElement );\n\n    @Delete( \"DELETE FROM `hydra_service_app_node_meta` WHERE `guid`=#{guid}\" )\n    void remove( @Param(\"guid\") GUID guid );\n\n    @Select( \"SELECT `id` AS `enumId`, `guid`, `name`, `path`, `type`, `alias`, `resource_type` AS resourceType, `deployment_method` AS deploymentMethod, `create_time` AS createTime, `update_time` AS updateTime FROM `hydra_service_app_node_meta` WHERE `guid`=#{guid}\" )\n    GenericApplicationElement getApplicationElement( @Param(\"guid\") GUID guid );\n\n    default GenericApplicationElement getApplicationElement( GUID guid, ServiceInstrument serviceInstrument){\n        GenericApplicationElement element = this.getApplicationElement( guid );\n        element.apply(serviceInstrument);\n        return element;\n    }\n    @Update(\"UPDATE `hydra_service_app_node_meta` SET `name` = #{name}, `path` = #{path}, `type` = #{type}, `alias` = #{alias}, `resource_type` = #{resourceType}, `deployment_method` = #{deploymentMethod}, `update_time` = #{updateTime} WHERE `guid` = #{guid}\")\n     void update( ApplicationElement applicationElement );\n\n    @Update(\"UPDATE hydra_service_app_node_meta SET name = #{name} WHERE guid = #{guid}\")\n    void updateName( String name, GUID guid );\n    @Update(\"UPDATE hydra_service_app_node_meta SET path = #{path} WHERE guid = #{guid}\")\n    void updatePath( String path, GUID guid );\n    @Update(\"UPDATE hydra_service_app_node_meta SET type = #{type} WHERE guid = #{guid}\")\n    void updateType( String type, GUID guid );\n    @Update(\"UPDATE hydra_service_app_node_meta SET alias = #{alias} WHERE guid = #{guid}\")\n    void updateAlias( String alias, GUID guid );\n    @Update(\"UPDATE hydra_service_app_node_meta SET resource_type = #{resourceType} WHERE guid = #{guid}\")\n    void updateResourceType( String resourceType, GUID guid );\n    @Update(\"UPDATE hydra_service_app_node_meta SET  deployment_method= #{deploymentMethod} WHERE guid = #{guid}\")\n    void updateDeploymentMethod( String deploymentMethod, GUID guid );\n    @Update(\"UPDATE hydra_service_app_node_meta SET update_time = #{updateTime} WHERE guid = #{guid}\")\n    void updateUpdateTime(LocalDateTime updateTime, GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ApplicationNodeMapper.java",
    "content": "package com.pinecone.hydra.service.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.ApplicationElement;\nimport com.pinecone.hydra.service.kom.entity.GenericApplicationElement;\nimport com.pinecone.hydra.service.kom.source.ApplicationNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface ApplicationNodeMapper extends ApplicationNodeManipulator {\n    @Insert(\"INSERT INTO  `hydra_service_application_node` (`guid`, `name`) VALUES (#{guid},#{name})\")\n    void insert( ApplicationElement applicationElement );\n\n    @Delete(\"DELETE FROM `hydra_service_application_node` WHERE `guid`=#{guid}\")\n    void remove( @Param(\"guid\")GUID guid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_service_application_node` WHERE `guid`=#{guid}\")\n    GenericApplicationElement getApplicationNode(@Param(\"guid\")GUID guid);\n\n    @Update(\"UPDATE `hydra_service_application_node` SET name = #{name} WHERE guid = #{guid}\")\n    void update( ApplicationElement applicationElement );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_service_application_node` WHERE `name` = #{name}\" )\n    List<GUID > getGuidsByName( String name );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_service_application_node` WHERE `name` = #{name} AND `guid` = #{guid}\" )\n    List<GUID > getGuidsByNameID( @Param(\"name\") String name, @Param(\"guid\") GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/NamespaceRulesMapper.java",
    "content": "package com.pinecone.hydra.service.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.GenericNamespaceRules;\nimport com.pinecone.hydra.service.kom.source.NamespaceRulesManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface NamespaceRulesMapper extends NamespaceRulesManipulator {\n    @Insert(\"INSERT INTO `hydra_service_namespace_rules` (`guid`, `scope`, `name`, `description`) VALUES (#{guid},#{scope},#{name},#{description})\")\n    void insert(GenericNamespaceRules classificationRules);\n    @Delete(\"DELETE FROM `hydra_service_namespace_rules` WHERE `guid`=#{guid}\")\n    void remove(@Param(\"guid\")GUID guid);\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `scope`, `name`, `description` FROM `hydra_service_namespace_rules` WHERE `guid`=#{guid}\")\n    GenericNamespaceRules getNamespaceRules(@Param(\"guid\")GUID guid);\n    @Update(\"UPDATE `hydra_service_namespace_rules` SET `scope` = #{scope}, `name` = #{name}, `description` = #{description} WHERE `guid` = #{guid}\")\n    void update(GenericNamespaceRules classificationRules);\n    @Update(\"UPDATE `hydra_service_namespace_rules` SET `scope` = #{scope} WHERE `guid` = #{guid}\")\n    void updateScope( @Param(\"scope\") String scope, GUID guid );\n    @Update(\"UPDATE `hydra_service_namespace_rules` SET `name` = #{name} WHERE `guid` = #{guid}\")\n    void updateName( @Param(\"name\") String name, GUID guid );\n    @Update(\"UPDATE `hydra_service_namespace_rules` SET `description` = #{description} WHERE `guid` = #{guid}\")\n    void updateDescription( @Param(\"description\") String description, GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceInstanceMapper.java",
    "content": "package com.pinecone.hydra.service.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry;\nimport com.pinecone.hydra.service.kom.source.ServiceInstanceManipulator;\nimport com.pinecone.hydra.service.kom.entity.GenericServiceInstanceEntity;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface ServiceInstanceMapper extends ServiceInstanceManipulator {\n    @Override\n    @Insert(\"INSERT INTO `hydra_service_instances` \" +\n            \"(`service_guid`, `guid`, `status`, `latest_start_time`, `latest_end_time`, `error_cause`, `run_count`, `deploy_guid`, `ip`) VALUES \" +\n            \"(#{serviceGuid}, #{guid}, #{status}, #{latestStartTime}, #{latestEndTime}, #{errorCause}, #{runCount}, #{deployGuid}, #{ip})\")\n    void initServiceInstance(ServiceInstanceEntry element);\n\n    @Override\n    @Select(\"SELECT `id`, `service_guid`, `guid`, `status`, `latest_start_time`, `latest_end_time`, `error_cause`, `run_count`, `deploy_guid`, `ip`\" +\n            \" FROM `hydra_service_instances` WHERE guid = #{instanceId}\")\n    GenericServiceInstanceEntity queryServiceInstance( GUID instanceId );\n\n    @Override\n    @Update(\"UPDATE `hydra_service_instances` SET status = #{status}, run_count = #{runCount} WHERE guid = #{guid}\")\n    void updateServiceInstance(ServiceInstanceEntry element);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceMetaMapper.java",
    "content": "package com.pinecone.hydra.service.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.GenericServiceElement;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\nimport com.pinecone.hydra.service.kom.source.ServiceMetaManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.time.LocalDateTime;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface ServiceMetaMapper extends ServiceMetaManipulator {\n    @Insert(\"INSERT INTO `hydra_service_serv_node_meta` (`guid`, `name`, `path`, `type`, `alias`, resource_type, service_type, create_time, update_time) VALUES (#{metaGuid},#{name},#{path},#{type},#{alias},#{resourceType},#{serviceType},#{createTime},#{updateTime})\")\n    void insert( ServiceElement serviceElement );\n\n    @Delete(\"DELETE FROM `hydra_service_serv_node_meta` WHERE `guid`=#{guid}\")\n    void remove( @Param(\"guid\") GUID guid );\n\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `name`, `path`, `type`, `alias`, `resource_type` AS resourceType, `service_type` AS serviceType, `create_time` AS createTime, `update_time` AS updateTime FROM `hydra_service_serv_node_meta` WHERE `guid`=#{guid}\")\n    GenericServiceElement getServiceMeta( @Param(\"guid\") GUID guid );\n    @Update(\"UPDATE `hydra_service_serv_node_meta` SET `name` =#{name}, `path` = #{path}, `type` = #{type}, `alias` = #{alias}, `resource_type` = #{resourceType}, `service_type` = #{serviceType}, `update_time` = #{updateTime} WHERE `guid` = #{guid}\")\n    void update( ServiceElement serviceElement );\n    @Update(\"UPDATE `hydra_service_serv_node_meta` SET `name` = #{name} WHERE `guid` = #{guid}\")\n    void updateName( @Param(\"name\") String name, @Param(\"guid\") GUID guid );\n    @Update(\"UPDATE `hydra_service_serv_node_meta` SET `path` = #{path} WHERE `guid` = #{guid}\")\n    void updatePath( @Param(\"path\") String path, @Param(\"guid\") GUID guid );\n    @Update(\"UPDATE `hydra_service_serv_node_meta` SET `alias` = #{alias} WHERE `guid` = #{guid}\")\n    void updateAlias( @Param(\"alias\") String alias, @Param(\"guid\") GUID guid );\n    @Update(\"UPDATE `hydra_service_serv_node_meta` SET `resource_type` = #{resourceType} WHERE `guid` = #{guid}\")\n    void updateResourceType( @Param(\"resourceType\") String resourceType, @Param(\"guid\") GUID guid );\n    @Update(\"UPDATE `hydra_service_serv_node_meta` SET `service_type` = #{serviceType} WHERE `guid` = #{guid}\")\n    void updateServiceType( @Param(\"serviceType\") String serviceType, @Param(\"guid\") GUID guid );\n    @Update(\"UPDATE `hydra_service_serv_node_meta` SET `update_time` = #{updateTime} WHERE `guid` = #{guid}\")\n    void updateUpdateTime( @Param(\"updateTime\") LocalDateTime updateTime, @Param(\"guid\") GUID guid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceNamespaceMapper.java",
    "content": "package com.pinecone.hydra.service.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.service.kom.entity.Namespace;\nimport com.pinecone.hydra.service.kom.source.ServiceNamespaceManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface ServiceNamespaceMapper extends ServiceNamespaceManipulator {\n    @Insert(\"INSERT INTO `hydra_service_namespace_node` (`guid`, `name`, `rules_guid`) VALUES (#{guid},#{name},#{rulesGUID})\")\n    void insert( Namespace ns );\n\n    @Delete(\"DELETE FROM `hydra_service_namespace_node` WHERE `guid`=#{guid}\")\n    void remove( @Param(\"guid\") GUID GUID );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `name`, `rules_guid` AS rulesGUID FROM `hydra_service_namespace_node` WHERE `guid`=#{guid}\")\n    GenericNamespace getNamespace( @Param(\"guid\") GUID guid );\n    @Update(\"UPDATE `hydra_service_namespace_node` SET `name` = #{name} WHERE `guid` = #{guid}\")\n    void update( Namespace ns );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `name`, `rules_guid` AS rulesGUID FROM `hydra_service_namespace_node` WHERE name=#{name}\")\n    List<GenericNamespace > fetchNamespaceNodeByName0( @Param(\"name\") String name );\n\n    @SuppressWarnings( \"unchecked\" )\n    default List<Namespace > fetchNamespaceNodeByName( String name ){\n        return (List) this.fetchNamespaceNodeByName0( name );\n    }\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_service_namespace_node` WHERE `name` = #{name}\" )\n    List<GUID > getGuidsByName(String name);\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_service_namespace_node` WHERE `name` = #{name} AND `guid` = #{guid}\" )\n    List<GUID > getGuidsByNameID( @Param(\"name\") String name, @Param(\"guid\") GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceNodeMapper.java",
    "content": "package com.pinecone.hydra.service.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.entity.GenericServiceElement;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\nimport com.pinecone.hydra.service.kom.source.ServiceNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface ServiceNodeMapper extends ServiceNodeManipulator {\n\n    @Insert(\"INSERT INTO `hydra_service_service_node` (`guid`, `name`) VALUES (#{guid},#{name})\")\n    void insert( GenericServiceElement serviceNode );\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_service_service_node` WHERE `guid`=#{guid}\")\n    void remove( @Param(\"guid\")GUID guid );\n\n    @Override\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_service_service_node` WHERE `guid`=#{guid}\")\n    GenericServiceElement getServiceNode( @Param(\"guid\") GUID guid );\n\n    @Update(\"UPDATE `hydra_service_service_node` SET `name` = #{name} WHERE `guid` = #{guid}\")\n    void update( GenericServiceElement serviceNode );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid` , `name` FROM `hydra_service_service_node` WHERE name=#{name}\")\n    List<GenericServiceElement> fetchServiceNodeByName0( @Param(\"name\") String name );\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    default List<ServiceElement> fetchServiceNodeByName( String name ) {\n        return (List) this.fetchServiceNodeByName0( name );\n    }\n\n\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_service_service_node` WHERE `name` = #{name}\" )\n    List<GUID> getGuidsByName( String name );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_service_service_node` WHERE `name` = #{name} AND `guid` = #{guid}\" )\n    List<GUID> getGuidsByNameID( @Param(\"name\") String name, @Param(\"guid\") GUID guid );\n\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    default List<ServiceElement> fetchAllService(){\n        return (List) this.fetchAllService0();\n    }\n\n    @Select(\"SELECT `id`, `guid`, `name` FROM `hydra_service_service_node` \")\n    List<GenericServiceElement> fetchAllService0();\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceNodeMetaMapper.java",
    "content": "package com.pinecone.hydra.service.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.ServiceFamilyNode;\nimport com.pinecone.hydra.service.kom.entity.GenericCommonMeta;\nimport com.pinecone.hydra.service.kom.entity.Namespace;\nimport com.pinecone.hydra.service.kom.source.NodeMetaManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface ServiceNodeMetaMapper extends NodeMetaManipulator {\n\n    @Override\n    @Insert(\"INSERT INTO `hydra_service_node_meta` (`guid`, `scenario`, primary_impl_lang, extra_information, `level`, `description`) VALUES (#{guid}, #{scenario}, #{primaryImplLang}, #{extraInformation}, #{level}, #{description})\")\n    void insert( ServiceFamilyNode node );\n\n    @Override\n    @Insert(\"INSERT INTO `hydra_service_node_meta` (`guid`, `scenario`, primary_impl_lang, extra_information, `level`, `description`) VALUES (#{metaGuid}, #{scenario}, #{primaryImplLang}, #{extraInformation}, #{level}, #{description})\")\n    void insertNS( Namespace node );\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_service_node_meta` WHERE `guid`=#{guid}\")\n    void remove( @Param(\"guid\")GUID guid );\n\n    @Override\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `scenario`, `primary_impl_lang` AS primaryImplLang, `extra_information` AS extraInformation, `level`, `description` FROM `hydra_service_node_meta` WHERE `guid`=#{guid}\")\n    GenericCommonMeta getNodeCommonMeta( @Param(\"guid\") GUID guid );\n\n    @Override\n    @Update(\"UPDATE `hydra_service_node_meta` SET `scenario` = #{scenario}, `primary_impl_lang` = #{primaryImplLang}, `extra_information` = #{extraInformation}, `level` = #{level}, `description` = #{description}\")\n    void update( ServiceFamilyNode node );\n\n    @Update(\"UPDATE `hydra_service_node_meta` SET `scenario` = #{scenario} WHERE `guid` = #{guid}\")\n    void updateScenario( @Param(\"scenario\") String scenario, @Param(\"guid\") GUID guid );\n\n    @Update(\"UPDATE `hydra_service_node_meta` SET `primary_impl_lang` = #{primaryImplLang} WHERE `guid` = #{guid}\")\n    void updatePrimaryImplLang( @Param(\"primaryImpLang\") String primaryImplLang, @Param(\"guid\") GUID guid );\n\n    @Update(\"UPDATE `hydra_service_node_meta` SET `extra_information` = #{extraInformation} WHERE `guid` = #{guid}\")\n    void updateExtraInformation( @Param(\"extraInformation\") String extraInformation, @Param(\"guid\") GUID guid );\n\n    @Update(\"UPDATE `hydra_service_node_meta` SET `level` = #{level} WHERE `guid` = #{guid}\")\n    void updateLevel( @Param(\"level\") String level, @Param(\"guid\") GUID guid );\n\n    @Update(\"UPDATE `hydra_service_node_meta` SET `description` = #{description} WHERE `guid` = #{guid}\")\n    void updateDescription( @Param(\"description\") String description, @Param(\"guid\") GUID guid );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceNodeOwnerMapper.java",
    "content": "package com.pinecone.hydra.service.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n@Mapper\n@IbatisDataAccessObject\npublic interface ServiceNodeOwnerMapper extends TireOwnerManipulator {\n    @Insert(\"INSERT INTO `hydra_service_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )\")\n    void insertRootNode( @Param(\"guid\")  GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Insert( \"INSERT INTO `hydra_service_node_tree` (`guid`, `parent_guid`,`linked_type`) VALUES (#{targetGuid}, #{parentGuid}, #{linkedType})\" )\n    void insert( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n    @Update( \"UPDATE `hydra_service_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}\" )\n    void update( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Update( \"UPDATE `hydra_service_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}\" )\n    void updateParentGuid( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID );\n\n    @Update( \"UPDATE `hydra_service_node_tree` SET `guid` = #{targetGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}\" )\n    void updateLinkedType( @Param(\"targetGuid\") GUID targetGuid, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n    @Delete( \"DELETE FROM `hydra_service_node_tree` WHERE `guid`=#{subordinateGuid}  AND `linked_type` = 'Owned'\" )\n    void remove( @Param(\"subordinateGuid\") GUID subordinateGuid, @Param(\"ownerGuid\") GUID ownerGuid );\n\n    @Delete( \"DELETE FROM `hydra_service_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'\" )\n    void removeBySubordinate( GUID subordinateGuid );\n\n//    @Delete(\"DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}\")\n//    void removeByOwner(GUID ownerGuid);\n\n    @Select( \"SELECT `parent_guid` FROM `hydra_service_node_tree` WHERE `guid`=#{subordinateGuid} AND linked_type = 'Owned'\" )\n    GUID getOwner( GUID subordinateGuid );\n\n    @Select( \"SELECT guid FROM hydra_service_node_tree where parent_guid=#{guid} AND linked_type = 'Owned'\" )\n    List<GUID > getSubordinates( GUID guid );\n\n\n    @Update(\"UPDATE `hydra_service_node_tree` SET `linked_type` = '#{linkedType}' WHERE `guid` = #{sourceGuid} AND `parent_guid` = #{targetGuid}\")\n    void setLinkedType( @Param(\"sourceGuid\") GUID sourceGuid, @Param(\"targetGuid\") GUID targetGuid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Select(\"SELECT `linked_type` FROM `hydra_service_node_tree` WHERE `guid` = #{childGuid} AND `parent_guid` =#{parentGuid}\")\n    LinkedType getLinkedType( @Param(\"childGuid\") GUID childGuid,@Param(\"parentGuid\") GUID parentGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServicePathCacheMapper.java",
    "content": "package com.pinecone.hydra.service.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n@Mapper\n@IbatisDataAccessObject\npublic interface ServicePathCacheMapper extends TriePathCacheManipulator {\n    @Insert(\"INSERT INTO `hydra_service_node_cache_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )\")\n    void insert(@Param(\"guid\") GUID guid, @Param(\"path\") String path );\n\n    @Insert(\"INSERT INTO `hydra_service_node_cache_path` (path, long_path, guid) VALUES ( #{path},#{longPath},#{guid} )\")\n    void insertLongPath( @Param(\"guid\") GUID guid, @Param(\"path\") String path, @Param(\"longPath\") String longPath );\n\n    @Delete(\"DELETE FROM `hydra_service_node_cache_path` WHERE `guid`=#{guid}\")\n    void remove( GUID guid );\n\n\n    default String getPath( GUID guid ){\n        String longPath = this.getLongPath(guid);\n        if ( longPath != null ){\n            return this.getPath0( guid )+this.getLongPath( guid );\n        }\n        return this.getPath0( guid );\n    };\n    @Select(\"SELECT `long_path` FROM `hydra_service_node_cache_path` WHERE `guid`=#{guid}\")\n    String getLongPath( GUID guid );\n    @Select(\"SELECT `path` FROM `hydra_service_node_cache_path` WHERE `guid`=#{guid}\")\n    String getPath0( GUID guid );\n    @Select(\"SELECT `guid` FROM `hydra_service_node_cache_path` WHERE `guid`=#{guid}\")\n    GUID getNode( String path );\n\n    @Select(\"SELECT `guid` FROM `hydra_service_node_cache_path` WHERE `path`=#{path}\")\n    GUID queryGUIDByPath( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/ServiceTreeMapper.java",
    "content": "package com.pinecone.hydra.service.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.entity.TreeReparseLinkNode;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface ServiceTreeMapper extends TrieTreeManipulator {\n    @Insert(\"INSERT INTO `hydra_service_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )\")\n    void insertRootNode(@Param(\"guid\")  GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    default void insert (TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){\n        this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() );\n        ownerManipulator.insertRootNode( node.getGuid() );\n    }\n\n    @Insert(\"INSERT INTO hydra_service_nodes (`guid`, `type`,`base_data_guid`,`node_metadata_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})\")\n    void insertTreeNode( @Param(\"guid\") GUID guid, @Param(\"type\") UOI type, @Param(\"baseDataGuid\") GUID baseDataGuid, @Param(\"nodeMetaGuid\") GUID nodeMetaGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_metadata_guid AS nodeMetadataGUID FROM hydra_service_nodes WHERE guid=#{guid}\")\n    GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid );\n\n    @Select(\"SELECT COUNT( `id` ) FROM hydra_service_nodes WHERE guid=#{guid}\")\n    boolean contains( GUID key );\n\n    @Override\n    default GUIDImperialTrieNode getNode(GUID guid ) {\n        GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid );\n        if( node == null ) {\n            return null;\n        }\n        List<GUID > parent = this.fetchParentGuids( guid );\n        node.setParentGUID( parent );\n        return node;\n    }\n\n    @Select(\"SELECT id, guid, parent_guid, linked_type FROM hydra_service_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    GUIDImperialTrieNode getTreeNodeOnly(@Param(\"guid\") GUID guid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT count( * ) FROM hydra_service_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    long countNode( GUID guid, GUID parentGuid );\n\n\n\n    @Override\n    default void purge( GUID guid ) {\n        this.removeNodeMeta( guid );\n        this.removeTreeNode( guid );\n        this.removeOwnedTreeNode( guid );\n    }\n\n    @Delete(\"DELETE FROM `hydra_service_nodes` WHERE `guid`=#{guid}\")\n    void removeNodeMeta( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_service_node_tree` WHERE `guid` = #{guid}\")\n    void removeTreeNode( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_service_node_tree` WHERE `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeByParentGuid( @Param(\"parent_guid\") GUID parentGuid );\n\n    @Delete(\"DELETE FROM `hydra_service_node_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeYoke( @Param(\"guid\") GUID guid, @Param(\"parent_guid\") GUID parentGuid );\n\n    @Delete(\"DELETE FROM `hydra_service_node_tree` WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}\")\n    void removeTreeNodeWithLinkedType( @Param(\"guid\") GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n\n    @Delete(\"DELETE FROM `hydra_service_node_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}\")\n    void removeInheritance( @Param(\"chileGuid\") GUID childGuid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_service_node_tree` WHERE `parent_guid`=#{guid}\")\n    List<GUIDImperialTrieNode> getChildren(GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_service_node_tree` WHERE `parent_guid` = #{parentGuid}\")\n    List<GUID > fetchChildrenGuids( @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `parent_guid` FROM `hydra_service_node_tree` WHERE `guid`=#{guid}\")\n    List<GUID > fetchParentGuids( GUID guid );\n\n    @Update(\"UPDATE `hydra_service_nodes` SET `type` = #{type} WHERE guid=#{guid}\")\n    void updateType( UOI type , GUID guid );\n\n    @Select( \"SELECT guid FROM hydra_service_node_tree WHERE parent_guid IS NULL \" )\n    List<GUID > fetchRoot();\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_service_node_tree WHERE `parent_guid` IS NULL AND guid = #{guid}\" )\n    boolean isRoot( GUID guid );\n\n\n\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_service_node_tree WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}\" )\n    long queryLinkedCount( @Param(\"guid\") GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_service_node_tree WHERE `guid` = #{guid}\" )\n    long queryAllLinkedCount( @Param(\"guid\") GUID guid );\n\n\n    @Override\n    @Insert(\n            \"INSERT INTO `hydra_service_node_tree` (`guid`, `linked_type`,`tag_name`,`tag_guid`,`parent_guid`) \" +\n                    \"VALUES (#{originalGuid}, #{linkedType}, #{tagName}, #{tagGuid}, #{dirGuid})\"\n    )\n    void newLinkTag(\n            @Param(\"originalGuid\") GUID originalGuid, @Param(\"dirGuid\") GUID dirGuid,\n            @Param(\"tagName\") String tagName, @Param(\"tagGuid\") GUID tagGuid, @Param(\"linkedType\") LinkedType linkedType\n    );\n\n    @Override\n    @Update( \"UPDATE hydra_service_node_tree SET tag_name = #{tagName} WHERE tag_guid =#{tagGuid}\" )\n    void updateLinkTagName( @Param(\"tagGuid\") GUID tagGuid, @Param(\"tagName\") String tagName );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_service_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{dirGuid}\" )\n    GUID getOriginalGuid( @Param(\"tagName\") String tagName, @Param(\"dirGuid\") GUID dirGuid );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_service_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}\" )\n    GUID getOriginalGuidByNodeGuid( @Param(\"tagName\") String tagName, @Param(\"nodeGuid\") GUID nodeGUID );\n\n    @Override\n    @Select( \"SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_service_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{parentDirGuid}\" )\n    TreeReparseLinkNode getReparseLinkNode(@Param(\"tagName\") String tagName, @Param(\"parentDirGuid\") GUID parentDirGuid );\n\n    @Override\n    @Select( \"SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_service_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}\" )\n    TreeReparseLinkNode getReparseLinkNodeByNodeGuid( @Param(\"tagName\") String tagName, @Param(\"nodeGuid\") GUID nodeGUID );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_service_node_tree WHERE `tag_name` = #{tagName}\" )\n    List<GUID > fetchOriginalGuid( String tagName );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_service_node_tree WHERE `tag_name` = #{tagName} AND `parent_guid` IS NULL\" )\n    List<GUID > fetchOriginalGuidRoot( String tagName );\n\n    @Override\n    @Select( \"SELECT COUNT(*) FROM `hydra_service_node_tree` WHERE `tag_guid` = #{guid}\" )\n    boolean isTagGuid(GUID guid);\n\n    @Override\n    @Delete( \"DELETE FROM `hydra_service_node_tree` WHERE `tag_guid` = #{guid}\" )\n    void removeReparseLink( GUID guid );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_service_node_tree` WHERE `tag_guid` = #{tagGuid}\" )\n    GUID getOriginalGuidByTagGuid(GUID tagGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/hydranium/ServiceMappingDriver.java",
    "content": "package com.pinecone.hydra.service.ibatis.hydranium;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class ServiceMappingDriver extends ArchMappingDriver implements KOIMappingDriver {\n    protected KOIMasterManipulator mKOIMasterManipulator;\n    public ServiceMappingDriver( Processum superiorProcess ) {\n        super(superiorProcess);\n    }\n    public ServiceMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, ServiceMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n\n        this.mKOIMasterManipulator = new ServiceMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/hydranium/ServiceMasterManipulatorImpl.java",
    "content": "package com.pinecone.hydra.service.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.service.ibatis.AppNodeMetaMapper;\nimport com.pinecone.hydra.service.ibatis.ApplicationNodeMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceInstanceMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceNamespaceMapper;\nimport com.pinecone.hydra.service.ibatis.NamespaceRulesMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceNodeMetaMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceMetaMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceNodeMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceNodeOwnerMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceTreeMapper;\nimport com.pinecone.hydra.service.kom.source.ApplicationMetaManipulator;\nimport com.pinecone.hydra.service.kom.source.ApplicationNodeManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceInstanceManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceNamespaceManipulator;\nimport com.pinecone.hydra.service.kom.source.NamespaceRulesManipulator;\nimport com.pinecone.hydra.service.kom.source.NodeMetaManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceMasterManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceMetaManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceNodeManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport org.springframework.stereotype.Component;\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class ServiceMasterManipulatorImpl implements ServiceMasterManipulator {\n\n    @Resource\n    @Structure(type = ServiceMasterTreeManipulatorImpl.class )\n    KOISkeletonMasterManipulator skeletonMasterManipulator;\n\n    @Resource\n    @Structure(type = ServiceTreeMapper.class )\n    TrieTreeManipulator             trieTreeManipulator;\n\n    @Resource\n    @Structure(type = ServiceNodeMetaMapper.class )\n    NodeMetaManipulator nodeMetaManipulator;\n\n    @Resource\n    @Structure(type = ApplicationNodeMapper.class )\n    ApplicationNodeManipulator      applicationNodeManipulator;\n\n    @Resource\n    @Structure( type = AppNodeMetaMapper.class )\n    ApplicationMetaManipulator      applicationMetaManipulator;\n\n    @Resource\n    @Structure( type = ServiceNodeMapper.class )\n    ServiceNodeManipulator          serviceNodeManipulator;\n\n    @Resource\n    @Structure( type = ServiceMetaMapper.class )\n    ServiceMetaManipulator          serviceMetaManipulator;\n\n    @Resource\n    @Structure( type = ServiceNamespaceMapper.class )\n    ServiceNamespaceManipulator serviceNamespaceManipulator;\n\n    @Resource\n    @Structure( type = ServiceInstanceMapper.class )\n    ServiceInstanceManipulator serviceInstanceManipulator;\n\n    @Resource\n    @Structure( type = NamespaceRulesMapper.class )\n    NamespaceRulesManipulator namespaceRulesManipulator;\n\n    @Resource\n    @Structure( type = ServiceNodeOwnerMapper.class )\n    TireOwnerManipulator            tireOwnerManipulator;\n\n    public ServiceMasterManipulatorImpl() {\n\n    }\n\n    public ServiceMasterManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( ServiceMasterManipulatorImpl.class, Map.of(), this );\n        this.skeletonMasterManipulator = new ServiceMasterTreeManipulatorImpl( driver );\n    }\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n    @Override\n    public NodeMetaManipulator getNodeMetaManipulator() {\n        return this.nodeMetaManipulator;\n    }\n\n    @Override\n    public ApplicationNodeManipulator getApplicationNodeManipulator() {\n        return this.applicationNodeManipulator;\n    }\n\n    @Override\n    public ApplicationMetaManipulator getApplicationElementManipulator() {\n        return this.applicationMetaManipulator;\n    }\n\n    @Override\n    public ServiceNodeManipulator getServiceNodeManipulator() {\n        return this.serviceNodeManipulator;\n    }\n\n    @Override\n    public ServiceMetaManipulator getServiceMetaManipulator() {\n        return this.serviceMetaManipulator;\n    }\n\n    @Override\n    public ServiceNamespaceManipulator getNamespaceManipulator() {\n        return this.serviceNamespaceManipulator;\n    }\n\n    @Override\n    public NamespaceRulesManipulator getNamespaceRulesManipulator() {\n        return this.namespaceRulesManipulator;\n    }\n\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.tireOwnerManipulator;\n    }\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return this.skeletonMasterManipulator;\n    }\n\n    @Override\n    public ServiceInstanceManipulator getServiceInstanceManipulator() {\n        return this.serviceInstanceManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/service/ibatis/hydranium/ServiceMasterTreeManipulatorImpl.java",
    "content": "package com.pinecone.hydra.service.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.service.ibatis.ServiceNodeOwnerMapper;\nimport com.pinecone.hydra.service.ibatis.ServicePathCacheMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceTreeMapper;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n@Component\npublic class ServiceMasterTreeManipulatorImpl implements TreeMasterManipulator {\n    @Resource\n    @Structure( type = ServicePathCacheMapper.class )\n    TriePathCacheManipulator triePathCacheManipulator;\n\n    @Resource\n    @Structure( type = ServiceNodeOwnerMapper.class )\n    TireOwnerManipulator tireOwnerManipulator;\n\n    @Resource\n    @Structure( type = ServiceTreeMapper.class )\n    TrieTreeManipulator  trieTreeManipulator;\n\n    public ServiceMasterTreeManipulatorImpl() {\n\n    }\n\n    public ServiceMasterTreeManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( ServiceMasterTreeManipulatorImpl.class, Map.of(), this );\n    }\n\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.tireOwnerManipulator;\n    }\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n    @Override\n    public TriePathCacheManipulator getTriePathCacheManipulator() {\n        return this.triePathCacheManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/AppNodeMapper.java",
    "content": "package com.pinecone.hydra.task.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.AppElement;\nimport com.pinecone.hydra.task.kom.entity.GenericAppElement;\nimport com.pinecone.hydra.task.kom.source.AppNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface AppNodeMapper extends AppNodeManipulator {\n\n    @Override\n    @Insert(\"INSERT INTO `hydra_task_app_node` \" +\n            \"(`guid`, `name`, `type`, `create_time`, `update_time`) \" +\n            \"VALUES (#{guid}, #{name}, #{type}, #{createTime}, #{updateTime})\")\n    void insert( AppElement appElement);\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_task_app_node` WHERE `guid` = #{guid}\")\n    void remove(@Param(\"guid\") GUID guid);\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `name`, `type`, \" +\n            \"`create_time` AS `createTime`, `update_time` AS `updateTime` \" +\n            \"FROM `hydra_task_app_node` WHERE `guid` = #{guid}\")\n    GenericAppElement getAppElement(@Param(\"guid\") GUID guid);\n\n    @Override\n    default AppElement getAppElement(GUID guid, TaskInstrument instrument ) {\n        GenericAppElement element = this.getAppElement( guid );\n        element.apply( instrument );\n\n        return element;\n    }\n\n    @Override\n    @Update(\"UPDATE `hydra_task_app_node` SET \" +\n            \"`name` = #{name}, \" +\n            \"`type` = #{type}, \" +\n            \"`create_time` = #{createTime}, \" +\n            \"`update_time` = #{updateTime} \" +\n            \"WHERE `guid` = #{guid}\")\n    void update( AppElement appElement);\n\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_task_app_node` WHERE `name` = #{name}\" )\n    List<GUID > getGuidsByName( String name );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_task_app_node` WHERE `name` = #{name} AND `guid` = #{guid}\" )\n    List<GUID > getGuidsByNameID( @Param(\"name\") String name, @Param(\"guid\") GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/InstanceNodeMapper.java",
    "content": "package com.pinecone.hydra.task.ibatis;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.CollectionUtils;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.instance.GenericInstanceEntry;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\nimport com.pinecone.hydra.task.kom.instance.source.InstanceNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.pinecone.slime.meta.TableIndex64Meta;\n\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\n\nimport java.time.LocalDateTime;\nimport java.util.Collection;\nimport java.util.List;\n\n\n@Mapper\n@IbatisDataAccessObject\npublic interface InstanceNodeMapper extends InstanceNodeManipulator {\n\n    @Override\n    void insert( InstanceEntry instance );\n\n    void update( InstanceEntry instance );\n\n    GenericInstanceEntry queryByGuid0( GUID guid );\n\n    @Override\n    default InstanceEntry queryByGuid( GUID guid, TaskInstrument instrument ) {\n        GenericInstanceEntry entry = this.queryByGuid0( guid );\n        if ( entry == null ) {\n            return null;\n        }\n        entry.apply( instrument );\n        return entry;\n    }\n\n\n\n    int countInstance();\n\n    long countInstanceByName( String name );\n\n    List<GenericInstanceEntry> fetchInstances0( @Param(\"offset\") long offset, @Param(\"pageSize\") long pageSize );\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default List<InstanceEntry> fetchInstances( TaskInstrument instrument, long offset, long pageSize ) {\n        List<GenericInstanceEntry> list = this.fetchInstances0( offset, pageSize );\n        for ( GenericInstanceEntry entry : list ) {\n            entry.apply( instrument );\n        }\n        return (List) list;\n    }\n\n    List<GenericInstanceEntry> queryByTaskGuid0( @Param(\"taskGuid\") GUID taskGuid, @Param(\"offset\") long offset, @Param(\"pageSize\") long pageSize );\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default List<InstanceEntry> queryByTaskGuid( TaskInstrument instrument, GUID taskGuid, long offset, long pageSize ) {\n        List<GenericInstanceEntry> list = this.queryByTaskGuid0( taskGuid, offset, pageSize );\n        for ( GenericInstanceEntry entry : list ) {\n            entry.apply( instrument );\n        }\n        return (List) list;\n    }\n\n    long countInstanceByTaskGuid( GUID taskGuid );\n\n    GenericInstanceEntry findLastExecuted0( @Param(\"taskGuid\") GUID taskGuid, @Param(\"bizTime\") String bizTime );\n\n    @Override\n    default InstanceEntry findLastExecuted( GUID taskGuid, TaskInstrument instrument, String bizTime ) {\n        GenericInstanceEntry entry = this.findLastExecuted0( taskGuid, bizTime );\n        if ( entry == null ) {\n            return null;\n        }\n        entry.apply( instrument );\n        return entry;\n    }\n\n\n\n\n\n    @Override\n    TableIndex64Meta selectSchedulableIdRange(\n            @Param(\"runStatuses\") Collection<TaskInstanceStatus> runStatuses, @Param(\"targetTime\") LocalDateTime targetTime,\n            @Param( \"actuallyPriority\" ) @Nullable Short actuallyPriority\n    );\n\n    List<GenericInstanceEntry> fetchSchedulableInstances0(\n            @Param( \"idMin\" ) long idMin, @Param( \"idMax\" ) long idMax,\n            @Param(\"runStatuses\") Collection<TaskInstanceStatus> runStatuses, @Param( \"targetTime\" ) LocalDateTime targetTime,\n            @Param( \"actuallyPriority\" ) @Nullable Short actuallyPriority\n    );\n\n    @Override\n    default List<InstanceEntry> fetchSchedulableInstances(\n            TaskInstrument instrument,\n            long idMin, long idMax, Collection<TaskInstanceStatus> runStatuses, LocalDateTime targetTime, @Nullable Short actuallyPriority\n    ) {\n        List<GenericInstanceEntry> list = this.fetchSchedulableInstances0( idMin, idMax, runStatuses, targetTime, actuallyPriority );\n        for ( GenericInstanceEntry entry : list ) {\n            entry.apply( instrument );\n        }\n        return CollectionUtils.genericConvert( list );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/TaskNamespaceMapper.java",
    "content": "package com.pinecone.hydra.task.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.kom.entity.GenericNamespace;\nimport com.pinecone.hydra.task.kom.entity.Namespace;\nimport com.pinecone.hydra.task.kom.source.TaskNamespaceManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface TaskNamespaceMapper extends TaskNamespaceManipulator {\n    @Insert(\"INSERT INTO `hydra_task_namespace_node` (`guid`, `name`) VALUES (#{guid},#{name})\")\n    void insert( Namespace ns );\n\n    @Delete(\"DELETE FROM `hydra_task_namespace_node` WHERE `guid`=#{guid}\")\n    void remove( @Param(\"guid\") GUID GUID );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_task_namespace_node` WHERE `guid`=#{guid}\")\n    GenericNamespace getNamespace( @Param(\"guid\") GUID guid );\n\n    @Update(\"UPDATE `hydra_task_namespace_node` SET `name` = #{name} WHERE `guid` = #{guid}\")\n    void update( Namespace ns );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `name` FROM `hydra_task_namespace_node` WHERE name=#{name}\")\n    List<GenericNamespace > fetchNamespaceNodeByName0( @Param(\"name\") String name );\n\n    @SuppressWarnings( \"unchecked\" )\n    default List<Namespace > fetchNamespaceNodeByName( String name ){\n        return (List) this.fetchNamespaceNodeByName0( name );\n    }\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_task_namespace_node` WHERE `name` = #{name}\" )\n    List<GUID > getGuidsByName(String name);\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_task_namespace_node` WHERE `name` = #{name} AND `guid` = #{guid}\" )\n    List<GUID > getGuidsByNameID( @Param(\"name\") String name, @Param(\"guid\") GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/TaskNodeMapper.java",
    "content": "package com.pinecone.hydra.task.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.GenericTaskElement;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.kom.source.TaskNodeManipulator;\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.pinecone.slime.meta.TableIndex64Meta;\n\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.time.LocalDateTime;\nimport java.util.Collection;\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface TaskNodeMapper extends TaskNodeManipulator {\n\n    @Override\n    void insert( TaskElement taskElement );\n\n    @Override\n    void remove( @Param(\"guid\") GUID guid );\n\n    GenericTaskElement getTaskNode0( @Param(\"guid\") GUID guid );\n\n    @Override\n    default TaskElement getTaskNode( GUID guid, TaskInstrument instrument ) {\n        GenericTaskElement taskElement = this.getTaskNode0( guid );\n        taskElement.apply( instrument );\n        return taskElement;\n    }\n\n    @Override\n    void update( TaskElement taskElement );\n\n    List<GenericTaskElement> fetchTaskNodeByName0( @Param(\"name\") String name );\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default List<TaskElement> fetchTaskNodeByName( String name ) {\n        List<GenericTaskElement> list = this.fetchTaskNodeByName0( name );\n        return (List) list;\n    }\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_task_task_node` WHERE `name` = #{name}\" )\n    List<GUID> getGuidsByName( String name );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_task_task_node` WHERE `name` = #{name} AND `guid` = #{guid}\" )\n    List<GUID> getGuidsByNameID( @Param(\"name\") String name, @Param(\"guid\") GUID guid );\n\n\n\n\n\n    @Override\n    TableIndex64Meta selectSchedulableIdRange(\n            @Param( \"cycles\" ) Collection<TaskScheduleCycle> cycles,\n            @Param( \"targetTime\" ) LocalDateTime targetTime\n    );\n\n    List<GenericTaskElement> fetchSchedulableTasksInRange0(\n            @Param( \"idMin\"  ) long idMin,\n            @Param( \"idMax\"  ) long idMax,\n            @Param( \"cycles\" ) Collection<TaskScheduleCycle> cycles,\n            @Param( \"targetTime\" ) LocalDateTime targetTime\n    );\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default List<TaskElement> fetchSchedulableTasksInRange( long idMin, long idMax, Collection<TaskScheduleCycle> cycles, LocalDateTime targetTime ) {\n        List<GenericTaskElement> list = this.fetchSchedulableTasksInRange0( idMin, idMax, cycles, targetTime );\n        return (List) list;\n    }\n\n\n\n    @Override\n    List<TaskElement> listPage(int offset, int pageSize);\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/TaskNodeOwnerMapper.java",
    "content": "package com.pinecone.hydra.task.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface TaskNodeOwnerMapper extends TireOwnerManipulator {\n    @Insert(\"INSERT INTO `hydra_task_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )\")\n    void insertRootNode(@Param(\"guid\")  GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Insert( \"INSERT INTO `hydra_task_node_tree` (`guid`, `parent_guid`,`linked_type`) VALUES (#{targetGuid}, #{parentGuid}, #{linkedType})\" )\n    void insert( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n    @Update( \"UPDATE `hydra_task_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}\" )\n    void update( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Update( \"UPDATE `hydra_task_node_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}\" )\n    void updateParentGuid( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID );\n\n    @Update( \"UPDATE `hydra_task_node_tree` SET `guid` = #{targetGuid}, `linked_type` = #{linkedType} WHERE `guid` = #{targetGuid}\" )\n    void updateLinkedType( @Param(\"targetGuid\") GUID targetGuid, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n    @Delete( \"DELETE FROM `hydra_task_node_tree` WHERE `guid`=#{subordinateGuid}  AND `linked_type` = 'Owned'\" )\n    void remove( @Param(\"subordinateGuid\") GUID subordinateGuid, @Param(\"ownerGuid\") GUID ownerGuid );\n\n    @Delete( \"DELETE FROM `hydra_task_node_tree` WHERE `guid`=#{subordinateGuid} AND `linked_type` = 'Owned'\" )\n    void removeBySubordinate( GUID subordinateGuid );\n\n//    @Delete(\"DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}\")\n//    void removeByOwner(GUID ownerGuid);\n\n    @Select( \"SELECT `parent_guid` FROM `hydra_task_node_tree` WHERE `guid`=#{subordinateGuid} AND linked_type = 'Owned'\" )\n    GUID getOwner( GUID subordinateGuid );\n\n    @Select( \"SELECT guid FROM hydra_task_node_tree where parent_guid=#{guid} AND linked_type = 'Owned'\" )\n    List<GUID > getSubordinates( GUID guid );\n\n\n    @Update(\"UPDATE `hydra_task_node_tree` SET `linked_type` = '#{linkedType}' WHERE `guid` = #{sourceGuid} AND `parent_guid` = #{targetGuid}\")\n    void setLinkedType( @Param(\"sourceGuid\") GUID sourceGuid, @Param(\"targetGuid\") GUID targetGuid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Select(\"SELECT `linked_type` FROM `hydra_task_node_tree` WHERE `guid` = #{childGuid} AND `parent_guid` =#{parentGuid}\")\n    LinkedType getLinkedType( @Param(\"childGuid\") GUID childGuid,@Param(\"parentGuid\") GUID parentGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/TaskPathCacheMapper.java",
    "content": "package com.pinecone.hydra.task.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface TaskPathCacheMapper extends TriePathCacheManipulator {\n    @Insert(\"INSERT INTO `hydra_task_node_cache_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )\")\n    void insert(@Param(\"guid\") GUID guid, @Param(\"path\") String path );\n\n    @Insert(\"INSERT INTO `hydra_task_node_cache_path` (path, long_path, guid) VALUES ( #{path},#{longPath},#{guid} )\")\n    void insertLongPath( @Param(\"guid\") GUID guid, @Param(\"path\") String path, @Param(\"longPath\") String longPath );\n\n    @Delete(\"DELETE FROM `hydra_task_node_cache_path` WHERE `guid`=#{guid}\")\n    void remove( GUID guid );\n\n\n    default String getPath( GUID guid ){\n        String longPath = this.getLongPath(guid);\n        if ( longPath != null ){\n            return this.getPath0( guid ) + this.getLongPath( guid );\n        }\n        return this.getPath0( guid );\n    }\n\n    @Select(\"SELECT `long_path` FROM `hydra_task_node_cache_path` WHERE `guid`=#{guid}\")\n    String getLongPath( GUID guid );\n\n    @Select(\"SELECT `path` FROM `hydra_task_node_cache_path` WHERE `guid`=#{guid}\")\n    String getPath0( GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_task_node_cache_path` WHERE `guid`=#{guid}\")\n    GUID getNode( String path );\n\n    @Select(\"SELECT `guid` FROM `hydra_task_node_cache_path` WHERE `path`=#{path}\")\n    GUID queryGUIDByPath( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/TaskTreeMapper.java",
    "content": "package com.pinecone.hydra.task.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.LinkedType;\nimport com.pinecone.hydra.unit.imperium.entity.TreeReparseLinkNode;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface TaskTreeMapper extends TrieTreeManipulator {\n    @Insert(\"INSERT INTO `hydra_task_node_tree` (`guid`, `linked_type`) VALUES ( #{guid}, #{linkedType} )\")\n    void insertRootNode(@Param(\"guid\")  GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    default void insert (TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){\n        this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() );\n        ownerManipulator.insertRootNode( node.getGuid() );\n    }\n\n    @Insert(\"INSERT INTO hydra_task_nodes (`guid`, `type`,`base_data_guid`,`node_metadata_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})\")\n    void insertTreeNode(@Param(\"guid\") GUID guid, @Param(\"type\") UOI type, @Param(\"baseDataGuid\") GUID baseDataGuid, @Param(\"nodeMetaGuid\") GUID nodeMetaGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_metadata_guid AS nodeMetadataGUID FROM hydra_task_nodes WHERE guid=#{guid}\")\n    GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid );\n\n    @Select(\"SELECT COUNT( `id` ) FROM hydra_task_nodes WHERE guid=#{guid}\")\n    boolean contains( GUID key );\n\n    @Override\n    default GUIDImperialTrieNode getNode(GUID guid ) {\n        GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid );\n        if( node == null ) {\n            return null;\n        }\n        List<GUID > parent = this.fetchParentGuids( guid );\n        node.setParentGUID( parent );\n        return node;\n    }\n\n    @Select(\"SELECT id, guid, parent_guid, linked_type FROM hydra_task_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    GUIDImperialTrieNode getTreeNodeOnly(@Param(\"guid\") GUID guid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT count( * ) FROM hydra_task_node_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    long countNode( GUID guid, GUID parentGuid );\n\n\n\n    @Override\n    default void purge( GUID guid ) {\n        this.removeNodeMeta( guid );\n        this.removeTreeNode( guid );\n        this.removeOwnedTreeNode( guid );\n    }\n\n    @Delete(\"DELETE FROM `hydra_task_nodes` WHERE `guid`=#{guid}\")\n    void removeNodeMeta( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_task_node_tree` WHERE `guid` = #{guid}\")\n    void removeTreeNode( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_task_node_tree` WHERE `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeByParentGuid( @Param(\"parent_guid\") GUID parentGuid );\n\n    @Delete(\"DELETE FROM `hydra_task_node_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeYoke( @Param(\"guid\") GUID guid, @Param(\"parent_guid\") GUID parentGuid );\n\n    @Delete(\"DELETE FROM `hydra_task_node_tree` WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}\")\n    void removeTreeNodeWithLinkedType( @Param(\"guid\") GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n\n\n\n    @Delete(\"DELETE FROM `hydra_task_node_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}\")\n    void removeInheritance( @Param(\"chileGuid\") GUID childGuid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_task_node_tree` WHERE `parent_guid`=#{guid}\")\n    List<GUIDImperialTrieNode> getChildren(GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_task_node_tree` WHERE `parent_guid` = #{parentGuid}\")\n    List<GUID > fetchChildrenGuids( @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `parent_guid` FROM `hydra_task_node_tree` WHERE `guid`=#{guid}\")\n    List<GUID > fetchParentGuids( GUID guid );\n\n    @Update(\"UPDATE `hydra_task_nodes` SET `type` = #{type} WHERE guid=#{guid}\")\n    void updateType( UOI type , GUID guid );\n\n    @Select( \"SELECT guid FROM hydra_task_node_tree WHERE parent_guid IS NULL \" )\n    List<GUID > fetchRoot();\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_task_node_tree WHERE `parent_guid` IS NULL AND guid = #{guid}\" )\n    boolean isRoot( GUID guid );\n\n\n\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_task_node_tree WHERE `guid` = #{guid} AND `linked_type` = #{linkedType}\" )\n    long queryLinkedCount( @Param(\"guid\") GUID guid, @Param(\"linkedType\") LinkedType linkedType );\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_task_node_tree WHERE `guid` = #{guid}\" )\n    long queryAllLinkedCount( @Param(\"guid\") GUID guid );\n\n\n    @Override\n    @Insert(\n            \"INSERT INTO `hydra_task_node_tree` (`guid`, `linked_type`,`tag_name`,`tag_guid`,`parent_guid`) \" +\n                    \"VALUES (#{originalGuid}, #{linkedType}, #{tagName}, #{tagGuid}, #{dirGuid})\"\n    )\n    void newLinkTag(\n            @Param(\"originalGuid\") GUID originalGuid, @Param(\"dirGuid\") GUID dirGuid,\n            @Param(\"tagName\") String tagName, @Param(\"tagGuid\") GUID tagGuid, @Param(\"linkedType\") LinkedType linkedType\n    );\n\n    @Override\n    @Update( \"UPDATE hydra_task_node_tree SET tag_name = #{tagName} WHERE tag_guid =#{tagGuid}\" )\n    void updateLinkTagName( @Param(\"tagGuid\") GUID tagGuid, @Param(\"tagName\") String tagName );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_task_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{dirGuid}\" )\n    GUID getOriginalGuid( @Param(\"tagName\") String tagName, @Param(\"dirGuid\") GUID dirGuid );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_task_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}\" )\n    GUID getOriginalGuidByNodeGuid( @Param(\"tagName\") String tagName, @Param(\"nodeGuid\") GUID nodeGUID );\n\n    @Override\n    @Select( \"SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_task_node_tree WHERE tag_name = #{tagName} AND parent_guid = #{parentDirGuid}\" )\n    TreeReparseLinkNode getReparseLinkNode(@Param(\"tagName\") String tagName, @Param(\"parentDirGuid\") GUID parentDirGuid );\n\n    @Override\n    @Select( \"SELECT `guid` AS targetNodeGuid, `parent_guid` AS parentNodeGuid, `linked_type` AS linkedType, `tag_name` AS tagName, `tag_guid` AS tagGuid FROM hydra_task_node_tree WHERE tag_name = #{tagName} AND guid = #{nodeGuid}\" )\n    TreeReparseLinkNode getReparseLinkNodeByNodeGuid( @Param(\"tagName\") String tagName, @Param(\"nodeGuid\") GUID nodeGUID );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_task_node_tree WHERE `tag_name` = #{tagName}\" )\n    List<GUID > fetchOriginalGuid( String tagName );\n\n    @Override\n    @Select( \"SELECT `guid` FROM hydra_task_node_tree WHERE `tag_name` = #{tagName} AND `parent_guid` IS NULL\" )\n    List<GUID > fetchOriginalGuidRoot( String tagName );\n\n    @Override\n    @Select( \"SELECT COUNT(*) FROM `hydra_task_node_tree` WHERE `tag_guid` = #{guid}\" )\n    boolean isTagGuid(GUID guid);\n\n    @Override\n    @Delete( \"DELETE FROM `hydra_task_node_tree` WHERE `tag_guid` = #{guid}\" )\n    void removeReparseLink( GUID guid );\n\n    @Override\n    @Select( \"SELECT `guid` FROM `hydra_task_node_tree` WHERE `tag_guid` = #{tagGuid}\" )\n    GUID getOriginalGuidByTagGuid(GUID tagGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/hydranium/TaskMappingDriver.java",
    "content": "package com.pinecone.hydra.task.ibatis.hydranium;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class TaskMappingDriver extends ArchMappingDriver implements KOIMappingDriver {\n\n    protected KOIMasterManipulator mKOIMasterManipulator;\n\n    public TaskMappingDriver( Processum superiorProcess ) {\n        super( superiorProcess );\n    }\n\n    public TaskMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, TaskMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n\n        this.mKOIMasterManipulator = new TaskMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/hydranium/TaskMasterManipulatorImpl.java",
    "content": "package com.pinecone.hydra.task.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\n\nimport com.pinecone.hydra.task.kom.instance.source.InstanceNodeManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.task.ibatis.AppNodeMapper;\nimport com.pinecone.hydra.task.ibatis.TaskNamespaceMapper;\nimport com.pinecone.hydra.task.ibatis.TaskNodeMapper;\nimport com.pinecone.hydra.task.ibatis.TaskNodeOwnerMapper;\nimport com.pinecone.hydra.task.ibatis.TaskTreeMapper;\nimport com.pinecone.hydra.task.ibatis.InstanceNodeMapper;\nimport com.pinecone.hydra.task.kom.source.AppNodeManipulator;\nimport com.pinecone.hydra.task.kom.source.TaskMasterManipulator;\nimport com.pinecone.hydra.task.kom.source.TaskNamespaceManipulator;\nimport com.pinecone.hydra.task.kom.source.TaskNodeManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class TaskMasterManipulatorImpl implements TaskMasterManipulator {\n    @Resource\n    @Structure( type = TaskNodeOwnerMapper.class )\n    TireOwnerManipulator            tireOwnerManipulator;\n\n    @Resource\n    @Structure(type = TaskTreeMapper.class )\n    TrieTreeManipulator             trieTreeManipulator;\n\n    @Resource\n    @Structure(type = TaskNodeMapper.class)\n    TaskNodeManipulator taskNodeManipulator;\n\n    @Resource\n    @Structure(type = AppNodeMapper.class )\n    AppNodeManipulator appNodeManipulator;\n\n    @Resource\n    @Structure( type = TaskNamespaceMapper.class )\n    TaskNamespaceManipulator taskNamespaceManipulator;\n\n    @Resource\n    @Structure(type = TaskMasterTreeManipulatorImpl.class)\n    KOISkeletonMasterManipulator skeletonMasterManipulator;\n\n\n    @Resource\n    @Structure(type = InstanceNodeMapper.class)\n    InstanceNodeManipulator instanceNodeManipulator;\n\n    public TaskMasterManipulatorImpl() {\n\n    }\n\n    public TaskMasterManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( TaskMasterManipulatorImpl.class, Map.of(), this );\n        this.skeletonMasterManipulator = new TaskMasterTreeManipulatorImpl( driver );\n    }\n\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n    @Override\n    public TaskNodeManipulator getTaskNodeManipulator() {\n        return this.taskNodeManipulator;\n    }\n\n    @Override\n    public AppNodeManipulator getAppNodeManipulator() {\n        return this.appNodeManipulator;\n    }\n\n    @Override\n    public TaskNamespaceManipulator getNamespaceManipulator() {\n        return this.taskNamespaceManipulator;\n    }\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return this.skeletonMasterManipulator;\n    }\n\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.tireOwnerManipulator;\n    }\n\n    @Override\n    public InstanceNodeManipulator getInstanceNodeManipulator() {\n        return this.instanceNodeManipulator;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/task/ibatis/hydranium/TaskMasterTreeManipulatorImpl.java",
    "content": "package com.pinecone.hydra.task.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.task.ibatis.TaskNodeOwnerMapper;\nimport com.pinecone.hydra.task.ibatis.TaskPathCacheMapper;\nimport com.pinecone.hydra.task.ibatis.TaskTreeMapper;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class TaskMasterTreeManipulatorImpl implements TreeMasterManipulator {\n    @Resource\n    @Structure( type = TaskPathCacheMapper.class )\n    TriePathCacheManipulator triePathCacheManipulator;\n\n    @Resource\n    @Structure( type = TaskNodeOwnerMapper.class )\n    TireOwnerManipulator tireOwnerManipulator;\n\n    @Resource\n    @Structure( type = TaskTreeMapper.class )\n    TrieTreeManipulator  trieTreeManipulator;\n\n    public TaskMasterTreeManipulatorImpl() {\n\n    }\n\n    public TaskMasterTreeManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( TaskMasterTreeManipulatorImpl.class, Map.of(), this );\n    }\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.tireOwnerManipulator;\n    }\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n    @Override\n    public TriePathCacheManipulator getTriePathCacheManipulator() {\n        return this.triePathCacheManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/version/ibatis/VersionMapper.java",
    "content": "package com.pinecone.hydra.version.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.version.entity.TitanVersion;\nimport com.pinecone.hydra.storage.version.entity.Version;\nimport com.pinecone.hydra.storage.version.source.VersionManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface VersionMapper extends VersionManipulator {\n    @Insert(\"INSERT INTO `hydra_uofs_version` ( `version_guid`, `version`, `target_storage_object_guid`, `file_guid`) VALUES (#{versionGuid},#{version}, #{targetStorageObjectGuid}, #{fileGuid})\")\n    void insertObjectVersion(Version version);\n\n    @Delete(\"DELETE FROM `hydra_uofs_version` WHERE `version` = #{version} AND `file_guid` = #{fileGuid}\")\n    void removeObjectVersion( String version, GUID fileGuid );\n\n    @Select(\"SELECT `target_storage_object_guid` FROM `hydra_uofs_version` WHERE `version` = #{version} AND file_guid = #{fileGuid}\")\n    GUID queryObjectGuid( String version, GUID fileGuid );\n\n\n    @Select(\"SELECT EXISTS(SELECT 1 FROM `hydra_uofs_version` WHERE `file_guid` = #{fileGuid})\")\n    boolean queryIsManage(@Param(\"fileGuid\") GUID fileGuid);\n\n    @Select(\"SELECT `target_storage_object_guid` FROM `hydra_uofs_version` WHERE `file_guid` = #{fileGuid}\")\n    List<GUID> fetchVersions(GUID guid);\n\n    @Select(\"SELECT `file_guid` FROM `hydra_uofs_version` WHERE target_storage_object_guid = #{fileGuid}\")\n    GUID getVersionFileByGuid(GUID fileGuid);\n\n    @Select(\"SELECT `version`, `target_storage_object_guid`AS targetStorageObjectGuid, `file_guid` AS fileGuid, `enable_crc32` AS enableCrc32, `crc32`, `version_guid` AS versionGuid FROM `hydra_uofs_version` WHERE `target_storage_object_guid` = #{targetStorageObjectGuid}\")\n    TitanVersion queryByTargetStorageObjectGuid(GUID targetStorageObjectGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/version/ibatis/VersionMappingMapper.java",
    "content": "package com.pinecone.hydra.version.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.version.entity.TitanVersionMapping;\nimport com.pinecone.hydra.storage.version.entity.VersionMapping;\nimport com.pinecone.hydra.storage.version.source.VersionMappingManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface VersionMappingMapper extends VersionMappingManipulator {\n@Insert(\"INSERT INTO `hydra_ucdn_version_mapping` ( `enable_version_guid`, `file_guid`,`version_guid`) VALUES (#{enableVersionGuid},#{fileGuid},#{versionGuid})\")\n    void insert(VersionMapping versionMapping);\n@Delete(\"DELETE FROM `hydra_ucdn_version_mapping` WHERE `enable_version_guid` = #{enableVersionGuid} AND `file_guid` = #{fileGuid} AND `version_guid` = #{versionGuid}\")\n    void remove(VersionMapping versionMapping);\n@Select(\"SELECT `enable_version_guid` AS enableVersionGuid, `file_guid` AS fileGuid, `version_guid` AS versionGuid FROM `hydra_ucdn_version_mapping` WHERE `file_guid` = #{fileGuid}\")\n    TitanVersionMapping queryVersionMapping(GUID fileGuid);\n    @Insert(\"UPDATE `hydra_ucdn_version_mapping` \"\n            + \"SET `enable_version_guid` = #{enableVersionGuid}, \"\n            + \"`version_guid` = #{versionGuid} \"\n            + \"WHERE `file_guid` = #{fileGuid}\")\n    void update(VersionMapping versionMapping);\n\n    @Select(\"SELECT `enable_version_guid` AS enableVersionGuid, `file_guid` AS fileGuid, `version_guid` AS versionGuid FROM `hydra_ucdn_version_mapping`\")\n    List<TitanVersionMapping> queryAllVersionMapper();\n    @Select(\"SELECT EXISTS(SELECT 1 FROM `hydra_ucdn_version_mapping` WHERE `enable_version_guid` = #{enableVersionGuid})\")\n    boolean isExistEnableVersionMapping(GUID enableVersionGuid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/version/ibatis/hydranium/VersionMappingDriver.java",
    "content": "package com.pinecone.hydra.version.ibatis.hydranium;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class VersionMappingDriver extends ArchMappingDriver implements KOIMappingDriver {\n    protected KOIMasterManipulator mKOIMasterManipulator;\n\n    public VersionMappingDriver( Processum superiorProcess ) {\n        super( superiorProcess );\n    }\n\n    // Temp , TODO\n    public VersionMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, VersionMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n\n        this.mKOIMasterManipulator = new VersionMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/version/ibatis/hydranium/VersionMasterManipulatorImpl.java",
    "content": "package com.pinecone.hydra.version.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.storage.version.source.VersionManipulator;\nimport com.pinecone.hydra.storage.version.source.VersionMappingManipulator;\nimport com.pinecone.hydra.storage.version.source.VersionMasterManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.version.ibatis.VersionMapper;\nimport com.pinecone.hydra.version.ibatis.VersionMappingMapper;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class VersionMasterManipulatorImpl implements VersionMasterManipulator {\n    @Resource\n    @Structure( type = VersionMapper.class )\n    VersionManipulator versionManipulator;\n\n    @Resource\n    @Structure( type = VersionMappingMapper.class )\n    VersionMappingManipulator versionMappingManipulator;\n\n    public VersionMasterManipulatorImpl() {\n\n    }\n\n    public VersionMasterManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( VersionMasterManipulatorImpl.class, Map.of(), this );\n    }\n\n    @Override\n    public VersionManipulator getVersionManipulator() {\n        return this.versionManipulator;\n    }\n\n    @Override\n    public VersionMappingManipulator getVersionMappingManipulator() {\n        return this.versionMappingManipulator;\n    }\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/LineSegmentMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.source.LineSegmentManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n@IbatisDataAccessObject\npublic interface LineSegmentMapper extends LineSegmentManipulator {\n    @Insert(\"INSERT INTO `hydra_volume_line_segment` (`id_min`, `id_max`, `volume_guid`) VALUES ( #{idMin}, #{idMax}, #{volumeGuid} )\")\n    void insert(@Param(\"idMin\") int idMin, @Param(\"idMax\") int idMax, @Param(\"volumeGuid\") GUID volumeGuid );\n    @Select(\"SELECT `volume_guid` FROM `hydra_volume_line_segment` WHERE id > id_min AND id < id_max\")\n    GUID getVolumeGuid( int id );\n    @Delete(\"DELETE FROM hydra_volume_line_segment WHERE id_min = #{idMin} AND id_max = #{idMax}\")\n    void delete( @Param(\"idMin\") int idMin, @Param(\"idMax\") int idMax );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/MirroredVolumeMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.MirroredVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.mirrored.TitanLocalMirroredVolume;\nimport com.pinecone.hydra.storage.volume.source.MirroredVolumeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Select;\n\n@IbatisDataAccessObject\npublic interface MirroredVolumeMapper extends MirroredVolumeManipulator, PrimeLogicVolumeMapper {\n    @Insert(\"INSERT INTO `hydra_uofs_volumes` (`guid`, `create_time`, `update_time`, `name`, `definition_capacity`, `used_size`, `quota_capacity`, `type`, `ext_config`) VALUES ( #{guid}, #{createTime}, #{updateTime}, #{name}, #{definitionCapacity}, #{usedSize}, #{quotaCapacity}, #{type}, #{extConfig} )\")\n    void insert( MirroredVolume mirroredVolume );\n\n    @Delete(\"DELETE FROM `hydra_uofs_volumes` where `guid` = #{guid}\")\n    void remove( GUID guid );\n\n    default TitanLocalMirroredVolume getMirroredVolume(GUID guid){\n        TitanLocalMirroredVolume mirroredVolume0 = this.getMirroredVolume0(guid);\n        mirroredVolume0.setMirroredVolumeManipulator( this );\n        return mirroredVolume0;\n    }\n\n    @Select(\"SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `definition_capacity` AS definitionCapacity, `used_size` AS userdSize, `quota_capacity` AS quotaCapacity, `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE `guid` = #{guid}\")\n    TitanLocalMirroredVolume getMirroredVolume0(GUID guid);\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/MountPointMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.MountPoint;\nimport com.pinecone.hydra.storage.volume.entity.TitanMountPoint;\nimport com.pinecone.hydra.storage.volume.source.MountPointManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Select;\n\n@IbatisDataAccessObject\n@Mapper\npublic interface MountPointMapper extends MountPointManipulator {\n    @Insert(\"INSERT INTO `hydra_uofs_volumes_mount_point` (`guid`, `volume_guid`, `create_time`, `update_time`, `name`, `mount_point`) VALUES (#{guid},#{volumeGuid},#{createTime},#{updateTime},#{name},#{mountPoint})\")\n    void insert( MountPoint mountPoint );\n    @Delete(\"DELETE FROM `hydra_uofs_volumes_mount_point` WHERE `guid` = #{guid}\")\n    void remove( GUID guid );\n    @Delete(\"DELETE FROM `hydra_uofs_volumes_mount_point` WHERE `volume_guid` = #{guid}\")\n    void removeByVolumeGuid( GUID guid );\n    default TitanMountPoint getMountPoint(GUID guid){\n        TitanMountPoint mountPoint0 = this.getMountPoint0( guid );\n        if ( mountPoint0 == null ){\n            return null;\n        }\n        mountPoint0.setMountPointManipulator( this );\n        return mountPoint0;\n    }\n    @Select(\"SELECT `id` AS enumId, `guid`, `volume_guid` AS volumeGuid, `create_time` AS createTime, `update_time` AS updateTime, `name`, `mount_point` AS mountPoint FROM `hydra_uofs_volumes_mount_point` WHERE `guid` = #{guid}\")\n    TitanMountPoint getMountPoint0(GUID guid);\n\n    default TitanMountPoint getMountPointByVolumeGuid( GUID guid ){\n        TitanMountPoint mountPoint = this.getMountPointByVolumeGuid0(guid);\n        if ( mountPoint == null ){\n            return null;\n        }\n        mountPoint.setMountPointManipulator( this );\n        return mountPoint;\n    }\n    @Select(\"SELECT `id` AS enumId, `guid`, `volume_guid` AS volumeGuid, `create_time` AS createTime, `update_time` AS updateTime, `name`, `mount_point` AS mountPoint FROM `hydra_uofs_volumes_mount_point` WHERE `volume_guid` = #{guid}\")\n    TitanMountPoint getMountPointByVolumeGuid0( GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/PhysicalVolumeMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.Volume;\nimport com.pinecone.hydra.storage.volume.entity.local.physical.TitanLocalPhysicalVolume;\nimport com.pinecone.hydra.storage.volume.source.PhysicalVolumeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface PhysicalVolumeMapper extends PhysicalVolumeManipulator {\n    @Insert(\"INSERT INTO `hydra_uofs_volumes` (`guid`, `create_time`, `update_time`, `name`, `type`, `ext_config`) VALUES ( #{guid}, #{createTime}, #{updateTime}, #{name}, #{type}, #{extConfig} )\")\n    void insert( PhysicalVolume physicalVolume );\n\n    @Delete(\"DELETE FROM `hydra_uofs_volumes` where `guid` = #{guid}\")\n    void remove( GUID guid );\n\n    @Override\n    default TitanLocalPhysicalVolume getPhysicalVolume(GUID guid){\n        TitanLocalPhysicalVolume physicalVolume0 = this.getPhysicalVolume0( guid );\n        if(physicalVolume0 == null){\n            return null;\n        }\n        physicalVolume0.setPhysicalVolumeManipulator( this );\n        return physicalVolume0;\n    }\n\n    @Select(\"SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`,  `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE `guid` = #{guid} AND type = 'PhysicalVolume'\")\n    TitanLocalPhysicalVolume getPhysicalVolume0(GUID guid);\n\n    @Override\n    default TitanLocalPhysicalVolume getPhysicalVolumeByName( String name ){\n        TitanLocalPhysicalVolume physicalVolumeByName0 = this.getPhysicalVolumeByName0(name);\n        physicalVolumeByName0.setPhysicalVolumeManipulator( this );\n        return physicalVolumeByName0;\n    }\n\n    @Select(\"SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`,  `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE `name` = #{name}\")\n    TitanLocalPhysicalVolume getPhysicalVolumeByName0( String name );\n    default TitanLocalPhysicalVolume getSmallestCapacityPhysicalVolume(){\n        TitanLocalPhysicalVolume physicalVolume0 = this.getSmallestCapacityPhysicalVolume0();\n        physicalVolume0.setPhysicalVolumeManipulator( this );\n        return physicalVolume0;\n    }\n\n    @Select(\"SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`,  `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE type = 'PhysicalVolume' ORDER BY ( `definition_capacity` - hydra_uofs_volumes.`used_size` ) ASC LIMIT 1\")\n    TitanLocalPhysicalVolume getSmallestCapacityPhysicalVolume0();\n\n    @Select(\"SELECT `logic_guid` FROM `hydra_volume_physical_logic` WHERE `physical_guid` = #{guid}\")\n    GUID getParent( GUID guid );\n\n\n    default List<Volume> queryAllPhysicalVolumes(){\n        List<TitanLocalPhysicalVolume> physicalVolumes = this.queryAllPhysicalVolumes0();\n        return new ArrayList<>(physicalVolumes);\n    }\n\n    @Select(\"SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`,  `type`, `ext_config` AS extConfig FROM hydra_uofs_volumes WHERE type = 'PhysicalVolume'\")\n    List<TitanLocalPhysicalVolume> queryAllPhysicalVolumes0();\n\n    @Update(\"UPDATE `hydra_uofs_volumes` SET `create_time` = #{createTime}, `name` = #{name}, `used_size` = #{usedSize} WHERE `guid` = #{guid}\")\n    void update( PhysicalVolume physicalVolume );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/PrimeLogicVolumeMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport java.util.List;\n\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.source.LogicVolumeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\n\n@IbatisDataAccessObject\npublic interface PrimeLogicVolumeMapper extends LogicVolumeManipulator {\n    @Select(\"SELECT `guid` FROM `hydra_uofs_volumes` WHERE `name` = #{name}\")\n    List<GUID> getGuidsByName( String name );\n\n    @Select(\"SELECT `guid` FROM `hydra_uofs_volumes` WHERE `name` = #{name} AND `guid` = #{guid}\")\n    List<GUID > getGuidsByNameID( @Param(\"name\") String name, @Param(\"guid\") GUID guid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/SQLiteVolumeMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.source.SQLiteVolumeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n@IbatisDataAccessObject\npublic interface SQLiteVolumeMapper extends SQLiteVolumeManipulator {\n    @Insert(\"INSERT INTO `hydra_volume_sqlite_volume` (`physics_volume_guid`, `volume_guid`) VALUES ( #{physicsGuid}, #{volumeGuid} )\")\n    void insert(@Param(\"physicsGuid\") GUID physicsGuid, @Param(\"volumeGuid\") GUID volumeGuid );\n\n    @Select(\"SELECT `physics_volume_guid` FROM `hydra_volume_sqlite_volume` WHERE `volume_guid` = #{volumeGuid}\")\n    GUID getPhysicsGuid( GUID volumeGuid );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/SimpleVolumeMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.Volume;\nimport com.pinecone.hydra.storage.volume.entity.local.simple.TitanLocalSimpleVolume;\nimport com.pinecone.hydra.storage.volume.source.SimpleVolumeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface SimpleVolumeMapper extends SimpleVolumeManipulator, PrimeLogicVolumeMapper {\n    @Insert(\"INSERT INTO `hydra_uofs_volumes` (`guid`, `create_time`, `update_time`, `name`, `type`, `ext_config`) VALUES ( #{guid}, #{createTime}, #{updateTime}, #{name}, #{type}, #{extConfig} )\")\n    void insert( SimpleVolume simpleVolume );\n\n    @Delete(\"DELETE FROM `hydra_uofs_volumes` where `guid` = #{guid}\")\n    void remove( GUID guid );\n\n    @Update(\"UPDATE `hydra_uofs_volumes` SET `create_time` = #{createTime}, `name` = #{name}, `used_size` = #{usedSize} WHERE `guid` = #{guid}\")\n    void update( SimpleVolume simpleVolume );\n\n    @Override\n    default TitanLocalSimpleVolume getSimpleVolume(GUID guid){\n        TitanLocalSimpleVolume simpleVolume0 = this.getSimpleVolume0( guid );\n        simpleVolume0.setSimpleVolumeManipulator( this );\n        return simpleVolume0;\n    }\n\n    @Select(\"SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`,  `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE `guid` = #{guid}\")\n    TitanLocalSimpleVolume getSimpleVolume0(GUID guid);\n\n    @Insert(\"INSERT INTO `hydra_volume_physical_logic` (`logic_guid`, `physical_guid`) VALUES ( #{logicGuid}, #{physicalGuid} )\")\n    void extendLogicalVolume( @Param(\"logicGuid\") GUID logicGuid, @Param(\"physicalGuid\") GUID physicalGuid );\n\n    @Select(\"SELECT `physical_guid` FROM `hydra_volume_physical_logic` WHERE `logic_guid` = #{logicGuid}\")\n    List<GUID> listPhysicalVolume(GUID logicGuid );\n\n    default List<Volume> queryAllSimpleVolumes(){\n        List<TitanLocalSimpleVolume> titanLocalSimpleVolumes = this.queryAllSimpleVolumes0();\n        return new ArrayList<>(titanLocalSimpleVolumes);\n    }\n    @Select(\"SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`,  `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE type = 'SimpleVolume'\")\n    List<TitanLocalSimpleVolume> queryAllSimpleVolumes0();\n\n    @Update(\"UPDATE `hydra_uofs_volumes` SET definition_capacity = #{definitionCapacity} WHERE guid = #{guid}\")\n    void updateDefinitionCapacity( @Param(\"guid\") GUID guid, @Param(\"definitionCapacity\") long definitionCapacity );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/SpannedVolumeMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.SpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.Volume;\nimport com.pinecone.hydra.storage.volume.entity.local.spanned.TitanLocalSpannedVolume;\nimport com.pinecone.hydra.storage.volume.source.SpannedVolumeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface SpannedVolumeMapper extends SpannedVolumeManipulator, PrimeLogicVolumeMapper {\n    @Insert(\"INSERT INTO `hydra_uofs_volumes` (`guid`, `create_time`, `update_time`, `name`,  `type`, `ext_config`) VALUES ( #{guid}, #{createTime}, #{updateTime}, #{name}, #{type}, #{extConfig} )\")\n    void insert( SpannedVolume spannedVolume );\n\n    @Delete(\"DELETE FROM `hydra_uofs_volumes` where `guid` = #{guid}\")\n    void remove( GUID guid );\n\n    @Update(\"UPDATE `hydra_uofs_volumes` SET `create_time` = #{createTime}, `name` = #{name}, `used_size` = #{usedSize} WHERE `guid` = #{guid}\")\n    void update( SpannedVolume spannedVolume );\n\n    @Override\n    default TitanLocalSpannedVolume getSpannedVolume(GUID guid){\n        TitanLocalSpannedVolume spannedVolume0 = this.getSpannedVolume0( guid );\n        spannedVolume0.setSpannedVolumeManipulator( this );\n        return spannedVolume0;\n    }\n\n    @Select(\"SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `definition_capacity` AS definitionCapacity, `used_size` AS userdSize, `quota_capacity` AS quotaCapacity, `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE `guid` = #{guid}\")\n    TitanLocalSpannedVolume getSpannedVolume0(GUID guid);\n\n    default List<Volume> queryAllSpannedVolume(){\n        List<TitanLocalSpannedVolume> titanLocalSpannedVolumes = this.queryAllSpannedVolume0();\n        return new ArrayList<>(titanLocalSpannedVolumes);\n    }\n    @Select(\"SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`, `definition_capacity` AS definitionCapacity, `used_size` AS userdSize, `quota_capacity` AS quotaCapacity, `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE type = 'SpannedVolume'\")\n    List<TitanLocalSpannedVolume> queryAllSpannedVolume0();\n\n    @Update(\"UPDATE `hydra_uofs_volumes` SET definition_capacity = #{definitionCapacity} WHERE guid = #{guid}\")\n    void updateDefinitionCapacity(@Param(\"guid\") GUID guid, @Param(\"definitionCapacity\") long definitionCapacity );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/StripedVolumeMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.StripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.Volume;\nimport com.pinecone.hydra.storage.volume.entity.local.striped.TitanLocalStripedVolume;\nimport com.pinecone.hydra.storage.volume.source.StripedVolumeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface StripedVolumeMapper extends StripedVolumeManipulator, PrimeLogicVolumeMapper {\n    @Insert(\"INSERT INTO `hydra_uofs_volumes` (`guid`, `create_time`, `update_time`, `name`, `type`, `ext_config`) VALUES ( #{guid}, #{createTime}, #{updateTime}, #{name}, #{type}, #{extConfig} )\")\n    void insert( StripedVolume stripedVolume );\n\n    @Delete(\"DELETE FROM `hydra_uofs_volumes` where `guid` = #{guid}\")\n    void remove( GUID guid );\n\n    @Update(\"UPDATE `hydra_uofs_volumes` SET `create_time` = #{createTime}, `name` = #{name}, `used_size` = #{usedSize} WHERE `guid` = #{guid}\")\n    void update( StripedVolume stripedVolume );\n\n    @Override\n    default TitanLocalStripedVolume getStripedVolume(GUID guid){\n        TitanLocalStripedVolume stripedVolume0 = this.getStripedVolume0( guid );\n        stripedVolume0.setStripedVolumeManipulator( this );\n        return stripedVolume0;\n    }\n    @Select(\"SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`,  `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE `guid` = #{guid}\")\n    TitanLocalStripedVolume getStripedVolume0(GUID guid);\n\n    default List<Volume> queryAllStripedVolume(){\n        List<TitanLocalStripedVolume> titanLocalStripedVolumes = this.queryAllStripedVolume0();\n        return new ArrayList<>(titanLocalStripedVolumes);\n    }\n    @Select(\"SELECT `id` AS enumId, `guid`, `create_time` AS createTime, `update_time` AS updateTime, `name`,  `type`, `ext_config` AS extConfig FROM `hydra_uofs_volumes` WHERE type = 'StripedVolume'\")\n    List<TitanLocalStripedVolume> queryAllStripedVolume0();\n\n    @Update(\"UPDATE `hydra_uofs_volumes` SET definition_capacity = #{definitionCapacity} WHERE guid = #{guid}\")\n    void updateDefinitionCapacity(@Param(\"guid\") GUID guid, @Param(\"definitionCapacity\") long definitionCapacity );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/VolumeAllocateMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.source.VolumeAllocateManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n    @IbatisDataAccessObject\n    public interface VolumeAllocateMapper extends VolumeAllocateManipulator {\n        @Insert(\"INSERT INTO `hydra_volume_allocate` (`object_guid`, `child_volume_guid`, `parent_volume_guid`) VALUES ( #{objectGuid},#{childVolumeGuid},#{parentVoluemGuid} )\")\n        void insert(@Param(\"objectGuid\") GUID objectGuid, @Param(\"childVolumeGuid\") GUID childVolumeGuid, @Param(\"parentVolumeGuid\") GUID parentVolumeGuid );\n        @Select(\"SELECT `child_volume_guid` FROM `hydra_volume_allocate` WHERE `object_guid` = #{objectGuid} AND `parent_volume_guid` = #{parentGuid}\")\n        GUID get( @Param(\"objectGuid\") GUID objectGuid, @Param(\"parentGuid\") GUID parentGuid );\n    }\n\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/VolumeCachePathMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n@IbatisDataAccessObject\npublic interface VolumeCachePathMapper extends TriePathCacheManipulator {\n    @Insert(\"INSERT INTO `hydra_volume_node_cache_path` (`path`, `guid`) VALUES ( #{path}, #{guid} )\")\n    void insert(@Param(\"guid\") GUID guid, @Param(\"path\") String path );\n\n    @Delete(\"DELETE FROM `hydra_volume_node_cache_path` WHERE `guid`=#{guid}\")\n    void remove( GUID guid );\n\n    @Select(\"SELECT `path` FROM `hydra_volume_node_cache_path` WHERE `guid`=#{guid}\")\n    String getPath( GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_volume_node_cache_path` WHERE `guid`=#{guid}\")\n    GUID getNode( String path );\n\n    @Select(\"SELECT `guid` FROM `hydra_volume_node_cache_path` WHERE `path`=#{path}\")\n    GUID queryGUIDByPath( String path );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/VolumeCapacityMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.entity.TitanVolumeCapacity64;\nimport com.pinecone.hydra.storage.volume.entity.VolumeCapacity64;\nimport com.pinecone.hydra.storage.volume.source.VolumeCapacityManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\n\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\n@IbatisDataAccessObject\npublic interface VolumeCapacityMapper extends VolumeCapacityManipulator {\n    @Update(\"UPDATE `hydra_uofs_volumes` SET `definition_capacity` = #{definitionCapacity}, `used_size` = #{usedSize}, `quota_capacity` = #{quotaCapacity} WHERE `guid` = #{volumeGuid}\")\n    void insert( VolumeCapacity64 volumeCapacity );\n\n    void remove( GUID guid );\n\n    @Select(\"SELECT `guid` AS volumeGuid, `definition_capacity` AS definitionCapacity, `used_size` AS usedSize, `quota_capacity` AS quotaCapacity FROM `hydra_uofs_volumes` WHERE `guid` = #{guid}\")\n    TitanVolumeCapacity64 getVolumeCapacity(GUID guid);\n\n    @Update(\"UPDATE `hydra_uofs_volumes` SET `used_size` = #{usedSize} WHERE `guid` = #{guid}\")\n    void update( GUID guid, long usedSize );\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/VolumeOwnerMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Update;\n\n@IbatisDataAccessObject\npublic interface VolumeOwnerMapper extends TireOwnerManipulator {\n    @Insert(\"INSERT INTO `hydra_uofs_volumes_tree` (`guid`) VALUES ( #{guid} )\")\n    void insertRootNode(@Param(\"guid\") GUID guid );\n\n    @Insert( \"INSERT INTO `hydra_uofs_volumes_tree` (`guid`, `parent_guid`) VALUES (#{targetGuid}, #{parentGuid})\" )\n    void insert( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID );\n\n\n    @Update( \"UPDATE `hydra_uofs_volumes_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}\" )\n    void update( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID );\n\n    @Update( \"UPDATE `hydra_uofs_volumes_tree` SET `guid` = #{targetGuid}, `parent_guid` = #{parentGuid} WHERE `guid` = #{targetGuid}\" )\n    void updateParentGuid( @Param(\"targetGuid\") GUID targetGuid, @Param(\"parentGuid\") GUID parentGUID );\n\n    @Delete( \"DELETE FROM `hydra_uofs_volumes_tree` WHERE `guid`=#{subordinateGuid} \" )\n    void remove( @Param(\"subordinateGuid\") GUID subordinateGuid );\n\n    @Delete( \"DELETE FROM `hydra_uofs_volumes_tree` WHERE `guid`=#{subordinateGuid} \" )\n    void removeBySubordinate( GUID subordinateGuid );\n\n//    @Delete(\"DELETE FROM `hydra_registry_node_owner` WHERE `owner_guid`=#{ownerGuid}\")\n//    void removeByOwner(GUID ownerGuid);\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/VolumeTreeMapper.java",
    "content": "package com.pinecone.hydra.volume.ibatis;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.uoi.UOI;\nimport com.pinecone.hydra.storage.volume.source.VolumeTreeManipulator;\nimport com.pinecone.hydra.unit.imperium.GUIDImperialTrieNode;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport java.util.List;\n@IbatisDataAccessObject\npublic interface VolumeTreeMapper extends VolumeTreeManipulator {\n    @Insert(\"INSERT INTO `hydra_uofs_volumes_tree` (`guid`) VALUES ( #{guid} )\")\n    void insertRootNode(@Param(\"guid\")  GUID guid);\n\n    @Override\n    default void insert ( TireOwnerManipulator ownerManipulator, GUIDImperialTrieNode node ){\n        this.insertTreeNode( node.getGuid(), node.getType(), node.getAttributesGUID(), node.getNodeMetadataGUID() );\n        ownerManipulator.insertRootNode( node.getGuid() );\n    }\n\n    @Insert(\"INSERT INTO `hydra_volume_nodes` (`guid`, `type`,`base_data_guid`,`node_meta_guid`) VALUES (#{guid},#{type},#{baseDataGuid},#{nodeMetaGuid})\")\n    void insertTreeNode( @Param(\"guid\") GUID guid, @Param(\"type\") UOI type, @Param(\"baseDataGuid\") GUID baseDataGuid, @Param(\"nodeMetaGuid\") GUID nodeMetaGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `type`, base_data_guid AS baseDataGUID, node_meta_guid AS nodeMetadataGUID FROM hydra_volume_nodes WHERE guid=#{guid}\")\n    GUIDImperialTrieNode getNodeExtendsFromMeta(GUID guid );\n\n    @Select(\"SELECT COUNT( `id` ) FROM hydra_volume_nodes WHERE guid=#{guid}\")\n    boolean contains( GUID key );\n\n    @Override\n    default GUIDImperialTrieNode getNode(GUID guid ) {\n        GUIDImperialTrieNode node = this.getNodeExtendsFromMeta( guid );\n        if( node == null ){\n            return node;\n        }\n        List<GUID > parent = this.fetchParentGuids( guid );\n        node.setParentGUID( parent );\n        return node;\n    }\n\n    @Select(\"SELECT id, guid, parent_guid FROM hydra_uofs_volumes_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    GUIDImperialTrieNode getTreeNodeOnly(@Param(\"guid\") GUID guid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT count( * ) FROM hydra_uofs_volumes_tree WHERE guid = #{guid} AND parent_guid = #{parentGuid}\")\n    long countNode( GUID guid, GUID parentGuid );\n\n\n    @Override\n    default void purge( GUID guid ) {\n        this.removeNodeMeta( guid );\n        this.removeTreeNode( guid );\n    }\n\n    @Delete(\"DELETE FROM `hydra_volume_nodes` WHERE `guid`=#{guid}\")\n    void removeNodeMeta( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_uofs_volumes_tree` WHERE `guid` = #{guid}\")\n    void removeTreeNode( @Param(\"guid\") GUID guid );\n\n    @Delete(\"DELETE FROM `hydra_uofs_volumes_tree` WHERE `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeByParentGuid( @Param(\"parent_guid\") GUID parentGuid );\n\n    @Delete(\"DELETE FROM `hydra_uofs_volumes_tree` WHERE `guid` = #{guid} AND `parent_guid` = #{parent_guid}\")\n    void removeTreeNodeYoke( @Param(\"guid\") GUID guid, @Param(\"parent_guid\") GUID parentGuid );\n\n\n    @Delete(\"DELETE FROM `hydra_uofs_volumes_tree` WHERE `guid`=#{chileGuid} AND `parent_guid`=#{parentGuid}\")\n    void removeInheritance( @Param(\"chileGuid\") GUID childGuid, @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `id` AS `enumId`, `guid`, `parent_guid` AS parentGuid FROM `hydra_uofs_volumes_tree` WHERE `parent_guid`=#{guid}\")\n    List<GUIDImperialTrieNode> getChildren(GUID guid );\n\n    @Select(\"SELECT `guid` FROM `hydra_uofs_volumes_tree` WHERE `parent_guid` = #{parentGuid}\")\n    List<GUID > fetchChildrenGuids( @Param(\"parentGuid\") GUID parentGuid );\n\n    @Select(\"SELECT `parent_guid` FROM `hydra_uofs_volumes_tree` WHERE `guid`=#{guid}\")\n    List<GUID > fetchParentGuids( GUID guid );\n\n    @Update(\"UPDATE `hydra_volume_nodes` SET `type` = #{type} WHERE guid=#{guid}\")\n    void updateType( UOI type , GUID guid );\n\n    @Select( \"SELECT guid FROM hydra_uofs_volumes_tree WHERE parent_guid IS NULL \" )\n    List<GUID > fetchRoot();\n\n    @Override\n    @Select( \"SELECT COUNT( `guid` ) FROM hydra_uofs_volumes_tree WHERE `parent_guid` IS NULL AND guid = #{guid}\" )\n    boolean isRoot( GUID guid );\n\n    @Update(\"UPDATE hydra_uofs_volumes_tree SET parent_guid = #{parentGuid} WHERE guid = #{childGuid}\")\n    void addChild( @Param(\"childGuid\") GUID childGuid, @Param(\"parentGuid\") GUID parentGuid );\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/hydranium/VolumeMappingDriver.java",
    "content": "package com.pinecone.hydra.volume.ibatis.hydranium;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class VolumeMappingDriver extends ArchMappingDriver implements KOIMappingDriver {\n    protected KOIMasterManipulator mKOIMasterManipulator;\n\n    public VolumeMappingDriver( Processum superiorProcess ) {\n        super(superiorProcess);\n    }\n\n    public VolumeMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, VolumeMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n\n        this.mKOIMasterManipulator = new VolumeMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/hydranium/VolumeMasterManipulatorImpl.java",
    "content": "package com.pinecone.hydra.volume.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.storage.volume.source.LogicVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.MirroredVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.MountPointManipulator;\nimport com.pinecone.hydra.storage.volume.source.PhysicalVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.SimpleVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.SpannedVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.SQLiteVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.StripedVolumeManipulator;\nimport com.pinecone.hydra.storage.volume.source.VolumeAllocateManipulator;\nimport com.pinecone.hydra.storage.volume.source.VolumeCapacityManipulator;\nimport com.pinecone.hydra.storage.volume.source.VolumeMasterManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.volume.ibatis.MirroredVolumeMapper;\nimport com.pinecone.hydra.volume.ibatis.MountPointMapper;\nimport com.pinecone.hydra.volume.ibatis.PhysicalVolumeMapper;\nimport com.pinecone.hydra.volume.ibatis.PrimeLogicVolumeMapper;\nimport com.pinecone.hydra.volume.ibatis.SimpleVolumeMapper;\nimport com.pinecone.hydra.volume.ibatis.SpannedVolumeMapper;\nimport com.pinecone.hydra.volume.ibatis.SQLiteVolumeMapper;\nimport com.pinecone.hydra.volume.ibatis.StripedVolumeMapper;\nimport com.pinecone.hydra.volume.ibatis.VolumeAllocateMapper;\nimport com.pinecone.hydra.volume.ibatis.VolumeCapacityMapper;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n@Component\npublic class VolumeMasterManipulatorImpl implements VolumeMasterManipulator {\n    @Resource\n    @Structure( type = VolumeMasterTreeManipulatorImpl.class )\n    KOISkeletonMasterManipulator skeletonMasterManipulator;\n\n    @Resource\n    @Structure( type = MirroredVolumeMapper.class )\n    MirroredVolumeManipulator mirroredVolumeManipulator;\n\n    @Resource\n    @Structure( type = MountPointMapper.class )\n    MountPointManipulator     mountPointManipulator;\n\n    @Resource\n    @Structure( type = SimpleVolumeMapper.class )\n    SimpleVolumeManipulator   simpleVolumeManipulator;\n\n    @Resource\n    @Structure( type = SpannedVolumeMapper.class )\n    SpannedVolumeManipulator  spannedVolumeManipulator;\n\n    @Resource\n    @Structure( type = StripedVolumeMapper.class )\n    StripedVolumeManipulator  stripedVolumeManipulator;\n\n    @Resource\n    @Structure( type = VolumeCapacityMapper.class )\n    VolumeCapacityManipulator volumeCapacityManipulator;\n\n    @Resource\n    @Structure( type = PhysicalVolumeMapper.class )\n    PhysicalVolumeManipulator physicalVolumeManipulator;\n\n    @Resource\n    @Structure( type = VolumeAllocateMapper.class )\n    VolumeAllocateManipulator volumeAllocateManipulator;\n\n    @Resource\n    @Structure( type = SQLiteVolumeMapper.class )\n    SQLiteVolumeManipulator   sqliteVolumeManipulator;\n\n    @Resource\n    @Structure( type = PrimeLogicVolumeMapper.class )\n    protected LogicVolumeManipulator primeLogicVolumeManipulator;\n\n    public VolumeMasterManipulatorImpl() {\n\n    }\n\n    public VolumeMasterManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( VolumeMasterManipulatorImpl.class, Map.of(), this );\n        this.skeletonMasterManipulator = new VolumeMasterTreeManipulatorImpl( driver );\n    }\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return this.skeletonMasterManipulator;\n    }\n\n    @Override\n    public MirroredVolumeManipulator getMirroredVolumeManipulator() {\n        return this.mirroredVolumeManipulator;\n    }\n\n    @Override\n    public MountPointManipulator getMountPointManipulator() {\n        return this.mountPointManipulator;\n    }\n\n    @Override\n    public SimpleVolumeManipulator getSimpleVolumeManipulator() {\n        return this.simpleVolumeManipulator;\n    }\n\n    @Override\n    public SpannedVolumeManipulator getSpannedVolumeManipulator() {\n        return this.spannedVolumeManipulator;\n    }\n\n    @Override\n    public StripedVolumeManipulator getStripedVolumeManipulator() {\n        return this.stripedVolumeManipulator;\n    }\n\n    @Override\n    public VolumeCapacityManipulator getVolumeCapacityManipulator() {\n        return this.volumeCapacityManipulator;\n    }\n\n    @Override\n    public PhysicalVolumeManipulator getPhysicalVolumeManipulator() {\n        return this.physicalVolumeManipulator;\n    }\n\n    @Override\n    public VolumeAllocateManipulator getVolumeAllocateManipulator() {\n        return this.volumeAllocateManipulator;\n    }\n\n    @Override\n    public SQLiteVolumeManipulator getSQLiteVolumeManipulator() {\n        return this.sqliteVolumeManipulator;\n    }\n\n    @Override\n    public LogicVolumeManipulator getPrimeLogicVolumeManipulator() {\n        return this.primeLogicVolumeManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/java/com/pinecone/hydra/volume/ibatis/hydranium/VolumeMasterTreeManipulatorImpl.java",
    "content": "package com.pinecone.hydra.volume.ibatis.hydranium;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.hydra.volume.ibatis.VolumeCachePathMapper;\nimport com.pinecone.hydra.volume.ibatis.VolumeOwnerMapper;\nimport com.pinecone.hydra.volume.ibatis.VolumeTreeMapper;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n@Component\npublic class VolumeMasterTreeManipulatorImpl implements TreeMasterManipulator {\n    @Resource\n    @Structure( type = VolumeTreeMapper.class )\n    TrieTreeManipulator trieTreeManipulator;\n\n    @Resource\n    @Structure( type = VolumeCachePathMapper.class )\n    TriePathCacheManipulator triePathCacheManipulator;\n\n    @Resource\n    @Structure( type = VolumeOwnerMapper.class )\n    TireOwnerManipulator tireOwnerManipulator;\n\n    public VolumeMasterTreeManipulatorImpl() {\n\n    }\n\n    public VolumeMasterTreeManipulatorImpl( KOIMappingDriver driver ) {\n        driver.autoConstruct( VolumeMasterTreeManipulatorImpl.class, Map.of(), this );\n    }\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.tireOwnerManipulator;\n    }\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n    @Override\n    public TriePathCacheManipulator getTriePathCacheManipulator() {\n        return this.triePathCacheManipulator;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/resources/mapper/kernel/task/InstanceNodeMapper.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE mapper\n        PUBLIC \"-//mybatis.org//DTD Mapper 3.0//EN\"\n        \"http://mybatis.org/dtd/mybatis-3-mapper.dtd\">\n\n<mapper namespace=\"com.pinecone.hydra.task.ibatis.InstanceNodeMapper\">\n\n    <sql id=\"BaseFields\">\n        guid,\n        task_guid,\n        name,\n        task_name,\n        business_time,\n        expect_time,\n        fire_time,\n        start_time,\n        finish_time,\n        schedule_host_time,\n        submit_time,\n        schedule_time,\n        priority,\n        image_path,\n        actually_priority,\n        run_status,\n        schedule_cycle,\n        schedule_type,\n        task_type,\n        dry_run,\n        run_count,\n        sequence_cnt,\n        retry_cnt,\n        latest_start_time,\n        latest_end_time,\n        error_cause,\n        processor_name,\n        create_time,\n        update_time\n    </sql>\n\n    <sql id=\"BaseColumns\">\n        #{guid},\n        #{taskGuid},\n        #{instanceName},\n        #{taskName},\n        #{businessTime},\n        #{expectTime},\n        #{fireTime},\n        #{startTime},\n        #{finishTime},\n        #{scheduleHostTime},\n        #{submitTime},\n        #{scheduleTime},\n        #{priority},\n        #{imagePath},\n        #{actuallyPriority},\n        #{runStatus},\n        #{scheduleCycle},\n        #{scheduleType},\n        #{taskType},\n        #{dryRun},\n        #{runCount},\n        #{sequenceCnt},\n        #{retryCnt},\n        #{lastStartTime},\n        #{lastEndTime},\n        #{errorCause},\n        #{processorName},\n        #{createTime},\n        #{updateTime}\n    </sql>\n\n    <!-- ========================================================== -->\n    <!-- ResultMap -->\n    <!-- ========================================================== -->\n\n    <resultMap id=\"TableIndex64MetaResult\" type=\"com.pinecone.slime.meta.TableIndex64Meta\">\n        <result column=\"min_id\" property=\"minId\"/>\n        <result column=\"max_id\" property=\"maxId\"/>\n    </resultMap>\n\n    <resultMap id=\"InstanceResultMap\" type=\"com.pinecone.hydra.task.kom.instance.GenericInstanceEntry\">\n\n        <result column=\"guid\"                property=\"guid\"/>\n        <result column=\"task_guid\"           property=\"taskGuid\"/>\n        <result column=\"name\"                property=\"instanceName\"/>\n        <result column=\"task_name\"           property=\"taskName\"/>\n        <result column=\"business_time\"       property=\"businessTime\"/>\n        <result column=\"expect_time\"         property=\"expectTime\"/>\n        <result column=\"fire_time\"           property=\"fireTime\"/>\n        <result column=\"start_time\"          property=\"startTime\"/>\n        <result column=\"finish_time\"         property=\"finishTime\"/>\n        <result column=\"schedule_host_time\"  property=\"scheduleHostTime\"/>\n        <result column=\"submit_time\"         property=\"submitTime\"/>\n        <result column=\"schedule_time\"       property=\"scheduleTime\"/>\n        <result column=\"priority\"            property=\"priority\"/>\n        <result column=\"image_path\"          property=\"imagePath\"/>\n        <result column=\"actually_priority\"   property=\"actuallyPriority\"/>\n        <result column=\"run_status\"          property=\"runStatus\"/>\n        <result column=\"schedule_cycle\"      property=\"scheduleCycle\"/>\n        <result column=\"schedule_type\"       property=\"scheduleType\"/>\n        <result column=\"task_type\"           property=\"taskType\"/>\n        <result column=\"dry_run\"             property=\"dryRun\"/>\n        <result column=\"run_count\"           property=\"runCount\"/>\n        <result column=\"sequence_cnt\"        property=\"sequenceCnt\"/>\n        <result column=\"retry_cnt\"           property=\"retryCnt\"/>\n        <result column=\"latest_start_time\"   property=\"lastStartTime\"/>\n        <result column=\"latest_end_time\"     property=\"lastEndTime\"/>\n        <result column=\"error_cause\"         property=\"errorCause\"/>\n        <result column=\"processor_name\"      property=\"processorName\"/>\n        <result column=\"create_time\"         property=\"createTime\"/>\n        <result column=\"update_time\"         property=\"updateTime\"/>\n\n    </resultMap>\n\n    <!-- ========================================================== -->\n    <!-- Insert -->\n    <!-- ========================================================== -->\n\n    <insert id=\"insert\" parameterType=\"com.pinecone.hydra.task.kom.instance.InstanceEntry\">\n\n        INSERT INTO hydra_task_instances\n        (\n        <include refid=\"BaseFields\"/>\n        )\n        VALUES\n        (\n        <include refid=\"BaseColumns\"/>\n        )\n\n    </insert>\n\n    <!-- ========================================================== -->\n    <!-- Update -->\n    <!-- ========================================================== -->\n\n    <update id=\"update\" parameterType=\"com.pinecone.hydra.task.kom.instance.InstanceEntry\">\n\n        UPDATE hydra_task_instances\n        SET\n            task_guid           = #{taskGuid},\n            name                = #{instanceName},\n            task_name           = #{taskName},\n            business_time       = #{businessTime},\n            expect_time         = #{expectTime},\n            fire_time           = #{fireTime},\n            start_time          = #{startTime},\n            finish_time         = #{finishTime},\n            schedule_host_time  = #{scheduleHostTime},\n            submit_time         = #{submitTime},\n            schedule_time       = #{scheduleTime},\n            priority            = #{priority},\n            image_path          = #{imagePath},\n            actually_priority   = #{actuallyPriority},\n            run_status          = #{runStatus},\n            schedule_cycle      = #{kernelScheduleCycle},\n            schedule_type       = #{kernelScheduleType},\n            task_type           = #{taskType},\n            dry_run             = #{dryRun},\n            run_count           = #{runCount},\n            sequence_cnt        = #{sequenceCnt},\n            retry_cnt           = #{retryCnt},\n            latest_start_time   = #{lastStartTime},\n            latest_end_time     = #{lastEndTime},\n            error_cause         = #{errorCause},\n            processor_name      = #{processorName},\n            update_time         = #{updateTime}\n        WHERE guid = #{guid}\n\n    </update>\n\n    <!-- ========================================================== -->\n    <!-- Query By GUID -->\n    <!-- ========================================================== -->\n\n    <select id=\"queryByGuid0\" parameterType=\"com.pinecone.framework.util.id.GUID\" resultMap=\"InstanceResultMap\">\n\n        SELECT\n        <include refid=\"BaseFields\"/>\n        FROM hydra_task_instances\n        WHERE guid = #{guid}\n\n    </select>\n\n\n    <select id=\"countInstance\" resultType=\"int\">\n\n        SELECT COUNT(*)\n        FROM hydra_task_instances\n\n    </select>\n\n    <select id=\"countInstanceByName\" resultType=\"long\">\n\n        SELECT COUNT(*)\n        FROM hydra_task_instances\n        WHERE name = #{name}\n\n    </select>\n\n    <select id=\"fetchInstances0\" resultMap=\"InstanceResultMap\">\n\n        SELECT\n        <include refid=\"BaseFields\"/>\n        FROM hydra_task_instances\n        LIMIT #{offset}, #{pageSize}\n\n    </select>\n\n    <select id=\"queryByTaskGuid0\" resultMap=\"InstanceResultMap\">\n\n        SELECT\n        <include refid=\"BaseFields\"/>\n        FROM hydra_task_instances\n        WHERE task_guid = #{taskGuid}\n        LIMIT #{offset}, #{pageSize}\n\n    </select>\n\n    <select id=\"countInstanceByTaskGuid\" resultType=\"long\">\n\n        SELECT COUNT(*)\n        FROM hydra_task_instances\n        WHERE task_guid = #{taskGuid}\n\n    </select>\n\n    <select id=\"findLastExecuted0\" resultMap=\"InstanceResultMap\">\n\n        SELECT\n        <include refid=\"BaseFields\"/>\n        FROM hydra_task_instances\n        WHERE task_guid = #{taskGuid}\n        AND business_time = #{bizTime}\n        ORDER BY run_count DESC\n        LIMIT 1\n\n    </select>\n\n    <select id=\"selectSchedulableIdRange\" resultMap=\"TableIndex64MetaResult\">\n        SELECT\n            MIN( `id` ) AS min_id,\n            MAX( `id` ) AS max_id\n        FROM `hydra_task_instances`\n        WHERE\n        `run_status` IN\n        <foreach collection=\"runStatuses\" item=\"runStatus\" open=\"(\" separator=\",\" close=\")\">\n            #{runStatus}\n        </foreach>\n\n          AND (\n            `expect_time` IS NULL\n             OR `expect_time` <![CDATA[ <= ]]> #{targetTime}\n          )\n        <if test=\"actuallyPriority != null\">\n            AND `actually_priority` = #{actuallyPriority}\n        </if>\n        AND schedule_type = 'Cycle'\n    </select>\n\n    <select id=\"fetchSchedulableInstances0\" resultMap=\"InstanceResultMap\">\n        SELECT\n            `id`,\n            <include refid=\"BaseFields\"/>\n        FROM `hydra_task_instances`\n        WHERE `id` <![CDATA[ >= ]]> #{idMin}\n          AND `id` <![CDATA[ <= ]]> #{idMax}\n          AND `run_status` IN\n        <foreach collection=\"runStatuses\" item=\"runStatus\" open=\"(\" separator=\",\" close=\")\">\n            #{runStatus}\n        </foreach>\n          AND (\n              `expect_time` IS NULL\n              OR `expect_time` <![CDATA[ <= ]]> #{targetTime}\n          )\n        <if test=\"actuallyPriority != null\">\n            AND `actually_priority` = #{actuallyPriority}\n        </if>\n        AND schedule_type = 'Cycle'\n        ORDER BY actually_priority DESC, id ASC\n    </select>\n\n\n</mapper>"
  },
  {
    "path": "Hydra/hydra-kom-default-driver/src/main/resources/mapper/kernel/task/TaskNodeMapper.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE mapper\n        PUBLIC \"-//mybatis.org//DTD Mapper 3.0//EN\"\n        \"http://mybatis.org/dtd/mybatis-3-mapper.dtd\">\n\n<mapper namespace=\"com.pinecone.hydra.task.ibatis.TaskNodeMapper\">\n\n    <!-- ========================================================== -->\n    <!-- ResultMap -->\n    <!-- ========================================================== -->\n\n    <resultMap id=\"TaskNodeMap\" type=\"com.pinecone.hydra.task.kom.entity.GenericTaskElement\">\n        <id     column=\"id\"                   property=\"enumId\"/>\n\n        <result column=\"guid\"                 property=\"guid\"/>\n        <result column=\"name\"                 property=\"name\"/>\n        <result column=\"schedule_cron\"        property=\"scheduleCron\"/>\n        <result column=\"image_path\"           property=\"imagePath\"/>\n        <result column=\"type\"                 property=\"type\"/>\n        <result column=\"resource_type\"        property=\"resourceType\"/>\n        <result column=\"deployment_method\"    property=\"deploymentMethod\"/>\n        <result column=\"priority\"             property=\"priority\"/>\n        <result column=\"actually_priority\"    property=\"actuallyPriority\"/>\n        <result column=\"dry_run\"              property=\"dryRun\"/>\n        <result column=\"schedule_cycle\"       property=\"scheduleCycle\"/>\n        <result column=\"schedule_type\"        property=\"scheduleType\"/>\n        <result column=\"schedule_start_time\"  property=\"scheduleStartTime\"/>\n        <result column=\"schedule_end_time\"    property=\"scheduleEndTime\"/>\n        <result column=\"next_schedule_time\"   property=\"nextScheduleTime\"/>\n        <result column=\"processor_name\"       property=\"processorName\"/>\n        <result column=\"enable\"               property=\"enable\"/>\n        <result column=\"create_time\"          property=\"createTime\"/>\n        <result column=\"update_time\"          property=\"updateTime\"/>\n\n    </resultMap>\n\n    <!-- ========================================================== -->\n    <!-- Insert -->\n    <!-- ========================================================== -->\n\n    <insert id=\"insert\" parameterType=\"com.pinecone.hydra.task.kom.entity.TaskElement\">\n\n        INSERT INTO hydra_task_task_node\n        (\n            guid,\n            name,\n            schedule_cron,\n            image_path,\n            type,\n            resource_type,\n            deployment_method,\n            priority,\n            actually_priority,\n            dry_run,\n            schedule_cycle,\n            schedule_type,\n            processor_name,\n            enable,\n            create_time,\n            update_time\n        )\n        VALUES\n            (\n                #{guid},\n                #{name},\n                #{scheduleCron},\n                #{imagePath},\n                #{type},\n                #{resourceType},\n                #{deploymentMethod},\n                #{priority},\n                #{actuallyPriority},\n                #{dryRun},\n                #{scheduleCycle},\n                #{scheduleType},\n                #{processor_name},\n                #{enable},\n                #{createTime},\n                #{updateTime}\n            )\n\n    </insert>\n\n    <!-- ========================================================== -->\n    <!-- Update -->\n    <!-- ========================================================== -->\n\n    <update id=\"update\" parameterType=\"com.pinecone.hydra.task.kom.entity.TaskElement\">\n        UPDATE hydra_task_task_node\n        SET\n            name                = #{name},\n            schedule_cron       = #{scheduleCron},\n            image_path          = #{imagePath},\n            type                = #{type},\n            resource_type       = #{resourceType},\n            deployment_method   = #{deploymentMethod},\n            priority            = #{priority},\n            actually_priority   = #{actuallyPriority},\n            dry_run             = #{dryRun},\n            schedule_cycle      = #{scheduleCycle},\n            schedule_type       = #{scheduleType},\n            schedule_start_time = #{scheduleStartTime},\n            schedule_end_time   = #{scheduleEndTime},\n            next_schedule_time  = #{nextScheduleTime},\n            processor_name      = #{processorName}\n            enable              = #{enable},\n            update_time         = #{updateTime}\n        WHERE guid = #{guid}\n\n    </update>\n\n    <!-- ========================================================== -->\n    <!-- Select by GUID -->\n    <!-- ========================================================== -->\n\n    <select id=\"getTaskNode0\" parameterType=\"com.pinecone.framework.util.id.GUID\" resultMap=\"TaskNodeMap\">\n\n        SELECT\n            id,\n            guid,\n            name,\n            schedule_cron,\n            image_path,\n            type,\n            resource_type,\n            deployment_method,\n            priority,\n            actually_priority,\n            dry_run,\n            schedule_cycle,\n            schedule_type,\n            processor_name,\n            enable,\n            create_time,\n            update_time\n        FROM hydra_task_task_node\n        WHERE guid = #{guid}\n\n    </select>\n\n    <!-- ========================================================== -->\n    <!-- Select by Name -->\n    <!-- ========================================================== -->\n\n    <select id=\"fetchTaskNodeByName0\" resultMap=\"TaskNodeMap\">\n\n        SELECT\n            id,\n            guid,\n            name,\n            schedule_cron,\n            image_path,\n            type,\n            resource_type,\n            deployment_method,\n            priority,\n            actually_priority,\n            dry_run,\n            schedule_cycle,\n            schedule_type,\n            processor_name,\n            enable,\n            create_time,\n            update_time\n        FROM hydra_task_task_node\n        WHERE name = #{name}\n\n    </select>\n\n\n\n    <select id=\"selectSchedulableIdRange\" resultType=\"com.pinecone.slime.meta.TableIndex64Meta\">\n        SELECT\n            MIN(id) AS minId,\n            MAX(id) AS maxId\n        FROM hydra_task_task_node\n        WHERE enable = 1\n        AND schedule_type = 'Cycle'\n        AND schedule_cycle IN\n        <foreach collection=\"cycles\" item=\"cycle\" open=\"(\" separator=\",\" close=\")\">\n            #{cycle}\n        </foreach>\n        AND (\n            next_schedule_time IS NULL\n            OR next_schedule_time &lt;= #{targetTime}\n        )\n    </select>\n\n    <select id=\"fetchSchedulableTasksInRange0\" resultMap=\"TaskNodeMap\">\n        SELECT\n            id,\n            guid,\n            owned_service_guid,\n            name,\n            schedule_cron,\n            image_path,\n            type,\n            resource_type,\n            deployment_method,\n            priority,\n            actually_priority,\n            dry_run,\n            schedule_cycle,\n            schedule_type,\n            schedule_start_time,\n            schedule_end_time,\n            enable,\n            next_schedule_time,\n            processor_name,\n            create_time,\n            update_time\n        FROM\n            hydra_task_task_node\n        WHERE\n            id &gt;= #{idMin}\n            AND id &lt;= #{idMax}\n            AND enable = 1\n            AND schedule_type = 'Cycle'\n            AND schedule_cycle IN\n            <foreach collection=\"cycles\" item=\"cycle\" open=\"(\" separator=\",\" close=\")\">\n                #{cycle}\n            </foreach>\n            AND (\n                next_schedule_time IS NULL\n                OR next_schedule_time &lt;= #{targetTime}\n            )\n            ORDER BY actually_priority DESC, id ASC\n\n    </select>\n\n\n    <sql id=\"TaskNodeColumns\">\n    id,\n    guid,\n    owned_service_guid,\n    name,\n    schedule_cron,\n    image_path,\n    type,\n    resource_type,\n    deployment_method,\n    priority,\n    actually_priority,\n    dry_run,\n    schedule_cycle,\n    schedule_type,\n    schedule_start_time,\n    schedule_end_time,\n    enable,\n    next_schedule_time,\n    create_time,\n    update_time\n    </sql>\n\n    <select id=\"listPage\" resultMap=\"TaskNodeMap\">\n        SELECT\n        <include refid=\"TaskNodeColumns\"/>\n        FROM hydra_task_task_node\n        ORDER BY id DESC\n        LIMIT #{pageSize} OFFSET #{offset}\n    </select>\n</mapper>"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.sdk.grpc</groupId>\n    <artifactId>hydra-lib-grpc-service-sdk</artifactId>\n    <version>1.2.1</version>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n        <grpc.version>1.62.2</grpc.version>\n        <protobuf.version>4.28.2</protobuf.version>\n        <protoc.version>4.28.2</protoc.version>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>org.slf4j</groupId>\n            <artifactId>slf4j-api</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n\n\n        <!-- gRPC -->\n        <dependency>\n            <groupId>io.grpc</groupId>\n            <artifactId>grpc-netty-shaded</artifactId>\n            <version>${grpc.version}</version>\n        </dependency>\n\n        <dependency>\n            <groupId>io.grpc</groupId>\n            <artifactId>grpc-protobuf</artifactId>\n            <version>${grpc.version}</version>\n        </dependency>\n\n        <dependency>\n            <groupId>io.grpc</groupId>\n            <artifactId>grpc-stub</artifactId>\n            <version>${grpc.version}</version>\n        </dependency>\n\n        <!-- 必须：Java 9+ 需要 -->\n        <dependency>\n            <groupId>jakarta.annotation</groupId>\n            <artifactId>jakarta.annotation-api</artifactId>\n            <version>1.3.5</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-service-control</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/grpc/client/GrpcAppointClient.java",
    "content": "package com.pinecone.hydra.grpc.client;\n\nimport java.util.concurrent.TimeUnit;\n\nimport com.pinecone.hydra.appoints.AppointNodus;\n\nimport io.grpc.ManagedChannel;\nimport io.grpc.ManagedChannelBuilder;\n\npublic class GrpcAppointClient implements AppointNodus {\n\n    protected String name;\n    protected long messageNodeId;\n\n    protected ManagedChannel managedChannel;\n    protected final GrpcClientConfig grpcClientConfig;\n\n    protected ManagedChannelBuilder<?> channelBuilder;\n\n\n    public GrpcAppointClient( String name, long messageNodeId, GrpcClientConfig config ) {\n        this.name = name;\n        this.messageNodeId = messageNodeId;\n        this.grpcClientConfig = config;\n\n        ManagedChannelBuilder<?> builder = ManagedChannelBuilder\n                        .forAddress( config.getHost(), config.getPort() )\n                        .usePlaintext();\n\n        if( config.getIdleTimeoutMillis() > 0 ) {\n            builder.idleTimeout( config.getIdleTimeoutMillis(), TimeUnit.MILLISECONDS );\n        }\n\n        if( config.getKeepAliveTimeSeconds() > 0 ) {\n            builder.keepAliveTime( config.getKeepAliveTimeSeconds(), TimeUnit.SECONDS );\n            builder.keepAliveWithoutCalls( true );\n        }\n\n        this.channelBuilder = builder;\n    }\n\n\n    public GrpcAppointClient( long messageNodeId, GrpcClientConfig config ) {\n        this(\n                \"grpc-client-\" + config.getHost() + \"-\" + config.getPort(),\n                messageNodeId,\n                config\n        );\n    }\n\n\n    public GrpcAppointClient( GrpcClientConfig config ) {\n        this( config.getPort(), config );\n    }\n\n\n    public ManagedChannelBuilder<?> channelBuilder() {\n        return this.channelBuilder;\n    }\n\n\n    public ManagedChannel getChannel() {\n        return this.managedChannel;\n    }\n\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public GrpcClientConfig getConfig() {\n        return this.grpcClientConfig;\n    }\n\n    @Override\n    public void close() {\n        if( this.managedChannel != null ) {\n            this.managedChannel.shutdownNow();\n            this.managedChannel = null;\n        }\n    }\n\n    public void shutdown( long t, TimeUnit u ) throws InterruptedException {\n        if( this.managedChannel != null ) {\n            this.managedChannel.shutdown().awaitTermination( t, u );\n            this.managedChannel = null;\n        }\n    }\n\n    public void shutdownNow() {\n        if( this.managedChannel != null ) {\n            this.managedChannel.shutdownNow();\n            this.managedChannel = null;\n        }\n    }\n\n    @Override\n    public void execute() throws Exception {\n        if( this.managedChannel == null ) {\n            this.managedChannel = this.channelBuilder.build();\n        }\n    }\n\n    @Override\n    public long getMessageNodeId() {\n        return this.messageNodeId;\n    }\n\n    public long getClientId() {\n        return this.getMessageNodeId();\n    }\n\n    public boolean isShutdown() {\n        return this.managedChannel == null || this.managedChannel.isShutdown();\n    }\n\n    public boolean isTerminated() {\n        return this.managedChannel == null || this.managedChannel.isTerminated();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/grpc/client/GrpcClientConfig.java",
    "content": "package com.pinecone.hydra.grpc.client;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.framework.util.json.JSONObject;\n\npublic class GrpcClientConfig extends JSONConfig implements PatriarchalConfig {\n\n    private final String  host;\n    private final int     port;\n    private final boolean enable;\n\n    private final long    idleTimeoutMillis;\n    private final long    keepAliveTimeSeconds;\n\n    private final boolean autoReconnect;\n\n    private final boolean enableHeartbeat;\n    private final long    heartbeatIntervalMillis;\n\n\n    public GrpcClientConfig( JSONConfig parent ) {\n        this( (Map<String, Object>) null, parent );\n    }\n\n    public GrpcClientConfig( JSONObject thisScope, JSONConfig parent ) {\n        this( thisScope.getMap(), parent );\n    }\n\n    public GrpcClientConfig( JSONObject thisScope ) {\n        this( thisScope.getMap(), null );\n    }\n\n    public GrpcClientConfig( Map<String, Object> thisScope, JSONConfig parent ) {\n        super( thisScope, parent );\n\n        this.host = this.optString( \"host\", \"localhost\" );\n        this.port = this.optInt( \"port\", 5888 );\n        this.enable = this.optBoolean( \"enable\", true );\n\n        this.idleTimeoutMillis = this.optLong( \"idleTimeoutMillis\", 30L );\n\n        this.keepAliveTimeSeconds = this.optLong( \"keepAliveTimeoutSec\", 30L );\n\n        this.autoReconnect = this.optBoolean( \"autoReconnect\", true );\n        this.enableHeartbeat = this.optBoolean( \"enableHeartbeat\", false );\n        this.heartbeatIntervalMillis = this.optLong( \"heartbeatIntervalMills\", 2000L );\n    }\n\n    public GrpcClientConfig() {\n        this( null );\n    }\n\n\n    public String getHost() {\n        return this.host;\n    }\n\n    public int getPort() {\n        return this.port;\n    }\n\n    public boolean isEnable() {\n        return this.enable;\n    }\n\n    public long getIdleTimeoutMillis() {\n        return this.idleTimeoutMillis;\n    }\n\n    public long getKeepAliveTimeSeconds() {\n        return this.keepAliveTimeSeconds;\n    }\n\n    public boolean isAutoReconnect() {\n        return this.autoReconnect;\n    }\n\n    public boolean isEnableHeartbeat() {\n        return this.enableHeartbeat;\n    }\n\n    public long getHeartbeatIntervalMillis() {\n        return this.heartbeatIntervalMillis;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/grpc/server/GrpcAppointServer.java",
    "content": "package com.pinecone.hydra.grpc.server;\n\nimport java.io.IOException;\nimport java.util.concurrent.Executors;\nimport java.util.concurrent.TimeUnit;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.appoints.AppointNodus;\n\nimport io.grpc.Server;\nimport io.grpc.ServerBuilder;\n\npublic class GrpcAppointServer implements AppointNodus {\n\n    protected String name;\n    protected long messageNodeId;\n    protected Server grpcServer;\n    protected GrpcProcess grpcProcess;\n    protected Processum parentProcess;\n    protected final GrpcServerConfig grpcServerConfig;\n    protected ServerBuilder<?> serverBuilder;\n\n    public GrpcAppointServer( String name, long messageNodeId, GrpcServerConfig config, Processum parentProcess ) {\n        this.name = name;\n        this.messageNodeId = messageNodeId;\n        this.grpcServerConfig = config;\n        this.parentProcess = parentProcess;\n\n        ServerBuilder<?> builder = ServerBuilder.forPort( config.getPort() );\n\n        if( config.getHandshakeTimeoutMillis() > 0 ) {\n            builder.handshakeTimeout( config.getHandshakeTimeoutMillis(), TimeUnit.MILLISECONDS );\n        }\n\n        if( config.getKeepAliveTimeSeconds() > 0 ) {\n            builder.keepAliveTime( config.getKeepAliveTimeSeconds(), TimeUnit.SECONDS );\n        }\n\n        builder.keepAliveTimeout( config.getKeepAliveTimeoutSeconds(), TimeUnit.SECONDS );\n        builder.permitKeepAliveWithoutCalls( config.isPermitKeepAliveWithoutCalls() );\n        builder.maxInboundMessageSize( config.getMaxInboundMessageSize() );\n        builder.maxInboundMetadataSize( config.getMaxInboundMetadataSize() );\n\n        builder.executor( Executors.newCachedThreadPool() );\n        this.serverBuilder = builder;\n    }\n\n    public GrpcAppointServer( String name, long messageNodeId, GrpcServerConfig config ) {\n        this( name, messageNodeId, config, null );\n    }\n\n    public GrpcAppointServer( long messageNodeId, GrpcServerConfig config ) {\n        this(\n                messageNodeId,\n                config,\n                null\n        );\n    }\n\n    public GrpcAppointServer( long messageNodeId, GrpcServerConfig config, Processum parentProcess ) {\n        this(\n                \"grpc-server-\" + config.getHost() + \"-\" + config.getPort(),\n                messageNodeId,\n                config,\n                parentProcess\n        );\n    }\n\n    public GrpcAppointServer( GrpcServerConfig config ) {\n        this( config.getPort(), config );\n    }\n\n    public GrpcAppointServer( GrpcServerConfig config, Processum parentProcess ) {\n        this( config.getPort(), config, parentProcess );\n    }\n\n\n    public ServerBuilder<?> serverBuilder() {\n        return this.serverBuilder;\n    }\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public GrpcServerConfig getConfig() {\n        return this.grpcServerConfig;\n    }\n\n    @Override\n    public void close() {\n        if( this.grpcServer != null ) {\n            this.grpcServer.shutdownNow();\n            this.grpcServer = null;\n            this.grpcProcess = null;\n        }\n    }\n\n    public void shutdown() {\n        if( this.grpcServer != null ) {\n            this.grpcServer.shutdown();\n            this.grpcServer = null;\n            this.grpcProcess = null;\n        }\n    }\n\n\n    @Override\n    public void execute() throws Exception {\n        try {\n            this.start();\n        }\n        catch ( ProvokeHandleException e ) {\n            if ( e.getCause() instanceof Exception ) {\n                throw (Exception) e.getCause();\n            }\n        }\n    }\n\n    public void startGrpcServerOnly() throws IOException {\n        this.grpcServer.start();\n    }\n\n    public void start( Processum parentProcess ) {\n        if ( this.grpcServer == null ) {\n            this.grpcServer = this.serverBuilder.build();\n        }\n\n        this.grpcProcess = new GrpcProcess( this, parentProcess );\n        this.grpcProcess.start();\n    }\n\n    public void start() {\n        this.start( this.parentProcess );\n    }\n\n    public GrpcProcess getProcess() {\n        return this.grpcProcess;\n    }\n\n    public void awaitTermination() throws InterruptedException {\n        this.grpcServer.awaitTermination();\n    }\n\n    public void awaitTermination( long t, TimeUnit u ) throws InterruptedException {\n        this.grpcServer.awaitTermination( t, u );\n    }\n\n    @Override\n    public long getMessageNodeId() {\n        return this.messageNodeId;\n    }\n\n    public boolean isShutdown() {\n        return this.grpcServer == null || this.grpcServer.isShutdown();\n    }\n\n    public boolean isTerminated() {\n        return this.grpcServer == null || this.grpcServer.isTerminated();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/grpc/server/GrpcProcess.java",
    "content": "package com.pinecone.hydra.grpc.server;\n\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.ExecutionException;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.system.executum.ArchProcessum;\nimport com.pinecone.framework.system.executum.Processum;\n\npublic class GrpcProcess extends ArchProcessum {\n\n    protected Logger log = LoggerFactory.getLogger( this.getClass() );\n\n    protected Thread affinityThread;\n\n    protected GrpcAppointServer grpcAppointServer;\n\n    public GrpcProcess( GrpcAppointServer server, Processum parent ) {\n        super( server.getName(), parent);\n        this.grpcAppointServer = server;\n    }\n\n    @Override\n    public void start() {\n        if ( this.affinityThread != null ) {\n            throw new IllegalStateException( \"[GrpcAppointServer] Process has already started.\" );\n        }\n\n        CompletableFuture<Object> future = new CompletableFuture<>();\n\n        this.affinityThread = new Thread(()->{\n            try {\n                this.grpcAppointServer.startGrpcServerOnly();\n                log.info( \"[GrpcAppointServer] Process has started. <Start>\" );\n                future.complete(null);\n                this.grpcAppointServer.awaitTermination();\n                log.info( \"[GrpcAppointServer] Process has terminated. <Done>\" );\n            }\n            catch ( Exception e ) {\n                future.completeExceptionally( e );\n            }\n        });\n\n        this.affinityThread.setName( ( this.getName() + \"-main-\" + this.affinityThread.getName() ).toLowerCase() );\n        this.affinityThread.setDaemon( false );\n        this.setThreadAffinity( this.affinityThread );\n        this.affinityThread.start();\n\n\n        try {\n            Object e = future.get();\n            log.info( \"[GrpcAppointServer] Process redirect to parent thread. <Done>\" );\n            if ( future.isCompletedExceptionally() ) {\n                if ( e instanceof Exception ) {\n                    throw new ProvokeHandleException( ((Exception)e).getCause() );\n                }\n            }\n        }\n        catch ( InterruptedException e ) {\n            Thread.currentThread().interrupt();\n        }\n        catch ( ExecutionException e ) {\n            throw new ProvokeHandleException( e.getCause() );\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/grpc/server/GrpcServerConfig.java",
    "content": "package com.pinecone.hydra.grpc.server;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.framework.util.json.JSONObject;\n\npublic class GrpcServerConfig extends JSONConfig implements PatriarchalConfig {\n\n    private final String  host;\n    private final int     port;\n    private final boolean enabled;\n\n    private final long    handshakeTimeoutMillis;\n    private final long    keepAliveTimeSeconds;\n    private final long    keepAliveTimeoutSeconds;\n\n    private final int     maxConcurrentCalls;\n    private final int     maxInboundMessageSize;\n    private final int     maxInboundMetadataSize;\n\n    private final boolean permitKeepAliveWithoutCalls;\n\n\n    public GrpcServerConfig( JSONConfig parent ) {\n        this( (Map<String, Object >) null, parent );\n    }\n\n    public GrpcServerConfig( JSONObject thisScope, JSONConfig parent ) {\n        this( thisScope.getMap(), parent );\n    }\n\n    public GrpcServerConfig( JSONObject thisScope ) {\n        this( thisScope.getMap(), null );\n    }\n\n    public GrpcServerConfig( Map<String, Object > thisScope, JSONConfig parent ) {\n        super( thisScope, parent );\n\n        this.host = this.optString( \"host\", \"0.0.0.0\" );\n        this.port = this.optInt( \"port\", 5888 );\n        this.enabled = this.optBoolean( \"enable\", true );\n\n        this.handshakeTimeoutMillis = this.optLong( \"handshakeTimeoutMillis\", 0L );\n        this.keepAliveTimeSeconds = this.optLong( \"keepAliveTimeoutSec\", 0L );\n\n        this.keepAliveTimeoutSeconds = this.optLong( \"keepAliveAckTimeoutSec\", 20L );\n        this.maxConcurrentCalls = this.optInt( \"maximumConnections\", Integer.MAX_VALUE );\n\n        // gRPC inbound 限制\n        this.maxInboundMessageSize = this.optInt( \"maxInboundMessageSize\", 4 * 1024 * 1024 );\n        this.maxInboundMetadataSize = this.optInt( \"maxInboundMetadataSize\", 8 * 1024 );\n\n        // 是否允许无调用时keepalive\n        this.permitKeepAliveWithoutCalls = this.optBoolean( \"permitKeepAliveWithoutCalls\", true );\n    }\n\n    public GrpcServerConfig() {\n        this(null );\n    }\n\n\n\n    public String getHost() {\n        return this.host;\n    }\n\n    public int getPort() {\n        return this.port;\n    }\n\n    public boolean isEnabled() {\n        return this.enabled;\n    }\n\n    public long getHandshakeTimeoutMillis() {\n        return this.handshakeTimeoutMillis;\n    }\n\n    public long getKeepAliveTimeSeconds() {\n        return this.keepAliveTimeSeconds;\n    }\n\n    public long getKeepAliveTimeoutSeconds() {\n        return this.keepAliveTimeoutSeconds;\n    }\n\n    public int getMaxConcurrentCalls() {\n        return this.maxConcurrentCalls;\n    }\n\n    public int getMaxInboundMessageSize() {\n        return this.maxInboundMessageSize;\n    }\n\n    public int getMaxInboundMetadataSize() {\n        return this.maxInboundMetadataSize;\n    }\n\n    public boolean isPermitKeepAliveWithoutCalls() {\n        return this.permitKeepAliveWithoutCalls;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/client/GrpcServiceClient.java",
    "content": "package com.pinecone.hydra.service.registry.grpc.client;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.hydra.appoints.AppointNodus;\nimport com.pinecone.hydra.grpc.client.GrpcAppointClient;\nimport com.pinecone.hydra.service.registry.client.ArchServiceClient;\nimport com.pinecone.hydra.service.registry.client.ServiceClient;\n\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.service.registry.ClientServiceRegisterException;\nimport com.pinecone.hydra.service.registry.ServiceControlRPCException;\nimport com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage;\nimport com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamGrpc;\nimport com.pinecone.hydra.service.registry.grpc.server.iface.ServiceLifecycleImpl;\nimport com.pinecone.hydra.service.registry.grpc.server.iface.ServiceMetaManipulationIfaceImpl;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.*;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.*;\nimport com.pinecone.hydra.service.registry.server.ServiceLifecycleIface;\nimport com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface;\n\nimport io.grpc.stub.StreamObserver;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.util.concurrent.TimeUnit;\n\npublic class GrpcServiceClient extends ArchServiceClient implements ServiceClient {\n\n    protected final Logger mLogger = LoggerFactory.getLogger(this.getClass());\n    protected GrpcAppointClient mGrpcAppointClient;\n    private StreamObserver<ControlMessage> controlStream;\n\n    protected ServiceLifecycleGrpc.ServiceLifecycleBlockingStub mLifecycleStub;\n    protected ServiceMetaGrpc.ServiceMetaBlockingStub mMetaManipulationStub;\n\n    protected ServiceLifecycleIface mLifecycle;\n    protected ServiceMetaManipulationIface mMetaManipulation;\n\n\n    public GrpcServiceClient( @Nullable GUID serviceId, GrpcAppointClient appointClient, GuidAllocator guidAllocator ) {\n        super(serviceId, guidAllocator);\n        this.mGrpcAppointClient = appointClient;\n    }\n\n    public GrpcServiceClient( GrpcAppointClient appointClient, GuidAllocator guidAllocator ) {\n        this(null, appointClient, guidAllocator);\n    }\n\n\n\n    private void initControlStream() {\n        ControlStreamGrpc.ControlStreamStub asyncStub = ControlStreamGrpc.newStub( this.mGrpcAppointClient.getChannel() );\n\n        this.controlStream = asyncStub.connect(\n                new StreamObserver<ControlMessage>() {\n\n                    @Override\n                    public void onNext(ControlMessage value) {\n                    }\n\n                    @Override\n                    public void onError(Throwable t) {\n                    }\n\n                    @Override\n                    public void onCompleted() {\n                    }\n                }\n        );\n\n        ControlMessage message = ControlMessage.newBuilder()\n                        .setClientId( this.getClientId() )\n                        .build();\n\n        this.controlStream.onNext(message);\n    }\n\n    @Override\n    protected void initRPCSubsystem() throws ServiceControlRPCException {\n\n    }\n\n    public long getClientId() {\n        return this.mGrpcAppointClient.getClientId();\n    }\n\n    @Override\n    public void startService() throws ServiceControlRPCException {\n        if ( !this.mGrpcAppointClient.isShutdown() ) {\n            throw new IllegalStateException(\"gRPC client already started.\");\n        }\n\n        try {\n            this.mGrpcAppointClient.execute();\n\n            this.mLifecycleStub = ServiceLifecycleGrpc.newBlockingStub( this.mGrpcAppointClient.getChannel() );\n            this.mMetaManipulationStub = ServiceMetaGrpc.newBlockingStub( this.mGrpcAppointClient.getChannel() );\n\n            this.mLifecycle = new ServiceLifecycleImpl( this.mLifecycleStub );\n            this.mMetaManipulation = new ServiceMetaManipulationIfaceImpl( this.mMetaManipulationStub );\n\n            this.mLogger.info(\"gRPC initialization successful\");\n            this.initControlStream();\n        }\n        catch ( Exception e ) {\n            throw new ServiceControlRPCException(e);\n        }\n    }\n\n    @Override\n    public void terminateService() {\n        if ( !this.mGrpcAppointClient.isTerminated() ) {\n            throw new IllegalStateException( \"gRPC client not started.\" );\n        }\n\n        this.deregister();\n\n        try {\n            this.mGrpcAppointClient.shutdown( 5, TimeUnit.SECONDS );\n        }\n        catch ( InterruptedException e ) {\n            this.mGrpcAppointClient.shutdownNow();\n        }\n    }\n\n    @Override\n    public GUID registerService(GUID serviceId, GUID deployGuid) throws ClientServiceRegisterException {\n        RegisterServiceRequest.Builder builder = RegisterServiceRequest.newBuilder();\n\n        builder.setServiceId(serviceId.toString());\n        builder.setClientId( this.getClientId() );\n\n        if (deployGuid != null) {\n            builder.setDeployId(deployGuid.toString());\n        }\n\n        RegisterServiceRequest request = builder.build();\n\n        try {\n            RegisterServiceReply reply = this.mLifecycleStub.registerService(request);\n            String instanceId = reply.getInstanceId();\n\n            if ( StringUtils.isNotBlank( instanceId ) ) {\n                this.mInstanceId = this.mGuidAllocator.parse(instanceId);\n\n                this.mLogger.info(\n                        \"Successfully register service : {}, instanceId: {}\", serviceId, instanceId\n                );\n            }\n\n        }\n        catch ( Exception e ) {\n            this.mLogger.error(\"Register Service {} failed\", serviceId.toString());\n            throw new ClientServiceRegisterException(e);\n        }\n\n        return this.mInstanceId;\n    }\n\n    @Override\n    public void deregister() {\n        if (this.mInstanceId != null) {\n            InstanceIdRequest request =\n                    InstanceIdRequest.newBuilder()\n                            .setInstanceId(this.mInstanceId.toString())\n                            .build();\n\n            this.mLifecycleStub.deregisterServiceByInstanceId(request);\n        }\n    }\n\n    @Override\n    public AppointNodus getAppointNodus() {\n        return this.mGrpcAppointClient;\n    }\n\n    @Override\n    public GuidAllocator getGuidAllocator() {\n        return this.mGuidAllocator;\n    }\n\n\n\n    public ServiceLifecycleGrpc.ServiceLifecycleBlockingStub getLifecycleStub() {\n        return this.mLifecycleStub;\n    }\n\n    public ServiceMetaGrpc.ServiceMetaBlockingStub getMetaManipulationStub() {\n        return this.mMetaManipulationStub;\n    }\n\n    public ServiceLifecycleIface getServiceLifecycle() {\n        return this.mLifecycle;\n    }\n\n    public ServiceMetaManipulationIface getMetaManipulation() {\n        return this.mMetaManipulation;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/ClientMetaDataInterceptor.java",
    "content": "package com.pinecone.hydra.service.registry.grpc.server;\n\nimport java.net.SocketAddress;\n\nimport io.grpc.Context;\nimport io.grpc.Contexts;\nimport io.grpc.Grpc;\nimport io.grpc.Metadata;\nimport io.grpc.ServerCall;\nimport io.grpc.ServerCallHandler;\nimport io.grpc.ServerInterceptor;\n\npublic class ClientMetaDataInterceptor implements ServerInterceptor {\n\n    @Override\n    public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(ServerCall<ReqT, RespT> call, Metadata headers, ServerCallHandler<ReqT, RespT> next) {\n        SocketAddress remoteAddr = call.getAttributes().get(Grpc.TRANSPORT_ATTR_REMOTE_ADDR);\n\n        Context ctx = Context.current().withValue(ClientAddress.CLIENT_ADDR, remoteAddr);\n        return Contexts.interceptCall(ctx, call, headers, next);\n    }\n\n}\n\nfinal class ClientAddress {\n    private ClientAddress() {}\n    public static final io.grpc.Context.Key<SocketAddress> CLIENT_ADDR = io.grpc.Context.key( \"client-addr\" );\n}"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/GrpcControlStreamService.java",
    "content": "package com.pinecone.hydra.service.registry.grpc.server;\n\nimport java.net.SocketAddress;\n\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage;\nimport com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamGrpc;\nimport com.pinecone.hydra.service.registry.server.ServiceManager;\n\nimport io.grpc.Context;\nimport io.grpc.StatusRuntimeException;\nimport io.grpc.stub.StreamObserver;\n\npublic class GrpcControlStreamService extends ControlStreamGrpc.ControlStreamImplBase {\n\n    private final ServiceManager           serviceManager;\n    private final GrpcServiceAppointServer appointServer;\n    private final GuidAllocator            guidAllocator;\n\n    public GrpcControlStreamService( ServiceManager serviceManager, GrpcServiceAppointServer appointServer ) {\n        this.serviceManager = serviceManager;\n        this.appointServer  = appointServer;\n        this.guidAllocator  = serviceManager.getServicesInstrument().getGuidAllocator();\n    }\n\n    @Override\n    public StreamObserver<ControlMessage> connect( StreamObserver<ControlMessage> responseObserver ) {\n        final SocketAddress remoteAddr = ClientAddress.CLIENT_ADDR.get();\n        final String connectId = remoteAddr.toString() + \"_\" + this.guidAllocator.nextGUID().toString();\n\n        return new StreamObserver<>() {\n            Long clientId = null;\n\n            @Override\n            public void onNext( ControlMessage message ) {\n                if ( this.clientId == null ) {\n                    this.clientId = message.getClientId();\n\n                    GrpcSession session = new GrpcSession( connectId, remoteAddr, responseObserver );\n\n                    serviceManager.serviceEventHooker().afterNewConnectionInbound(\n                            this.clientId,\n                            connectId,\n                            session,\n                            null,\n                            () -> new GrpcServiceClientile(appointServer)\n                    );\n                }\n\n                // 这里可以处理心跳或其他控制指令\n            }\n\n            @Override\n            public void onError( Throwable t ) {\n                if ( t instanceof StatusRuntimeException ) {\n                    serviceManager.getLogger().info(\n                            \"[ServiceLifecycle] `{}` has requested `cancelled` to detach, with what '{}', addr: `{}`.\",\n                            this.clientId, t.getMessage(),  remoteAddr.toString()\n                    );\n                }\n                else {\n                    serviceManager.getLogger().error(\n                            \"[ServiceFatality] `{}` has provoked `exception` to detach, with what '{}', addr: `{}`.\",\n                            this.clientId, t.getMessage(),  remoteAddr.toString()\n                    );\n                }\n                this.detach();\n            }\n\n            @Override\n            public void onCompleted() {\n                this.detach();\n            }\n\n            private void detach() {\n                if ( this.clientId != null ) {\n                    serviceManager.serviceEventHooker().afterConnectionDetach( this.clientId, connectId, null );\n                }\n            }\n        };\n    }\n}"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/GrpcServiceAppointServer.java",
    "content": "package com.pinecone.hydra.service.registry.grpc.server;\n\nimport java.util.concurrent.TimeUnit;\n\nimport com.pinecone.hydra.grpc.server.GrpcAppointServer;\nimport com.pinecone.hydra.grpc.server.GrpcProcess;\nimport com.pinecone.hydra.grpc.server.GrpcServerConfig;\nimport com.pinecone.hydra.service.registry.appoint.ServiceAppointServer;\nimport com.pinecone.hydra.service.registry.server.ServiceManager;\n\nimport io.grpc.ServerBuilder;\nimport io.grpc.ServerInterceptors;\n\npublic class GrpcServiceAppointServer implements ServiceAppointServer {\n\n    protected ServiceManager serviceManager;\n\n    protected final GrpcAppointServer grpcAppointServer;;\n\n    public GrpcServiceAppointServer( GrpcAppointServer server ) {\n        this.grpcAppointServer = server;\n    }\n\n    @Override\n    public ServiceManager serviceManager() {\n        return this.serviceManager;\n    }\n\n    @Override\n    public ServiceAppointServer hookServiceManager( ServiceManager serviceManager ) {\n        if (this.serviceManager != null) {\n            throw new IllegalStateException(\"Manager has already hooked.\");\n        }\n\n        this.serviceManager = serviceManager;\n\n        ServerBuilder<?> build = this.grpcAppointServer.serverBuilder();\n        build\n                .addService(new GrpcServiceLifecycleService(serviceManager))\n                .addService(new GrpcServiceMetaService(serviceManager))\n                .addService(ServerInterceptors.intercept(\n                        new GrpcControlStreamService(serviceManager, this),\n                        new ClientMetaDataInterceptor()\n                ))\n        ;\n\n        this.serviceManager.getLogger().info( \"GrpcAppointServer[{}] has been hooked.\", this.getName() );\n        return this;\n    }\n\n    @Override\n    public String getName() {\n        return this.grpcAppointServer.getName();\n    }\n\n    @Override\n    public GrpcServerConfig getConfig() {\n        return this.grpcAppointServer.getConfig();\n    }\n\n    @Override\n    public void close() {\n        this.grpcAppointServer.close();\n    }\n\n    public void awaitTermination() throws InterruptedException {\n        this.grpcAppointServer.awaitTermination();\n    }\n\n    public void awaitTermination( long t, TimeUnit u ) throws InterruptedException {\n        this.grpcAppointServer.awaitTermination( t, u );\n    }\n\n    public GrpcProcess getProcess() {\n        return this.grpcAppointServer.getProcess();\n    }\n\n    @Override\n    public void execute() throws Exception {\n        this.grpcAppointServer.execute();\n    }\n\n    @Override\n    public long getMessageNodeId() {\n        return this.grpcAppointServer.getMessageNodeId();\n    }\n\n    @Override\n    public boolean isTerminated() {\n        return this.grpcAppointServer == null || this.grpcAppointServer.isTerminated();\n    }\n\n    @Override\n    public boolean isStarted() {\n        return !this.grpcAppointServer.isShutdown();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/GrpcServiceClientile.java",
    "content": "package com.pinecone.hydra.service.registry.grpc.server;\n\nimport java.net.SocketAddress;\nimport java.util.Collection;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ConcurrentMap;\n\nimport com.pinecone.hydra.service.registry.appoint.ServiceClientile;\nimport com.pinecone.hydra.service.registry.appoint.ServiceAppointServer;\n\npublic class GrpcServiceClientile implements ServiceClientile {\n\n    protected long                                      mClientId = -1;\n\n    // connectId -> GrpcSession\n    protected final ConcurrentMap<Object, GrpcSession>  mSessions;\n\n    protected final ServiceAppointServer                mServiceAppointServer;\n\n    protected SocketAddress                             mRemoteAddress;\n\n    public GrpcServiceClientile( ServiceAppointServer serviceAppointServer ) {\n        this.mServiceAppointServer = serviceAppointServer;\n        this.mSessions             = new ConcurrentHashMap<>();\n    }\n\n    @Override\n    public SocketAddress getRemoteAddress() {\n        return this.mRemoteAddress;\n    }\n\n    @Override\n    public void afterNewConnectionInbound( Long clientId, Object connectId, Object connection, Object context ) {\n        if ( !(connection instanceof GrpcSession) ) {\n            throw new IllegalArgumentException(\n                    \"GrpcServiceClientile expects `GrpcSession`, but got: \" + (connection == null ? \"null\" : connection.getClass().getName())\n            );\n        }\n\n        GrpcSession session = (GrpcSession) connection;\n        this.mClientId      = clientId;\n        this.mRemoteAddress = session.getRemoteAddress();\n        this.mSessions.put( connectId, session );\n    }\n\n    @Override\n    public void afterConnectionDetach( Long clientId, Object connectId, Object connection ) {\n        GrpcSession removed = this.mSessions.remove( connectId );\n        if ( removed != null ) {\n            removed.markClosed();\n        }\n    }\n\n    @Override\n    public ServiceAppointServer serviceAppointServer() {\n        return this.mServiceAppointServer;\n    }\n\n    @Override\n    public long getClientId() {\n        return this.mClientId;\n    }\n\n    @Override\n    public int connectionCount() {\n        return this.mSessions.size();\n    }\n\n    @Override\n    public boolean isDefunct() {\n        return this.mSessions.isEmpty();\n    }\n\n    @Override\n    public GrpcSession queryNativeConnection(Object connectionIdentity) {\n        return this.mSessions.get( connectionIdentity );\n    }\n\n    @Override\n    public Collection<?> connections() {\n        return this.mSessions.values();\n    }\n\n    @Override\n    public void shutdown() {\n        // 尽力关闭所有 session：完成 outbound 流（server 侧主动结束）\n        for ( GrpcSession s : this.mSessions.values() ) {\n            if (s == null) {\n                continue;\n            }\n            if ( s.markClosed() ) {\n                try {\n                    // outbound 是 StreamObserver<?>，onCompleted 可以安全调用（若已结束会抛异常则忽略）\n                    @SuppressWarnings(\"unchecked\")\n                    io.grpc.stub.StreamObserver<Object> out = (io.grpc.stub.StreamObserver<Object>) s.getOutbound();\n                    out.onCompleted();\n                }\n                catch (Throwable ignored) {\n                    // best-effort close\n                }\n            }\n        }\n        this.mSessions.clear();\n    }\n}"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/GrpcServiceLifecycleService.java",
    "content": "package com.pinecone.hydra.service.registry.grpc.server;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.hydra.service.registry.ClientServiceRegisterException;\nimport com.pinecone.hydra.service.registry.dto.RegisterServiceDTO;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleGrpc;\nimport com.pinecone.hydra.service.registry.server.ServiceLifecycleService;\nimport com.pinecone.hydra.service.registry.server.ServiceManager;\nimport io.grpc.stub.StreamObserver;\n\npublic class GrpcServiceLifecycleService extends ServiceLifecycleGrpc.ServiceLifecycleImplBase {\n\n    private final ServiceLifecycleService lifecycleService;\n\n    public GrpcServiceLifecycleService(ServiceManager serviceManager) {\n        this.lifecycleService = serviceManager.serviceLifecycleService();\n    }\n\n    @Override\n    public void registerService( RegisterServiceRequest request, StreamObserver<RegisterServiceReply> responseObserver ) {\n        try {\n            RegisterServiceDTO dto = new RegisterServiceDTO();\n            dto.setClientId(request.getClientId());\n            dto.setServiceId(request.getServiceId());\n            dto.setDeployId(request.getDeployId());\n\n            String instanceId = this.lifecycleService.registerService(dto);\n\n            RegisterServiceReply.Builder builder = RegisterServiceReply.newBuilder();\n\n            if (instanceId != null) {\n                builder.setInstanceId(instanceId);\n            } else {\n                builder.setInstanceId(\"\");\n            }\n\n            RegisterServiceReply reply = builder.build();\n\n            responseObserver.onNext(reply);\n            responseObserver.onCompleted();\n        }\n        catch ( ClientServiceRegisterException e ) {\n            throw new ProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public void deregisterServiceByClientId(ClientIdRequest request, StreamObserver<EmptyReply> responseObserver) {\n        this.lifecycleService.deregisterServiceByClientId(request.getClientId());\n\n        EmptyReply reply = EmptyReply.newBuilder().build();\n\n        responseObserver.onNext(reply);\n        responseObserver.onCompleted();\n    }\n\n    @Override\n    public void deregisterServiceByInstanceId(InstanceIdRequest request, StreamObserver<EmptyReply> responseObserver) {\n\n        this.lifecycleService.deregisterServiceByInstanceId(request.getInstanceId());\n\n        EmptyReply reply = EmptyReply.newBuilder().build();\n\n        responseObserver.onNext(reply);\n        responseObserver.onCompleted();\n    }\n\n    @Override\n    public void hasOwnedServiceByServiceId(ServiceIdRequest request, StreamObserver<BoolReply> responseObserver) {\n        boolean result = this.lifecycleService.hasOwnedServiceByServiceId(\n                request.getServiceId()\n        );\n\n        BoolReply reply = BoolReply.newBuilder()\n                .setValue(result)\n                .build();\n\n        responseObserver.onNext(reply);\n        responseObserver.onCompleted();\n    }\n\n    @Override\n    public void hasOwnedServiceInstanceByClientId(ClientIdRequest request, StreamObserver<BoolReply> responseObserver) {\n        boolean result = this.lifecycleService.hasOwnedServiceInstance(\n                request.getClientId()\n        );\n\n        BoolReply reply = BoolReply.newBuilder()\n                .setValue(result)\n                .build();\n\n        responseObserver.onNext(reply);\n        responseObserver.onCompleted();\n    }\n\n    @Override\n    public void hasOwnedServiceInstanceByInstanceId(InstanceIdRequest request, StreamObserver<BoolReply> responseObserver) {\n        boolean result = this.lifecycleService.hasOwnedServiceInstance(\n                request.getInstanceId()\n        );\n\n        BoolReply reply = BoolReply.newBuilder()\n                .setValue(result)\n                .build();\n\n        responseObserver.onNext(reply);\n        responseObserver.onCompleted();\n    }\n\n    @Override\n    public void hasOwnedServiceClient(ClientIdRequest request, StreamObserver<BoolReply> responseObserver) {\n        boolean result = this.lifecycleService.hasOwnedServiceClient(\n                request.getClientId()\n        );\n\n        BoolReply reply = BoolReply.newBuilder()\n                .setValue(result)\n                .build();\n\n        responseObserver.onNext(reply);\n        responseObserver.onCompleted();\n    }\n\n    @Override\n    public void countRegisteredService(EmptyRequest request, StreamObserver<CountReply> responseObserver) {\n\n        Integer count = this.lifecycleService.countRegisteredService();\n\n        CountReply.Builder builder = CountReply.newBuilder();\n\n        if (count != null) {\n            builder.setValue(count);\n        } else {\n            builder.setValue(0);\n        }\n\n        CountReply reply = builder.build();\n\n        responseObserver.onNext(reply);\n        responseObserver.onCompleted();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/GrpcServiceMetaService.java",
    "content": "package com.pinecone.hydra.service.registry.grpc.server;\n\nimport com.pinecone.hydra.service.registry.dto.ServiceMetaDTO;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaGrpc;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.StringReply;\nimport com.pinecone.hydra.service.registry.server.ServiceManager;\nimport com.pinecone.hydra.service.registry.server.ServiceMetaService;\nimport io.grpc.stub.StreamObserver;\n\nimport java.util.List;\n\npublic class GrpcServiceMetaService extends ServiceMetaGrpc.ServiceMetaImplBase {\n\n    private final ServiceMetaService serviceMetaService;\n\n    public GrpcServiceMetaService(ServiceManager serviceManager) {\n        this.serviceMetaService = serviceManager.getServiceMetaService();\n    }\n\n    @Override\n    public void fetchServiceInsMetaByClientId(ClientIdRequest request, StreamObserver<ServiceMetaDTOListReply> responseObserver) {\n\n        List<ServiceMetaDTO> list = this.serviceMetaService.fetchServiceInsMetaByClientId(request.getClientId());\n\n        ServiceMetaDTOListReply.Builder builder = ServiceMetaDTOListReply.newBuilder();\n\n        if (list != null) {\n            for (ServiceMetaDTO dto : list) {\n                builder.addMetas(this.toProto(dto));\n            }\n        }\n\n        ServiceMetaDTOListReply reply = builder.build();\n\n        responseObserver.onNext(reply);\n        responseObserver.onCompleted();\n    }\n\n    @Override\n    public void fetchServiceInsMetaByServiceId(ServiceIdRequest request, StreamObserver<ServiceMetaDTOListReply> responseObserver) {\n        List<ServiceMetaDTO> list = this.serviceMetaService.fetchServiceInsMetaByServiceId(request.getServiceId());\n\n        ServiceMetaDTOListReply.Builder builder = ServiceMetaDTOListReply.newBuilder();\n\n        if (list != null) {\n            for (ServiceMetaDTO dto : list) {\n                builder.addMetas(this.toProto(dto));\n            }\n        }\n\n        ServiceMetaDTOListReply reply = builder.build();\n\n        responseObserver.onNext(reply);\n        responseObserver.onCompleted();\n    }\n\n    @Override\n    public void queryServiceMetaByPath(PathRequest request, StreamObserver<ServiceMetaDTOReply> responseObserver) {\n        ServiceMetaDTO dto = this.serviceMetaService.queryServiceMetaByPath(request.getPath()\n        );\n\n        ServiceMetaDTOReply.Builder builder = ServiceMetaDTOReply.newBuilder();\n\n        if (dto != null) {\n            builder.setMeta(this.toProto(dto));\n        }\n\n        ServiceMetaDTOReply reply = builder.build();\n\n        responseObserver.onNext(reply);\n        responseObserver.onCompleted();\n    }\n\n    @Override\n    public void queryServiceMetaByGuid(GuidRequest request, StreamObserver<ServiceMetaDTOReply> responseObserver) {\n        ServiceMetaDTO dto = this.serviceMetaService.queryServiceMetaByGuid(request.getGuid());\n\n        ServiceMetaDTOReply.Builder builder = ServiceMetaDTOReply.newBuilder();\n\n        if (dto != null) {\n            builder.setMeta(this.toProto(dto));\n        }\n\n        ServiceMetaDTOReply reply = builder.build();\n\n        responseObserver.onNext(reply);\n        responseObserver.onCompleted();\n    }\n\n    @Override\n    public void evalCreationStatement(EvalRequest request, StreamObserver<StringReply> responseObserver) {\n        String result = this.serviceMetaService.evalCreationStatement(request.getJsonStatement());\n\n        StringReply.Builder builder = StringReply.newBuilder();\n\n        if (result != null) {\n            builder.setValue(result);\n        }\n\n        StringReply reply = builder.build();\n\n        responseObserver.onNext(reply);\n        responseObserver.onCompleted();\n    }\n\n    @Override\n    public void createNewService(CreateNewServiceRequest request, StreamObserver<StringReply> responseObserver) {\n        String result = this.serviceMetaService.createNewService(request.getParentAppPath(), this.fromProto(request.getMeta()));\n\n        StringReply.Builder builder = StringReply.newBuilder();\n\n        if (result != null) {\n            builder.setValue(result);\n        }\n\n        StringReply reply = builder.build();\n\n        responseObserver.onNext(reply);\n        responseObserver.onCompleted();\n    }\n\n    private com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO toProto(ServiceMetaDTO dto) {\n        com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builder =\n                com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.newBuilder();\n\n        if (dto.getGuid() != null) {\n            builder.setGuid(dto.getGuid());\n        }\n        if (dto.getName() != null) {\n            builder.setName(dto.getName());\n        }\n        if (dto.getType() != null) {\n            builder.setType(dto.getType());\n        }\n        if (dto.getDisplayName() != null) {\n            builder.setDisplayName(dto.getDisplayName());\n        }\n        if (dto.getDescription() != null) {\n            builder.setDescription(dto.getDescription());\n        }\n        if (dto.getFullName() != null) {\n            builder.setFullName(dto.getFullName());\n        }\n        if (dto.getGroupNamespace() != null) {\n            builder.setGroupNamespace(dto.getGroupNamespace());\n        }\n        if (dto.getGroupName() != null) {\n            builder.setGroupName(dto.getGroupName());\n        }\n        if (dto.getScenario() != null) {\n            builder.setScenario(dto.getScenario());\n        }\n        if (dto.getPrimaryImplLang() != null) {\n            builder.setPrimaryImplLang(dto.getPrimaryImplLang());\n        }\n        if (dto.getExtraInformation() != null) {\n            builder.setExtraInformation(dto.getExtraInformation());\n        }\n        if (dto.getLevel() != null) {\n            builder.setLevel(dto.getLevel());\n        }\n\n        return builder.build();\n    }\n\n    private ServiceMetaDTO fromProto(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO proto) {\n        ServiceMetaDTO dto = new ServiceMetaDTO();\n\n        dto.setGuid(proto.getGuid());\n        dto.setName(proto.getName());\n        dto.setType(proto.getType());\n        dto.setDisplayName(proto.getDisplayName());\n        dto.setDescription(proto.getDescription());\n        dto.setFullName(proto.getFullName());\n        dto.setGroupNamespace(proto.getGroupNamespace());\n        dto.setGroupName(proto.getGroupName());\n        dto.setScenario(proto.getScenario());\n        dto.setPrimaryImplLang(proto.getPrimaryImplLang());\n        dto.setExtraInformation(proto.getExtraInformation());\n        dto.setLevel(proto.getLevel());\n\n        return dto;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/GrpcSession.java",
    "content": "package com.pinecone.hydra.service.registry.grpc.server;\n\nimport io.grpc.stub.StreamObserver;\nimport java.net.SocketAddress;\nimport java.util.concurrent.atomic.AtomicBoolean;\n\npublic class GrpcSession {\n\n    private final String            connectId;\n    private final SocketAddress     remoteAddress;\n    private final StreamObserver<?> outbound;\n    private final AtomicBoolean     closed = new AtomicBoolean(false);\n    private volatile long           lastHeartbeatTime = System.currentTimeMillis();\n\n    public void refreshHeartbeat() {\n        this.lastHeartbeatTime = System.currentTimeMillis();\n    }\n\n    public boolean isTimeout( long timeoutMillis ) {\n        return System.currentTimeMillis() - this.lastHeartbeatTime > timeoutMillis;\n    }\n\n    public GrpcSession( String connectId, SocketAddress remoteAddress, StreamObserver<?> outbound ) {\n        this.connectId      = connectId;\n        this.remoteAddress  = remoteAddress;\n        this.outbound       = outbound;\n    }\n\n    public String getConnectId() {\n        return this.connectId;\n    }\n\n    public SocketAddress getRemoteAddress() {\n        return this.remoteAddress;\n    }\n\n    public StreamObserver<?> getOutbound() {\n        return this.outbound;\n    }\n\n    public boolean isClosed() {\n        return this.closed.get();\n    }\n\n    public boolean markClosed() {\n        return this.closed.compareAndSet(false, true);\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/cs/ControlMessage.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: control_stream.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.cs;\n\n/**\n * Protobuf type {@code ControlMessage}\n */\npublic final class ControlMessage extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:ControlMessage)\n    ControlMessageOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use ControlMessage.newBuilder() to construct.\n  private ControlMessage(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private ControlMessage() {\n    payload_ = \"\";\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new ControlMessage();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamOuterClass.internal_static_ControlMessage_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamOuterClass.internal_static_ControlMessage_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.class, com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.Builder.class);\n  }\n\n  public static final int CLIENTID_FIELD_NUMBER = 1;\n  private long clientId_ = 0L;\n  /**\n   * <code>int64 clientId = 1;</code>\n   * @return The clientId.\n   */\n  @java.lang.Override\n  public long getClientId() {\n    return clientId_;\n  }\n\n  public static final int PAYLOAD_FIELD_NUMBER = 2;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object payload_ = \"\";\n  /**\n   * <code>string payload = 2;</code>\n   * @return The payload.\n   */\n  @java.lang.Override\n  public java.lang.String getPayload() {\n    java.lang.Object ref = payload_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      payload_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string payload = 2;</code>\n   * @return The bytes for payload.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getPayloadBytes() {\n    java.lang.Object ref = payload_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      payload_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (clientId_ != 0L) {\n      output.writeInt64(1, clientId_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(payload_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 2, payload_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (clientId_ != 0L) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeInt64Size(1, clientId_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(payload_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, payload_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage other = (com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage) obj;\n\n    if (getClientId()\n        != other.getClientId()) return false;\n    if (!getPayload()\n        .equals(other.getPayload())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + CLIENTID_FIELD_NUMBER;\n    hash = (53 * hash) + com.google.protobuf.Internal.hashLong(\n        getClientId());\n    hash = (37 * hash) + PAYLOAD_FIELD_NUMBER;\n    hash = (53 * hash) + getPayload().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code ControlMessage}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:ControlMessage)\n      com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessageOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamOuterClass.internal_static_ControlMessage_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamOuterClass.internal_static_ControlMessage_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.class, com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      clientId_ = 0L;\n      payload_ = \"\";\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamOuterClass.internal_static_ControlMessage_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage build() {\n      com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage result = new com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.clientId_ = clientId_;\n      }\n      if (((from_bitField0_ & 0x00000002) != 0)) {\n        result.payload_ = payload_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.getDefaultInstance()) return this;\n      if (other.getClientId() != 0L) {\n        setClientId(other.getClientId());\n      }\n      if (!other.getPayload().isEmpty()) {\n        payload_ = other.payload_;\n        bitField0_ |= 0x00000002;\n        onChanged();\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 8: {\n              clientId_ = input.readInt64();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 8\n            case 18: {\n              payload_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000002;\n              break;\n            } // case 18\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private long clientId_ ;\n    /**\n     * <code>int64 clientId = 1;</code>\n     * @return The clientId.\n     */\n    @java.lang.Override\n    public long getClientId() {\n      return clientId_;\n    }\n    /**\n     * <code>int64 clientId = 1;</code>\n     * @param value The clientId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setClientId(long value) {\n\n      clientId_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>int64 clientId = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearClientId() {\n      bitField0_ = (bitField0_ & ~0x00000001);\n      clientId_ = 0L;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object payload_ = \"\";\n    /**\n     * <code>string payload = 2;</code>\n     * @return The payload.\n     */\n    public java.lang.String getPayload() {\n      java.lang.Object ref = payload_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        payload_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string payload = 2;</code>\n     * @return The bytes for payload.\n     */\n    public com.google.protobuf.ByteString\n        getPayloadBytes() {\n      java.lang.Object ref = payload_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        payload_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string payload = 2;</code>\n     * @param value The payload to set.\n     * @return This builder for chaining.\n     */\n    public Builder setPayload(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      payload_ = value;\n      bitField0_ |= 0x00000002;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string payload = 2;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearPayload() {\n      payload_ = getDefaultInstance().getPayload();\n      bitField0_ = (bitField0_ & ~0x00000002);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string payload = 2;</code>\n     * @param value The bytes for payload to set.\n     * @return This builder for chaining.\n     */\n    public Builder setPayloadBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      payload_ = value;\n      bitField0_ |= 0x00000002;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:ControlMessage)\n  }\n\n  // @@protoc_insertion_point(class_scope:ControlMessage)\n  private static final com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<ControlMessage>\n      PARSER = new com.google.protobuf.AbstractParser<ControlMessage>() {\n    @java.lang.Override\n    public ControlMessage parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<ControlMessage> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<ControlMessage> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/cs/ControlMessageOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: control_stream.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.cs;\n\npublic interface ControlMessageOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:ControlMessage)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>int64 clientId = 1;</code>\n   * @return The clientId.\n   */\n  long getClientId();\n\n  /**\n   * <code>string payload = 2;</code>\n   * @return The payload.\n   */\n  java.lang.String getPayload();\n  /**\n   * <code>string payload = 2;</code>\n   * @return The bytes for payload.\n   */\n  com.google.protobuf.ByteString\n      getPayloadBytes();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/cs/ControlStreamGrpc.java",
    "content": "package com.pinecone.hydra.service.registry.grpc.server.cs;\n\nimport static io.grpc.MethodDescriptor.generateFullMethodName;\n\n/**\n */\n@javax.annotation.Generated(\n    value = \"by gRPC proto compiler (version 1.62.2)\",\n    comments = \"Source: control_stream.proto\")\n@io.grpc.stub.annotations.GrpcGenerated\npublic final class ControlStreamGrpc {\n\n  private ControlStreamGrpc() {}\n\n  public static final java.lang.String SERVICE_NAME = \"ControlStream\";\n\n  // Static method descriptors that strictly reflect the proto.\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage,\n      com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage> getConnectMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"Connect\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage,\n      com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage> getConnectMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage, com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage> getConnectMethod;\n    if ((getConnectMethod = ControlStreamGrpc.getConnectMethod) == null) {\n      synchronized (ControlStreamGrpc.class) {\n        if ((getConnectMethod = ControlStreamGrpc.getConnectMethod) == null) {\n          ControlStreamGrpc.getConnectMethod = getConnectMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage, com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.BIDI_STREAMING)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"Connect\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage.getDefaultInstance()))\n              .setSchemaDescriptor(new ControlStreamMethodDescriptorSupplier(\"Connect\"))\n              .build();\n        }\n      }\n    }\n    return getConnectMethod;\n  }\n\n  /**\n   * Creates a new async stub that supports all call types for the service\n   */\n  public static ControlStreamStub newStub(io.grpc.Channel channel) {\n    io.grpc.stub.AbstractStub.StubFactory<ControlStreamStub> factory =\n      new io.grpc.stub.AbstractStub.StubFactory<ControlStreamStub>() {\n        @java.lang.Override\n        public ControlStreamStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n          return new ControlStreamStub(channel, callOptions);\n        }\n      };\n    return ControlStreamStub.newStub(factory, channel);\n  }\n\n  /**\n   * Creates a new blocking-style stub that supports unary and streaming output calls on the service\n   */\n  public static ControlStreamBlockingStub newBlockingStub(\n      io.grpc.Channel channel) {\n    io.grpc.stub.AbstractStub.StubFactory<ControlStreamBlockingStub> factory =\n      new io.grpc.stub.AbstractStub.StubFactory<ControlStreamBlockingStub>() {\n        @java.lang.Override\n        public ControlStreamBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n          return new ControlStreamBlockingStub(channel, callOptions);\n        }\n      };\n    return ControlStreamBlockingStub.newStub(factory, channel);\n  }\n\n  /**\n   * Creates a new ListenableFuture-style stub that supports unary calls on the service\n   */\n  public static ControlStreamFutureStub newFutureStub(\n      io.grpc.Channel channel) {\n    io.grpc.stub.AbstractStub.StubFactory<ControlStreamFutureStub> factory =\n      new io.grpc.stub.AbstractStub.StubFactory<ControlStreamFutureStub>() {\n        @java.lang.Override\n        public ControlStreamFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n          return new ControlStreamFutureStub(channel, callOptions);\n        }\n      };\n    return ControlStreamFutureStub.newStub(factory, channel);\n  }\n\n  /**\n   */\n  public interface AsyncService {\n\n    /**\n     */\n    default io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage> connect(\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage> responseObserver) {\n      return io.grpc.stub.ServerCalls.asyncUnimplementedStreamingCall(getConnectMethod(), responseObserver);\n    }\n  }\n\n  /**\n   * Base class for the server implementation of the service ControlStream.\n   */\n  public static abstract class ControlStreamImplBase\n      implements io.grpc.BindableService, AsyncService {\n\n    @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {\n      return ControlStreamGrpc.bindService(this);\n    }\n  }\n\n  /**\n   * A stub to allow clients to do asynchronous rpc calls to service ControlStream.\n   */\n  public static final class ControlStreamStub\n      extends io.grpc.stub.AbstractAsyncStub<ControlStreamStub> {\n    private ControlStreamStub(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      super(channel, callOptions);\n    }\n\n    @java.lang.Override\n    protected ControlStreamStub build(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      return new ControlStreamStub(channel, callOptions);\n    }\n\n    /**\n     */\n    public io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage> connect(\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage> responseObserver) {\n      return io.grpc.stub.ClientCalls.asyncBidiStreamingCall(\n          getChannel().newCall(getConnectMethod(), getCallOptions()), responseObserver);\n    }\n  }\n\n  /**\n   * A stub to allow clients to do synchronous rpc calls to service ControlStream.\n   */\n  public static final class ControlStreamBlockingStub\n      extends io.grpc.stub.AbstractBlockingStub<ControlStreamBlockingStub> {\n    private ControlStreamBlockingStub(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      super(channel, callOptions);\n    }\n\n    @java.lang.Override\n    protected ControlStreamBlockingStub build(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      return new ControlStreamBlockingStub(channel, callOptions);\n    }\n  }\n\n  /**\n   * A stub to allow clients to do ListenableFuture-style rpc calls to service ControlStream.\n   */\n  public static final class ControlStreamFutureStub\n      extends io.grpc.stub.AbstractFutureStub<ControlStreamFutureStub> {\n    private ControlStreamFutureStub(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      super(channel, callOptions);\n    }\n\n    @java.lang.Override\n    protected ControlStreamFutureStub build(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      return new ControlStreamFutureStub(channel, callOptions);\n    }\n  }\n\n  private static final int METHODID_CONNECT = 0;\n\n  private static final class MethodHandlers<Req, Resp> implements\n      io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,\n      io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,\n      io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,\n      io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {\n    private final AsyncService serviceImpl;\n    private final int methodId;\n\n    MethodHandlers(AsyncService serviceImpl, int methodId) {\n      this.serviceImpl = serviceImpl;\n      this.methodId = methodId;\n    }\n\n    @java.lang.Override\n    @java.lang.SuppressWarnings(\"unchecked\")\n    public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {\n      switch (methodId) {\n        default:\n          throw new AssertionError();\n      }\n    }\n\n    @java.lang.Override\n    @java.lang.SuppressWarnings(\"unchecked\")\n    public io.grpc.stub.StreamObserver<Req> invoke(\n        io.grpc.stub.StreamObserver<Resp> responseObserver) {\n      switch (methodId) {\n        case METHODID_CONNECT:\n          return (io.grpc.stub.StreamObserver<Req>) serviceImpl.connect(\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage>) responseObserver);\n        default:\n          throw new AssertionError();\n      }\n    }\n  }\n\n  public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {\n    return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())\n        .addMethod(\n          getConnectMethod(),\n          io.grpc.stub.ServerCalls.asyncBidiStreamingCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage,\n              com.pinecone.hydra.service.registry.grpc.server.cs.ControlMessage>(\n                service, METHODID_CONNECT)))\n        .build();\n  }\n\n  private static abstract class ControlStreamBaseDescriptorSupplier\n      implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier {\n    ControlStreamBaseDescriptorSupplier() {}\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.cs.ControlStreamOuterClass.getDescriptor();\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {\n      return getFileDescriptor().findServiceByName(\"ControlStream\");\n    }\n  }\n\n  private static final class ControlStreamFileDescriptorSupplier\n      extends ControlStreamBaseDescriptorSupplier {\n    ControlStreamFileDescriptorSupplier() {}\n  }\n\n  private static final class ControlStreamMethodDescriptorSupplier\n      extends ControlStreamBaseDescriptorSupplier\n      implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {\n    private final java.lang.String methodName;\n\n    ControlStreamMethodDescriptorSupplier(java.lang.String methodName) {\n      this.methodName = methodName;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {\n      return getServiceDescriptor().findMethodByName(methodName);\n    }\n  }\n\n  private static volatile io.grpc.ServiceDescriptor serviceDescriptor;\n\n  public static io.grpc.ServiceDescriptor getServiceDescriptor() {\n    io.grpc.ServiceDescriptor result = serviceDescriptor;\n    if (result == null) {\n      synchronized (ControlStreamGrpc.class) {\n        result = serviceDescriptor;\n        if (result == null) {\n          serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)\n              .setSchemaDescriptor(new ControlStreamFileDescriptorSupplier())\n              .addMethod(getConnectMethod())\n              .build();\n        }\n      }\n    }\n    return result;\n  }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/cs/ControlStreamOuterClass.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: control_stream.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.cs;\n\npublic final class ControlStreamOuterClass {\n  private ControlStreamOuterClass() {}\n  public static void registerAllExtensions(\n      com.google.protobuf.ExtensionRegistryLite registry) {\n  }\n\n  public static void registerAllExtensions(\n      com.google.protobuf.ExtensionRegistry registry) {\n    registerAllExtensions(\n        (com.google.protobuf.ExtensionRegistryLite) registry);\n  }\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_ControlMessage_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_ControlMessage_fieldAccessorTable;\n\n  public static com.google.protobuf.Descriptors.FileDescriptor\n      getDescriptor() {\n    return descriptor;\n  }\n  private static  com.google.protobuf.Descriptors.FileDescriptor\n      descriptor;\n  static {\n    java.lang.String[] descriptorData = {\n      \"\\n\\024control_stream.proto\\\"3\\n\\016ControlMessage\" +\n      \"\\022\\020\\n\\010clientId\\030\\001 \\001(\\003\\022\\017\\n\\007payload\\030\\002 \\001(\\t2@\\n\\rC\" +\n      \"ontrolStream\\022/\\n\\007Connect\\022\\017.ControlMessage\" +\n      \"\\032\\017.ControlMessage(\\0010\\001B6\\n2com.pinecone.hy\" +\n      \"dra.service.registry.grpc.server.csP\\001b\\006p\" +\n      \"roto3\"\n    };\n    descriptor = com.google.protobuf.Descriptors.FileDescriptor\n      .internalBuildGeneratedFileFrom(descriptorData,\n        new com.google.protobuf.Descriptors.FileDescriptor[] {\n        });\n    internal_static_ControlMessage_descriptor =\n      getDescriptor().getMessageTypes().get(0);\n    internal_static_ControlMessage_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_ControlMessage_descriptor,\n        new java.lang.String[] { \"ClientId\", \"Payload\", });\n  }\n\n  // @@protoc_insertion_point(outer_class_scope)\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/iface/ServiceLifecycleImpl.java",
    "content": "package com.pinecone.hydra.service.registry.grpc.server.iface;\n\nimport com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry;\nimport com.pinecone.hydra.service.registry.dto.RegisterServiceDTO;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleGrpc;\nimport com.pinecone.hydra.service.registry.server.ServiceLifecycleIface;\n\npublic class ServiceLifecycleImpl implements ServiceLifecycleIface {\n\n    protected final ServiceLifecycleGrpc.ServiceLifecycleBlockingStub lifecycleBlockingStub;\n\n    public ServiceLifecycleImpl( ServiceLifecycleGrpc.ServiceLifecycleBlockingStub lifecycleBlockingStub ) {\n        this.lifecycleBlockingStub = lifecycleBlockingStub;\n    }\n\n    @Override\n    public String registerService( RegisterServiceDTO serviceDTO ) {\n        RegisterServiceRequest request =\n                RegisterServiceRequest.newBuilder()\n                        .setClientId( serviceDTO.getClientId() )\n                        .setServiceId( serviceDTO.getServiceId() )\n                        .setDeployId( serviceDTO.getDeployId() )\n                        .build();\n        RegisterServiceReply reply = this.lifecycleBlockingStub.registerService( request );\n        return reply.getInstanceId();\n    }\n\n    @Override\n    public boolean createInstanceMeta(ServiceInstanceEntry serviceInstanceEntry) {\n        return false;\n    }\n\n    @Override\n    public void deregisterServiceByClientId( Long clientId ) {\n        ClientIdRequest request = ClientIdRequest.newBuilder().setClientId( clientId ).build();\n        this.lifecycleBlockingStub.deregisterServiceByClientId( request );\n    }\n\n\n    @Override\n    public void deregisterServiceByInstanceId( String instanceId ) {\n        InstanceIdRequest request = InstanceIdRequest.newBuilder().setInstanceId( instanceId ).build();\n        this.lifecycleBlockingStub.deregisterServiceByInstanceId( request );\n    }\n\n\n    @Override\n    public boolean hasOwnedServiceByServiceId( String serviceId ) {\n        ServiceIdRequest request = ServiceIdRequest.newBuilder().setServiceId( serviceId ).build();\n        BoolReply reply = this.lifecycleBlockingStub.hasOwnedServiceByServiceId( request );\n\n        return reply.getValue();\n    }\n\n\n    @Override\n    public boolean hasOwnedServiceInstance( Long clientId ) {\n        ClientIdRequest request = ClientIdRequest.newBuilder().setClientId( clientId ).build();\n        BoolReply reply = this.lifecycleBlockingStub.hasOwnedServiceInstanceByClientId( request );\n        return reply.getValue();\n    }\n\n\n    @Override\n    public boolean hasOwnedServiceInstance( String instanceId ) {\n        InstanceIdRequest request = InstanceIdRequest.newBuilder().setInstanceId( instanceId ).build();\n        BoolReply reply = this.lifecycleBlockingStub.hasOwnedServiceInstanceByInstanceId( request );\n        return reply.getValue();\n    }\n\n\n    @Override\n    public boolean hasOwnedServiceClient( Long clientId ) {\n        ClientIdRequest request = ClientIdRequest.newBuilder().setClientId( clientId ).build();\n        BoolReply reply = this.lifecycleBlockingStub.hasOwnedServiceClient( request );\n        return reply.getValue();\n    }\n\n\n    @Override\n    public Integer countRegisteredService() {\n        EmptyRequest request = EmptyRequest.newBuilder().build();\n        CountReply reply = this.lifecycleBlockingStub.countRegisteredService( request );\n        return reply.getValue();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/iface/ServiceMetaManipulationIfaceImpl.java",
    "content": "package com.pinecone.hydra.service.registry.grpc.server.iface;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.pinecone.hydra.service.registry.dto.ServiceMetaDTO;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaGrpc;\nimport com.pinecone.hydra.service.registry.grpc.server.meta.StringReply;\nimport com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface;\n\npublic class ServiceMetaManipulationIfaceImpl implements ServiceMetaManipulationIface {\n\n    protected final ServiceMetaGrpc.ServiceMetaBlockingStub metaBlockingStub;\n\n    public ServiceMetaManipulationIfaceImpl( ServiceMetaGrpc.ServiceMetaBlockingStub metaBlockingStub ) {\n        this.metaBlockingStub = metaBlockingStub;\n    }\n\n\n    @Override\n    public List<ServiceMetaDTO> fetchServiceInsMetaByClientId( long clientId ) {\n        ClientIdRequest request = ClientIdRequest.newBuilder()\n                        .setClientId( clientId )\n                        .build();\n\n        ServiceMetaDTOListReply reply = this.metaBlockingStub.fetchServiceInsMetaByClientId( request );\n\n        List<ServiceMetaDTO> result = new ArrayList<>();\n\n        for( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO proto : reply.getMetasList() ) {\n            result.add( this.fromProto( proto ) );\n        }\n\n        return result;\n    }\n\n\n    @Override\n    public List<ServiceMetaDTO> fetchServiceInsMetaByServiceId( String serviceId ) {\n        ServiceIdRequest request = ServiceIdRequest.newBuilder()\n                        .setServiceId( serviceId )\n                        .build();\n\n        ServiceMetaDTOListReply reply = this.metaBlockingStub.fetchServiceInsMetaByServiceId( request );\n\n        List<ServiceMetaDTO> result = new ArrayList<>();\n\n        for( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO proto : reply.getMetasList() ) {\n            result.add( this.fromProto( proto ) );\n        }\n        return result;\n    }\n\n\n    @Override\n    public ServiceMetaDTO queryServiceMetaByPath( String path ) {\n        PathRequest request = PathRequest.newBuilder()\n                        .setPath( path )\n                        .build();\n\n        ServiceMetaDTOReply reply = this.metaBlockingStub.queryServiceMetaByPath( request );\n        if( reply.hasMeta() ) {\n            return this.fromProto( reply.getMeta() );\n        }\n\n        return null;\n    }\n\n\n    @Override\n    public ServiceMetaDTO queryServiceMetaByGuid( String guid ) {\n        GuidRequest request = GuidRequest.newBuilder()\n                        .setGuid( guid )\n                        .build();\n\n        ServiceMetaDTOReply reply = this.metaBlockingStub.queryServiceMetaByGuid( request );\n        if( reply.hasMeta() ) {\n            return this.fromProto( reply.getMeta() );\n        }\n        return null;\n    }\n\n\n    @Override\n    public String evalCreationStatement( String jsonStatement ) {\n        EvalRequest request = EvalRequest.newBuilder()\n                        .setJsonStatement( jsonStatement )\n                        .build();\n        StringReply reply = this.metaBlockingStub.evalCreationStatement( request );\n        return reply.getValue();\n    }\n\n\n    @Override\n    public String createNewService( String parentAppPath, ServiceMetaDTO meta ) {\n        CreateNewServiceRequest request = CreateNewServiceRequest.newBuilder()\n                        .setParentAppPath( parentAppPath )\n                        .setMeta( this.toProto( meta ) )\n                        .build();\n        StringReply reply = this.metaBlockingStub.createNewService( request );\n        return reply.getValue();\n    }\n\n\n\n    protected ServiceMetaDTO fromProto( com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO proto ) {\n        ServiceMetaDTO dto = new ServiceMetaDTO();\n\n        dto.setGuid( proto.getGuid() );\n        dto.setName( proto.getName() );\n        dto.setType( proto.getType() );\n        dto.setDisplayName( proto.getDisplayName() );\n        dto.setDescription( proto.getDescription() );\n        dto.setFullName( proto.getFullName() );\n        dto.setGroupNamespace( proto.getGroupNamespace() );\n        dto.setGroupName( proto.getGroupName() );\n        dto.setScenario( proto.getScenario() );\n        dto.setPrimaryImplLang( proto.getPrimaryImplLang() );\n        dto.setExtraInformation( proto.getExtraInformation() );\n        dto.setLevel( proto.getLevel() );\n\n        return dto;\n    }\n\n    protected com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO toProto( ServiceMetaDTO dto ) {\n        com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builder =\n                com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.newBuilder();\n\n        if( dto.getGuid() != null ) {\n            builder.setGuid( dto.getGuid() );\n        }\n\n        if( dto.getName() != null ) {\n            builder.setName( dto.getName() );\n        }\n\n        if( dto.getType() != null ) {\n            builder.setType( dto.getType() );\n        }\n\n        if( dto.getDisplayName() != null ) {\n            builder.setDisplayName( dto.getDisplayName() );\n        }\n\n        if( dto.getDescription() != null ) {\n            builder.setDescription( dto.getDescription() );\n        }\n\n        if( dto.getFullName() != null ) {\n            builder.setFullName( dto.getFullName() );\n        }\n\n        if( dto.getGroupNamespace() != null ) {\n            builder.setGroupNamespace( dto.getGroupNamespace() );\n        }\n\n        if( dto.getGroupName() != null ) {\n            builder.setGroupName( dto.getGroupName() );\n        }\n\n        if( dto.getScenario() != null ) {\n            builder.setScenario( dto.getScenario() );\n        }\n\n        if( dto.getPrimaryImplLang() != null ) {\n            builder.setPrimaryImplLang( dto.getPrimaryImplLang() );\n        }\n\n        if( dto.getExtraInformation() != null ) {\n            builder.setExtraInformation( dto.getExtraInformation() );\n        }\n\n        if( dto.getLevel() != null ) {\n            builder.setLevel( dto.getLevel() );\n        }\n\n        return builder.build();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/BoolReply.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\n/**\n * Protobuf type {@code BoolReply}\n */\npublic final class BoolReply extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:BoolReply)\n    BoolReplyOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use BoolReply.newBuilder() to construct.\n  private BoolReply(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private BoolReply() {\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new BoolReply();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_BoolReply_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_BoolReply_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.Builder.class);\n  }\n\n  public static final int VALUE_FIELD_NUMBER = 1;\n  private boolean value_ = false;\n  /**\n   * <code>bool value = 1;</code>\n   * @return The value.\n   */\n  @java.lang.Override\n  public boolean getValue() {\n    return value_;\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (value_ != false) {\n      output.writeBool(1, value_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (value_ != false) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeBoolSize(1, value_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply) obj;\n\n    if (getValue()\n        != other.getValue()) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + VALUE_FIELD_NUMBER;\n    hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(\n        getValue());\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code BoolReply}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:BoolReply)\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReplyOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_BoolReply_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_BoolReply_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      value_ = false;\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_BoolReply_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply build() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.value_ = value_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance()) return this;\n      if (other.getValue() != false) {\n        setValue(other.getValue());\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 8: {\n              value_ = input.readBool();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 8\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private boolean value_ ;\n    /**\n     * <code>bool value = 1;</code>\n     * @return The value.\n     */\n    @java.lang.Override\n    public boolean getValue() {\n      return value_;\n    }\n    /**\n     * <code>bool value = 1;</code>\n     * @param value The value to set.\n     * @return This builder for chaining.\n     */\n    public Builder setValue(boolean value) {\n\n      value_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>bool value = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearValue() {\n      bitField0_ = (bitField0_ & ~0x00000001);\n      value_ = false;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:BoolReply)\n  }\n\n  // @@protoc_insertion_point(class_scope:BoolReply)\n  private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<BoolReply>\n      PARSER = new com.google.protobuf.AbstractParser<BoolReply>() {\n    @java.lang.Override\n    public BoolReply parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<BoolReply> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<BoolReply> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/BoolReplyOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\npublic interface BoolReplyOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:BoolReply)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>bool value = 1;</code>\n   * @return The value.\n   */\n  boolean getValue();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/ClientIdRequest.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\n/**\n * Protobuf type {@code ClientIdRequest}\n */\npublic final class ClientIdRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:ClientIdRequest)\n    ClientIdRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use ClientIdRequest.newBuilder() to construct.\n  private ClientIdRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private ClientIdRequest() {\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new ClientIdRequest();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ClientIdRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ClientIdRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.Builder.class);\n  }\n\n  public static final int CLIENTID_FIELD_NUMBER = 1;\n  private long clientId_ = 0L;\n  /**\n   * <code>int64 clientId = 1;</code>\n   * @return The clientId.\n   */\n  @java.lang.Override\n  public long getClientId() {\n    return clientId_;\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (clientId_ != 0L) {\n      output.writeInt64(1, clientId_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (clientId_ != 0L) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeInt64Size(1, clientId_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest) obj;\n\n    if (getClientId()\n        != other.getClientId()) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + CLIENTID_FIELD_NUMBER;\n    hash = (53 * hash) + com.google.protobuf.Internal.hashLong(\n        getClientId());\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code ClientIdRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:ClientIdRequest)\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ClientIdRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ClientIdRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      clientId_ = 0L;\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ClientIdRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest build() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.clientId_ = clientId_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.getDefaultInstance()) return this;\n      if (other.getClientId() != 0L) {\n        setClientId(other.getClientId());\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 8: {\n              clientId_ = input.readInt64();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 8\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private long clientId_ ;\n    /**\n     * <code>int64 clientId = 1;</code>\n     * @return The clientId.\n     */\n    @java.lang.Override\n    public long getClientId() {\n      return clientId_;\n    }\n    /**\n     * <code>int64 clientId = 1;</code>\n     * @param value The clientId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setClientId(long value) {\n\n      clientId_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>int64 clientId = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearClientId() {\n      bitField0_ = (bitField0_ & ~0x00000001);\n      clientId_ = 0L;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:ClientIdRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:ClientIdRequest)\n  private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<ClientIdRequest>\n      PARSER = new com.google.protobuf.AbstractParser<ClientIdRequest>() {\n    @java.lang.Override\n    public ClientIdRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<ClientIdRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<ClientIdRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/ClientIdRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\npublic interface ClientIdRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:ClientIdRequest)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>int64 clientId = 1;</code>\n   * @return The clientId.\n   */\n  long getClientId();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/CountReply.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\n/**\n * Protobuf type {@code CountReply}\n */\npublic final class CountReply extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:CountReply)\n    CountReplyOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use CountReply.newBuilder() to construct.\n  private CountReply(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private CountReply() {\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new CountReply();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CountReply_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CountReply_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.Builder.class);\n  }\n\n  public static final int VALUE_FIELD_NUMBER = 1;\n  private int value_ = 0;\n  /**\n   * <code>int32 value = 1;</code>\n   * @return The value.\n   */\n  @java.lang.Override\n  public int getValue() {\n    return value_;\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (value_ != 0) {\n      output.writeInt32(1, value_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (value_ != 0) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeInt32Size(1, value_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply) obj;\n\n    if (getValue()\n        != other.getValue()) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + VALUE_FIELD_NUMBER;\n    hash = (53 * hash) + getValue();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code CountReply}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:CountReply)\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReplyOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CountReply_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CountReply_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      value_ = 0;\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CountReply_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply build() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.value_ = value_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.getDefaultInstance()) return this;\n      if (other.getValue() != 0) {\n        setValue(other.getValue());\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 8: {\n              value_ = input.readInt32();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 8\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private int value_ ;\n    /**\n     * <code>int32 value = 1;</code>\n     * @return The value.\n     */\n    @java.lang.Override\n    public int getValue() {\n      return value_;\n    }\n    /**\n     * <code>int32 value = 1;</code>\n     * @param value The value to set.\n     * @return This builder for chaining.\n     */\n    public Builder setValue(int value) {\n\n      value_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>int32 value = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearValue() {\n      bitField0_ = (bitField0_ & ~0x00000001);\n      value_ = 0;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:CountReply)\n  }\n\n  // @@protoc_insertion_point(class_scope:CountReply)\n  private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<CountReply>\n      PARSER = new com.google.protobuf.AbstractParser<CountReply>() {\n    @java.lang.Override\n    public CountReply parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<CountReply> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<CountReply> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/CountReplyOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\npublic interface CountReplyOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:CountReply)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>int32 value = 1;</code>\n   * @return The value.\n   */\n  int getValue();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/CreateInstanceMetaRequest.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\n/**\n * Protobuf type {@code CreateInstanceMetaRequest}\n */\npublic final class CreateInstanceMetaRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:CreateInstanceMetaRequest)\n    CreateInstanceMetaRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use CreateInstanceMetaRequest.newBuilder() to construct.\n  private CreateInstanceMetaRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private CreateInstanceMetaRequest() {\n    instanceGuid_ = \"\";\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new CreateInstanceMetaRequest();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CreateInstanceMetaRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CreateInstanceMetaRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.Builder.class);\n  }\n\n  public static final int INSTANCEGUID_FIELD_NUMBER = 1;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object instanceGuid_ = \"\";\n  /**\n   * <code>string instanceGuid = 1;</code>\n   * @return The instanceGuid.\n   */\n  @java.lang.Override\n  public java.lang.String getInstanceGuid() {\n    java.lang.Object ref = instanceGuid_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      instanceGuid_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string instanceGuid = 1;</code>\n   * @return The bytes for instanceGuid.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getInstanceGuidBytes() {\n    java.lang.Object ref = instanceGuid_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      instanceGuid_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceGuid_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instanceGuid_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceGuid_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instanceGuid_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest) obj;\n\n    if (!getInstanceGuid()\n        .equals(other.getInstanceGuid())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + INSTANCEGUID_FIELD_NUMBER;\n    hash = (53 * hash) + getInstanceGuid().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code CreateInstanceMetaRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:CreateInstanceMetaRequest)\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CreateInstanceMetaRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CreateInstanceMetaRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      instanceGuid_ = \"\";\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_CreateInstanceMetaRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest build() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.instanceGuid_ = instanceGuid_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.getDefaultInstance()) return this;\n      if (!other.getInstanceGuid().isEmpty()) {\n        instanceGuid_ = other.instanceGuid_;\n        bitField0_ |= 0x00000001;\n        onChanged();\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              instanceGuid_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 10\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private java.lang.Object instanceGuid_ = \"\";\n    /**\n     * <code>string instanceGuid = 1;</code>\n     * @return The instanceGuid.\n     */\n    public java.lang.String getInstanceGuid() {\n      java.lang.Object ref = instanceGuid_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        instanceGuid_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string instanceGuid = 1;</code>\n     * @return The bytes for instanceGuid.\n     */\n    public com.google.protobuf.ByteString\n        getInstanceGuidBytes() {\n      java.lang.Object ref = instanceGuid_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        instanceGuid_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string instanceGuid = 1;</code>\n     * @param value The instanceGuid to set.\n     * @return This builder for chaining.\n     */\n    public Builder setInstanceGuid(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      instanceGuid_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string instanceGuid = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearInstanceGuid() {\n      instanceGuid_ = getDefaultInstance().getInstanceGuid();\n      bitField0_ = (bitField0_ & ~0x00000001);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string instanceGuid = 1;</code>\n     * @param value The bytes for instanceGuid to set.\n     * @return This builder for chaining.\n     */\n    public Builder setInstanceGuidBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      instanceGuid_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:CreateInstanceMetaRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:CreateInstanceMetaRequest)\n  private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<CreateInstanceMetaRequest>\n      PARSER = new com.google.protobuf.AbstractParser<CreateInstanceMetaRequest>() {\n    @java.lang.Override\n    public CreateInstanceMetaRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<CreateInstanceMetaRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<CreateInstanceMetaRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/CreateInstanceMetaRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\npublic interface CreateInstanceMetaRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:CreateInstanceMetaRequest)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>string instanceGuid = 1;</code>\n   * @return The instanceGuid.\n   */\n  java.lang.String getInstanceGuid();\n  /**\n   * <code>string instanceGuid = 1;</code>\n   * @return The bytes for instanceGuid.\n   */\n  com.google.protobuf.ByteString\n      getInstanceGuidBytes();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/EmptyReply.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\n/**\n * Protobuf type {@code EmptyReply}\n */\npublic final class EmptyReply extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:EmptyReply)\n    EmptyReplyOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use EmptyReply.newBuilder() to construct.\n  private EmptyReply(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private EmptyReply() {\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new EmptyReply();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyReply_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyReply_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.Builder.class);\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply) obj;\n\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code EmptyReply}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:EmptyReply)\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReplyOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyReply_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyReply_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyReply_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply build() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply(this);\n      onBuilt();\n      return result;\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.getDefaultInstance()) return this;\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:EmptyReply)\n  }\n\n  // @@protoc_insertion_point(class_scope:EmptyReply)\n  private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<EmptyReply>\n      PARSER = new com.google.protobuf.AbstractParser<EmptyReply>() {\n    @java.lang.Override\n    public EmptyReply parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<EmptyReply> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<EmptyReply> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/EmptyReplyOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\npublic interface EmptyReplyOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:EmptyReply)\n    com.google.protobuf.MessageOrBuilder {\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/EmptyRequest.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\n/**\n * Protobuf type {@code EmptyRequest}\n */\npublic final class EmptyRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:EmptyRequest)\n    EmptyRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use EmptyRequest.newBuilder() to construct.\n  private EmptyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private EmptyRequest() {\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new EmptyRequest();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.Builder.class);\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest) obj;\n\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code EmptyRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:EmptyRequest)\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_EmptyRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest build() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest(this);\n      onBuilt();\n      return result;\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.getDefaultInstance()) return this;\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:EmptyRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:EmptyRequest)\n  private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<EmptyRequest>\n      PARSER = new com.google.protobuf.AbstractParser<EmptyRequest>() {\n    @java.lang.Override\n    public EmptyRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<EmptyRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<EmptyRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/EmptyRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\npublic interface EmptyRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:EmptyRequest)\n    com.google.protobuf.MessageOrBuilder {\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/InstanceIdRequest.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\n/**\n * Protobuf type {@code InstanceIdRequest}\n */\npublic final class InstanceIdRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:InstanceIdRequest)\n    InstanceIdRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use InstanceIdRequest.newBuilder() to construct.\n  private InstanceIdRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private InstanceIdRequest() {\n    instanceId_ = \"\";\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new InstanceIdRequest();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_InstanceIdRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_InstanceIdRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.Builder.class);\n  }\n\n  public static final int INSTANCEID_FIELD_NUMBER = 1;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object instanceId_ = \"\";\n  /**\n   * <code>string instanceId = 1;</code>\n   * @return The instanceId.\n   */\n  @java.lang.Override\n  public java.lang.String getInstanceId() {\n    java.lang.Object ref = instanceId_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      instanceId_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string instanceId = 1;</code>\n   * @return The bytes for instanceId.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getInstanceIdBytes() {\n    java.lang.Object ref = instanceId_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      instanceId_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instanceId_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instanceId_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest) obj;\n\n    if (!getInstanceId()\n        .equals(other.getInstanceId())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + INSTANCEID_FIELD_NUMBER;\n    hash = (53 * hash) + getInstanceId().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code InstanceIdRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:InstanceIdRequest)\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_InstanceIdRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_InstanceIdRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      instanceId_ = \"\";\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_InstanceIdRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest build() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.instanceId_ = instanceId_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.getDefaultInstance()) return this;\n      if (!other.getInstanceId().isEmpty()) {\n        instanceId_ = other.instanceId_;\n        bitField0_ |= 0x00000001;\n        onChanged();\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              instanceId_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 10\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private java.lang.Object instanceId_ = \"\";\n    /**\n     * <code>string instanceId = 1;</code>\n     * @return The instanceId.\n     */\n    public java.lang.String getInstanceId() {\n      java.lang.Object ref = instanceId_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        instanceId_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string instanceId = 1;</code>\n     * @return The bytes for instanceId.\n     */\n    public com.google.protobuf.ByteString\n        getInstanceIdBytes() {\n      java.lang.Object ref = instanceId_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        instanceId_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string instanceId = 1;</code>\n     * @param value The instanceId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setInstanceId(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      instanceId_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string instanceId = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearInstanceId() {\n      instanceId_ = getDefaultInstance().getInstanceId();\n      bitField0_ = (bitField0_ & ~0x00000001);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string instanceId = 1;</code>\n     * @param value The bytes for instanceId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setInstanceIdBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      instanceId_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:InstanceIdRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:InstanceIdRequest)\n  private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<InstanceIdRequest>\n      PARSER = new com.google.protobuf.AbstractParser<InstanceIdRequest>() {\n    @java.lang.Override\n    public InstanceIdRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<InstanceIdRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<InstanceIdRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/InstanceIdRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\npublic interface InstanceIdRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:InstanceIdRequest)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>string instanceId = 1;</code>\n   * @return The instanceId.\n   */\n  java.lang.String getInstanceId();\n  /**\n   * <code>string instanceId = 1;</code>\n   * @return The bytes for instanceId.\n   */\n  com.google.protobuf.ByteString\n      getInstanceIdBytes();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/RegisterServiceReply.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\n/**\n * Protobuf type {@code RegisterServiceReply}\n */\npublic final class RegisterServiceReply extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:RegisterServiceReply)\n    RegisterServiceReplyOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use RegisterServiceReply.newBuilder() to construct.\n  private RegisterServiceReply(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private RegisterServiceReply() {\n    instanceId_ = \"\";\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new RegisterServiceReply();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceReply_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceReply_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.Builder.class);\n  }\n\n  public static final int INSTANCEID_FIELD_NUMBER = 1;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object instanceId_ = \"\";\n  /**\n   * <code>string instanceId = 1;</code>\n   * @return The instanceId.\n   */\n  @java.lang.Override\n  public java.lang.String getInstanceId() {\n    java.lang.Object ref = instanceId_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      instanceId_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string instanceId = 1;</code>\n   * @return The bytes for instanceId.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getInstanceIdBytes() {\n    java.lang.Object ref = instanceId_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      instanceId_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instanceId_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instanceId_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply) obj;\n\n    if (!getInstanceId()\n        .equals(other.getInstanceId())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + INSTANCEID_FIELD_NUMBER;\n    hash = (53 * hash) + getInstanceId().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code RegisterServiceReply}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:RegisterServiceReply)\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReplyOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceReply_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceReply_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      instanceId_ = \"\";\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceReply_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply build() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.instanceId_ = instanceId_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.getDefaultInstance()) return this;\n      if (!other.getInstanceId().isEmpty()) {\n        instanceId_ = other.instanceId_;\n        bitField0_ |= 0x00000001;\n        onChanged();\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              instanceId_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 10\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private java.lang.Object instanceId_ = \"\";\n    /**\n     * <code>string instanceId = 1;</code>\n     * @return The instanceId.\n     */\n    public java.lang.String getInstanceId() {\n      java.lang.Object ref = instanceId_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        instanceId_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string instanceId = 1;</code>\n     * @return The bytes for instanceId.\n     */\n    public com.google.protobuf.ByteString\n        getInstanceIdBytes() {\n      java.lang.Object ref = instanceId_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        instanceId_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string instanceId = 1;</code>\n     * @param value The instanceId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setInstanceId(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      instanceId_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string instanceId = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearInstanceId() {\n      instanceId_ = getDefaultInstance().getInstanceId();\n      bitField0_ = (bitField0_ & ~0x00000001);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string instanceId = 1;</code>\n     * @param value The bytes for instanceId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setInstanceIdBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      instanceId_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:RegisterServiceReply)\n  }\n\n  // @@protoc_insertion_point(class_scope:RegisterServiceReply)\n  private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<RegisterServiceReply>\n      PARSER = new com.google.protobuf.AbstractParser<RegisterServiceReply>() {\n    @java.lang.Override\n    public RegisterServiceReply parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<RegisterServiceReply> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<RegisterServiceReply> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/RegisterServiceReplyOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\npublic interface RegisterServiceReplyOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:RegisterServiceReply)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>string instanceId = 1;</code>\n   * @return The instanceId.\n   */\n  java.lang.String getInstanceId();\n  /**\n   * <code>string instanceId = 1;</code>\n   * @return The bytes for instanceId.\n   */\n  com.google.protobuf.ByteString\n      getInstanceIdBytes();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/RegisterServiceRequest.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\n/**\n * Protobuf type {@code RegisterServiceRequest}\n */\npublic final class RegisterServiceRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:RegisterServiceRequest)\n    RegisterServiceRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use RegisterServiceRequest.newBuilder() to construct.\n  private RegisterServiceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private RegisterServiceRequest() {\n    serviceId_ = \"\";\n    deployId_ = \"\";\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new RegisterServiceRequest();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.Builder.class);\n  }\n\n  public static final int CLIENTID_FIELD_NUMBER = 1;\n  private long clientId_ = 0L;\n  /**\n   * <code>int64 clientId = 1;</code>\n   * @return The clientId.\n   */\n  @java.lang.Override\n  public long getClientId() {\n    return clientId_;\n  }\n\n  public static final int SERVICEID_FIELD_NUMBER = 2;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object serviceId_ = \"\";\n  /**\n   * <code>string serviceId = 2;</code>\n   * @return The serviceId.\n   */\n  @java.lang.Override\n  public java.lang.String getServiceId() {\n    java.lang.Object ref = serviceId_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      serviceId_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string serviceId = 2;</code>\n   * @return The bytes for serviceId.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getServiceIdBytes() {\n    java.lang.Object ref = serviceId_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      serviceId_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int DEPLOYID_FIELD_NUMBER = 3;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object deployId_ = \"\";\n  /**\n   * <code>string deployId = 3;</code>\n   * @return The deployId.\n   */\n  @java.lang.Override\n  public java.lang.String getDeployId() {\n    java.lang.Object ref = deployId_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      deployId_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string deployId = 3;</code>\n   * @return The bytes for deployId.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getDeployIdBytes() {\n    java.lang.Object ref = deployId_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      deployId_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (clientId_ != 0L) {\n      output.writeInt64(1, clientId_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceId_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 2, serviceId_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(deployId_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 3, deployId_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (clientId_ != 0L) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeInt64Size(1, clientId_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceId_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, serviceId_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(deployId_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, deployId_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest) obj;\n\n    if (getClientId()\n        != other.getClientId()) return false;\n    if (!getServiceId()\n        .equals(other.getServiceId())) return false;\n    if (!getDeployId()\n        .equals(other.getDeployId())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + CLIENTID_FIELD_NUMBER;\n    hash = (53 * hash) + com.google.protobuf.Internal.hashLong(\n        getClientId());\n    hash = (37 * hash) + SERVICEID_FIELD_NUMBER;\n    hash = (53 * hash) + getServiceId().hashCode();\n    hash = (37 * hash) + DEPLOYID_FIELD_NUMBER;\n    hash = (53 * hash) + getDeployId().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code RegisterServiceRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:RegisterServiceRequest)\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      clientId_ = 0L;\n      serviceId_ = \"\";\n      deployId_ = \"\";\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_RegisterServiceRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest build() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.clientId_ = clientId_;\n      }\n      if (((from_bitField0_ & 0x00000002) != 0)) {\n        result.serviceId_ = serviceId_;\n      }\n      if (((from_bitField0_ & 0x00000004) != 0)) {\n        result.deployId_ = deployId_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.getDefaultInstance()) return this;\n      if (other.getClientId() != 0L) {\n        setClientId(other.getClientId());\n      }\n      if (!other.getServiceId().isEmpty()) {\n        serviceId_ = other.serviceId_;\n        bitField0_ |= 0x00000002;\n        onChanged();\n      }\n      if (!other.getDeployId().isEmpty()) {\n        deployId_ = other.deployId_;\n        bitField0_ |= 0x00000004;\n        onChanged();\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 8: {\n              clientId_ = input.readInt64();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 8\n            case 18: {\n              serviceId_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000002;\n              break;\n            } // case 18\n            case 26: {\n              deployId_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000004;\n              break;\n            } // case 26\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private long clientId_ ;\n    /**\n     * <code>int64 clientId = 1;</code>\n     * @return The clientId.\n     */\n    @java.lang.Override\n    public long getClientId() {\n      return clientId_;\n    }\n    /**\n     * <code>int64 clientId = 1;</code>\n     * @param value The clientId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setClientId(long value) {\n\n      clientId_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>int64 clientId = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearClientId() {\n      bitField0_ = (bitField0_ & ~0x00000001);\n      clientId_ = 0L;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object serviceId_ = \"\";\n    /**\n     * <code>string serviceId = 2;</code>\n     * @return The serviceId.\n     */\n    public java.lang.String getServiceId() {\n      java.lang.Object ref = serviceId_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        serviceId_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string serviceId = 2;</code>\n     * @return The bytes for serviceId.\n     */\n    public com.google.protobuf.ByteString\n        getServiceIdBytes() {\n      java.lang.Object ref = serviceId_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        serviceId_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string serviceId = 2;</code>\n     * @param value The serviceId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setServiceId(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      serviceId_ = value;\n      bitField0_ |= 0x00000002;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string serviceId = 2;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearServiceId() {\n      serviceId_ = getDefaultInstance().getServiceId();\n      bitField0_ = (bitField0_ & ~0x00000002);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string serviceId = 2;</code>\n     * @param value The bytes for serviceId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setServiceIdBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      serviceId_ = value;\n      bitField0_ |= 0x00000002;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object deployId_ = \"\";\n    /**\n     * <code>string deployId = 3;</code>\n     * @return The deployId.\n     */\n    public java.lang.String getDeployId() {\n      java.lang.Object ref = deployId_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        deployId_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string deployId = 3;</code>\n     * @return The bytes for deployId.\n     */\n    public com.google.protobuf.ByteString\n        getDeployIdBytes() {\n      java.lang.Object ref = deployId_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        deployId_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string deployId = 3;</code>\n     * @param value The deployId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setDeployId(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      deployId_ = value;\n      bitField0_ |= 0x00000004;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string deployId = 3;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearDeployId() {\n      deployId_ = getDefaultInstance().getDeployId();\n      bitField0_ = (bitField0_ & ~0x00000004);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string deployId = 3;</code>\n     * @param value The bytes for deployId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setDeployIdBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      deployId_ = value;\n      bitField0_ |= 0x00000004;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:RegisterServiceRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:RegisterServiceRequest)\n  private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<RegisterServiceRequest>\n      PARSER = new com.google.protobuf.AbstractParser<RegisterServiceRequest>() {\n    @java.lang.Override\n    public RegisterServiceRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<RegisterServiceRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<RegisterServiceRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/RegisterServiceRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\npublic interface RegisterServiceRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:RegisterServiceRequest)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>int64 clientId = 1;</code>\n   * @return The clientId.\n   */\n  long getClientId();\n\n  /**\n   * <code>string serviceId = 2;</code>\n   * @return The serviceId.\n   */\n  java.lang.String getServiceId();\n  /**\n   * <code>string serviceId = 2;</code>\n   * @return The bytes for serviceId.\n   */\n  com.google.protobuf.ByteString\n      getServiceIdBytes();\n\n  /**\n   * <code>string deployId = 3;</code>\n   * @return The deployId.\n   */\n  java.lang.String getDeployId();\n  /**\n   * <code>string deployId = 3;</code>\n   * @return The bytes for deployId.\n   */\n  com.google.protobuf.ByteString\n      getDeployIdBytes();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/ServiceIdRequest.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\n/**\n * Protobuf type {@code ServiceIdRequest}\n */\npublic final class ServiceIdRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:ServiceIdRequest)\n    ServiceIdRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use ServiceIdRequest.newBuilder() to construct.\n  private ServiceIdRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private ServiceIdRequest() {\n    serviceId_ = \"\";\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new ServiceIdRequest();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ServiceIdRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ServiceIdRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.Builder.class);\n  }\n\n  public static final int SERVICEID_FIELD_NUMBER = 1;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object serviceId_ = \"\";\n  /**\n   * <code>string serviceId = 1;</code>\n   * @return The serviceId.\n   */\n  @java.lang.Override\n  public java.lang.String getServiceId() {\n    java.lang.Object ref = serviceId_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      serviceId_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string serviceId = 1;</code>\n   * @return The bytes for serviceId.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getServiceIdBytes() {\n    java.lang.Object ref = serviceId_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      serviceId_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceId_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, serviceId_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceId_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, serviceId_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest other = (com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest) obj;\n\n    if (!getServiceId()\n        .equals(other.getServiceId())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + SERVICEID_FIELD_NUMBER;\n    hash = (53 * hash) + getServiceId().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code ServiceIdRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:ServiceIdRequest)\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ServiceIdRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ServiceIdRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      serviceId_ = \"\";\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.internal_static_ServiceIdRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest build() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest result = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.serviceId_ = serviceId_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.getDefaultInstance()) return this;\n      if (!other.getServiceId().isEmpty()) {\n        serviceId_ = other.serviceId_;\n        bitField0_ |= 0x00000001;\n        onChanged();\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              serviceId_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 10\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private java.lang.Object serviceId_ = \"\";\n    /**\n     * <code>string serviceId = 1;</code>\n     * @return The serviceId.\n     */\n    public java.lang.String getServiceId() {\n      java.lang.Object ref = serviceId_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        serviceId_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string serviceId = 1;</code>\n     * @return The bytes for serviceId.\n     */\n    public com.google.protobuf.ByteString\n        getServiceIdBytes() {\n      java.lang.Object ref = serviceId_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        serviceId_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string serviceId = 1;</code>\n     * @param value The serviceId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setServiceId(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      serviceId_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string serviceId = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearServiceId() {\n      serviceId_ = getDefaultInstance().getServiceId();\n      bitField0_ = (bitField0_ & ~0x00000001);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string serviceId = 1;</code>\n     * @param value The bytes for serviceId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setServiceIdBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      serviceId_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:ServiceIdRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:ServiceIdRequest)\n  private static final com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<ServiceIdRequest>\n      PARSER = new com.google.protobuf.AbstractParser<ServiceIdRequest>() {\n    @java.lang.Override\n    public ServiceIdRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<ServiceIdRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<ServiceIdRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/ServiceIdRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\npublic interface ServiceIdRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:ServiceIdRequest)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>string serviceId = 1;</code>\n   * @return The serviceId.\n   */\n  java.lang.String getServiceId();\n  /**\n   * <code>string serviceId = 1;</code>\n   * @return The bytes for serviceId.\n   */\n  com.google.protobuf.ByteString\n      getServiceIdBytes();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/ServiceLifecycleGrpc.java",
    "content": "package com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\nimport static io.grpc.MethodDescriptor.generateFullMethodName;\n\n/**\n */\n@javax.annotation.Generated(\n    value = \"by gRPC proto compiler (version 1.62.2)\",\n    comments = \"Source: service_lifecycle.proto\")\n@io.grpc.stub.annotations.GrpcGenerated\npublic final class ServiceLifecycleGrpc {\n\n  private ServiceLifecycleGrpc() {}\n\n  public static final java.lang.String SERVICE_NAME = \"ServiceLifecycle\";\n\n  // Static method descriptors that strictly reflect the proto.\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply> getRegisterServiceMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"RegisterService\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply> getRegisterServiceMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply> getRegisterServiceMethod;\n    if ((getRegisterServiceMethod = ServiceLifecycleGrpc.getRegisterServiceMethod) == null) {\n      synchronized (ServiceLifecycleGrpc.class) {\n        if ((getRegisterServiceMethod = ServiceLifecycleGrpc.getRegisterServiceMethod) == null) {\n          ServiceLifecycleGrpc.getRegisterServiceMethod = getRegisterServiceMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"RegisterService\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier(\"RegisterService\"))\n              .build();\n        }\n      }\n    }\n    return getRegisterServiceMethod;\n  }\n\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getCreateInstanceMetaMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"CreateInstanceMeta\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getCreateInstanceMetaMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getCreateInstanceMetaMethod;\n    if ((getCreateInstanceMetaMethod = ServiceLifecycleGrpc.getCreateInstanceMetaMethod) == null) {\n      synchronized (ServiceLifecycleGrpc.class) {\n        if ((getCreateInstanceMetaMethod = ServiceLifecycleGrpc.getCreateInstanceMetaMethod) == null) {\n          ServiceLifecycleGrpc.getCreateInstanceMetaMethod = getCreateInstanceMetaMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"CreateInstanceMeta\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier(\"CreateInstanceMeta\"))\n              .build();\n        }\n      }\n    }\n    return getCreateInstanceMetaMethod;\n  }\n\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply> getDeregisterServiceByClientIdMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"DeregisterServiceByClientId\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply> getDeregisterServiceByClientIdMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply> getDeregisterServiceByClientIdMethod;\n    if ((getDeregisterServiceByClientIdMethod = ServiceLifecycleGrpc.getDeregisterServiceByClientIdMethod) == null) {\n      synchronized (ServiceLifecycleGrpc.class) {\n        if ((getDeregisterServiceByClientIdMethod = ServiceLifecycleGrpc.getDeregisterServiceByClientIdMethod) == null) {\n          ServiceLifecycleGrpc.getDeregisterServiceByClientIdMethod = getDeregisterServiceByClientIdMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"DeregisterServiceByClientId\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier(\"DeregisterServiceByClientId\"))\n              .build();\n        }\n      }\n    }\n    return getDeregisterServiceByClientIdMethod;\n  }\n\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply> getDeregisterServiceByInstanceIdMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"DeregisterServiceByInstanceId\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply> getDeregisterServiceByInstanceIdMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply> getDeregisterServiceByInstanceIdMethod;\n    if ((getDeregisterServiceByInstanceIdMethod = ServiceLifecycleGrpc.getDeregisterServiceByInstanceIdMethod) == null) {\n      synchronized (ServiceLifecycleGrpc.class) {\n        if ((getDeregisterServiceByInstanceIdMethod = ServiceLifecycleGrpc.getDeregisterServiceByInstanceIdMethod) == null) {\n          ServiceLifecycleGrpc.getDeregisterServiceByInstanceIdMethod = getDeregisterServiceByInstanceIdMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"DeregisterServiceByInstanceId\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier(\"DeregisterServiceByInstanceId\"))\n              .build();\n        }\n      }\n    }\n    return getDeregisterServiceByInstanceIdMethod;\n  }\n\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getHasOwnedServiceByServiceIdMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"HasOwnedServiceByServiceId\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getHasOwnedServiceByServiceIdMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getHasOwnedServiceByServiceIdMethod;\n    if ((getHasOwnedServiceByServiceIdMethod = ServiceLifecycleGrpc.getHasOwnedServiceByServiceIdMethod) == null) {\n      synchronized (ServiceLifecycleGrpc.class) {\n        if ((getHasOwnedServiceByServiceIdMethod = ServiceLifecycleGrpc.getHasOwnedServiceByServiceIdMethod) == null) {\n          ServiceLifecycleGrpc.getHasOwnedServiceByServiceIdMethod = getHasOwnedServiceByServiceIdMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"HasOwnedServiceByServiceId\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier(\"HasOwnedServiceByServiceId\"))\n              .build();\n        }\n      }\n    }\n    return getHasOwnedServiceByServiceIdMethod;\n  }\n\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getHasOwnedServiceInstanceByClientIdMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"HasOwnedServiceInstanceByClientId\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getHasOwnedServiceInstanceByClientIdMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getHasOwnedServiceInstanceByClientIdMethod;\n    if ((getHasOwnedServiceInstanceByClientIdMethod = ServiceLifecycleGrpc.getHasOwnedServiceInstanceByClientIdMethod) == null) {\n      synchronized (ServiceLifecycleGrpc.class) {\n        if ((getHasOwnedServiceInstanceByClientIdMethod = ServiceLifecycleGrpc.getHasOwnedServiceInstanceByClientIdMethod) == null) {\n          ServiceLifecycleGrpc.getHasOwnedServiceInstanceByClientIdMethod = getHasOwnedServiceInstanceByClientIdMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"HasOwnedServiceInstanceByClientId\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier(\"HasOwnedServiceInstanceByClientId\"))\n              .build();\n        }\n      }\n    }\n    return getHasOwnedServiceInstanceByClientIdMethod;\n  }\n\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getHasOwnedServiceInstanceByInstanceIdMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"HasOwnedServiceInstanceByInstanceId\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getHasOwnedServiceInstanceByInstanceIdMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getHasOwnedServiceInstanceByInstanceIdMethod;\n    if ((getHasOwnedServiceInstanceByInstanceIdMethod = ServiceLifecycleGrpc.getHasOwnedServiceInstanceByInstanceIdMethod) == null) {\n      synchronized (ServiceLifecycleGrpc.class) {\n        if ((getHasOwnedServiceInstanceByInstanceIdMethod = ServiceLifecycleGrpc.getHasOwnedServiceInstanceByInstanceIdMethod) == null) {\n          ServiceLifecycleGrpc.getHasOwnedServiceInstanceByInstanceIdMethod = getHasOwnedServiceInstanceByInstanceIdMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"HasOwnedServiceInstanceByInstanceId\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier(\"HasOwnedServiceInstanceByInstanceId\"))\n              .build();\n        }\n      }\n    }\n    return getHasOwnedServiceInstanceByInstanceIdMethod;\n  }\n\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getHasOwnedServiceClientMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"HasOwnedServiceClient\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getHasOwnedServiceClientMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> getHasOwnedServiceClientMethod;\n    if ((getHasOwnedServiceClientMethod = ServiceLifecycleGrpc.getHasOwnedServiceClientMethod) == null) {\n      synchronized (ServiceLifecycleGrpc.class) {\n        if ((getHasOwnedServiceClientMethod = ServiceLifecycleGrpc.getHasOwnedServiceClientMethod) == null) {\n          ServiceLifecycleGrpc.getHasOwnedServiceClientMethod = getHasOwnedServiceClientMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"HasOwnedServiceClient\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier(\"HasOwnedServiceClient\"))\n              .build();\n        }\n      }\n    }\n    return getHasOwnedServiceClientMethod;\n  }\n\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply> getCountRegisteredServiceMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"CountRegisteredService\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest,\n      com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply> getCountRegisteredServiceMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply> getCountRegisteredServiceMethod;\n    if ((getCountRegisteredServiceMethod = ServiceLifecycleGrpc.getCountRegisteredServiceMethod) == null) {\n      synchronized (ServiceLifecycleGrpc.class) {\n        if ((getCountRegisteredServiceMethod = ServiceLifecycleGrpc.getCountRegisteredServiceMethod) == null) {\n          ServiceLifecycleGrpc.getCountRegisteredServiceMethod = getCountRegisteredServiceMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest, com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"CountRegisteredService\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceLifecycleMethodDescriptorSupplier(\"CountRegisteredService\"))\n              .build();\n        }\n      }\n    }\n    return getCountRegisteredServiceMethod;\n  }\n\n  /**\n   * Creates a new async stub that supports all call types for the service\n   */\n  public static ServiceLifecycleStub newStub(io.grpc.Channel channel) {\n    io.grpc.stub.AbstractStub.StubFactory<ServiceLifecycleStub> factory =\n      new io.grpc.stub.AbstractStub.StubFactory<ServiceLifecycleStub>() {\n        @java.lang.Override\n        public ServiceLifecycleStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n          return new ServiceLifecycleStub(channel, callOptions);\n        }\n      };\n    return ServiceLifecycleStub.newStub(factory, channel);\n  }\n\n  /**\n   * Creates a new blocking-style stub that supports unary and streaming output calls on the service\n   */\n  public static ServiceLifecycleBlockingStub newBlockingStub(\n      io.grpc.Channel channel) {\n    io.grpc.stub.AbstractStub.StubFactory<ServiceLifecycleBlockingStub> factory =\n      new io.grpc.stub.AbstractStub.StubFactory<ServiceLifecycleBlockingStub>() {\n        @java.lang.Override\n        public ServiceLifecycleBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n          return new ServiceLifecycleBlockingStub(channel, callOptions);\n        }\n      };\n    return ServiceLifecycleBlockingStub.newStub(factory, channel);\n  }\n\n  /**\n   * Creates a new ListenableFuture-style stub that supports unary calls on the service\n   */\n  public static ServiceLifecycleFutureStub newFutureStub(\n      io.grpc.Channel channel) {\n    io.grpc.stub.AbstractStub.StubFactory<ServiceLifecycleFutureStub> factory =\n      new io.grpc.stub.AbstractStub.StubFactory<ServiceLifecycleFutureStub>() {\n        @java.lang.Override\n        public ServiceLifecycleFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n          return new ServiceLifecycleFutureStub(channel, callOptions);\n        }\n      };\n    return ServiceLifecycleFutureStub.newStub(factory, channel);\n  }\n\n  /**\n   */\n  public interface AsyncService {\n\n    /**\n     */\n    default void registerService(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getRegisterServiceMethod(), responseObserver);\n    }\n\n    /**\n     */\n    default void createInstanceMeta(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getCreateInstanceMetaMethod(), responseObserver);\n    }\n\n    /**\n     */\n    default void deregisterServiceByClientId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getDeregisterServiceByClientIdMethod(), responseObserver);\n    }\n\n    /**\n     */\n    default void deregisterServiceByInstanceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getDeregisterServiceByInstanceIdMethod(), responseObserver);\n    }\n\n    /**\n     */\n    default void hasOwnedServiceByServiceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getHasOwnedServiceByServiceIdMethod(), responseObserver);\n    }\n\n    /**\n     */\n    default void hasOwnedServiceInstanceByClientId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getHasOwnedServiceInstanceByClientIdMethod(), responseObserver);\n    }\n\n    /**\n     */\n    default void hasOwnedServiceInstanceByInstanceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getHasOwnedServiceInstanceByInstanceIdMethod(), responseObserver);\n    }\n\n    /**\n     */\n    default void hasOwnedServiceClient(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getHasOwnedServiceClientMethod(), responseObserver);\n    }\n\n    /**\n     */\n    default void countRegisteredService(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getCountRegisteredServiceMethod(), responseObserver);\n    }\n  }\n\n  /**\n   * Base class for the server implementation of the service ServiceLifecycle.\n   */\n  public static abstract class ServiceLifecycleImplBase\n      implements io.grpc.BindableService, AsyncService {\n\n    @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {\n      return ServiceLifecycleGrpc.bindService(this);\n    }\n  }\n\n  /**\n   * A stub to allow clients to do asynchronous rpc calls to service ServiceLifecycle.\n   */\n  public static final class ServiceLifecycleStub\n      extends io.grpc.stub.AbstractAsyncStub<ServiceLifecycleStub> {\n    private ServiceLifecycleStub(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      super(channel, callOptions);\n    }\n\n    @java.lang.Override\n    protected ServiceLifecycleStub build(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      return new ServiceLifecycleStub(channel, callOptions);\n    }\n\n    /**\n     */\n    public void registerService(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getRegisterServiceMethod(), getCallOptions()), request, responseObserver);\n    }\n\n    /**\n     */\n    public void createInstanceMeta(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getCreateInstanceMetaMethod(), getCallOptions()), request, responseObserver);\n    }\n\n    /**\n     */\n    public void deregisterServiceByClientId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getDeregisterServiceByClientIdMethod(), getCallOptions()), request, responseObserver);\n    }\n\n    /**\n     */\n    public void deregisterServiceByInstanceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getDeregisterServiceByInstanceIdMethod(), getCallOptions()), request, responseObserver);\n    }\n\n    /**\n     */\n    public void hasOwnedServiceByServiceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getHasOwnedServiceByServiceIdMethod(), getCallOptions()), request, responseObserver);\n    }\n\n    /**\n     */\n    public void hasOwnedServiceInstanceByClientId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getHasOwnedServiceInstanceByClientIdMethod(), getCallOptions()), request, responseObserver);\n    }\n\n    /**\n     */\n    public void hasOwnedServiceInstanceByInstanceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getHasOwnedServiceInstanceByInstanceIdMethod(), getCallOptions()), request, responseObserver);\n    }\n\n    /**\n     */\n    public void hasOwnedServiceClient(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getHasOwnedServiceClientMethod(), getCallOptions()), request, responseObserver);\n    }\n\n    /**\n     */\n    public void countRegisteredService(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getCountRegisteredServiceMethod(), getCallOptions()), request, responseObserver);\n    }\n  }\n\n  /**\n   * A stub to allow clients to do synchronous rpc calls to service ServiceLifecycle.\n   */\n  public static final class ServiceLifecycleBlockingStub\n      extends io.grpc.stub.AbstractBlockingStub<ServiceLifecycleBlockingStub> {\n    private ServiceLifecycleBlockingStub(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      super(channel, callOptions);\n    }\n\n    @java.lang.Override\n    protected ServiceLifecycleBlockingStub build(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      return new ServiceLifecycleBlockingStub(channel, callOptions);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply registerService(com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getRegisterServiceMethod(), getCallOptions(), request);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply createInstanceMeta(com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getCreateInstanceMetaMethod(), getCallOptions(), request);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply deregisterServiceByClientId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getDeregisterServiceByClientIdMethod(), getCallOptions(), request);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply deregisterServiceByInstanceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getDeregisterServiceByInstanceIdMethod(), getCallOptions(), request);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply hasOwnedServiceByServiceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getHasOwnedServiceByServiceIdMethod(), getCallOptions(), request);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply hasOwnedServiceInstanceByClientId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getHasOwnedServiceInstanceByClientIdMethod(), getCallOptions(), request);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply hasOwnedServiceInstanceByInstanceId(com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getHasOwnedServiceInstanceByInstanceIdMethod(), getCallOptions(), request);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply hasOwnedServiceClient(com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getHasOwnedServiceClientMethod(), getCallOptions(), request);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply countRegisteredService(com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getCountRegisteredServiceMethod(), getCallOptions(), request);\n    }\n  }\n\n  /**\n   * A stub to allow clients to do ListenableFuture-style rpc calls to service ServiceLifecycle.\n   */\n  public static final class ServiceLifecycleFutureStub\n      extends io.grpc.stub.AbstractFutureStub<ServiceLifecycleFutureStub> {\n    private ServiceLifecycleFutureStub(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      super(channel, callOptions);\n    }\n\n    @java.lang.Override\n    protected ServiceLifecycleFutureStub build(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      return new ServiceLifecycleFutureStub(channel, callOptions);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply> registerService(\n        com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getRegisterServiceMethod(), getCallOptions()), request);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> createInstanceMeta(\n        com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getCreateInstanceMetaMethod(), getCallOptions()), request);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply> deregisterServiceByClientId(\n        com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getDeregisterServiceByClientIdMethod(), getCallOptions()), request);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply> deregisterServiceByInstanceId(\n        com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getDeregisterServiceByInstanceIdMethod(), getCallOptions()), request);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> hasOwnedServiceByServiceId(\n        com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getHasOwnedServiceByServiceIdMethod(), getCallOptions()), request);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> hasOwnedServiceInstanceByClientId(\n        com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getHasOwnedServiceInstanceByClientIdMethod(), getCallOptions()), request);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> hasOwnedServiceInstanceByInstanceId(\n        com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getHasOwnedServiceInstanceByInstanceIdMethod(), getCallOptions()), request);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply> hasOwnedServiceClient(\n        com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getHasOwnedServiceClientMethod(), getCallOptions()), request);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply> countRegisteredService(\n        com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getCountRegisteredServiceMethod(), getCallOptions()), request);\n    }\n  }\n\n  private static final int METHODID_REGISTER_SERVICE = 0;\n  private static final int METHODID_CREATE_INSTANCE_META = 1;\n  private static final int METHODID_DEREGISTER_SERVICE_BY_CLIENT_ID = 2;\n  private static final int METHODID_DEREGISTER_SERVICE_BY_INSTANCE_ID = 3;\n  private static final int METHODID_HAS_OWNED_SERVICE_BY_SERVICE_ID = 4;\n  private static final int METHODID_HAS_OWNED_SERVICE_INSTANCE_BY_CLIENT_ID = 5;\n  private static final int METHODID_HAS_OWNED_SERVICE_INSTANCE_BY_INSTANCE_ID = 6;\n  private static final int METHODID_HAS_OWNED_SERVICE_CLIENT = 7;\n  private static final int METHODID_COUNT_REGISTERED_SERVICE = 8;\n\n  private static final class MethodHandlers<Req, Resp> implements\n      io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,\n      io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,\n      io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,\n      io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {\n    private final AsyncService serviceImpl;\n    private final int methodId;\n\n    MethodHandlers(AsyncService serviceImpl, int methodId) {\n      this.serviceImpl = serviceImpl;\n      this.methodId = methodId;\n    }\n\n    @java.lang.Override\n    @java.lang.SuppressWarnings(\"unchecked\")\n    public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {\n      switch (methodId) {\n        case METHODID_REGISTER_SERVICE:\n          serviceImpl.registerService((com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply>) responseObserver);\n          break;\n        case METHODID_CREATE_INSTANCE_META:\n          serviceImpl.createInstanceMeta((com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>) responseObserver);\n          break;\n        case METHODID_DEREGISTER_SERVICE_BY_CLIENT_ID:\n          serviceImpl.deregisterServiceByClientId((com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply>) responseObserver);\n          break;\n        case METHODID_DEREGISTER_SERVICE_BY_INSTANCE_ID:\n          serviceImpl.deregisterServiceByInstanceId((com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply>) responseObserver);\n          break;\n        case METHODID_HAS_OWNED_SERVICE_BY_SERVICE_ID:\n          serviceImpl.hasOwnedServiceByServiceId((com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>) responseObserver);\n          break;\n        case METHODID_HAS_OWNED_SERVICE_INSTANCE_BY_CLIENT_ID:\n          serviceImpl.hasOwnedServiceInstanceByClientId((com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>) responseObserver);\n          break;\n        case METHODID_HAS_OWNED_SERVICE_INSTANCE_BY_INSTANCE_ID:\n          serviceImpl.hasOwnedServiceInstanceByInstanceId((com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>) responseObserver);\n          break;\n        case METHODID_HAS_OWNED_SERVICE_CLIENT:\n          serviceImpl.hasOwnedServiceClient((com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>) responseObserver);\n          break;\n        case METHODID_COUNT_REGISTERED_SERVICE:\n          serviceImpl.countRegisteredService((com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply>) responseObserver);\n          break;\n        default:\n          throw new AssertionError();\n      }\n    }\n\n    @java.lang.Override\n    @java.lang.SuppressWarnings(\"unchecked\")\n    public io.grpc.stub.StreamObserver<Req> invoke(\n        io.grpc.stub.StreamObserver<Resp> responseObserver) {\n      switch (methodId) {\n        default:\n          throw new AssertionError();\n      }\n    }\n  }\n\n  public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {\n    return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())\n        .addMethod(\n          getRegisterServiceMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceRequest,\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.RegisterServiceReply>(\n                service, METHODID_REGISTER_SERVICE)))\n        .addMethod(\n          getCreateInstanceMetaMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.CreateInstanceMetaRequest,\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>(\n                service, METHODID_CREATE_INSTANCE_META)))\n        .addMethod(\n          getDeregisterServiceByClientIdMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest,\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply>(\n                service, METHODID_DEREGISTER_SERVICE_BY_CLIENT_ID)))\n        .addMethod(\n          getDeregisterServiceByInstanceIdMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest,\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyReply>(\n                service, METHODID_DEREGISTER_SERVICE_BY_INSTANCE_ID)))\n        .addMethod(\n          getHasOwnedServiceByServiceIdMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceIdRequest,\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>(\n                service, METHODID_HAS_OWNED_SERVICE_BY_SERVICE_ID)))\n        .addMethod(\n          getHasOwnedServiceInstanceByClientIdMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest,\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>(\n                service, METHODID_HAS_OWNED_SERVICE_INSTANCE_BY_CLIENT_ID)))\n        .addMethod(\n          getHasOwnedServiceInstanceByInstanceIdMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.InstanceIdRequest,\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>(\n                service, METHODID_HAS_OWNED_SERVICE_INSTANCE_BY_INSTANCE_ID)))\n        .addMethod(\n          getHasOwnedServiceClientMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.ClientIdRequest,\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.BoolReply>(\n                service, METHODID_HAS_OWNED_SERVICE_CLIENT)))\n        .addMethod(\n          getCountRegisteredServiceMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.EmptyRequest,\n              com.pinecone.hydra.service.registry.grpc.server.lifecycle.CountReply>(\n                service, METHODID_COUNT_REGISTERED_SERVICE)))\n        .build();\n  }\n\n  private static abstract class ServiceLifecycleBaseDescriptorSupplier\n      implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier {\n    ServiceLifecycleBaseDescriptorSupplier() {}\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.lifecycle.ServiceLifecycleProto.getDescriptor();\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {\n      return getFileDescriptor().findServiceByName(\"ServiceLifecycle\");\n    }\n  }\n\n  private static final class ServiceLifecycleFileDescriptorSupplier\n      extends ServiceLifecycleBaseDescriptorSupplier {\n    ServiceLifecycleFileDescriptorSupplier() {}\n  }\n\n  private static final class ServiceLifecycleMethodDescriptorSupplier\n      extends ServiceLifecycleBaseDescriptorSupplier\n      implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {\n    private final java.lang.String methodName;\n\n    ServiceLifecycleMethodDescriptorSupplier(java.lang.String methodName) {\n      this.methodName = methodName;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {\n      return getServiceDescriptor().findMethodByName(methodName);\n    }\n  }\n\n  private static volatile io.grpc.ServiceDescriptor serviceDescriptor;\n\n  public static io.grpc.ServiceDescriptor getServiceDescriptor() {\n    io.grpc.ServiceDescriptor result = serviceDescriptor;\n    if (result == null) {\n      synchronized (ServiceLifecycleGrpc.class) {\n        result = serviceDescriptor;\n        if (result == null) {\n          serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)\n              .setSchemaDescriptor(new ServiceLifecycleFileDescriptorSupplier())\n              .addMethod(getRegisterServiceMethod())\n              .addMethod(getCreateInstanceMetaMethod())\n              .addMethod(getDeregisterServiceByClientIdMethod())\n              .addMethod(getDeregisterServiceByInstanceIdMethod())\n              .addMethod(getHasOwnedServiceByServiceIdMethod())\n              .addMethod(getHasOwnedServiceInstanceByClientIdMethod())\n              .addMethod(getHasOwnedServiceInstanceByInstanceIdMethod())\n              .addMethod(getHasOwnedServiceClientMethod())\n              .addMethod(getCountRegisteredServiceMethod())\n              .build();\n        }\n      }\n    }\n    return result;\n  }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/lifecycle/ServiceLifecycleProto.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_lifecycle.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.lifecycle;\n\npublic final class ServiceLifecycleProto {\n  private ServiceLifecycleProto() {}\n  public static void registerAllExtensions(\n      com.google.protobuf.ExtensionRegistryLite registry) {\n  }\n\n  public static void registerAllExtensions(\n      com.google.protobuf.ExtensionRegistry registry) {\n    registerAllExtensions(\n        (com.google.protobuf.ExtensionRegistryLite) registry);\n  }\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_RegisterServiceRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_RegisterServiceRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_RegisterServiceReply_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_RegisterServiceReply_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_CreateInstanceMetaRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_CreateInstanceMetaRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_ClientIdRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_ClientIdRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_InstanceIdRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_InstanceIdRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_ServiceIdRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_ServiceIdRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_BoolReply_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_BoolReply_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_CountReply_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_CountReply_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_EmptyRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_EmptyRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_EmptyReply_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_EmptyReply_fieldAccessorTable;\n\n  public static com.google.protobuf.Descriptors.FileDescriptor\n      getDescriptor() {\n    return descriptor;\n  }\n  private static  com.google.protobuf.Descriptors.FileDescriptor\n      descriptor;\n  static {\n    java.lang.String[] descriptorData = {\n      \"\\n\\027service_lifecycle.proto\\\"O\\n\\026RegisterSer\" +\n      \"viceRequest\\022\\020\\n\\010clientId\\030\\001 \\001(\\003\\022\\021\\n\\tservice\" +\n      \"Id\\030\\002 \\001(\\t\\022\\020\\n\\010deployId\\030\\003 \\001(\\t\\\"*\\n\\024RegisterSe\" +\n      \"rviceReply\\022\\022\\n\\ninstanceId\\030\\001 \\001(\\t\\\"1\\n\\031Create\" +\n      \"InstanceMetaRequest\\022\\024\\n\\014instanceGuid\\030\\001 \\001(\" +\n      \"\\t\\\"#\\n\\017ClientIdRequest\\022\\020\\n\\010clientId\\030\\001 \\001(\\003\\\"\\'\" +\n      \"\\n\\021InstanceIdRequest\\022\\022\\n\\ninstanceId\\030\\001 \\001(\\t\\\"\" +\n      \"%\\n\\020ServiceIdRequest\\022\\021\\n\\tserviceId\\030\\001 \\001(\\t\\\"\\032\" +\n      \"\\n\\tBoolReply\\022\\r\\n\\005value\\030\\001 \\001(\\010\\\"\\033\\n\\nCountReply\" +\n      \"\\022\\r\\n\\005value\\030\\001 \\001(\\005\\\"\\016\\n\\014EmptyRequest\\\"\\014\\n\\nEmpty\" +\n      \"Reply2\\307\\004\\n\\020ServiceLifecycle\\022A\\n\\017RegisterSe\" +\n      \"rvice\\022\\027.RegisterServiceRequest\\032\\025.Registe\" +\n      \"rServiceReply\\022<\\n\\022CreateInstanceMeta\\022\\032.Cr\" +\n      \"eateInstanceMetaRequest\\032\\n.BoolReply\\022<\\n\\033D\" +\n      \"eregisterServiceByClientId\\022\\020.ClientIdReq\" +\n      \"uest\\032\\013.EmptyReply\\022@\\n\\035DeregisterServiceBy\" +\n      \"InstanceId\\022\\022.InstanceIdRequest\\032\\013.EmptyRe\" +\n      \"ply\\022;\\n\\032HasOwnedServiceByServiceId\\022\\021.Serv\" +\n      \"iceIdRequest\\032\\n.BoolReply\\022A\\n!HasOwnedServ\" +\n      \"iceInstanceByClientId\\022\\020.ClientIdRequest\\032\" +\n      \"\\n.BoolReply\\022E\\n#HasOwnedServiceInstanceBy\" +\n      \"InstanceId\\022\\022.InstanceIdRequest\\032\\n.BoolRep\" +\n      \"ly\\0225\\n\\025HasOwnedServiceClient\\022\\020.ClientIdRe\" +\n      \"quest\\032\\n.BoolReply\\0224\\n\\026CountRegisteredServ\" +\n      \"ice\\022\\r.EmptyRequest\\032\\013.CountReplyBT\\n9com.p\" +\n      \"inecone.hydra.service.registry.grpc.serv\" +\n      \"er.lifecycleB\\025ServiceLifecycleProtoP\\001b\\006p\" +\n      \"roto3\"\n    };\n    descriptor = com.google.protobuf.Descriptors.FileDescriptor\n      .internalBuildGeneratedFileFrom(descriptorData,\n        new com.google.protobuf.Descriptors.FileDescriptor[] {\n        });\n    internal_static_RegisterServiceRequest_descriptor =\n      getDescriptor().getMessageTypes().get(0);\n    internal_static_RegisterServiceRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_RegisterServiceRequest_descriptor,\n        new java.lang.String[] { \"ClientId\", \"ServiceId\", \"DeployId\", });\n    internal_static_RegisterServiceReply_descriptor =\n      getDescriptor().getMessageTypes().get(1);\n    internal_static_RegisterServiceReply_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_RegisterServiceReply_descriptor,\n        new java.lang.String[] { \"InstanceId\", });\n    internal_static_CreateInstanceMetaRequest_descriptor =\n      getDescriptor().getMessageTypes().get(2);\n    internal_static_CreateInstanceMetaRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_CreateInstanceMetaRequest_descriptor,\n        new java.lang.String[] { \"InstanceGuid\", });\n    internal_static_ClientIdRequest_descriptor =\n      getDescriptor().getMessageTypes().get(3);\n    internal_static_ClientIdRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_ClientIdRequest_descriptor,\n        new java.lang.String[] { \"ClientId\", });\n    internal_static_InstanceIdRequest_descriptor =\n      getDescriptor().getMessageTypes().get(4);\n    internal_static_InstanceIdRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_InstanceIdRequest_descriptor,\n        new java.lang.String[] { \"InstanceId\", });\n    internal_static_ServiceIdRequest_descriptor =\n      getDescriptor().getMessageTypes().get(5);\n    internal_static_ServiceIdRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_ServiceIdRequest_descriptor,\n        new java.lang.String[] { \"ServiceId\", });\n    internal_static_BoolReply_descriptor =\n      getDescriptor().getMessageTypes().get(6);\n    internal_static_BoolReply_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_BoolReply_descriptor,\n        new java.lang.String[] { \"Value\", });\n    internal_static_CountReply_descriptor =\n      getDescriptor().getMessageTypes().get(7);\n    internal_static_CountReply_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_CountReply_descriptor,\n        new java.lang.String[] { \"Value\", });\n    internal_static_EmptyRequest_descriptor =\n      getDescriptor().getMessageTypes().get(8);\n    internal_static_EmptyRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_EmptyRequest_descriptor,\n        new java.lang.String[] { });\n    internal_static_EmptyReply_descriptor =\n      getDescriptor().getMessageTypes().get(9);\n    internal_static_EmptyReply_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_EmptyReply_descriptor,\n        new java.lang.String[] { });\n  }\n\n  // @@protoc_insertion_point(outer_class_scope)\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ClientIdRequest.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\n/**\n * Protobuf type {@code pinecone.meta.ClientIdRequest}\n */\npublic final class ClientIdRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:pinecone.meta.ClientIdRequest)\n    ClientIdRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use ClientIdRequest.newBuilder() to construct.\n  private ClientIdRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private ClientIdRequest() {\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new ClientIdRequest();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ClientIdRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ClientIdRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.Builder.class);\n  }\n\n  public static final int CLIENTID_FIELD_NUMBER = 1;\n  private long clientId_ = 0L;\n  /**\n   * <code>int64 clientId = 1;</code>\n   * @return The clientId.\n   */\n  @java.lang.Override\n  public long getClientId() {\n    return clientId_;\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (clientId_ != 0L) {\n      output.writeInt64(1, clientId_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (clientId_ != 0L) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeInt64Size(1, clientId_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest other = (com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest) obj;\n\n    if (getClientId()\n        != other.getClientId()) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + CLIENTID_FIELD_NUMBER;\n    hash = (53 * hash) + com.google.protobuf.Internal.hashLong(\n        getClientId());\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code pinecone.meta.ClientIdRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:pinecone.meta.ClientIdRequest)\n      com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ClientIdRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ClientIdRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      clientId_ = 0L;\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ClientIdRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest build() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest result = new com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.clientId_ = clientId_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.getDefaultInstance()) return this;\n      if (other.getClientId() != 0L) {\n        setClientId(other.getClientId());\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 8: {\n              clientId_ = input.readInt64();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 8\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private long clientId_ ;\n    /**\n     * <code>int64 clientId = 1;</code>\n     * @return The clientId.\n     */\n    @java.lang.Override\n    public long getClientId() {\n      return clientId_;\n    }\n    /**\n     * <code>int64 clientId = 1;</code>\n     * @param value The clientId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setClientId(long value) {\n\n      clientId_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>int64 clientId = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearClientId() {\n      bitField0_ = (bitField0_ & ~0x00000001);\n      clientId_ = 0L;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:pinecone.meta.ClientIdRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:pinecone.meta.ClientIdRequest)\n  private static final com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<ClientIdRequest>\n      PARSER = new com.google.protobuf.AbstractParser<ClientIdRequest>() {\n    @java.lang.Override\n    public ClientIdRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<ClientIdRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<ClientIdRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ClientIdRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\npublic interface ClientIdRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:pinecone.meta.ClientIdRequest)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>int64 clientId = 1;</code>\n   * @return The clientId.\n   */\n  long getClientId();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/CreateNewServiceRequest.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\n/**\n * Protobuf type {@code pinecone.meta.CreateNewServiceRequest}\n */\npublic final class CreateNewServiceRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:pinecone.meta.CreateNewServiceRequest)\n    CreateNewServiceRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use CreateNewServiceRequest.newBuilder() to construct.\n  private CreateNewServiceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private CreateNewServiceRequest() {\n    parentAppPath_ = \"\";\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new CreateNewServiceRequest();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_CreateNewServiceRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_CreateNewServiceRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.Builder.class);\n  }\n\n  private int bitField0_;\n  public static final int PARENTAPPPATH_FIELD_NUMBER = 1;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object parentAppPath_ = \"\";\n  /**\n   * <code>string parentAppPath = 1;</code>\n   * @return The parentAppPath.\n   */\n  @java.lang.Override\n  public java.lang.String getParentAppPath() {\n    java.lang.Object ref = parentAppPath_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      parentAppPath_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string parentAppPath = 1;</code>\n   * @return The bytes for parentAppPath.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getParentAppPathBytes() {\n    java.lang.Object ref = parentAppPath_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      parentAppPath_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int META_FIELD_NUMBER = 2;\n  private com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO meta_;\n  /**\n   * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n   * @return Whether the meta field is set.\n   */\n  @java.lang.Override\n  public boolean hasMeta() {\n    return ((bitField0_ & 0x00000001) != 0);\n  }\n  /**\n   * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n   * @return The meta.\n   */\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMeta() {\n    return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_;\n  }\n  /**\n   * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n   */\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetaOrBuilder() {\n    return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_;\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parentAppPath_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parentAppPath_);\n    }\n    if (((bitField0_ & 0x00000001) != 0)) {\n      output.writeMessage(2, getMeta());\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parentAppPath_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parentAppPath_);\n    }\n    if (((bitField0_ & 0x00000001) != 0)) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeMessageSize(2, getMeta());\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest other = (com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest) obj;\n\n    if (!getParentAppPath()\n        .equals(other.getParentAppPath())) return false;\n    if (hasMeta() != other.hasMeta()) return false;\n    if (hasMeta()) {\n      if (!getMeta()\n          .equals(other.getMeta())) return false;\n    }\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + PARENTAPPPATH_FIELD_NUMBER;\n    hash = (53 * hash) + getParentAppPath().hashCode();\n    if (hasMeta()) {\n      hash = (37 * hash) + META_FIELD_NUMBER;\n      hash = (53 * hash) + getMeta().hashCode();\n    }\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code pinecone.meta.CreateNewServiceRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:pinecone.meta.CreateNewServiceRequest)\n      com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_CreateNewServiceRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_CreateNewServiceRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.newBuilder()\n    private Builder() {\n      maybeForceBuilderInitialization();\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n      maybeForceBuilderInitialization();\n    }\n    private void maybeForceBuilderInitialization() {\n      if (com.google.protobuf.GeneratedMessageV3\n              .alwaysUseFieldBuilders) {\n        getMetaFieldBuilder();\n      }\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      parentAppPath_ = \"\";\n      meta_ = null;\n      if (metaBuilder_ != null) {\n        metaBuilder_.dispose();\n        metaBuilder_ = null;\n      }\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_CreateNewServiceRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest build() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest result = new com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.parentAppPath_ = parentAppPath_;\n      }\n      int to_bitField0_ = 0;\n      if (((from_bitField0_ & 0x00000002) != 0)) {\n        result.meta_ = metaBuilder_ == null\n            ? meta_\n            : metaBuilder_.build();\n        to_bitField0_ |= 0x00000001;\n      }\n      result.bitField0_ |= to_bitField0_;\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.getDefaultInstance()) return this;\n      if (!other.getParentAppPath().isEmpty()) {\n        parentAppPath_ = other.parentAppPath_;\n        bitField0_ |= 0x00000001;\n        onChanged();\n      }\n      if (other.hasMeta()) {\n        mergeMeta(other.getMeta());\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              parentAppPath_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 10\n            case 18: {\n              input.readMessage(\n                  getMetaFieldBuilder().getBuilder(),\n                  extensionRegistry);\n              bitField0_ |= 0x00000002;\n              break;\n            } // case 18\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private java.lang.Object parentAppPath_ = \"\";\n    /**\n     * <code>string parentAppPath = 1;</code>\n     * @return The parentAppPath.\n     */\n    public java.lang.String getParentAppPath() {\n      java.lang.Object ref = parentAppPath_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        parentAppPath_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string parentAppPath = 1;</code>\n     * @return The bytes for parentAppPath.\n     */\n    public com.google.protobuf.ByteString\n        getParentAppPathBytes() {\n      java.lang.Object ref = parentAppPath_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        parentAppPath_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string parentAppPath = 1;</code>\n     * @param value The parentAppPath to set.\n     * @return This builder for chaining.\n     */\n    public Builder setParentAppPath(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      parentAppPath_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string parentAppPath = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearParentAppPath() {\n      parentAppPath_ = getDefaultInstance().getParentAppPath();\n      bitField0_ = (bitField0_ & ~0x00000001);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string parentAppPath = 1;</code>\n     * @param value The bytes for parentAppPath to set.\n     * @return This builder for chaining.\n     */\n    public Builder setParentAppPathBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      parentAppPath_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n\n    private com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO meta_;\n    private com.google.protobuf.SingleFieldBuilderV3<\n        com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> metaBuilder_;\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n     * @return Whether the meta field is set.\n     */\n    public boolean hasMeta() {\n      return ((bitField0_ & 0x00000002) != 0);\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n     * @return The meta.\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMeta() {\n      if (metaBuilder_ == null) {\n        return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_;\n      } else {\n        return metaBuilder_.getMessage();\n      }\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n     */\n    public Builder setMeta(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) {\n      if (metaBuilder_ == null) {\n        if (value == null) {\n          throw new NullPointerException();\n        }\n        meta_ = value;\n      } else {\n        metaBuilder_.setMessage(value);\n      }\n      bitField0_ |= 0x00000002;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n     */\n    public Builder setMeta(\n        com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builderForValue) {\n      if (metaBuilder_ == null) {\n        meta_ = builderForValue.build();\n      } else {\n        metaBuilder_.setMessage(builderForValue.build());\n      }\n      bitField0_ |= 0x00000002;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n     */\n    public Builder mergeMeta(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) {\n      if (metaBuilder_ == null) {\n        if (((bitField0_ & 0x00000002) != 0) &&\n          meta_ != null &&\n          meta_ != com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance()) {\n          getMetaBuilder().mergeFrom(value);\n        } else {\n          meta_ = value;\n        }\n      } else {\n        metaBuilder_.mergeFrom(value);\n      }\n      if (meta_ != null) {\n        bitField0_ |= 0x00000002;\n        onChanged();\n      }\n      return this;\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n     */\n    public Builder clearMeta() {\n      bitField0_ = (bitField0_ & ~0x00000002);\n      meta_ = null;\n      if (metaBuilder_ != null) {\n        metaBuilder_.dispose();\n        metaBuilder_ = null;\n      }\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder getMetaBuilder() {\n      bitField0_ |= 0x00000002;\n      onChanged();\n      return getMetaFieldBuilder().getBuilder();\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetaOrBuilder() {\n      if (metaBuilder_ != null) {\n        return metaBuilder_.getMessageOrBuilder();\n      } else {\n        return meta_ == null ?\n            com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_;\n      }\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n     */\n    private com.google.protobuf.SingleFieldBuilderV3<\n        com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> \n        getMetaFieldBuilder() {\n      if (metaBuilder_ == null) {\n        metaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<\n            com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder>(\n                getMeta(),\n                getParentForChildren(),\n                isClean());\n        meta_ = null;\n      }\n      return metaBuilder_;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:pinecone.meta.CreateNewServiceRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:pinecone.meta.CreateNewServiceRequest)\n  private static final com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<CreateNewServiceRequest>\n      PARSER = new com.google.protobuf.AbstractParser<CreateNewServiceRequest>() {\n    @java.lang.Override\n    public CreateNewServiceRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<CreateNewServiceRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<CreateNewServiceRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/CreateNewServiceRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\npublic interface CreateNewServiceRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:pinecone.meta.CreateNewServiceRequest)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>string parentAppPath = 1;</code>\n   * @return The parentAppPath.\n   */\n  java.lang.String getParentAppPath();\n  /**\n   * <code>string parentAppPath = 1;</code>\n   * @return The bytes for parentAppPath.\n   */\n  com.google.protobuf.ByteString\n      getParentAppPathBytes();\n\n  /**\n   * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n   * @return Whether the meta field is set.\n   */\n  boolean hasMeta();\n  /**\n   * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n   * @return The meta.\n   */\n  com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMeta();\n  /**\n   * <code>.pinecone.meta.ServiceMetaDTO meta = 2;</code>\n   */\n  com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetaOrBuilder();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/EvalRequest.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\n/**\n * Protobuf type {@code pinecone.meta.EvalRequest}\n */\npublic final class EvalRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:pinecone.meta.EvalRequest)\n    EvalRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use EvalRequest.newBuilder() to construct.\n  private EvalRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private EvalRequest() {\n    jsonStatement_ = \"\";\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new EvalRequest();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_EvalRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_EvalRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.Builder.class);\n  }\n\n  public static final int JSONSTATEMENT_FIELD_NUMBER = 1;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object jsonStatement_ = \"\";\n  /**\n   * <code>string jsonStatement = 1;</code>\n   * @return The jsonStatement.\n   */\n  @java.lang.Override\n  public java.lang.String getJsonStatement() {\n    java.lang.Object ref = jsonStatement_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      jsonStatement_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string jsonStatement = 1;</code>\n   * @return The bytes for jsonStatement.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getJsonStatementBytes() {\n    java.lang.Object ref = jsonStatement_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      jsonStatement_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(jsonStatement_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, jsonStatement_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(jsonStatement_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, jsonStatement_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest other = (com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest) obj;\n\n    if (!getJsonStatement()\n        .equals(other.getJsonStatement())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + JSONSTATEMENT_FIELD_NUMBER;\n    hash = (53 * hash) + getJsonStatement().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code pinecone.meta.EvalRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:pinecone.meta.EvalRequest)\n      com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_EvalRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_EvalRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      jsonStatement_ = \"\";\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_EvalRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest build() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest result = new com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.jsonStatement_ = jsonStatement_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.getDefaultInstance()) return this;\n      if (!other.getJsonStatement().isEmpty()) {\n        jsonStatement_ = other.jsonStatement_;\n        bitField0_ |= 0x00000001;\n        onChanged();\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              jsonStatement_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 10\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private java.lang.Object jsonStatement_ = \"\";\n    /**\n     * <code>string jsonStatement = 1;</code>\n     * @return The jsonStatement.\n     */\n    public java.lang.String getJsonStatement() {\n      java.lang.Object ref = jsonStatement_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        jsonStatement_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string jsonStatement = 1;</code>\n     * @return The bytes for jsonStatement.\n     */\n    public com.google.protobuf.ByteString\n        getJsonStatementBytes() {\n      java.lang.Object ref = jsonStatement_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        jsonStatement_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string jsonStatement = 1;</code>\n     * @param value The jsonStatement to set.\n     * @return This builder for chaining.\n     */\n    public Builder setJsonStatement(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      jsonStatement_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string jsonStatement = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearJsonStatement() {\n      jsonStatement_ = getDefaultInstance().getJsonStatement();\n      bitField0_ = (bitField0_ & ~0x00000001);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string jsonStatement = 1;</code>\n     * @param value The bytes for jsonStatement to set.\n     * @return This builder for chaining.\n     */\n    public Builder setJsonStatementBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      jsonStatement_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:pinecone.meta.EvalRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:pinecone.meta.EvalRequest)\n  private static final com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<EvalRequest>\n      PARSER = new com.google.protobuf.AbstractParser<EvalRequest>() {\n    @java.lang.Override\n    public EvalRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<EvalRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<EvalRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/EvalRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\npublic interface EvalRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:pinecone.meta.EvalRequest)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>string jsonStatement = 1;</code>\n   * @return The jsonStatement.\n   */\n  java.lang.String getJsonStatement();\n  /**\n   * <code>string jsonStatement = 1;</code>\n   * @return The bytes for jsonStatement.\n   */\n  com.google.protobuf.ByteString\n      getJsonStatementBytes();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/GuidRequest.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\n/**\n * Protobuf type {@code pinecone.meta.GuidRequest}\n */\npublic final class GuidRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:pinecone.meta.GuidRequest)\n    GuidRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use GuidRequest.newBuilder() to construct.\n  private GuidRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private GuidRequest() {\n    guid_ = \"\";\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new GuidRequest();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_GuidRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_GuidRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.Builder.class);\n  }\n\n  public static final int GUID_FIELD_NUMBER = 1;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object guid_ = \"\";\n  /**\n   * <code>string guid = 1;</code>\n   * @return The guid.\n   */\n  @java.lang.Override\n  public java.lang.String getGuid() {\n    java.lang.Object ref = guid_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      guid_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string guid = 1;</code>\n   * @return The bytes for guid.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getGuidBytes() {\n    java.lang.Object ref = guid_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      guid_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(guid_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, guid_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(guid_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, guid_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest other = (com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest) obj;\n\n    if (!getGuid()\n        .equals(other.getGuid())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + GUID_FIELD_NUMBER;\n    hash = (53 * hash) + getGuid().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code pinecone.meta.GuidRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:pinecone.meta.GuidRequest)\n      com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_GuidRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_GuidRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      guid_ = \"\";\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_GuidRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest build() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest result = new com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.guid_ = guid_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.getDefaultInstance()) return this;\n      if (!other.getGuid().isEmpty()) {\n        guid_ = other.guid_;\n        bitField0_ |= 0x00000001;\n        onChanged();\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              guid_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 10\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private java.lang.Object guid_ = \"\";\n    /**\n     * <code>string guid = 1;</code>\n     * @return The guid.\n     */\n    public java.lang.String getGuid() {\n      java.lang.Object ref = guid_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        guid_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string guid = 1;</code>\n     * @return The bytes for guid.\n     */\n    public com.google.protobuf.ByteString\n        getGuidBytes() {\n      java.lang.Object ref = guid_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        guid_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string guid = 1;</code>\n     * @param value The guid to set.\n     * @return This builder for chaining.\n     */\n    public Builder setGuid(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      guid_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string guid = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearGuid() {\n      guid_ = getDefaultInstance().getGuid();\n      bitField0_ = (bitField0_ & ~0x00000001);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string guid = 1;</code>\n     * @param value The bytes for guid to set.\n     * @return This builder for chaining.\n     */\n    public Builder setGuidBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      guid_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:pinecone.meta.GuidRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:pinecone.meta.GuidRequest)\n  private static final com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<GuidRequest>\n      PARSER = new com.google.protobuf.AbstractParser<GuidRequest>() {\n    @java.lang.Override\n    public GuidRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<GuidRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<GuidRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/GuidRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\npublic interface GuidRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:pinecone.meta.GuidRequest)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>string guid = 1;</code>\n   * @return The guid.\n   */\n  java.lang.String getGuid();\n  /**\n   * <code>string guid = 1;</code>\n   * @return The bytes for guid.\n   */\n  com.google.protobuf.ByteString\n      getGuidBytes();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/PathRequest.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\n/**\n * Protobuf type {@code pinecone.meta.PathRequest}\n */\npublic final class PathRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:pinecone.meta.PathRequest)\n    PathRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use PathRequest.newBuilder() to construct.\n  private PathRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private PathRequest() {\n    path_ = \"\";\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new PathRequest();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_PathRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_PathRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.Builder.class);\n  }\n\n  public static final int PATH_FIELD_NUMBER = 1;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object path_ = \"\";\n  /**\n   * <code>string path = 1;</code>\n   * @return The path.\n   */\n  @java.lang.Override\n  public java.lang.String getPath() {\n    java.lang.Object ref = path_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      path_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string path = 1;</code>\n   * @return The bytes for path.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getPathBytes() {\n    java.lang.Object ref = path_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      path_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, path_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, path_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest other = (com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest) obj;\n\n    if (!getPath()\n        .equals(other.getPath())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + PATH_FIELD_NUMBER;\n    hash = (53 * hash) + getPath().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code pinecone.meta.PathRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:pinecone.meta.PathRequest)\n      com.pinecone.hydra.service.registry.grpc.server.meta.PathRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_PathRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_PathRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      path_ = \"\";\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_PathRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest build() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest result = new com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.path_ = path_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.getDefaultInstance()) return this;\n      if (!other.getPath().isEmpty()) {\n        path_ = other.path_;\n        bitField0_ |= 0x00000001;\n        onChanged();\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              path_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 10\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private java.lang.Object path_ = \"\";\n    /**\n     * <code>string path = 1;</code>\n     * @return The path.\n     */\n    public java.lang.String getPath() {\n      java.lang.Object ref = path_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        path_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string path = 1;</code>\n     * @return The bytes for path.\n     */\n    public com.google.protobuf.ByteString\n        getPathBytes() {\n      java.lang.Object ref = path_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        path_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string path = 1;</code>\n     * @param value The path to set.\n     * @return This builder for chaining.\n     */\n    public Builder setPath(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      path_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string path = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearPath() {\n      path_ = getDefaultInstance().getPath();\n      bitField0_ = (bitField0_ & ~0x00000001);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string path = 1;</code>\n     * @param value The bytes for path to set.\n     * @return This builder for chaining.\n     */\n    public Builder setPathBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      path_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:pinecone.meta.PathRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:pinecone.meta.PathRequest)\n  private static final com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<PathRequest>\n      PARSER = new com.google.protobuf.AbstractParser<PathRequest>() {\n    @java.lang.Override\n    public PathRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<PathRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<PathRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/PathRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\npublic interface PathRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:pinecone.meta.PathRequest)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>string path = 1;</code>\n   * @return The path.\n   */\n  java.lang.String getPath();\n  /**\n   * <code>string path = 1;</code>\n   * @return The bytes for path.\n   */\n  com.google.protobuf.ByteString\n      getPathBytes();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceIdRequest.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\n/**\n * Protobuf type {@code pinecone.meta.ServiceIdRequest}\n */\npublic final class ServiceIdRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:pinecone.meta.ServiceIdRequest)\n    ServiceIdRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use ServiceIdRequest.newBuilder() to construct.\n  private ServiceIdRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private ServiceIdRequest() {\n    serviceId_ = \"\";\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new ServiceIdRequest();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceIdRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceIdRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.Builder.class);\n  }\n\n  public static final int SERVICEID_FIELD_NUMBER = 1;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object serviceId_ = \"\";\n  /**\n   * <code>string serviceId = 1;</code>\n   * @return The serviceId.\n   */\n  @java.lang.Override\n  public java.lang.String getServiceId() {\n    java.lang.Object ref = serviceId_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      serviceId_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string serviceId = 1;</code>\n   * @return The bytes for serviceId.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getServiceIdBytes() {\n    java.lang.Object ref = serviceId_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      serviceId_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceId_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, serviceId_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(serviceId_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, serviceId_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest other = (com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest) obj;\n\n    if (!getServiceId()\n        .equals(other.getServiceId())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + SERVICEID_FIELD_NUMBER;\n    hash = (53 * hash) + getServiceId().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code pinecone.meta.ServiceIdRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:pinecone.meta.ServiceIdRequest)\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceIdRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceIdRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      serviceId_ = \"\";\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceIdRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest build() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest result = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.serviceId_ = serviceId_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.getDefaultInstance()) return this;\n      if (!other.getServiceId().isEmpty()) {\n        serviceId_ = other.serviceId_;\n        bitField0_ |= 0x00000001;\n        onChanged();\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              serviceId_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 10\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private java.lang.Object serviceId_ = \"\";\n    /**\n     * <code>string serviceId = 1;</code>\n     * @return The serviceId.\n     */\n    public java.lang.String getServiceId() {\n      java.lang.Object ref = serviceId_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        serviceId_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string serviceId = 1;</code>\n     * @return The bytes for serviceId.\n     */\n    public com.google.protobuf.ByteString\n        getServiceIdBytes() {\n      java.lang.Object ref = serviceId_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        serviceId_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string serviceId = 1;</code>\n     * @param value The serviceId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setServiceId(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      serviceId_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string serviceId = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearServiceId() {\n      serviceId_ = getDefaultInstance().getServiceId();\n      bitField0_ = (bitField0_ & ~0x00000001);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string serviceId = 1;</code>\n     * @param value The bytes for serviceId to set.\n     * @return This builder for chaining.\n     */\n    public Builder setServiceIdBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      serviceId_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:pinecone.meta.ServiceIdRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:pinecone.meta.ServiceIdRequest)\n  private static final com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<ServiceIdRequest>\n      PARSER = new com.google.protobuf.AbstractParser<ServiceIdRequest>() {\n    @java.lang.Override\n    public ServiceIdRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<ServiceIdRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<ServiceIdRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceIdRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\npublic interface ServiceIdRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:pinecone.meta.ServiceIdRequest)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>string serviceId = 1;</code>\n   * @return The serviceId.\n   */\n  java.lang.String getServiceId();\n  /**\n   * <code>string serviceId = 1;</code>\n   * @return The bytes for serviceId.\n   */\n  com.google.protobuf.ByteString\n      getServiceIdBytes();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaDTO.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\n/**\n * Protobuf type {@code pinecone.meta.ServiceMetaDTO}\n */\npublic final class ServiceMetaDTO extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:pinecone.meta.ServiceMetaDTO)\n    ServiceMetaDTOOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use ServiceMetaDTO.newBuilder() to construct.\n  private ServiceMetaDTO(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private ServiceMetaDTO() {\n    guid_ = \"\";\n    name_ = \"\";\n    type_ = \"\";\n    displayName_ = \"\";\n    description_ = \"\";\n    fullName_ = \"\";\n    groupNamespace_ = \"\";\n    groupName_ = \"\";\n    scenario_ = \"\";\n    primaryImplLang_ = \"\";\n    extraInformation_ = \"\";\n    level_ = \"\";\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new ServiceMetaDTO();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTO_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTO_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder.class);\n  }\n\n  public static final int GUID_FIELD_NUMBER = 1;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object guid_ = \"\";\n  /**\n   * <code>string guid = 1;</code>\n   * @return The guid.\n   */\n  @java.lang.Override\n  public java.lang.String getGuid() {\n    java.lang.Object ref = guid_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      guid_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string guid = 1;</code>\n   * @return The bytes for guid.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getGuidBytes() {\n    java.lang.Object ref = guid_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      guid_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int NAME_FIELD_NUMBER = 2;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object name_ = \"\";\n  /**\n   * <code>string name = 2;</code>\n   * @return The name.\n   */\n  @java.lang.Override\n  public java.lang.String getName() {\n    java.lang.Object ref = name_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      name_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string name = 2;</code>\n   * @return The bytes for name.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getNameBytes() {\n    java.lang.Object ref = name_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      name_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int TYPE_FIELD_NUMBER = 3;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object type_ = \"\";\n  /**\n   * <code>string type = 3;</code>\n   * @return The type.\n   */\n  @java.lang.Override\n  public java.lang.String getType() {\n    java.lang.Object ref = type_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      type_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string type = 3;</code>\n   * @return The bytes for type.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getTypeBytes() {\n    java.lang.Object ref = type_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      type_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int DISPLAYNAME_FIELD_NUMBER = 4;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object displayName_ = \"\";\n  /**\n   * <code>string displayName = 4;</code>\n   * @return The displayName.\n   */\n  @java.lang.Override\n  public java.lang.String getDisplayName() {\n    java.lang.Object ref = displayName_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      displayName_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string displayName = 4;</code>\n   * @return The bytes for displayName.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getDisplayNameBytes() {\n    java.lang.Object ref = displayName_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      displayName_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int DESCRIPTION_FIELD_NUMBER = 5;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object description_ = \"\";\n  /**\n   * <code>string description = 5;</code>\n   * @return The description.\n   */\n  @java.lang.Override\n  public java.lang.String getDescription() {\n    java.lang.Object ref = description_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      description_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string description = 5;</code>\n   * @return The bytes for description.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getDescriptionBytes() {\n    java.lang.Object ref = description_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      description_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int FULLNAME_FIELD_NUMBER = 6;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object fullName_ = \"\";\n  /**\n   * <code>string fullName = 6;</code>\n   * @return The fullName.\n   */\n  @java.lang.Override\n  public java.lang.String getFullName() {\n    java.lang.Object ref = fullName_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      fullName_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string fullName = 6;</code>\n   * @return The bytes for fullName.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getFullNameBytes() {\n    java.lang.Object ref = fullName_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      fullName_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int GROUPNAMESPACE_FIELD_NUMBER = 7;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object groupNamespace_ = \"\";\n  /**\n   * <code>string groupNamespace = 7;</code>\n   * @return The groupNamespace.\n   */\n  @java.lang.Override\n  public java.lang.String getGroupNamespace() {\n    java.lang.Object ref = groupNamespace_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      groupNamespace_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string groupNamespace = 7;</code>\n   * @return The bytes for groupNamespace.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getGroupNamespaceBytes() {\n    java.lang.Object ref = groupNamespace_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      groupNamespace_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int GROUPNAME_FIELD_NUMBER = 8;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object groupName_ = \"\";\n  /**\n   * <code>string groupName = 8;</code>\n   * @return The groupName.\n   */\n  @java.lang.Override\n  public java.lang.String getGroupName() {\n    java.lang.Object ref = groupName_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      groupName_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string groupName = 8;</code>\n   * @return The bytes for groupName.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getGroupNameBytes() {\n    java.lang.Object ref = groupName_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      groupName_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int SCENARIO_FIELD_NUMBER = 9;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object scenario_ = \"\";\n  /**\n   * <code>string scenario = 9;</code>\n   * @return The scenario.\n   */\n  @java.lang.Override\n  public java.lang.String getScenario() {\n    java.lang.Object ref = scenario_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      scenario_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string scenario = 9;</code>\n   * @return The bytes for scenario.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getScenarioBytes() {\n    java.lang.Object ref = scenario_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      scenario_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int PRIMARYIMPLLANG_FIELD_NUMBER = 10;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object primaryImplLang_ = \"\";\n  /**\n   * <code>string primaryImplLang = 10;</code>\n   * @return The primaryImplLang.\n   */\n  @java.lang.Override\n  public java.lang.String getPrimaryImplLang() {\n    java.lang.Object ref = primaryImplLang_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      primaryImplLang_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string primaryImplLang = 10;</code>\n   * @return The bytes for primaryImplLang.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getPrimaryImplLangBytes() {\n    java.lang.Object ref = primaryImplLang_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      primaryImplLang_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int EXTRAINFORMATION_FIELD_NUMBER = 11;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object extraInformation_ = \"\";\n  /**\n   * <code>string extraInformation = 11;</code>\n   * @return The extraInformation.\n   */\n  @java.lang.Override\n  public java.lang.String getExtraInformation() {\n    java.lang.Object ref = extraInformation_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      extraInformation_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string extraInformation = 11;</code>\n   * @return The bytes for extraInformation.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getExtraInformationBytes() {\n    java.lang.Object ref = extraInformation_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      extraInformation_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int LEVEL_FIELD_NUMBER = 12;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object level_ = \"\";\n  /**\n   * <code>string level = 12;</code>\n   * @return The level.\n   */\n  @java.lang.Override\n  public java.lang.String getLevel() {\n    java.lang.Object ref = level_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      level_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string level = 12;</code>\n   * @return The bytes for level.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getLevelBytes() {\n    java.lang.Object ref = level_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      level_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(guid_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, guid_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 2, name_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 3, type_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 4, displayName_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 5, description_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(fullName_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 6, fullName_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(groupNamespace_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 7, groupNamespace_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(groupName_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 8, groupName_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(scenario_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 9, scenario_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(primaryImplLang_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 10, primaryImplLang_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(extraInformation_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 11, extraInformation_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(level_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 12, level_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(guid_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, guid_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, name_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, type_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, displayName_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, description_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(fullName_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(6, fullName_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(groupNamespace_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(7, groupNamespace_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(groupName_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(8, groupName_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(scenario_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(9, scenario_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(primaryImplLang_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(10, primaryImplLang_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(extraInformation_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(11, extraInformation_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(level_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(12, level_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO other = (com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO) obj;\n\n    if (!getGuid()\n        .equals(other.getGuid())) return false;\n    if (!getName()\n        .equals(other.getName())) return false;\n    if (!getType()\n        .equals(other.getType())) return false;\n    if (!getDisplayName()\n        .equals(other.getDisplayName())) return false;\n    if (!getDescription()\n        .equals(other.getDescription())) return false;\n    if (!getFullName()\n        .equals(other.getFullName())) return false;\n    if (!getGroupNamespace()\n        .equals(other.getGroupNamespace())) return false;\n    if (!getGroupName()\n        .equals(other.getGroupName())) return false;\n    if (!getScenario()\n        .equals(other.getScenario())) return false;\n    if (!getPrimaryImplLang()\n        .equals(other.getPrimaryImplLang())) return false;\n    if (!getExtraInformation()\n        .equals(other.getExtraInformation())) return false;\n    if (!getLevel()\n        .equals(other.getLevel())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + GUID_FIELD_NUMBER;\n    hash = (53 * hash) + getGuid().hashCode();\n    hash = (37 * hash) + NAME_FIELD_NUMBER;\n    hash = (53 * hash) + getName().hashCode();\n    hash = (37 * hash) + TYPE_FIELD_NUMBER;\n    hash = (53 * hash) + getType().hashCode();\n    hash = (37 * hash) + DISPLAYNAME_FIELD_NUMBER;\n    hash = (53 * hash) + getDisplayName().hashCode();\n    hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;\n    hash = (53 * hash) + getDescription().hashCode();\n    hash = (37 * hash) + FULLNAME_FIELD_NUMBER;\n    hash = (53 * hash) + getFullName().hashCode();\n    hash = (37 * hash) + GROUPNAMESPACE_FIELD_NUMBER;\n    hash = (53 * hash) + getGroupNamespace().hashCode();\n    hash = (37 * hash) + GROUPNAME_FIELD_NUMBER;\n    hash = (53 * hash) + getGroupName().hashCode();\n    hash = (37 * hash) + SCENARIO_FIELD_NUMBER;\n    hash = (53 * hash) + getScenario().hashCode();\n    hash = (37 * hash) + PRIMARYIMPLLANG_FIELD_NUMBER;\n    hash = (53 * hash) + getPrimaryImplLang().hashCode();\n    hash = (37 * hash) + EXTRAINFORMATION_FIELD_NUMBER;\n    hash = (53 * hash) + getExtraInformation().hashCode();\n    hash = (37 * hash) + LEVEL_FIELD_NUMBER;\n    hash = (53 * hash) + getLevel().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code pinecone.meta.ServiceMetaDTO}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:pinecone.meta.ServiceMetaDTO)\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTO_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTO_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      guid_ = \"\";\n      name_ = \"\";\n      type_ = \"\";\n      displayName_ = \"\";\n      description_ = \"\";\n      fullName_ = \"\";\n      groupNamespace_ = \"\";\n      groupName_ = \"\";\n      scenario_ = \"\";\n      primaryImplLang_ = \"\";\n      extraInformation_ = \"\";\n      level_ = \"\";\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTO_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO build() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO result = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.guid_ = guid_;\n      }\n      if (((from_bitField0_ & 0x00000002) != 0)) {\n        result.name_ = name_;\n      }\n      if (((from_bitField0_ & 0x00000004) != 0)) {\n        result.type_ = type_;\n      }\n      if (((from_bitField0_ & 0x00000008) != 0)) {\n        result.displayName_ = displayName_;\n      }\n      if (((from_bitField0_ & 0x00000010) != 0)) {\n        result.description_ = description_;\n      }\n      if (((from_bitField0_ & 0x00000020) != 0)) {\n        result.fullName_ = fullName_;\n      }\n      if (((from_bitField0_ & 0x00000040) != 0)) {\n        result.groupNamespace_ = groupNamespace_;\n      }\n      if (((from_bitField0_ & 0x00000080) != 0)) {\n        result.groupName_ = groupName_;\n      }\n      if (((from_bitField0_ & 0x00000100) != 0)) {\n        result.scenario_ = scenario_;\n      }\n      if (((from_bitField0_ & 0x00000200) != 0)) {\n        result.primaryImplLang_ = primaryImplLang_;\n      }\n      if (((from_bitField0_ & 0x00000400) != 0)) {\n        result.extraInformation_ = extraInformation_;\n      }\n      if (((from_bitField0_ & 0x00000800) != 0)) {\n        result.level_ = level_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance()) return this;\n      if (!other.getGuid().isEmpty()) {\n        guid_ = other.guid_;\n        bitField0_ |= 0x00000001;\n        onChanged();\n      }\n      if (!other.getName().isEmpty()) {\n        name_ = other.name_;\n        bitField0_ |= 0x00000002;\n        onChanged();\n      }\n      if (!other.getType().isEmpty()) {\n        type_ = other.type_;\n        bitField0_ |= 0x00000004;\n        onChanged();\n      }\n      if (!other.getDisplayName().isEmpty()) {\n        displayName_ = other.displayName_;\n        bitField0_ |= 0x00000008;\n        onChanged();\n      }\n      if (!other.getDescription().isEmpty()) {\n        description_ = other.description_;\n        bitField0_ |= 0x00000010;\n        onChanged();\n      }\n      if (!other.getFullName().isEmpty()) {\n        fullName_ = other.fullName_;\n        bitField0_ |= 0x00000020;\n        onChanged();\n      }\n      if (!other.getGroupNamespace().isEmpty()) {\n        groupNamespace_ = other.groupNamespace_;\n        bitField0_ |= 0x00000040;\n        onChanged();\n      }\n      if (!other.getGroupName().isEmpty()) {\n        groupName_ = other.groupName_;\n        bitField0_ |= 0x00000080;\n        onChanged();\n      }\n      if (!other.getScenario().isEmpty()) {\n        scenario_ = other.scenario_;\n        bitField0_ |= 0x00000100;\n        onChanged();\n      }\n      if (!other.getPrimaryImplLang().isEmpty()) {\n        primaryImplLang_ = other.primaryImplLang_;\n        bitField0_ |= 0x00000200;\n        onChanged();\n      }\n      if (!other.getExtraInformation().isEmpty()) {\n        extraInformation_ = other.extraInformation_;\n        bitField0_ |= 0x00000400;\n        onChanged();\n      }\n      if (!other.getLevel().isEmpty()) {\n        level_ = other.level_;\n        bitField0_ |= 0x00000800;\n        onChanged();\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              guid_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 10\n            case 18: {\n              name_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000002;\n              break;\n            } // case 18\n            case 26: {\n              type_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000004;\n              break;\n            } // case 26\n            case 34: {\n              displayName_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000008;\n              break;\n            } // case 34\n            case 42: {\n              description_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000010;\n              break;\n            } // case 42\n            case 50: {\n              fullName_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000020;\n              break;\n            } // case 50\n            case 58: {\n              groupNamespace_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000040;\n              break;\n            } // case 58\n            case 66: {\n              groupName_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000080;\n              break;\n            } // case 66\n            case 74: {\n              scenario_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000100;\n              break;\n            } // case 74\n            case 82: {\n              primaryImplLang_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000200;\n              break;\n            } // case 82\n            case 90: {\n              extraInformation_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000400;\n              break;\n            } // case 90\n            case 98: {\n              level_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000800;\n              break;\n            } // case 98\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private java.lang.Object guid_ = \"\";\n    /**\n     * <code>string guid = 1;</code>\n     * @return The guid.\n     */\n    public java.lang.String getGuid() {\n      java.lang.Object ref = guid_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        guid_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string guid = 1;</code>\n     * @return The bytes for guid.\n     */\n    public com.google.protobuf.ByteString\n        getGuidBytes() {\n      java.lang.Object ref = guid_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        guid_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string guid = 1;</code>\n     * @param value The guid to set.\n     * @return This builder for chaining.\n     */\n    public Builder setGuid(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      guid_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string guid = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearGuid() {\n      guid_ = getDefaultInstance().getGuid();\n      bitField0_ = (bitField0_ & ~0x00000001);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string guid = 1;</code>\n     * @param value The bytes for guid to set.\n     * @return This builder for chaining.\n     */\n    public Builder setGuidBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      guid_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object name_ = \"\";\n    /**\n     * <code>string name = 2;</code>\n     * @return The name.\n     */\n    public java.lang.String getName() {\n      java.lang.Object ref = name_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        name_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string name = 2;</code>\n     * @return The bytes for name.\n     */\n    public com.google.protobuf.ByteString\n        getNameBytes() {\n      java.lang.Object ref = name_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        name_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string name = 2;</code>\n     * @param value The name to set.\n     * @return This builder for chaining.\n     */\n    public Builder setName(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      name_ = value;\n      bitField0_ |= 0x00000002;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string name = 2;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearName() {\n      name_ = getDefaultInstance().getName();\n      bitField0_ = (bitField0_ & ~0x00000002);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string name = 2;</code>\n     * @param value The bytes for name to set.\n     * @return This builder for chaining.\n     */\n    public Builder setNameBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      name_ = value;\n      bitField0_ |= 0x00000002;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object type_ = \"\";\n    /**\n     * <code>string type = 3;</code>\n     * @return The type.\n     */\n    public java.lang.String getType() {\n      java.lang.Object ref = type_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        type_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string type = 3;</code>\n     * @return The bytes for type.\n     */\n    public com.google.protobuf.ByteString\n        getTypeBytes() {\n      java.lang.Object ref = type_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        type_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string type = 3;</code>\n     * @param value The type to set.\n     * @return This builder for chaining.\n     */\n    public Builder setType(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      type_ = value;\n      bitField0_ |= 0x00000004;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string type = 3;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearType() {\n      type_ = getDefaultInstance().getType();\n      bitField0_ = (bitField0_ & ~0x00000004);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string type = 3;</code>\n     * @param value The bytes for type to set.\n     * @return This builder for chaining.\n     */\n    public Builder setTypeBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      type_ = value;\n      bitField0_ |= 0x00000004;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object displayName_ = \"\";\n    /**\n     * <code>string displayName = 4;</code>\n     * @return The displayName.\n     */\n    public java.lang.String getDisplayName() {\n      java.lang.Object ref = displayName_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        displayName_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string displayName = 4;</code>\n     * @return The bytes for displayName.\n     */\n    public com.google.protobuf.ByteString\n        getDisplayNameBytes() {\n      java.lang.Object ref = displayName_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        displayName_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string displayName = 4;</code>\n     * @param value The displayName to set.\n     * @return This builder for chaining.\n     */\n    public Builder setDisplayName(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      displayName_ = value;\n      bitField0_ |= 0x00000008;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string displayName = 4;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearDisplayName() {\n      displayName_ = getDefaultInstance().getDisplayName();\n      bitField0_ = (bitField0_ & ~0x00000008);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string displayName = 4;</code>\n     * @param value The bytes for displayName to set.\n     * @return This builder for chaining.\n     */\n    public Builder setDisplayNameBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      displayName_ = value;\n      bitField0_ |= 0x00000008;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object description_ = \"\";\n    /**\n     * <code>string description = 5;</code>\n     * @return The description.\n     */\n    public java.lang.String getDescription() {\n      java.lang.Object ref = description_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        description_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string description = 5;</code>\n     * @return The bytes for description.\n     */\n    public com.google.protobuf.ByteString\n        getDescriptionBytes() {\n      java.lang.Object ref = description_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        description_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string description = 5;</code>\n     * @param value The description to set.\n     * @return This builder for chaining.\n     */\n    public Builder setDescription(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      description_ = value;\n      bitField0_ |= 0x00000010;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string description = 5;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearDescription() {\n      description_ = getDefaultInstance().getDescription();\n      bitField0_ = (bitField0_ & ~0x00000010);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string description = 5;</code>\n     * @param value The bytes for description to set.\n     * @return This builder for chaining.\n     */\n    public Builder setDescriptionBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      description_ = value;\n      bitField0_ |= 0x00000010;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object fullName_ = \"\";\n    /**\n     * <code>string fullName = 6;</code>\n     * @return The fullName.\n     */\n    public java.lang.String getFullName() {\n      java.lang.Object ref = fullName_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        fullName_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string fullName = 6;</code>\n     * @return The bytes for fullName.\n     */\n    public com.google.protobuf.ByteString\n        getFullNameBytes() {\n      java.lang.Object ref = fullName_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        fullName_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string fullName = 6;</code>\n     * @param value The fullName to set.\n     * @return This builder for chaining.\n     */\n    public Builder setFullName(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      fullName_ = value;\n      bitField0_ |= 0x00000020;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string fullName = 6;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearFullName() {\n      fullName_ = getDefaultInstance().getFullName();\n      bitField0_ = (bitField0_ & ~0x00000020);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string fullName = 6;</code>\n     * @param value The bytes for fullName to set.\n     * @return This builder for chaining.\n     */\n    public Builder setFullNameBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      fullName_ = value;\n      bitField0_ |= 0x00000020;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object groupNamespace_ = \"\";\n    /**\n     * <code>string groupNamespace = 7;</code>\n     * @return The groupNamespace.\n     */\n    public java.lang.String getGroupNamespace() {\n      java.lang.Object ref = groupNamespace_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        groupNamespace_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string groupNamespace = 7;</code>\n     * @return The bytes for groupNamespace.\n     */\n    public com.google.protobuf.ByteString\n        getGroupNamespaceBytes() {\n      java.lang.Object ref = groupNamespace_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        groupNamespace_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string groupNamespace = 7;</code>\n     * @param value The groupNamespace to set.\n     * @return This builder for chaining.\n     */\n    public Builder setGroupNamespace(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      groupNamespace_ = value;\n      bitField0_ |= 0x00000040;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string groupNamespace = 7;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearGroupNamespace() {\n      groupNamespace_ = getDefaultInstance().getGroupNamespace();\n      bitField0_ = (bitField0_ & ~0x00000040);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string groupNamespace = 7;</code>\n     * @param value The bytes for groupNamespace to set.\n     * @return This builder for chaining.\n     */\n    public Builder setGroupNamespaceBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      groupNamespace_ = value;\n      bitField0_ |= 0x00000040;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object groupName_ = \"\";\n    /**\n     * <code>string groupName = 8;</code>\n     * @return The groupName.\n     */\n    public java.lang.String getGroupName() {\n      java.lang.Object ref = groupName_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        groupName_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string groupName = 8;</code>\n     * @return The bytes for groupName.\n     */\n    public com.google.protobuf.ByteString\n        getGroupNameBytes() {\n      java.lang.Object ref = groupName_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        groupName_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string groupName = 8;</code>\n     * @param value The groupName to set.\n     * @return This builder for chaining.\n     */\n    public Builder setGroupName(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      groupName_ = value;\n      bitField0_ |= 0x00000080;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string groupName = 8;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearGroupName() {\n      groupName_ = getDefaultInstance().getGroupName();\n      bitField0_ = (bitField0_ & ~0x00000080);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string groupName = 8;</code>\n     * @param value The bytes for groupName to set.\n     * @return This builder for chaining.\n     */\n    public Builder setGroupNameBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      groupName_ = value;\n      bitField0_ |= 0x00000080;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object scenario_ = \"\";\n    /**\n     * <code>string scenario = 9;</code>\n     * @return The scenario.\n     */\n    public java.lang.String getScenario() {\n      java.lang.Object ref = scenario_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        scenario_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string scenario = 9;</code>\n     * @return The bytes for scenario.\n     */\n    public com.google.protobuf.ByteString\n        getScenarioBytes() {\n      java.lang.Object ref = scenario_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        scenario_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string scenario = 9;</code>\n     * @param value The scenario to set.\n     * @return This builder for chaining.\n     */\n    public Builder setScenario(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      scenario_ = value;\n      bitField0_ |= 0x00000100;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string scenario = 9;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearScenario() {\n      scenario_ = getDefaultInstance().getScenario();\n      bitField0_ = (bitField0_ & ~0x00000100);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string scenario = 9;</code>\n     * @param value The bytes for scenario to set.\n     * @return This builder for chaining.\n     */\n    public Builder setScenarioBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      scenario_ = value;\n      bitField0_ |= 0x00000100;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object primaryImplLang_ = \"\";\n    /**\n     * <code>string primaryImplLang = 10;</code>\n     * @return The primaryImplLang.\n     */\n    public java.lang.String getPrimaryImplLang() {\n      java.lang.Object ref = primaryImplLang_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        primaryImplLang_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string primaryImplLang = 10;</code>\n     * @return The bytes for primaryImplLang.\n     */\n    public com.google.protobuf.ByteString\n        getPrimaryImplLangBytes() {\n      java.lang.Object ref = primaryImplLang_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        primaryImplLang_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string primaryImplLang = 10;</code>\n     * @param value The primaryImplLang to set.\n     * @return This builder for chaining.\n     */\n    public Builder setPrimaryImplLang(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      primaryImplLang_ = value;\n      bitField0_ |= 0x00000200;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string primaryImplLang = 10;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearPrimaryImplLang() {\n      primaryImplLang_ = getDefaultInstance().getPrimaryImplLang();\n      bitField0_ = (bitField0_ & ~0x00000200);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string primaryImplLang = 10;</code>\n     * @param value The bytes for primaryImplLang to set.\n     * @return This builder for chaining.\n     */\n    public Builder setPrimaryImplLangBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      primaryImplLang_ = value;\n      bitField0_ |= 0x00000200;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object extraInformation_ = \"\";\n    /**\n     * <code>string extraInformation = 11;</code>\n     * @return The extraInformation.\n     */\n    public java.lang.String getExtraInformation() {\n      java.lang.Object ref = extraInformation_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        extraInformation_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string extraInformation = 11;</code>\n     * @return The bytes for extraInformation.\n     */\n    public com.google.protobuf.ByteString\n        getExtraInformationBytes() {\n      java.lang.Object ref = extraInformation_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        extraInformation_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string extraInformation = 11;</code>\n     * @param value The extraInformation to set.\n     * @return This builder for chaining.\n     */\n    public Builder setExtraInformation(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      extraInformation_ = value;\n      bitField0_ |= 0x00000400;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string extraInformation = 11;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearExtraInformation() {\n      extraInformation_ = getDefaultInstance().getExtraInformation();\n      bitField0_ = (bitField0_ & ~0x00000400);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string extraInformation = 11;</code>\n     * @param value The bytes for extraInformation to set.\n     * @return This builder for chaining.\n     */\n    public Builder setExtraInformationBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      extraInformation_ = value;\n      bitField0_ |= 0x00000400;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object level_ = \"\";\n    /**\n     * <code>string level = 12;</code>\n     * @return The level.\n     */\n    public java.lang.String getLevel() {\n      java.lang.Object ref = level_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        level_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string level = 12;</code>\n     * @return The bytes for level.\n     */\n    public com.google.protobuf.ByteString\n        getLevelBytes() {\n      java.lang.Object ref = level_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        level_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string level = 12;</code>\n     * @param value The level to set.\n     * @return This builder for chaining.\n     */\n    public Builder setLevel(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      level_ = value;\n      bitField0_ |= 0x00000800;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string level = 12;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearLevel() {\n      level_ = getDefaultInstance().getLevel();\n      bitField0_ = (bitField0_ & ~0x00000800);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string level = 12;</code>\n     * @param value The bytes for level to set.\n     * @return This builder for chaining.\n     */\n    public Builder setLevelBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      level_ = value;\n      bitField0_ |= 0x00000800;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:pinecone.meta.ServiceMetaDTO)\n  }\n\n  // @@protoc_insertion_point(class_scope:pinecone.meta.ServiceMetaDTO)\n  private static final com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<ServiceMetaDTO>\n      PARSER = new com.google.protobuf.AbstractParser<ServiceMetaDTO>() {\n    @java.lang.Override\n    public ServiceMetaDTO parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<ServiceMetaDTO> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<ServiceMetaDTO> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaDTOListReply.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\n/**\n * Protobuf type {@code pinecone.meta.ServiceMetaDTOListReply}\n */\npublic final class ServiceMetaDTOListReply extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:pinecone.meta.ServiceMetaDTOListReply)\n    ServiceMetaDTOListReplyOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use ServiceMetaDTOListReply.newBuilder() to construct.\n  private ServiceMetaDTOListReply(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private ServiceMetaDTOListReply() {\n    metas_ = java.util.Collections.emptyList();\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new ServiceMetaDTOListReply();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOListReply_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOListReply_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.Builder.class);\n  }\n\n  public static final int METAS_FIELD_NUMBER = 1;\n  @SuppressWarnings(\"serial\")\n  private java.util.List<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO> metas_;\n  /**\n   * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n   */\n  @java.lang.Override\n  public java.util.List<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO> getMetasList() {\n    return metas_;\n  }\n  /**\n   * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n   */\n  @java.lang.Override\n  public java.util.List<? extends com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> \n      getMetasOrBuilderList() {\n    return metas_;\n  }\n  /**\n   * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n   */\n  @java.lang.Override\n  public int getMetasCount() {\n    return metas_.size();\n  }\n  /**\n   * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n   */\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMetas(int index) {\n    return metas_.get(index);\n  }\n  /**\n   * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n   */\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetasOrBuilder(\n      int index) {\n    return metas_.get(index);\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    for (int i = 0; i < metas_.size(); i++) {\n      output.writeMessage(1, metas_.get(i));\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    for (int i = 0; i < metas_.size(); i++) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeMessageSize(1, metas_.get(i));\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply other = (com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply) obj;\n\n    if (!getMetasList()\n        .equals(other.getMetasList())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    if (getMetasCount() > 0) {\n      hash = (37 * hash) + METAS_FIELD_NUMBER;\n      hash = (53 * hash) + getMetasList().hashCode();\n    }\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code pinecone.meta.ServiceMetaDTOListReply}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:pinecone.meta.ServiceMetaDTOListReply)\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReplyOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOListReply_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOListReply_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      if (metasBuilder_ == null) {\n        metas_ = java.util.Collections.emptyList();\n      } else {\n        metas_ = null;\n        metasBuilder_.clear();\n      }\n      bitField0_ = (bitField0_ & ~0x00000001);\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOListReply_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply build() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply result = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply(this);\n      buildPartialRepeatedFields(result);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartialRepeatedFields(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply result) {\n      if (metasBuilder_ == null) {\n        if (((bitField0_ & 0x00000001) != 0)) {\n          metas_ = java.util.Collections.unmodifiableList(metas_);\n          bitField0_ = (bitField0_ & ~0x00000001);\n        }\n        result.metas_ = metas_;\n      } else {\n        result.metas_ = metasBuilder_.build();\n      }\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply result) {\n      int from_bitField0_ = bitField0_;\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.getDefaultInstance()) return this;\n      if (metasBuilder_ == null) {\n        if (!other.metas_.isEmpty()) {\n          if (metas_.isEmpty()) {\n            metas_ = other.metas_;\n            bitField0_ = (bitField0_ & ~0x00000001);\n          } else {\n            ensureMetasIsMutable();\n            metas_.addAll(other.metas_);\n          }\n          onChanged();\n        }\n      } else {\n        if (!other.metas_.isEmpty()) {\n          if (metasBuilder_.isEmpty()) {\n            metasBuilder_.dispose();\n            metasBuilder_ = null;\n            metas_ = other.metas_;\n            bitField0_ = (bitField0_ & ~0x00000001);\n            metasBuilder_ = \n              com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?\n                 getMetasFieldBuilder() : null;\n          } else {\n            metasBuilder_.addAllMessages(other.metas_);\n          }\n        }\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO m =\n                  input.readMessage(\n                      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.parser(),\n                      extensionRegistry);\n              if (metasBuilder_ == null) {\n                ensureMetasIsMutable();\n                metas_.add(m);\n              } else {\n                metasBuilder_.addMessage(m);\n              }\n              break;\n            } // case 10\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private java.util.List<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO> metas_ =\n      java.util.Collections.emptyList();\n    private void ensureMetasIsMutable() {\n      if (!((bitField0_ & 0x00000001) != 0)) {\n        metas_ = new java.util.ArrayList<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO>(metas_);\n        bitField0_ |= 0x00000001;\n       }\n    }\n\n    private com.google.protobuf.RepeatedFieldBuilderV3<\n        com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> metasBuilder_;\n\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public java.util.List<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO> getMetasList() {\n      if (metasBuilder_ == null) {\n        return java.util.Collections.unmodifiableList(metas_);\n      } else {\n        return metasBuilder_.getMessageList();\n      }\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public int getMetasCount() {\n      if (metasBuilder_ == null) {\n        return metas_.size();\n      } else {\n        return metasBuilder_.getCount();\n      }\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMetas(int index) {\n      if (metasBuilder_ == null) {\n        return metas_.get(index);\n      } else {\n        return metasBuilder_.getMessage(index);\n      }\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public Builder setMetas(\n        int index, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) {\n      if (metasBuilder_ == null) {\n        if (value == null) {\n          throw new NullPointerException();\n        }\n        ensureMetasIsMutable();\n        metas_.set(index, value);\n        onChanged();\n      } else {\n        metasBuilder_.setMessage(index, value);\n      }\n      return this;\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public Builder setMetas(\n        int index, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builderForValue) {\n      if (metasBuilder_ == null) {\n        ensureMetasIsMutable();\n        metas_.set(index, builderForValue.build());\n        onChanged();\n      } else {\n        metasBuilder_.setMessage(index, builderForValue.build());\n      }\n      return this;\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public Builder addMetas(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) {\n      if (metasBuilder_ == null) {\n        if (value == null) {\n          throw new NullPointerException();\n        }\n        ensureMetasIsMutable();\n        metas_.add(value);\n        onChanged();\n      } else {\n        metasBuilder_.addMessage(value);\n      }\n      return this;\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public Builder addMetas(\n        int index, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) {\n      if (metasBuilder_ == null) {\n        if (value == null) {\n          throw new NullPointerException();\n        }\n        ensureMetasIsMutable();\n        metas_.add(index, value);\n        onChanged();\n      } else {\n        metasBuilder_.addMessage(index, value);\n      }\n      return this;\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public Builder addMetas(\n        com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builderForValue) {\n      if (metasBuilder_ == null) {\n        ensureMetasIsMutable();\n        metas_.add(builderForValue.build());\n        onChanged();\n      } else {\n        metasBuilder_.addMessage(builderForValue.build());\n      }\n      return this;\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public Builder addMetas(\n        int index, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builderForValue) {\n      if (metasBuilder_ == null) {\n        ensureMetasIsMutable();\n        metas_.add(index, builderForValue.build());\n        onChanged();\n      } else {\n        metasBuilder_.addMessage(index, builderForValue.build());\n      }\n      return this;\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public Builder addAllMetas(\n        java.lang.Iterable<? extends com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO> values) {\n      if (metasBuilder_ == null) {\n        ensureMetasIsMutable();\n        com.google.protobuf.AbstractMessageLite.Builder.addAll(\n            values, metas_);\n        onChanged();\n      } else {\n        metasBuilder_.addAllMessages(values);\n      }\n      return this;\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public Builder clearMetas() {\n      if (metasBuilder_ == null) {\n        metas_ = java.util.Collections.emptyList();\n        bitField0_ = (bitField0_ & ~0x00000001);\n        onChanged();\n      } else {\n        metasBuilder_.clear();\n      }\n      return this;\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public Builder removeMetas(int index) {\n      if (metasBuilder_ == null) {\n        ensureMetasIsMutable();\n        metas_.remove(index);\n        onChanged();\n      } else {\n        metasBuilder_.remove(index);\n      }\n      return this;\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder getMetasBuilder(\n        int index) {\n      return getMetasFieldBuilder().getBuilder(index);\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetasOrBuilder(\n        int index) {\n      if (metasBuilder_ == null) {\n        return metas_.get(index);  } else {\n        return metasBuilder_.getMessageOrBuilder(index);\n      }\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public java.util.List<? extends com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> \n         getMetasOrBuilderList() {\n      if (metasBuilder_ != null) {\n        return metasBuilder_.getMessageOrBuilderList();\n      } else {\n        return java.util.Collections.unmodifiableList(metas_);\n      }\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder addMetasBuilder() {\n      return getMetasFieldBuilder().addBuilder(\n          com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance());\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder addMetasBuilder(\n        int index) {\n      return getMetasFieldBuilder().addBuilder(\n          index, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance());\n    }\n    /**\n     * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n     */\n    public java.util.List<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder> \n         getMetasBuilderList() {\n      return getMetasFieldBuilder().getBuilderList();\n    }\n    private com.google.protobuf.RepeatedFieldBuilderV3<\n        com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> \n        getMetasFieldBuilder() {\n      if (metasBuilder_ == null) {\n        metasBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<\n            com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder>(\n                metas_,\n                ((bitField0_ & 0x00000001) != 0),\n                getParentForChildren(),\n                isClean());\n        metas_ = null;\n      }\n      return metasBuilder_;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:pinecone.meta.ServiceMetaDTOListReply)\n  }\n\n  // @@protoc_insertion_point(class_scope:pinecone.meta.ServiceMetaDTOListReply)\n  private static final com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<ServiceMetaDTOListReply>\n      PARSER = new com.google.protobuf.AbstractParser<ServiceMetaDTOListReply>() {\n    @java.lang.Override\n    public ServiceMetaDTOListReply parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<ServiceMetaDTOListReply> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<ServiceMetaDTOListReply> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaDTOListReplyOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\npublic interface ServiceMetaDTOListReplyOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:pinecone.meta.ServiceMetaDTOListReply)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n   */\n  java.util.List<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO> \n      getMetasList();\n  /**\n   * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n   */\n  com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMetas(int index);\n  /**\n   * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n   */\n  int getMetasCount();\n  /**\n   * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n   */\n  java.util.List<? extends com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> \n      getMetasOrBuilderList();\n  /**\n   * <code>repeated .pinecone.meta.ServiceMetaDTO metas = 1;</code>\n   */\n  com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetasOrBuilder(\n      int index);\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaDTOOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\npublic interface ServiceMetaDTOOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:pinecone.meta.ServiceMetaDTO)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>string guid = 1;</code>\n   * @return The guid.\n   */\n  java.lang.String getGuid();\n  /**\n   * <code>string guid = 1;</code>\n   * @return The bytes for guid.\n   */\n  com.google.protobuf.ByteString\n      getGuidBytes();\n\n  /**\n   * <code>string name = 2;</code>\n   * @return The name.\n   */\n  java.lang.String getName();\n  /**\n   * <code>string name = 2;</code>\n   * @return The bytes for name.\n   */\n  com.google.protobuf.ByteString\n      getNameBytes();\n\n  /**\n   * <code>string type = 3;</code>\n   * @return The type.\n   */\n  java.lang.String getType();\n  /**\n   * <code>string type = 3;</code>\n   * @return The bytes for type.\n   */\n  com.google.protobuf.ByteString\n      getTypeBytes();\n\n  /**\n   * <code>string displayName = 4;</code>\n   * @return The displayName.\n   */\n  java.lang.String getDisplayName();\n  /**\n   * <code>string displayName = 4;</code>\n   * @return The bytes for displayName.\n   */\n  com.google.protobuf.ByteString\n      getDisplayNameBytes();\n\n  /**\n   * <code>string description = 5;</code>\n   * @return The description.\n   */\n  java.lang.String getDescription();\n  /**\n   * <code>string description = 5;</code>\n   * @return The bytes for description.\n   */\n  com.google.protobuf.ByteString\n      getDescriptionBytes();\n\n  /**\n   * <code>string fullName = 6;</code>\n   * @return The fullName.\n   */\n  java.lang.String getFullName();\n  /**\n   * <code>string fullName = 6;</code>\n   * @return The bytes for fullName.\n   */\n  com.google.protobuf.ByteString\n      getFullNameBytes();\n\n  /**\n   * <code>string groupNamespace = 7;</code>\n   * @return The groupNamespace.\n   */\n  java.lang.String getGroupNamespace();\n  /**\n   * <code>string groupNamespace = 7;</code>\n   * @return The bytes for groupNamespace.\n   */\n  com.google.protobuf.ByteString\n      getGroupNamespaceBytes();\n\n  /**\n   * <code>string groupName = 8;</code>\n   * @return The groupName.\n   */\n  java.lang.String getGroupName();\n  /**\n   * <code>string groupName = 8;</code>\n   * @return The bytes for groupName.\n   */\n  com.google.protobuf.ByteString\n      getGroupNameBytes();\n\n  /**\n   * <code>string scenario = 9;</code>\n   * @return The scenario.\n   */\n  java.lang.String getScenario();\n  /**\n   * <code>string scenario = 9;</code>\n   * @return The bytes for scenario.\n   */\n  com.google.protobuf.ByteString\n      getScenarioBytes();\n\n  /**\n   * <code>string primaryImplLang = 10;</code>\n   * @return The primaryImplLang.\n   */\n  java.lang.String getPrimaryImplLang();\n  /**\n   * <code>string primaryImplLang = 10;</code>\n   * @return The bytes for primaryImplLang.\n   */\n  com.google.protobuf.ByteString\n      getPrimaryImplLangBytes();\n\n  /**\n   * <code>string extraInformation = 11;</code>\n   * @return The extraInformation.\n   */\n  java.lang.String getExtraInformation();\n  /**\n   * <code>string extraInformation = 11;</code>\n   * @return The bytes for extraInformation.\n   */\n  com.google.protobuf.ByteString\n      getExtraInformationBytes();\n\n  /**\n   * <code>string level = 12;</code>\n   * @return The level.\n   */\n  java.lang.String getLevel();\n  /**\n   * <code>string level = 12;</code>\n   * @return The bytes for level.\n   */\n  com.google.protobuf.ByteString\n      getLevelBytes();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaDTOReply.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\n/**\n * Protobuf type {@code pinecone.meta.ServiceMetaDTOReply}\n */\npublic final class ServiceMetaDTOReply extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:pinecone.meta.ServiceMetaDTOReply)\n    ServiceMetaDTOReplyOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use ServiceMetaDTOReply.newBuilder() to construct.\n  private ServiceMetaDTOReply(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private ServiceMetaDTOReply() {\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new ServiceMetaDTOReply();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOReply_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOReply_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.Builder.class);\n  }\n\n  private int bitField0_;\n  public static final int META_FIELD_NUMBER = 1;\n  private com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO meta_;\n  /**\n   * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n   * @return Whether the meta field is set.\n   */\n  @java.lang.Override\n  public boolean hasMeta() {\n    return ((bitField0_ & 0x00000001) != 0);\n  }\n  /**\n   * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n   * @return The meta.\n   */\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMeta() {\n    return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_;\n  }\n  /**\n   * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n   */\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetaOrBuilder() {\n    return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_;\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (((bitField0_ & 0x00000001) != 0)) {\n      output.writeMessage(1, getMeta());\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (((bitField0_ & 0x00000001) != 0)) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeMessageSize(1, getMeta());\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply other = (com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply) obj;\n\n    if (hasMeta() != other.hasMeta()) return false;\n    if (hasMeta()) {\n      if (!getMeta()\n          .equals(other.getMeta())) return false;\n    }\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    if (hasMeta()) {\n      hash = (37 * hash) + META_FIELD_NUMBER;\n      hash = (53 * hash) + getMeta().hashCode();\n    }\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code pinecone.meta.ServiceMetaDTOReply}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:pinecone.meta.ServiceMetaDTOReply)\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReplyOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOReply_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOReply_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.class, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.newBuilder()\n    private Builder() {\n      maybeForceBuilderInitialization();\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n      maybeForceBuilderInitialization();\n    }\n    private void maybeForceBuilderInitialization() {\n      if (com.google.protobuf.GeneratedMessageV3\n              .alwaysUseFieldBuilders) {\n        getMetaFieldBuilder();\n      }\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      meta_ = null;\n      if (metaBuilder_ != null) {\n        metaBuilder_.dispose();\n        metaBuilder_ = null;\n      }\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_ServiceMetaDTOReply_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply build() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply result = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply result) {\n      int from_bitField0_ = bitField0_;\n      int to_bitField0_ = 0;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.meta_ = metaBuilder_ == null\n            ? meta_\n            : metaBuilder_.build();\n        to_bitField0_ |= 0x00000001;\n      }\n      result.bitField0_ |= to_bitField0_;\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.getDefaultInstance()) return this;\n      if (other.hasMeta()) {\n        mergeMeta(other.getMeta());\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              input.readMessage(\n                  getMetaFieldBuilder().getBuilder(),\n                  extensionRegistry);\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 10\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO meta_;\n    private com.google.protobuf.SingleFieldBuilderV3<\n        com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> metaBuilder_;\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n     * @return Whether the meta field is set.\n     */\n    public boolean hasMeta() {\n      return ((bitField0_ & 0x00000001) != 0);\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n     * @return The meta.\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMeta() {\n      if (metaBuilder_ == null) {\n        return meta_ == null ? com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_;\n      } else {\n        return metaBuilder_.getMessage();\n      }\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n     */\n    public Builder setMeta(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) {\n      if (metaBuilder_ == null) {\n        if (value == null) {\n          throw new NullPointerException();\n        }\n        meta_ = value;\n      } else {\n        metaBuilder_.setMessage(value);\n      }\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n     */\n    public Builder setMeta(\n        com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder builderForValue) {\n      if (metaBuilder_ == null) {\n        meta_ = builderForValue.build();\n      } else {\n        metaBuilder_.setMessage(builderForValue.build());\n      }\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n     */\n    public Builder mergeMeta(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO value) {\n      if (metaBuilder_ == null) {\n        if (((bitField0_ & 0x00000001) != 0) &&\n          meta_ != null &&\n          meta_ != com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance()) {\n          getMetaBuilder().mergeFrom(value);\n        } else {\n          meta_ = value;\n        }\n      } else {\n        metaBuilder_.mergeFrom(value);\n      }\n      if (meta_ != null) {\n        bitField0_ |= 0x00000001;\n        onChanged();\n      }\n      return this;\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n     */\n    public Builder clearMeta() {\n      bitField0_ = (bitField0_ & ~0x00000001);\n      meta_ = null;\n      if (metaBuilder_ != null) {\n        metaBuilder_.dispose();\n        metaBuilder_ = null;\n      }\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder getMetaBuilder() {\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return getMetaFieldBuilder().getBuilder();\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetaOrBuilder() {\n      if (metaBuilder_ != null) {\n        return metaBuilder_.getMessageOrBuilder();\n      } else {\n        return meta_ == null ?\n            com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.getDefaultInstance() : meta_;\n      }\n    }\n    /**\n     * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n     */\n    private com.google.protobuf.SingleFieldBuilderV3<\n        com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder> \n        getMetaFieldBuilder() {\n      if (metaBuilder_ == null) {\n        metaBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<\n            com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO.Builder, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder>(\n                getMeta(),\n                getParentForChildren(),\n                isClean());\n        meta_ = null;\n      }\n      return metaBuilder_;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:pinecone.meta.ServiceMetaDTOReply)\n  }\n\n  // @@protoc_insertion_point(class_scope:pinecone.meta.ServiceMetaDTOReply)\n  private static final com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<ServiceMetaDTOReply>\n      PARSER = new com.google.protobuf.AbstractParser<ServiceMetaDTOReply>() {\n    @java.lang.Override\n    public ServiceMetaDTOReply parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<ServiceMetaDTOReply> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<ServiceMetaDTOReply> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaDTOReplyOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\npublic interface ServiceMetaDTOReplyOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:pinecone.meta.ServiceMetaDTOReply)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n   * @return Whether the meta field is set.\n   */\n  boolean hasMeta();\n  /**\n   * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n   * @return The meta.\n   */\n  com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTO getMeta();\n  /**\n   * <code>.pinecone.meta.ServiceMetaDTO meta = 1;</code>\n   */\n  com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOOrBuilder getMetaOrBuilder();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaGrpc.java",
    "content": "package com.pinecone.hydra.service.registry.grpc.server.meta;\n\nimport static io.grpc.MethodDescriptor.generateFullMethodName;\n\n/**\n */\n@javax.annotation.Generated(\n    value = \"by gRPC proto compiler (version 1.62.2)\",\n    comments = \"Source: service_meta.proto\")\n@io.grpc.stub.annotations.GrpcGenerated\npublic final class ServiceMetaGrpc {\n\n  private ServiceMetaGrpc() {}\n\n  public static final java.lang.String SERVICE_NAME = \"pinecone.meta.ServiceMeta\";\n\n  // Static method descriptors that strictly reflect the proto.\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply> getFetchServiceInsMetaByClientIdMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"FetchServiceInsMetaByClientId\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply> getFetchServiceInsMetaByClientIdMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply> getFetchServiceInsMetaByClientIdMethod;\n    if ((getFetchServiceInsMetaByClientIdMethod = ServiceMetaGrpc.getFetchServiceInsMetaByClientIdMethod) == null) {\n      synchronized (ServiceMetaGrpc.class) {\n        if ((getFetchServiceInsMetaByClientIdMethod = ServiceMetaGrpc.getFetchServiceInsMetaByClientIdMethod) == null) {\n          ServiceMetaGrpc.getFetchServiceInsMetaByClientIdMethod = getFetchServiceInsMetaByClientIdMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"FetchServiceInsMetaByClientId\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceMetaMethodDescriptorSupplier(\"FetchServiceInsMetaByClientId\"))\n              .build();\n        }\n      }\n    }\n    return getFetchServiceInsMetaByClientIdMethod;\n  }\n\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply> getFetchServiceInsMetaByServiceIdMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"FetchServiceInsMetaByServiceId\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest,\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply> getFetchServiceInsMetaByServiceIdMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply> getFetchServiceInsMetaByServiceIdMethod;\n    if ((getFetchServiceInsMetaByServiceIdMethod = ServiceMetaGrpc.getFetchServiceInsMetaByServiceIdMethod) == null) {\n      synchronized (ServiceMetaGrpc.class) {\n        if ((getFetchServiceInsMetaByServiceIdMethod = ServiceMetaGrpc.getFetchServiceInsMetaByServiceIdMethod) == null) {\n          ServiceMetaGrpc.getFetchServiceInsMetaByServiceIdMethod = getFetchServiceInsMetaByServiceIdMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"FetchServiceInsMetaByServiceId\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceMetaMethodDescriptorSupplier(\"FetchServiceInsMetaByServiceId\"))\n              .build();\n        }\n      }\n    }\n    return getFetchServiceInsMetaByServiceIdMethod;\n  }\n\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest,\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply> getQueryServiceMetaByPathMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"QueryServiceMetaByPath\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest,\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply> getQueryServiceMetaByPathMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply> getQueryServiceMetaByPathMethod;\n    if ((getQueryServiceMetaByPathMethod = ServiceMetaGrpc.getQueryServiceMetaByPathMethod) == null) {\n      synchronized (ServiceMetaGrpc.class) {\n        if ((getQueryServiceMetaByPathMethod = ServiceMetaGrpc.getQueryServiceMetaByPathMethod) == null) {\n          ServiceMetaGrpc.getQueryServiceMetaByPathMethod = getQueryServiceMetaByPathMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"QueryServiceMetaByPath\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceMetaMethodDescriptorSupplier(\"QueryServiceMetaByPath\"))\n              .build();\n        }\n      }\n    }\n    return getQueryServiceMetaByPathMethod;\n  }\n\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest,\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply> getQueryServiceMetaByGuidMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"QueryServiceMetaByGuid\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest,\n      com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply> getQueryServiceMetaByGuidMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply> getQueryServiceMetaByGuidMethod;\n    if ((getQueryServiceMetaByGuidMethod = ServiceMetaGrpc.getQueryServiceMetaByGuidMethod) == null) {\n      synchronized (ServiceMetaGrpc.class) {\n        if ((getQueryServiceMetaByGuidMethod = ServiceMetaGrpc.getQueryServiceMetaByGuidMethod) == null) {\n          ServiceMetaGrpc.getQueryServiceMetaByGuidMethod = getQueryServiceMetaByGuidMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest, com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"QueryServiceMetaByGuid\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceMetaMethodDescriptorSupplier(\"QueryServiceMetaByGuid\"))\n              .build();\n        }\n      }\n    }\n    return getQueryServiceMetaByGuidMethod;\n  }\n\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest,\n      com.pinecone.hydra.service.registry.grpc.server.meta.StringReply> getEvalCreationStatementMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"EvalCreationStatement\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest,\n      com.pinecone.hydra.service.registry.grpc.server.meta.StringReply> getEvalCreationStatementMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest, com.pinecone.hydra.service.registry.grpc.server.meta.StringReply> getEvalCreationStatementMethod;\n    if ((getEvalCreationStatementMethod = ServiceMetaGrpc.getEvalCreationStatementMethod) == null) {\n      synchronized (ServiceMetaGrpc.class) {\n        if ((getEvalCreationStatementMethod = ServiceMetaGrpc.getEvalCreationStatementMethod) == null) {\n          ServiceMetaGrpc.getEvalCreationStatementMethod = getEvalCreationStatementMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest, com.pinecone.hydra.service.registry.grpc.server.meta.StringReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"EvalCreationStatement\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceMetaMethodDescriptorSupplier(\"EvalCreationStatement\"))\n              .build();\n        }\n      }\n    }\n    return getEvalCreationStatementMethod;\n  }\n\n  private static volatile io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest,\n      com.pinecone.hydra.service.registry.grpc.server.meta.StringReply> getCreateNewServiceMethod;\n\n  @io.grpc.stub.annotations.RpcMethod(\n      fullMethodName = SERVICE_NAME + '/' + \"CreateNewService\",\n      requestType = com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.class,\n      responseType = com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.class,\n      methodType = io.grpc.MethodDescriptor.MethodType.UNARY)\n  public static io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest,\n      com.pinecone.hydra.service.registry.grpc.server.meta.StringReply> getCreateNewServiceMethod() {\n    io.grpc.MethodDescriptor<com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest, com.pinecone.hydra.service.registry.grpc.server.meta.StringReply> getCreateNewServiceMethod;\n    if ((getCreateNewServiceMethod = ServiceMetaGrpc.getCreateNewServiceMethod) == null) {\n      synchronized (ServiceMetaGrpc.class) {\n        if ((getCreateNewServiceMethod = ServiceMetaGrpc.getCreateNewServiceMethod) == null) {\n          ServiceMetaGrpc.getCreateNewServiceMethod = getCreateNewServiceMethod =\n              io.grpc.MethodDescriptor.<com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest, com.pinecone.hydra.service.registry.grpc.server.meta.StringReply>newBuilder()\n              .setType(io.grpc.MethodDescriptor.MethodType.UNARY)\n              .setFullMethodName(generateFullMethodName(SERVICE_NAME, \"CreateNewService\"))\n              .setSampledToLocalTracing(true)\n              .setRequestMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest.getDefaultInstance()))\n              .setResponseMarshaller(io.grpc.protobuf.ProtoUtils.marshaller(\n                  com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.getDefaultInstance()))\n              .setSchemaDescriptor(new ServiceMetaMethodDescriptorSupplier(\"CreateNewService\"))\n              .build();\n        }\n      }\n    }\n    return getCreateNewServiceMethod;\n  }\n\n  /**\n   * Creates a new async stub that supports all call types for the service\n   */\n  public static ServiceMetaStub newStub(io.grpc.Channel channel) {\n    io.grpc.stub.AbstractStub.StubFactory<ServiceMetaStub> factory =\n      new io.grpc.stub.AbstractStub.StubFactory<ServiceMetaStub>() {\n        @java.lang.Override\n        public ServiceMetaStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n          return new ServiceMetaStub(channel, callOptions);\n        }\n      };\n    return ServiceMetaStub.newStub(factory, channel);\n  }\n\n  /**\n   * Creates a new blocking-style stub that supports unary and streaming output calls on the service\n   */\n  public static ServiceMetaBlockingStub newBlockingStub(\n      io.grpc.Channel channel) {\n    io.grpc.stub.AbstractStub.StubFactory<ServiceMetaBlockingStub> factory =\n      new io.grpc.stub.AbstractStub.StubFactory<ServiceMetaBlockingStub>() {\n        @java.lang.Override\n        public ServiceMetaBlockingStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n          return new ServiceMetaBlockingStub(channel, callOptions);\n        }\n      };\n    return ServiceMetaBlockingStub.newStub(factory, channel);\n  }\n\n  /**\n   * Creates a new ListenableFuture-style stub that supports unary calls on the service\n   */\n  public static ServiceMetaFutureStub newFutureStub(\n      io.grpc.Channel channel) {\n    io.grpc.stub.AbstractStub.StubFactory<ServiceMetaFutureStub> factory =\n      new io.grpc.stub.AbstractStub.StubFactory<ServiceMetaFutureStub>() {\n        @java.lang.Override\n        public ServiceMetaFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n          return new ServiceMetaFutureStub(channel, callOptions);\n        }\n      };\n    return ServiceMetaFutureStub.newStub(factory, channel);\n  }\n\n  /**\n   */\n  public interface AsyncService {\n\n    /**\n     */\n    default void fetchServiceInsMetaByClientId(com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getFetchServiceInsMetaByClientIdMethod(), responseObserver);\n    }\n\n    /**\n     */\n    default void fetchServiceInsMetaByServiceId(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getFetchServiceInsMetaByServiceIdMethod(), responseObserver);\n    }\n\n    /**\n     */\n    default void queryServiceMetaByPath(com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getQueryServiceMetaByPathMethod(), responseObserver);\n    }\n\n    /**\n     */\n    default void queryServiceMetaByGuid(com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getQueryServiceMetaByGuidMethod(), responseObserver);\n    }\n\n    /**\n     */\n    default void evalCreationStatement(com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.StringReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getEvalCreationStatementMethod(), responseObserver);\n    }\n\n    /**\n     */\n    default void createNewService(com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.StringReply> responseObserver) {\n      io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getCreateNewServiceMethod(), responseObserver);\n    }\n  }\n\n  /**\n   * Base class for the server implementation of the service ServiceMeta.\n   */\n  public static abstract class ServiceMetaImplBase\n      implements io.grpc.BindableService, AsyncService {\n\n    @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() {\n      return ServiceMetaGrpc.bindService(this);\n    }\n  }\n\n  /**\n   * A stub to allow clients to do asynchronous rpc calls to service ServiceMeta.\n   */\n  public static final class ServiceMetaStub\n      extends io.grpc.stub.AbstractAsyncStub<ServiceMetaStub> {\n    private ServiceMetaStub(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      super(channel, callOptions);\n    }\n\n    @java.lang.Override\n    protected ServiceMetaStub build(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      return new ServiceMetaStub(channel, callOptions);\n    }\n\n    /**\n     */\n    public void fetchServiceInsMetaByClientId(com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getFetchServiceInsMetaByClientIdMethod(), getCallOptions()), request, responseObserver);\n    }\n\n    /**\n     */\n    public void fetchServiceInsMetaByServiceId(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getFetchServiceInsMetaByServiceIdMethod(), getCallOptions()), request, responseObserver);\n    }\n\n    /**\n     */\n    public void queryServiceMetaByPath(com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getQueryServiceMetaByPathMethod(), getCallOptions()), request, responseObserver);\n    }\n\n    /**\n     */\n    public void queryServiceMetaByGuid(com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getQueryServiceMetaByGuidMethod(), getCallOptions()), request, responseObserver);\n    }\n\n    /**\n     */\n    public void evalCreationStatement(com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.StringReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getEvalCreationStatementMethod(), getCallOptions()), request, responseObserver);\n    }\n\n    /**\n     */\n    public void createNewService(com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest request,\n        io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.StringReply> responseObserver) {\n      io.grpc.stub.ClientCalls.asyncUnaryCall(\n          getChannel().newCall(getCreateNewServiceMethod(), getCallOptions()), request, responseObserver);\n    }\n  }\n\n  /**\n   * A stub to allow clients to do synchronous rpc calls to service ServiceMeta.\n   */\n  public static final class ServiceMetaBlockingStub\n      extends io.grpc.stub.AbstractBlockingStub<ServiceMetaBlockingStub> {\n    private ServiceMetaBlockingStub(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      super(channel, callOptions);\n    }\n\n    @java.lang.Override\n    protected ServiceMetaBlockingStub build(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      return new ServiceMetaBlockingStub(channel, callOptions);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply fetchServiceInsMetaByClientId(com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getFetchServiceInsMetaByClientIdMethod(), getCallOptions(), request);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply fetchServiceInsMetaByServiceId(com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getFetchServiceInsMetaByServiceIdMethod(), getCallOptions(), request);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply queryServiceMetaByPath(com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getQueryServiceMetaByPathMethod(), getCallOptions(), request);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply queryServiceMetaByGuid(com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getQueryServiceMetaByGuidMethod(), getCallOptions(), request);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.StringReply evalCreationStatement(com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getEvalCreationStatementMethod(), getCallOptions(), request);\n    }\n\n    /**\n     */\n    public com.pinecone.hydra.service.registry.grpc.server.meta.StringReply createNewService(com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest request) {\n      return io.grpc.stub.ClientCalls.blockingUnaryCall(\n          getChannel(), getCreateNewServiceMethod(), getCallOptions(), request);\n    }\n  }\n\n  /**\n   * A stub to allow clients to do ListenableFuture-style rpc calls to service ServiceMeta.\n   */\n  public static final class ServiceMetaFutureStub\n      extends io.grpc.stub.AbstractFutureStub<ServiceMetaFutureStub> {\n    private ServiceMetaFutureStub(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      super(channel, callOptions);\n    }\n\n    @java.lang.Override\n    protected ServiceMetaFutureStub build(\n        io.grpc.Channel channel, io.grpc.CallOptions callOptions) {\n      return new ServiceMetaFutureStub(channel, callOptions);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply> fetchServiceInsMetaByClientId(\n        com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getFetchServiceInsMetaByClientIdMethod(), getCallOptions()), request);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply> fetchServiceInsMetaByServiceId(\n        com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getFetchServiceInsMetaByServiceIdMethod(), getCallOptions()), request);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply> queryServiceMetaByPath(\n        com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getQueryServiceMetaByPathMethod(), getCallOptions()), request);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply> queryServiceMetaByGuid(\n        com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getQueryServiceMetaByGuidMethod(), getCallOptions()), request);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.meta.StringReply> evalCreationStatement(\n        com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getEvalCreationStatementMethod(), getCallOptions()), request);\n    }\n\n    /**\n     */\n    public com.google.common.util.concurrent.ListenableFuture<com.pinecone.hydra.service.registry.grpc.server.meta.StringReply> createNewService(\n        com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest request) {\n      return io.grpc.stub.ClientCalls.futureUnaryCall(\n          getChannel().newCall(getCreateNewServiceMethod(), getCallOptions()), request);\n    }\n  }\n\n  private static final int METHODID_FETCH_SERVICE_INS_META_BY_CLIENT_ID = 0;\n  private static final int METHODID_FETCH_SERVICE_INS_META_BY_SERVICE_ID = 1;\n  private static final int METHODID_QUERY_SERVICE_META_BY_PATH = 2;\n  private static final int METHODID_QUERY_SERVICE_META_BY_GUID = 3;\n  private static final int METHODID_EVAL_CREATION_STATEMENT = 4;\n  private static final int METHODID_CREATE_NEW_SERVICE = 5;\n\n  private static final class MethodHandlers<Req, Resp> implements\n      io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,\n      io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,\n      io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,\n      io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {\n    private final AsyncService serviceImpl;\n    private final int methodId;\n\n    MethodHandlers(AsyncService serviceImpl, int methodId) {\n      this.serviceImpl = serviceImpl;\n      this.methodId = methodId;\n    }\n\n    @java.lang.Override\n    @java.lang.SuppressWarnings(\"unchecked\")\n    public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {\n      switch (methodId) {\n        case METHODID_FETCH_SERVICE_INS_META_BY_CLIENT_ID:\n          serviceImpl.fetchServiceInsMetaByClientId((com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply>) responseObserver);\n          break;\n        case METHODID_FETCH_SERVICE_INS_META_BY_SERVICE_ID:\n          serviceImpl.fetchServiceInsMetaByServiceId((com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply>) responseObserver);\n          break;\n        case METHODID_QUERY_SERVICE_META_BY_PATH:\n          serviceImpl.queryServiceMetaByPath((com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply>) responseObserver);\n          break;\n        case METHODID_QUERY_SERVICE_META_BY_GUID:\n          serviceImpl.queryServiceMetaByGuid((com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply>) responseObserver);\n          break;\n        case METHODID_EVAL_CREATION_STATEMENT:\n          serviceImpl.evalCreationStatement((com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.StringReply>) responseObserver);\n          break;\n        case METHODID_CREATE_NEW_SERVICE:\n          serviceImpl.createNewService((com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest) request,\n              (io.grpc.stub.StreamObserver<com.pinecone.hydra.service.registry.grpc.server.meta.StringReply>) responseObserver);\n          break;\n        default:\n          throw new AssertionError();\n      }\n    }\n\n    @java.lang.Override\n    @java.lang.SuppressWarnings(\"unchecked\")\n    public io.grpc.stub.StreamObserver<Req> invoke(\n        io.grpc.stub.StreamObserver<Resp> responseObserver) {\n      switch (methodId) {\n        default:\n          throw new AssertionError();\n      }\n    }\n  }\n\n  public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {\n    return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())\n        .addMethod(\n          getFetchServiceInsMetaByClientIdMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.meta.ClientIdRequest,\n              com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply>(\n                service, METHODID_FETCH_SERVICE_INS_META_BY_CLIENT_ID)))\n        .addMethod(\n          getFetchServiceInsMetaByServiceIdMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.meta.ServiceIdRequest,\n              com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOListReply>(\n                service, METHODID_FETCH_SERVICE_INS_META_BY_SERVICE_ID)))\n        .addMethod(\n          getQueryServiceMetaByPathMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.meta.PathRequest,\n              com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply>(\n                service, METHODID_QUERY_SERVICE_META_BY_PATH)))\n        .addMethod(\n          getQueryServiceMetaByGuidMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.meta.GuidRequest,\n              com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaDTOReply>(\n                service, METHODID_QUERY_SERVICE_META_BY_GUID)))\n        .addMethod(\n          getEvalCreationStatementMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.meta.EvalRequest,\n              com.pinecone.hydra.service.registry.grpc.server.meta.StringReply>(\n                service, METHODID_EVAL_CREATION_STATEMENT)))\n        .addMethod(\n          getCreateNewServiceMethod(),\n          io.grpc.stub.ServerCalls.asyncUnaryCall(\n            new MethodHandlers<\n              com.pinecone.hydra.service.registry.grpc.server.meta.CreateNewServiceRequest,\n              com.pinecone.hydra.service.registry.grpc.server.meta.StringReply>(\n                service, METHODID_CREATE_NEW_SERVICE)))\n        .build();\n  }\n\n  private static abstract class ServiceMetaBaseDescriptorSupplier\n      implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier {\n    ServiceMetaBaseDescriptorSupplier() {}\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.getDescriptor();\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {\n      return getFileDescriptor().findServiceByName(\"ServiceMeta\");\n    }\n  }\n\n  private static final class ServiceMetaFileDescriptorSupplier\n      extends ServiceMetaBaseDescriptorSupplier {\n    ServiceMetaFileDescriptorSupplier() {}\n  }\n\n  private static final class ServiceMetaMethodDescriptorSupplier\n      extends ServiceMetaBaseDescriptorSupplier\n      implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {\n    private final java.lang.String methodName;\n\n    ServiceMetaMethodDescriptorSupplier(java.lang.String methodName) {\n      this.methodName = methodName;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {\n      return getServiceDescriptor().findMethodByName(methodName);\n    }\n  }\n\n  private static volatile io.grpc.ServiceDescriptor serviceDescriptor;\n\n  public static io.grpc.ServiceDescriptor getServiceDescriptor() {\n    io.grpc.ServiceDescriptor result = serviceDescriptor;\n    if (result == null) {\n      synchronized (ServiceMetaGrpc.class) {\n        result = serviceDescriptor;\n        if (result == null) {\n          serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)\n              .setSchemaDescriptor(new ServiceMetaFileDescriptorSupplier())\n              .addMethod(getFetchServiceInsMetaByClientIdMethod())\n              .addMethod(getFetchServiceInsMetaByServiceIdMethod())\n              .addMethod(getQueryServiceMetaByPathMethod())\n              .addMethod(getQueryServiceMetaByGuidMethod())\n              .addMethod(getEvalCreationStatementMethod())\n              .addMethod(getCreateNewServiceMethod())\n              .build();\n        }\n      }\n    }\n    return result;\n  }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/ServiceMetaProto.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\npublic final class ServiceMetaProto {\n  private ServiceMetaProto() {}\n  public static void registerAllExtensions(\n      com.google.protobuf.ExtensionRegistryLite registry) {\n  }\n\n  public static void registerAllExtensions(\n      com.google.protobuf.ExtensionRegistry registry) {\n    registerAllExtensions(\n        (com.google.protobuf.ExtensionRegistryLite) registry);\n  }\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_pinecone_meta_ServiceMetaDTO_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_pinecone_meta_ServiceMetaDTO_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_pinecone_meta_ClientIdRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_pinecone_meta_ClientIdRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_pinecone_meta_ServiceIdRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_pinecone_meta_ServiceIdRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_pinecone_meta_PathRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_pinecone_meta_PathRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_pinecone_meta_GuidRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_pinecone_meta_GuidRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_pinecone_meta_EvalRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_pinecone_meta_EvalRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_pinecone_meta_CreateNewServiceRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_pinecone_meta_CreateNewServiceRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_pinecone_meta_ServiceMetaDTOReply_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_pinecone_meta_ServiceMetaDTOReply_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_pinecone_meta_ServiceMetaDTOListReply_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_pinecone_meta_ServiceMetaDTOListReply_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_pinecone_meta_StringReply_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_pinecone_meta_StringReply_fieldAccessorTable;\n\n  public static com.google.protobuf.Descriptors.FileDescriptor\n      getDescriptor() {\n    return descriptor;\n  }\n  private static  com.google.protobuf.Descriptors.FileDescriptor\n      descriptor;\n  static {\n    java.lang.String[] descriptorData = {\n      \"\\n\\022service_meta.proto\\022\\rpinecone.meta\\\"\\365\\001\\n\\016\" +\n      \"ServiceMetaDTO\\022\\014\\n\\004guid\\030\\001 \\001(\\t\\022\\014\\n\\004name\\030\\002 \\001\" +\n      \"(\\t\\022\\014\\n\\004type\\030\\003 \\001(\\t\\022\\023\\n\\013displayName\\030\\004 \\001(\\t\\022\\023\\n\" +\n      \"\\013description\\030\\005 \\001(\\t\\022\\020\\n\\010fullName\\030\\006 \\001(\\t\\022\\026\\n\\016\" +\n      \"groupNamespace\\030\\007 \\001(\\t\\022\\021\\n\\tgroupName\\030\\010 \\001(\\t\\022\" +\n      \"\\020\\n\\010scenario\\030\\t \\001(\\t\\022\\027\\n\\017primaryImplLang\\030\\n \\001\" +\n      \"(\\t\\022\\030\\n\\020extraInformation\\030\\013 \\001(\\t\\022\\r\\n\\005level\\030\\014 \" +\n      \"\\001(\\t\\\"#\\n\\017ClientIdRequest\\022\\020\\n\\010clientId\\030\\001 \\001(\\003\" +\n      \"\\\"%\\n\\020ServiceIdRequest\\022\\021\\n\\tserviceId\\030\\001 \\001(\\t\\\"\" +\n      \"\\033\\n\\013PathRequest\\022\\014\\n\\004path\\030\\001 \\001(\\t\\\"\\033\\n\\013GuidRequ\" +\n      \"est\\022\\014\\n\\004guid\\030\\001 \\001(\\t\\\"$\\n\\013EvalRequest\\022\\025\\n\\rjson\" +\n      \"Statement\\030\\001 \\001(\\t\\\"]\\n\\027CreateNewServiceReque\" +\n      \"st\\022\\025\\n\\rparentAppPath\\030\\001 \\001(\\t\\022+\\n\\004meta\\030\\002 \\001(\\0132\" +\n      \"\\035.pinecone.meta.ServiceMetaDTO\\\"B\\n\\023Servic\" +\n      \"eMetaDTOReply\\022+\\n\\004meta\\030\\001 \\001(\\0132\\035.pinecone.m\" +\n      \"eta.ServiceMetaDTO\\\"G\\n\\027ServiceMetaDTOList\" +\n      \"Reply\\022,\\n\\005metas\\030\\001 \\003(\\0132\\035.pinecone.meta.Ser\" +\n      \"viceMetaDTO\\\"\\034\\n\\013StringReply\\022\\r\\n\\005value\\030\\001 \\001(\" +\n      \"\\t2\\276\\004\\n\\013ServiceMeta\\022g\\n\\035FetchServiceInsMeta\" +\n      \"ByClientId\\022\\036.pinecone.meta.ClientIdReque\" +\n      \"st\\032&.pinecone.meta.ServiceMetaDTOListRep\" +\n      \"ly\\022i\\n\\036FetchServiceInsMetaByServiceId\\022\\037.p\" +\n      \"inecone.meta.ServiceIdRequest\\032&.pinecone\" +\n      \".meta.ServiceMetaDTOListReply\\022X\\n\\026QuerySe\" +\n      \"rviceMetaByPath\\022\\032.pinecone.meta.PathRequ\" +\n      \"est\\032\\\".pinecone.meta.ServiceMetaDTOReply\\022\" +\n      \"X\\n\\026QueryServiceMetaByGuid\\022\\032.pinecone.met\" +\n      \"a.GuidRequest\\032\\\".pinecone.meta.ServiceMet\" +\n      \"aDTOReply\\022O\\n\\025EvalCreationStatement\\022\\032.pin\" +\n      \"econe.meta.EvalRequest\\032\\032.pinecone.meta.S\" +\n      \"tringReply\\022V\\n\\020CreateNewService\\022&.pinecon\" +\n      \"e.meta.CreateNewServiceRequest\\032\\032.pinecon\" +\n      \"e.meta.StringReplyBJ\\n4com.pinecone.hydra\" +\n      \".service.registry.grpc.server.metaB\\020Serv\" +\n      \"iceMetaProtoP\\001b\\006proto3\"\n    };\n    descriptor = com.google.protobuf.Descriptors.FileDescriptor\n      .internalBuildGeneratedFileFrom(descriptorData,\n        new com.google.protobuf.Descriptors.FileDescriptor[] {\n        });\n    internal_static_pinecone_meta_ServiceMetaDTO_descriptor =\n      getDescriptor().getMessageTypes().get(0);\n    internal_static_pinecone_meta_ServiceMetaDTO_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_pinecone_meta_ServiceMetaDTO_descriptor,\n        new java.lang.String[] { \"Guid\", \"Name\", \"Type\", \"DisplayName\", \"Description\", \"FullName\", \"GroupNamespace\", \"GroupName\", \"Scenario\", \"PrimaryImplLang\", \"ExtraInformation\", \"Level\", });\n    internal_static_pinecone_meta_ClientIdRequest_descriptor =\n      getDescriptor().getMessageTypes().get(1);\n    internal_static_pinecone_meta_ClientIdRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_pinecone_meta_ClientIdRequest_descriptor,\n        new java.lang.String[] { \"ClientId\", });\n    internal_static_pinecone_meta_ServiceIdRequest_descriptor =\n      getDescriptor().getMessageTypes().get(2);\n    internal_static_pinecone_meta_ServiceIdRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_pinecone_meta_ServiceIdRequest_descriptor,\n        new java.lang.String[] { \"ServiceId\", });\n    internal_static_pinecone_meta_PathRequest_descriptor =\n      getDescriptor().getMessageTypes().get(3);\n    internal_static_pinecone_meta_PathRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_pinecone_meta_PathRequest_descriptor,\n        new java.lang.String[] { \"Path\", });\n    internal_static_pinecone_meta_GuidRequest_descriptor =\n      getDescriptor().getMessageTypes().get(4);\n    internal_static_pinecone_meta_GuidRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_pinecone_meta_GuidRequest_descriptor,\n        new java.lang.String[] { \"Guid\", });\n    internal_static_pinecone_meta_EvalRequest_descriptor =\n      getDescriptor().getMessageTypes().get(5);\n    internal_static_pinecone_meta_EvalRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_pinecone_meta_EvalRequest_descriptor,\n        new java.lang.String[] { \"JsonStatement\", });\n    internal_static_pinecone_meta_CreateNewServiceRequest_descriptor =\n      getDescriptor().getMessageTypes().get(6);\n    internal_static_pinecone_meta_CreateNewServiceRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_pinecone_meta_CreateNewServiceRequest_descriptor,\n        new java.lang.String[] { \"ParentAppPath\", \"Meta\", });\n    internal_static_pinecone_meta_ServiceMetaDTOReply_descriptor =\n      getDescriptor().getMessageTypes().get(7);\n    internal_static_pinecone_meta_ServiceMetaDTOReply_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_pinecone_meta_ServiceMetaDTOReply_descriptor,\n        new java.lang.String[] { \"Meta\", });\n    internal_static_pinecone_meta_ServiceMetaDTOListReply_descriptor =\n      getDescriptor().getMessageTypes().get(8);\n    internal_static_pinecone_meta_ServiceMetaDTOListReply_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_pinecone_meta_ServiceMetaDTOListReply_descriptor,\n        new java.lang.String[] { \"Metas\", });\n    internal_static_pinecone_meta_StringReply_descriptor =\n      getDescriptor().getMessageTypes().get(9);\n    internal_static_pinecone_meta_StringReply_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_pinecone_meta_StringReply_descriptor,\n        new java.lang.String[] { \"Value\", });\n  }\n\n  // @@protoc_insertion_point(outer_class_scope)\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/StringReply.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\n/**\n * Protobuf type {@code pinecone.meta.StringReply}\n */\npublic final class StringReply extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:pinecone.meta.StringReply)\n    StringReplyOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use StringReply.newBuilder() to construct.\n  private StringReply(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private StringReply() {\n    value_ = \"\";\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new StringReply();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_StringReply_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_StringReply_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.class, com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.Builder.class);\n  }\n\n  public static final int VALUE_FIELD_NUMBER = 1;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object value_ = \"\";\n  /**\n   * <code>string value = 1;</code>\n   * @return The value.\n   */\n  @java.lang.Override\n  public java.lang.String getValue() {\n    java.lang.Object ref = value_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      value_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string value = 1;</code>\n   * @return The bytes for value.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getValueBytes() {\n    java.lang.Object ref = value_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      value_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(value_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, value_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(value_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, value_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.pinecone.hydra.service.registry.grpc.server.meta.StringReply)) {\n      return super.equals(obj);\n    }\n    com.pinecone.hydra.service.registry.grpc.server.meta.StringReply other = (com.pinecone.hydra.service.registry.grpc.server.meta.StringReply) obj;\n\n    if (!getValue()\n        .equals(other.getValue())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + VALUE_FIELD_NUMBER;\n    hash = (53 * hash) + getValue().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.pinecone.hydra.service.registry.grpc.server.meta.StringReply prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code pinecone.meta.StringReply}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:pinecone.meta.StringReply)\n      com.pinecone.hydra.service.registry.grpc.server.meta.StringReplyOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_StringReply_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_StringReply_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.class, com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.Builder.class);\n    }\n\n    // Construct using com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      value_ = \"\";\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.ServiceMetaProto.internal_static_pinecone_meta_StringReply_descriptor;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.StringReply getDefaultInstanceForType() {\n      return com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.StringReply build() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.StringReply result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.pinecone.hydra.service.registry.grpc.server.meta.StringReply buildPartial() {\n      com.pinecone.hydra.service.registry.grpc.server.meta.StringReply result = new com.pinecone.hydra.service.registry.grpc.server.meta.StringReply(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.pinecone.hydra.service.registry.grpc.server.meta.StringReply result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.value_ = value_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.pinecone.hydra.service.registry.grpc.server.meta.StringReply) {\n        return mergeFrom((com.pinecone.hydra.service.registry.grpc.server.meta.StringReply)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.pinecone.hydra.service.registry.grpc.server.meta.StringReply other) {\n      if (other == com.pinecone.hydra.service.registry.grpc.server.meta.StringReply.getDefaultInstance()) return this;\n      if (!other.getValue().isEmpty()) {\n        value_ = other.value_;\n        bitField0_ |= 0x00000001;\n        onChanged();\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              value_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 10\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private java.lang.Object value_ = \"\";\n    /**\n     * <code>string value = 1;</code>\n     * @return The value.\n     */\n    public java.lang.String getValue() {\n      java.lang.Object ref = value_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        value_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string value = 1;</code>\n     * @return The bytes for value.\n     */\n    public com.google.protobuf.ByteString\n        getValueBytes() {\n      java.lang.Object ref = value_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        value_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string value = 1;</code>\n     * @param value The value to set.\n     * @return This builder for chaining.\n     */\n    public Builder setValue(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      value_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string value = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearValue() {\n      value_ = getDefaultInstance().getValue();\n      bitField0_ = (bitField0_ & ~0x00000001);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string value = 1;</code>\n     * @param value The bytes for value to set.\n     * @return This builder for chaining.\n     */\n    public Builder setValueBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      value_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:pinecone.meta.StringReply)\n  }\n\n  // @@protoc_insertion_point(class_scope:pinecone.meta.StringReply)\n  private static final com.pinecone.hydra.service.registry.grpc.server.meta.StringReply DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.pinecone.hydra.service.registry.grpc.server.meta.StringReply();\n  }\n\n  public static com.pinecone.hydra.service.registry.grpc.server.meta.StringReply getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<StringReply>\n      PARSER = new com.google.protobuf.AbstractParser<StringReply>() {\n    @java.lang.Override\n    public StringReply parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<StringReply> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<StringReply> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.pinecone.hydra.service.registry.grpc.server.meta.StringReply getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/java/com/pinecone/hydra/service/registry/grpc/server/meta/StringReplyOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: service_meta.proto\n\npackage com.pinecone.hydra.service.registry.grpc.server.meta;\n\npublic interface StringReplyOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:pinecone.meta.StringReply)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>string value = 1;</code>\n   * @return The value.\n   */\n  java.lang.String getValue();\n  /**\n   * <code>string value = 1;</code>\n   * @return The bytes for value.\n   */\n  com.google.protobuf.ByteString\n      getValueBytes();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/proto/control_stream.proto",
    "content": "syntax = \"proto3\";\n\noption java_multiple_files = true;\noption java_package = \"com.pinecone.hydra.service.registry.grpc.server.cs\";\n\nservice ControlStream {\n  rpc Connect(stream ControlMessage) returns (stream ControlMessage);\n}\n\nmessage ControlMessage {\n  int64 clientId = 1;\n  string payload = 2;\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/proto/service_lifecycle.proto",
    "content": "syntax = \"proto3\";\n\noption java_multiple_files = true;\noption java_package = \"com.pinecone.hydra.service.registry.grpc.server.lifecycle\";\noption java_outer_classname = \"ServiceLifecycleProto\";\n\nservice ServiceLifecycle {\n\n  rpc RegisterService (RegisterServiceRequest) returns (RegisterServiceReply);\n\n  rpc CreateInstanceMeta (CreateInstanceMetaRequest) returns (BoolReply);\n\n  rpc DeregisterServiceByClientId (ClientIdRequest) returns (EmptyReply);\n\n  rpc DeregisterServiceByInstanceId (InstanceIdRequest) returns (EmptyReply);\n\n  rpc HasOwnedServiceByServiceId (ServiceIdRequest) returns (BoolReply);\n\n  rpc HasOwnedServiceInstanceByClientId (ClientIdRequest) returns (BoolReply);\n\n  rpc HasOwnedServiceInstanceByInstanceId (InstanceIdRequest) returns (BoolReply);\n\n  rpc HasOwnedServiceClient (ClientIdRequest) returns (BoolReply);\n\n  rpc CountRegisteredService (EmptyRequest) returns (CountReply);\n}\n\nmessage RegisterServiceRequest {\n  int64 clientId = 1;\n  string serviceId = 2;\n  string deployId = 3;\n}\n\nmessage RegisterServiceReply {\n  string instanceId = 1;\n}\n\nmessage CreateInstanceMetaRequest {\n  string instanceGuid = 1;\n}\n\nmessage ClientIdRequest {\n  int64 clientId = 1;\n}\n\nmessage InstanceIdRequest {\n  string instanceId = 1;\n}\n\nmessage ServiceIdRequest {\n  string serviceId = 1;\n}\n\nmessage BoolReply {\n  bool value = 1;\n}\n\nmessage CountReply {\n  int32 value = 1;\n}\n\nmessage EmptyRequest {}\nmessage EmptyReply {}\n"
  },
  {
    "path": "Hydra/hydra-lib-grpc-service-sdk/src/main/proto/service_meta.proto",
    "content": "syntax = \"proto3\";\n\noption java_multiple_files = true;\noption java_package = \"com.pinecone.hydra.service.registry.grpc.server.meta\";\noption java_outer_classname = \"ServiceMetaProto\";\n\npackage pinecone.meta;\n\nservice ServiceMeta {\n\n  rpc FetchServiceInsMetaByClientId (ClientIdRequest) returns (ServiceMetaDTOListReply);\n\n  rpc FetchServiceInsMetaByServiceId (ServiceIdRequest) returns (ServiceMetaDTOListReply);\n\n  rpc QueryServiceMetaByPath (PathRequest) returns (ServiceMetaDTOReply);\n\n  rpc QueryServiceMetaByGuid (GuidRequest) returns (ServiceMetaDTOReply);\n\n  rpc EvalCreationStatement (EvalRequest) returns (StringReply);\n\n  rpc CreateNewService (CreateNewServiceRequest) returns (StringReply);\n}\n\n/* ================= DTO ================= */\n\nmessage ServiceMetaDTO {\n\n  string guid = 1;\n\n  string name = 2;\n\n  string type = 3;\n\n  string displayName = 4;\n\n  string description = 5;\n\n  string fullName = 6;\n\n  string groupNamespace = 7;\n\n  string groupName = 8;\n\n  string scenario = 9;\n\n  string primaryImplLang = 10;\n\n  string extraInformation = 11;\n\n  string level = 12;\n}\n\n/* ================= Request / Reply ================= */\n\nmessage ClientIdRequest {\n  int64 clientId = 1;\n}\n\nmessage ServiceIdRequest {\n  string serviceId = 1;\n}\n\nmessage PathRequest {\n  string path = 1;\n}\n\nmessage GuidRequest {\n  string guid = 1;\n}\n\nmessage EvalRequest {\n  string jsonStatement = 1;\n}\n\nmessage CreateNewServiceRequest {\n  string parentAppPath = 1;\n  ServiceMetaDTO meta = 2;\n}\n\nmessage ServiceMetaDTOReply {\n  ServiceMetaDTO meta = 1;\n}\n\nmessage ServiceMetaDTOListReply {\n  repeated ServiceMetaDTO metas = 1;\n}\n\nmessage StringReply {\n  string value = 1;\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.sdk.thrift</groupId>\n    <artifactId>hydra-lib-thrift-sdk</artifactId>\n    <version>1.2.1</version>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>junit</groupId>\n            <artifactId>junit</artifactId>\n            <version>3.8.1</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.thrift</groupId>\n            <artifactId>libthrift</artifactId>\n            <version>0.18.0</version>\n        </dependency>\n        <dependency>\n            <groupId>org.slf4j</groupId>\n            <artifactId>slf4j-api</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/GenericThriftServiceRegistry.java",
    "content": "package com.pinecone.hydra.thrift;\n\npublic class GenericThriftServiceRegistry implements ThriftServiceRegistry{\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/MCConnectionArguments.java",
    "content": "package com.pinecone.hydra.thrift;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface MCConnectionArguments extends Pinenut {\n    String getHost();\n\n    void setHost(String host);\n\n    short getPort();\n\n    void setPort( short port );\n\n    int getKeepAliveTimeout();\n\n    void setKeepAliveTimeout( int keepAliveTimeout );\n\n    int getSocketTimeout();\n\n    void setSocketTimeout( int socketTimeout );\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/SharedConnectionArguments.java",
    "content": "package com.pinecone.hydra.thrift;\n\nimport com.pinecone.framework.util.json.JSONObject;\n\npublic abstract class SharedConnectionArguments implements MCConnectionArguments {\n    protected String         mszHost;\n    protected short          mnPort;\n    protected int            mnKeepAliveTimeout;\n    protected int            mnSocketTimeout;\n\n\n    public SharedConnectionArguments( JSONObject args ) {\n        this.mszHost             = args.optString( \"host\", null );\n        this.mnPort              = (short) args.optInt( \"port\", -1 );\n        this.mnKeepAliveTimeout  = args.optInt( \"KeepAliveTimeout\" );\n        this.mnSocketTimeout     = args.optInt( \"SocketTimeout\", 800 );\n    }\n\n    @Override\n    public String getHost() {\n        return this.mszHost;\n    }\n\n    @Override\n    public void setHost( String host ) {\n        this.mszHost = host;\n    }\n\n    @Override\n    public short getPort() {\n        return this.mnPort;\n    }\n\n    @Override\n    public void setPort( short port ) {\n        this.mnPort = port;\n    }\n\n    @Override\n    public int getKeepAliveTimeout() {\n        return this.mnKeepAliveTimeout;\n    }\n\n    @Override\n    public void setKeepAliveTimeout( int keepAliveTimeout ) {\n        this.mnKeepAliveTimeout = keepAliveTimeout;\n    }\n\n    @Override\n    public int getSocketTimeout() {\n        return this.mnSocketTimeout;\n    }\n\n    @Override\n    public void setSocketTimeout( int socketTimeout ) {\n        this.mnSocketTimeout = socketTimeout;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/ThriftServiceRegistry.java",
    "content": "package com.pinecone.hydra.thrift;\n\npublic interface ThriftServiceRegistry {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/client/GenericMultiplexedThriftClient.java",
    "content": "package com.pinecone.hydra.thrift.client;\n\nimport org.apache.thrift.TException;\nimport org.apache.thrift.TServiceClient;\nimport org.apache.thrift.protocol.TBinaryProtocol;\nimport org.apache.thrift.protocol.TMultiplexedProtocol;\nimport org.apache.thrift.protocol.TProtocol;\nimport org.apache.thrift.transport.TSocket;\nimport org.apache.thrift.transport.TTransport;\nimport org.apache.thrift.transport.TTransportException;\n\nimport java.lang.reflect.Constructor;\n\npublic class GenericMultiplexedThriftClient implements MultiplexedThriftClient{\n    private String              host;\n\n    private int                 port;\n\n    private TTransport          transport;\n\n    private TBinaryProtocol     protocol;\n\n    public GenericMultiplexedThriftClient( String host, int port ) throws TTransportException {\n        this.host = host;\n        this.port = port;\n        this.transport = new TSocket(this.host,this.port);\n        this.transport.open();\n        this.protocol = new TBinaryProtocol(this.transport);\n\n    }\n    @Override\n    public <T extends TServiceClient> T getClient(String serviceName, Class<T> clientClass) throws TException {\n        // 创建多路复用协议\n        TMultiplexedProtocol multiplexedProtocol = new TMultiplexedProtocol(this.protocol, serviceName);\n\n        try {\n            // 获取 Client 类的构造方法\n            Constructor<T> constructor = clientClass.getConstructor(TProtocol.class);\n\n            // 使用构造方法创建 Client 对象\n            return constructor.newInstance(multiplexedProtocol);\n        } catch (Exception e) {\n            throw new TException(\"Failed to create client for service: \" + serviceName, e);\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/client/GenericThriftClient.java",
    "content": "package com.pinecone.hydra.thrift.client;\n\nimport org.apache.thrift.TException;\nimport org.apache.thrift.TServiceClient;\nimport org.apache.thrift.protocol.TBinaryProtocol;\nimport org.apache.thrift.protocol.TProtocol;\nimport org.apache.thrift.transport.TSocket;\nimport org.apache.thrift.transport.TTransport;\n\npublic class GenericThriftClient<T extends TServiceClient> implements ThriftClient<T> {\n    private String          host;\n\n    private int             port;\n\n    private int             outTime;\n\n    private TTransport      transport;\n\n    private T               client;\n\n\n    public GenericThriftClient( String host, int port, int outTime, Class<T> clientClass ){\n        this.host = host;\n        this.port = port;\n        this.outTime = outTime;\n        try {\n            // 创建传输层和协议\n            this.transport = new TSocket(this.host, this.port, this.outTime);\n            TProtocol protocol = new TBinaryProtocol(this.transport);\n\n            // 使用反射创建客户端实例\n            this.client = clientClass.getConstructor(TProtocol.class).newInstance(protocol);\n        } catch (Exception e) {\n            throw new RuntimeException(\"Failed to initialize the client\", e);\n        }\n    }\n\n    @Override\n    public T getClient() throws TException {\n        if (!transport.isOpen()) {\n            transport.open();\n        }\n        return client;\n    }\n\n    @Override\n    public void close() {\n        if (transport != null && transport.isOpen()) {\n            transport.close();\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/client/MultiplexedThriftClient.java",
    "content": "package com.pinecone.hydra.thrift.client;\n\nimport org.apache.thrift.TException;\nimport org.apache.thrift.TServiceClient;\n\npublic interface MultiplexedThriftClient {\n    <T extends TServiceClient> T getClient(String serviceName, Class<T> clientClass) throws TException;\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/client/ThriftClient.java",
    "content": "package com.pinecone.hydra.thrift.client;\n\nimport org.apache.thrift.TException;\nimport org.apache.thrift.TServiceClient;\n\npublic interface ThriftClient<T extends TServiceClient> {\n    T getClient() throws TException;\n    void close();\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/server/GenericThriftServer.java",
    "content": "package com.pinecone.hydra.thrift.server;\n\nimport org.apache.thrift.TProcessor;\nimport org.apache.thrift.protocol.TBinaryProtocol;\nimport org.apache.thrift.server.TServer;\nimport org.apache.thrift.server.TSimpleServer;\nimport org.apache.thrift.transport.TServerSocket;\nimport org.apache.thrift.transport.TTransportException;\n\nimport java.util.Map;\n\npublic class GenericThriftServer <T extends TProcessor> implements ThriftServer{\n    private final T processor;\n    private final int port;\n\n\n    public GenericThriftServer(T processor, int port) {\n        this.processor = processor;\n        this.port = port;\n    }\n\n    @Override\n    public void start() {\n        try {\n            System.out.println(\"服务端开启....\");\n\n            // 创建服务传输层\n            TServerSocket serverTransport = new TServerSocket(port);\n\n            // 构造服务参数\n            TSimpleServer.Args tArgs = new TSimpleServer.Args(serverTransport);\n            tArgs.processor(processor);\n            tArgs.protocolFactory(new TBinaryProtocol.Factory());\n\n            // 创建并启动服务\n            TServer server = new TSimpleServer(tArgs);;\n            server.serve();\n        }\n        catch (TTransportException e) {\n            e.printStackTrace();\n        }\n    }\n\n    @Override\n    public void close() {\n\n    }\n\n    @Override\n    public ServerConnectArguments getConnectionArguments() {\n        return null;\n    }\n\n    @Override\n    public ThriftServer apply( Map<String, Object> conf ) {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/server/MultiplexedServer.java",
    "content": "package com.pinecone.hydra.thrift.server;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\n\nimport org.apache.thrift.TMultiplexedProcessor;\nimport org.apache.thrift.TProcessor;\nimport org.apache.thrift.protocol.TBinaryProtocol;\nimport org.apache.thrift.server.TServer;\nimport org.apache.thrift.server.TSimpleServer;\nimport org.apache.thrift.transport.TServerSocket;\nimport org.apache.thrift.transport.TTransportException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.net.InetSocketAddress;\nimport java.util.Map;\n\npublic class MultiplexedServer implements ThriftServer {\n\n    protected Logger logger = LoggerFactory.getLogger( MultiplexedServer.class );\n\n    protected JSONObject                  mjoSectionConf;\n\n    protected ServerConnectArguments      connectionArguments ;\n\n    protected InetSocketAddress           primaryBindAddress  ;\n\n    protected final TMultiplexedProcessor multiplexedProcessor;\n\n    protected TServer server;\n\n    public MultiplexedServer( Map<String, Object> conf ){\n        this( conf, null );\n    }\n\n    public MultiplexedServer( Map<String, Object> conf, ServerConnectArguments arguments ){\n        this.multiplexedProcessor = new TMultiplexedProcessor();\n        this.mjoSectionConf       = MultiplexedServer.asConfig( conf );\n        this.connectionArguments  = arguments;\n\n        if ( this.connectionArguments == null ) {\n            this.connectionArguments = new ServerConnectionArguments( this.mjoSectionConf );\n        }\n    }\n\n    protected static JSONObject asConfig( Map<String, Object> joConf ) {\n        if( joConf instanceof JSONObject ) {\n            return (JSONObject) joConf;\n        }\n        else {\n            return new JSONMaptron( joConf, true );\n        }\n    }\n\n    @Override\n    public ThriftServer apply( Map<String, Object> conf ) {\n        this.mjoSectionConf = MultiplexedServer.asConfig( conf );\n        JSONObject joConf = this.getSectionConf();\n\n        this.connectionArguments = new ServerConnectionArguments( joConf );\n//        this.mChannelPool         = new PassiveRegisterChannelPool<>(\n//                this, new UlfIdleFirstBalanceStrategy(), joConf.optInt( \"MaximumConnections\", (int)1e7 )\n//        );\n\n        return this;\n    }\n\n\n    public void registerProcessor( TProcessor processor ) {\n        String name = processor.getClass().getName();\n        String[] parts = name.split(\"[.$]\");\n        name = parts[parts.length - 2];\n\n        this.registerProcessor( name, processor );\n    }\n\n    public void registerProcessor( String serviceName, TProcessor processor ) {\n        this.multiplexedProcessor.registerProcessor( serviceName, processor );\n    }\n\n    @Override\n    public void start() {\n        try {\n            String szHost           = this.getConnectionArguments().getHost();\n            short  nPort            = this.getConnectionArguments().getPort();\n            if( StringUtils.isEmpty( szHost ) ) {\n                this.primaryBindAddress = new InetSocketAddress( nPort );\n            }\n            else {\n                this.primaryBindAddress = new InetSocketAddress( szHost, nPort );\n            }\n\n            TServerSocket serverTransport = new TServerSocket( this.primaryBindAddress );\n\n            TSimpleServer.Args tArgs = new TSimpleServer.Args(serverTransport);\n            tArgs.processor( this.multiplexedProcessor );\n            tArgs.protocolFactory( new TBinaryProtocol.Factory() );\n\n            this.server = new TSimpleServer(tArgs);\n            this.server.serve();\n\n            this.logger.info( \"ThriftServer started at \" + this.primaryBindAddress );\n        }\n        catch ( TTransportException e ) {\n            e.printStackTrace();\n        }\n    }\n\n\n    public JSONObject getSectionConf() {\n        return this.mjoSectionConf;\n    }\n\n    @Override\n    public ServerConnectArguments getConnectionArguments() {\n        return this.connectionArguments;\n    }\n\n    @Override\n    public void close() {\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/server/ServerConnectArguments.java",
    "content": "package com.pinecone.hydra.thrift.server;\n\nimport com.pinecone.hydra.thrift.MCConnectionArguments;\n\npublic interface ServerConnectArguments extends MCConnectionArguments {\n    int getMaximumClients() ;\n\n    void setMaximumClients( int mnMaximumClients );\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/server/ServerConnectionArguments.java",
    "content": "package com.pinecone.hydra.thrift.server;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.thrift.SharedConnectionArguments;\n\npublic class ServerConnectionArguments extends SharedConnectionArguments implements ServerConnectArguments {\n    protected int mnMaximumClients; // 0 <= for unlimited clients\n\n    public ServerConnectionArguments( JSONObject args ) {\n        super( args );\n        this.mnMaximumClients  = args.optInt( \"MaximumClients\", 0 );\n    }\n\n    @Override\n    public int getMaximumClients() {\n        return this.mnMaximumClients;\n    }\n\n    @Override\n    public void setMaximumClients( int mnMaximumClients ) {\n        this.mnMaximumClients = mnMaximumClients;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/server/ThriftServer.java",
    "content": "package com.pinecone.hydra.thrift.server;\n\nimport java.util.Map;\n\npublic interface ThriftServer {\n    void start();\n\n    void close();\n\n    ServerConnectArguments getConnectionArguments();\n\n    ThriftServer apply( Map<String, Object> conf );\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/service/HelloWorldService.java",
    "content": "/**\n * Autogenerated by Thrift Compiler (0.18.0)\n *\n * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING\n *  @generated\n */\npackage com.pinecone.hydra.thrift.service;\n\nimport org.apache.thrift.async.TAsyncMethodCall;\nimport org.apache.tomcat.jni.Proc;\n\n@javax.annotation.Generated(value = \"Autogenerated by Thrift Compiler (0.18.0)\", date = \"2025-01-24\")\n@SuppressWarnings({\"cast\", \"rawtypes\", \"serial\", \"unchecked\", \"unused\"})\npublic class HelloWorldService {\n\n  public interface Iface {\n\n    public java.lang.String sayHello(java.lang.String name) throws org.apache.thrift.TException;\n\n  }\n\n  public interface AsyncIface {\n\n    public void sayHello(java.lang.String name, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException;\n\n  }\n\n  public static class Client extends org.apache.thrift.TServiceClient implements Iface {\n    public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {\n      public Factory() {}\n      @Override\n      public Client getClient(org.apache.thrift.protocol.TProtocol prot) {\n        return new Client(prot);\n      }\n      @Override\n      public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {\n        return new Client(iprot, oprot);\n      }\n    }\n\n    public Client(org.apache.thrift.protocol.TProtocol prot)\n    {\n      super(prot, prot);\n    }\n\n    public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {\n      super(iprot, oprot);\n    }\n\n    @Override\n    public java.lang.String sayHello(java.lang.String name) throws org.apache.thrift.TException\n    {\n      send_sayHello(name);\n      return recv_sayHello();\n    }\n\n    public void send_sayHello(java.lang.String name) throws org.apache.thrift.TException\n    {\n      sayHello_args args = new sayHello_args();\n      args.setName(name);\n      sendBase(\"sayHello\", args);\n    }\n\n    public java.lang.String recv_sayHello() throws org.apache.thrift.TException\n    {\n      sayHello_result result = new sayHello_result();\n      receiveBase(result, \"sayHello\");\n      if (result.isSetSuccess()) {\n        return result.success;\n      }\n      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, \"sayHello failed: unknown result\");\n    }\n\n  }\n  public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {\n    public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {\n      private org.apache.thrift.async.TAsyncClientManager clientManager;\n      private org.apache.thrift.protocol.TProtocolFactory protocolFactory;\n      public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {\n        this.clientManager = clientManager;\n        this.protocolFactory = protocolFactory;\n      }\n      @Override\n      public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {\n        return new AsyncClient(protocolFactory, clientManager, transport);\n      }\n    }\n\n    public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {\n      super(protocolFactory, clientManager, transport);\n    }\n\n    @Override\n    public void sayHello(java.lang.String name, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException {\n      checkReady();\n      sayHello_call method_call = new sayHello_call(name, resultHandler, this, ___protocolFactory, ___transport);\n      this.___currentMethod = method_call;\n      ___manager.call(method_call);\n    }\n\n    public static class sayHello_call extends TAsyncMethodCall<String> {\n      private java.lang.String name;\n      public sayHello_call(java.lang.String name, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {\n        super(client, protocolFactory, transport, resultHandler, false);\n        this.name = name;\n      }\n\n      @Override\n      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {\n        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage(\"sayHello\", org.apache.thrift.protocol.TMessageType.CALL, 0));\n        sayHello_args args = new sayHello_args();\n        args.setName(name);\n        args.write(prot);\n        prot.writeMessageEnd();\n      }\n\n      @Override\n      public java.lang.String getResult() throws org.apache.thrift.TException {\n        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {\n          throw new java.lang.IllegalStateException(\"Method call not finished!\");\n        }\n        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());\n        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);\n        return (new Client(prot)).recv_sayHello();\n      }\n    }\n\n  }\n\n  public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {\n    private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(Processor.class.getName());\n    public Processor(I iface) {\n      super(iface, getProcessMap(new java.util.HashMap<java.lang.String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));\n    }\n\n    protected Processor(I iface, java.util.Map<java.lang.String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {\n      super(iface, getProcessMap(processMap));\n    }\n\n    private static <I extends Iface> java.util.Map<java.lang.String,  org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> getProcessMap(java.util.Map<java.lang.String, org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> processMap) {\n      processMap.put(\"sayHello\", new sayHello());\n      return processMap;\n    }\n\n    public static class sayHello<I extends Iface> extends org.apache.thrift.ProcessFunction<I, sayHello_args> {\n      public sayHello() {\n        super(\"sayHello\");\n      }\n\n      @Override\n      public sayHello_args getEmptyArgsInstance() {\n        return new sayHello_args();\n      }\n\n      @Override\n      protected boolean isOneway() {\n        return false;\n      }\n\n      @Override\n      protected boolean rethrowUnhandledExceptions() {\n        return false;\n      }\n\n      @Override\n      public sayHello_result getResult(I iface, sayHello_args args) throws org.apache.thrift.TException {\n        sayHello_result result = new sayHello_result();\n        result.success = iface.sayHello(args.name);\n        return result;\n      }\n    }\n\n  }\n\n  public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {\n    private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(AsyncProcessor.class.getName());\n    public AsyncProcessor(I iface) {\n      super(iface, getProcessMap(new java.util.HashMap<java.lang.String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>>()));\n    }\n\n    protected AsyncProcessor(I iface, java.util.Map<java.lang.String,  org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase, ?>> processMap) {\n      super(iface, getProcessMap(processMap));\n    }\n\n    private static <I extends AsyncIface> java.util.Map<java.lang.String,  org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase,?>> getProcessMap(java.util.Map<java.lang.String,  org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase, ?>> processMap) {\n      processMap.put(\"sayHello\", new sayHello());\n      return processMap;\n    }\n\n    public static class sayHello<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, sayHello_args, java.lang.String> {\n      public sayHello() {\n        super(\"sayHello\");\n      }\n\n      @Override\n      public sayHello_args getEmptyArgsInstance() {\n        return new sayHello_args();\n      }\n\n      @Override\n      public org.apache.thrift.async.AsyncMethodCallback<java.lang.String> getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {\n        final org.apache.thrift.AsyncProcessFunction fcall = this;\n        return new org.apache.thrift.async.AsyncMethodCallback<java.lang.String>() {\n          @Override\n          public void onComplete(java.lang.String o) {\n            sayHello_result result = new sayHello_result();\n            result.success = o;\n            try {\n              fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);\n            } catch (org.apache.thrift.transport.TTransportException e) {\n              _LOGGER.error(\"TTransportException writing to internal frame buffer\", e);\n              fb.close();\n            } catch (java.lang.Exception e) {\n              _LOGGER.error(\"Exception writing to internal frame buffer\", e);\n              onError(e);\n            }\n          }\n          @Override\n          public void onError(java.lang.Exception e) {\n            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;\n            org.apache.thrift.TSerializable msg;\n            sayHello_result result = new sayHello_result();\n            if (e instanceof org.apache.thrift.transport.TTransportException) {\n              _LOGGER.error(\"TTransportException inside handler\", e);\n              fb.close();\n              return;\n            } else if (e instanceof org.apache.thrift.TApplicationException) {\n              _LOGGER.error(\"TApplicationException inside handler\", e);\n              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;\n              msg = (org.apache.thrift.TApplicationException)e;\n            } else {\n              _LOGGER.error(\"Exception inside handler\", e);\n              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;\n              msg = new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());\n            }\n            try {\n              fcall.sendResponse(fb,msg,msgType,seqid);\n            } catch (java.lang.Exception ex) {\n              _LOGGER.error(\"Exception writing to internal frame buffer\", ex);\n              fb.close();\n            }\n          }\n        };\n      }\n\n      @Override\n      protected boolean isOneway() {\n        return false;\n      }\n\n      @Override\n      public void start(I iface, sayHello_args args, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException {\n        iface.sayHello(args.name,resultHandler);\n      }\n    }\n\n  }\n\n  @SuppressWarnings({\"cast\", \"rawtypes\", \"serial\", \"unchecked\", \"unused\"})\n  public static class sayHello_args implements org.apache.thrift.TBase<sayHello_args, sayHello_args._Fields>, java.io.Serializable, Cloneable, Comparable<sayHello_args>   {\n    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(\"sayHello_args\");\n\n    private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField(\"name\", org.apache.thrift.protocol.TType.STRING, (short)1);\n\n    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new sayHello_argsStandardSchemeFactory();\n    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new sayHello_argsTupleSchemeFactory();\n\n    public @org.apache.thrift.annotation.Nullable java.lang.String name; // required\n\n    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */\n    public enum _Fields implements org.apache.thrift.TFieldIdEnum {\n      NAME((short)1, \"name\");\n\n      private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();\n\n      static {\n        for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {\n          byName.put(field.getFieldName(), field);\n        }\n      }\n\n      /**\n       * Find the _Fields constant that matches fieldId, or null if its not found.\n       */\n      @org.apache.thrift.annotation.Nullable\n      public static _Fields findByThriftId(int fieldId) {\n        switch(fieldId) {\n          case 1: // NAME\n            return NAME;\n          default:\n            return null;\n        }\n      }\n\n      /**\n       * Find the _Fields constant that matches fieldId, throwing an exception\n       * if it is not found.\n       */\n      public static _Fields findByThriftIdOrThrow(int fieldId) {\n        _Fields fields = findByThriftId(fieldId);\n        if (fields == null) throw new java.lang.IllegalArgumentException(\"Field \" + fieldId + \" doesn't exist!\");\n        return fields;\n      }\n\n      /**\n       * Find the _Fields constant that matches name, or null if its not found.\n       */\n      @org.apache.thrift.annotation.Nullable\n      public static _Fields findByName(java.lang.String name) {\n        return byName.get(name);\n      }\n\n      private final short _thriftId;\n      private final java.lang.String _fieldName;\n\n      _Fields(short thriftId, java.lang.String fieldName) {\n        _thriftId = thriftId;\n        _fieldName = fieldName;\n      }\n\n      @Override\n      public short getThriftFieldId() {\n        return _thriftId;\n      }\n\n      @Override\n      public java.lang.String getFieldName() {\n        return _fieldName;\n      }\n    }\n\n    // isset id assignments\n    public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;\n    static {\n      java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);\n      tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData(\"name\", org.apache.thrift.TFieldRequirementType.DEFAULT,\n              new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));\n      metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);\n      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(sayHello_args.class, metaDataMap);\n    }\n\n    public sayHello_args() {\n    }\n\n    public sayHello_args(\n            java.lang.String name)\n    {\n      this();\n      this.name = name;\n    }\n\n    /**\n     * Performs a deep copy on <i>other</i>.\n     */\n    public sayHello_args(sayHello_args other) {\n      if (other.isSetName()) {\n        this.name = other.name;\n      }\n    }\n\n    @Override\n    public sayHello_args deepCopy() {\n      return new sayHello_args(this);\n    }\n\n    @Override\n    public void clear() {\n      this.name = null;\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    public java.lang.String getName() {\n      return this.name;\n    }\n\n    public sayHello_args setName(@org.apache.thrift.annotation.Nullable java.lang.String name) {\n      this.name = name;\n      return this;\n    }\n\n    public void unsetName() {\n      this.name = null;\n    }\n\n    /** Returns true if field name is set (has been assigned a value) and false otherwise */\n    public boolean isSetName() {\n      return this.name != null;\n    }\n\n    public void setNameIsSet(boolean value) {\n      if (!value) {\n        this.name = null;\n      }\n    }\n\n    @Override\n    public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {\n      switch (field) {\n        case NAME:\n          if (value == null) {\n            unsetName();\n          } else {\n            setName((java.lang.String)value);\n          }\n          break;\n\n      }\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    @Override\n    public java.lang.Object getFieldValue(_Fields field) {\n      switch (field) {\n        case NAME:\n          return getName();\n\n      }\n      throw new java.lang.IllegalStateException();\n    }\n\n    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */\n    @Override\n    public boolean isSet(_Fields field) {\n      if (field == null) {\n        throw new java.lang.IllegalArgumentException();\n      }\n\n      switch (field) {\n        case NAME:\n          return isSetName();\n      }\n      throw new java.lang.IllegalStateException();\n    }\n\n    @Override\n    public boolean equals(java.lang.Object that) {\n      if (that instanceof sayHello_args)\n        return this.equals((sayHello_args)that);\n      return false;\n    }\n\n    public boolean equals(sayHello_args that) {\n      if (that == null)\n        return false;\n      if (this == that)\n        return true;\n\n      boolean this_present_name = true && this.isSetName();\n      boolean that_present_name = true && that.isSetName();\n      if (this_present_name || that_present_name) {\n        if (!(this_present_name && that_present_name))\n          return false;\n        if (!this.name.equals(that.name))\n          return false;\n      }\n\n      return true;\n    }\n\n    @Override\n    public int hashCode() {\n      int hashCode = 1;\n\n      hashCode = hashCode * 8191 + ((isSetName()) ? 131071 : 524287);\n      if (isSetName())\n        hashCode = hashCode * 8191 + name.hashCode();\n\n      return hashCode;\n    }\n\n    @Override\n    public int compareTo(sayHello_args other) {\n      if (!getClass().equals(other.getClass())) {\n        return getClass().getName().compareTo(other.getClass().getName());\n      }\n\n      int lastComparison = 0;\n\n      lastComparison = java.lang.Boolean.compare(isSetName(), other.isSetName());\n      if (lastComparison != 0) {\n        return lastComparison;\n      }\n      if (isSetName()) {\n        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name);\n        if (lastComparison != 0) {\n          return lastComparison;\n        }\n      }\n      return 0;\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    @Override\n    public _Fields fieldForId(int fieldId) {\n      return _Fields.findByThriftId(fieldId);\n    }\n\n    @Override\n    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {\n      scheme(iprot).read(iprot, this);\n    }\n\n    @Override\n    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {\n      scheme(oprot).write(oprot, this);\n    }\n\n    @Override\n    public java.lang.String toString() {\n      java.lang.StringBuilder sb = new java.lang.StringBuilder(\"sayHello_args(\");\n      boolean first = true;\n\n      sb.append(\"name:\");\n      if (this.name == null) {\n        sb.append(\"null\");\n      } else {\n        sb.append(this.name);\n      }\n      first = false;\n      sb.append(\")\");\n      return sb.toString();\n    }\n\n    public void validate() throws org.apache.thrift.TException {\n      // check for required fields\n      // check for sub-struct validity\n    }\n\n    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {\n      try {\n        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));\n      } catch (org.apache.thrift.TException te) {\n        throw new java.io.IOException(te);\n      }\n    }\n\n    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {\n      try {\n        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));\n      } catch (org.apache.thrift.TException te) {\n        throw new java.io.IOException(te);\n      }\n    }\n\n    private static class sayHello_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {\n      @Override\n      public sayHello_argsStandardScheme getScheme() {\n        return new sayHello_argsStandardScheme();\n      }\n    }\n\n    private static class sayHello_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<sayHello_args> {\n\n      @Override\n      public void read(org.apache.thrift.protocol.TProtocol iprot, sayHello_args struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TField schemeField;\n        iprot.readStructBegin();\n        while (true)\n        {\n          schemeField = iprot.readFieldBegin();\n          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {\n            break;\n          }\n          switch (schemeField.id) {\n            case 1: // NAME\n              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {\n                struct.name = iprot.readString();\n                struct.setNameIsSet(true);\n              } else {\n                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n              }\n              break;\n            default:\n              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n          }\n          iprot.readFieldEnd();\n        }\n        iprot.readStructEnd();\n\n        // check for required fields of primitive type, which can't be checked in the validate method\n        struct.validate();\n      }\n\n      @Override\n      public void write(org.apache.thrift.protocol.TProtocol oprot, sayHello_args struct) throws org.apache.thrift.TException {\n        struct.validate();\n\n        oprot.writeStructBegin(STRUCT_DESC);\n        if (struct.name != null) {\n          oprot.writeFieldBegin(NAME_FIELD_DESC);\n          oprot.writeString(struct.name);\n          oprot.writeFieldEnd();\n        }\n        oprot.writeFieldStop();\n        oprot.writeStructEnd();\n      }\n\n    }\n\n    private static class sayHello_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {\n      @Override\n      public sayHello_argsTupleScheme getScheme() {\n        return new sayHello_argsTupleScheme();\n      }\n    }\n\n    private static class sayHello_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<sayHello_args> {\n\n      @Override\n      public void write(org.apache.thrift.protocol.TProtocol prot, sayHello_args struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;\n        java.util.BitSet optionals = new java.util.BitSet();\n        if (struct.isSetName()) {\n          optionals.set(0);\n        }\n        oprot.writeBitSet(optionals, 1);\n        if (struct.isSetName()) {\n          oprot.writeString(struct.name);\n        }\n      }\n\n      @Override\n      public void read(org.apache.thrift.protocol.TProtocol prot, sayHello_args struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;\n        java.util.BitSet incoming = iprot.readBitSet(1);\n        if (incoming.get(0)) {\n          struct.name = iprot.readString();\n          struct.setNameIsSet(true);\n        }\n      }\n    }\n\n    private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {\n      return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();\n    }\n  }\n\n  @SuppressWarnings({\"cast\", \"rawtypes\", \"serial\", \"unchecked\", \"unused\"})\n  public static class sayHello_result implements org.apache.thrift.TBase<sayHello_result, sayHello_result._Fields>, java.io.Serializable, Cloneable, Comparable<sayHello_result>   {\n    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(\"sayHello_result\");\n\n    private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField(\"success\", org.apache.thrift.protocol.TType.STRING, (short)0);\n\n    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new sayHello_resultStandardSchemeFactory();\n    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new sayHello_resultTupleSchemeFactory();\n\n    public @org.apache.thrift.annotation.Nullable java.lang.String success; // required\n\n    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */\n    public enum _Fields implements org.apache.thrift.TFieldIdEnum {\n      SUCCESS((short)0, \"success\");\n\n      private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();\n\n      static {\n        for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {\n          byName.put(field.getFieldName(), field);\n        }\n      }\n\n      /**\n       * Find the _Fields constant that matches fieldId, or null if its not found.\n       */\n      @org.apache.thrift.annotation.Nullable\n      public static _Fields findByThriftId(int fieldId) {\n        switch(fieldId) {\n          case 0: // SUCCESS\n            return SUCCESS;\n          default:\n            return null;\n        }\n      }\n\n      /**\n       * Find the _Fields constant that matches fieldId, throwing an exception\n       * if it is not found.\n       */\n      public static _Fields findByThriftIdOrThrow(int fieldId) {\n        _Fields fields = findByThriftId(fieldId);\n        if (fields == null) throw new java.lang.IllegalArgumentException(\"Field \" + fieldId + \" doesn't exist!\");\n        return fields;\n      }\n\n      /**\n       * Find the _Fields constant that matches name, or null if its not found.\n       */\n      @org.apache.thrift.annotation.Nullable\n      public static _Fields findByName(java.lang.String name) {\n        return byName.get(name);\n      }\n\n      private final short _thriftId;\n      private final java.lang.String _fieldName;\n\n      _Fields(short thriftId, java.lang.String fieldName) {\n        _thriftId = thriftId;\n        _fieldName = fieldName;\n      }\n\n      @Override\n      public short getThriftFieldId() {\n        return _thriftId;\n      }\n\n      @Override\n      public java.lang.String getFieldName() {\n        return _fieldName;\n      }\n    }\n\n    // isset id assignments\n    public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;\n    static {\n      java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);\n      tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData(\"success\", org.apache.thrift.TFieldRequirementType.DEFAULT,\n              new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));\n      metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);\n      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(sayHello_result.class, metaDataMap);\n    }\n\n    public sayHello_result() {\n    }\n\n    public sayHello_result(\n            java.lang.String success)\n    {\n      this();\n      this.success = success;\n    }\n\n    /**\n     * Performs a deep copy on <i>other</i>.\n     */\n    public sayHello_result(sayHello_result other) {\n      if (other.isSetSuccess()) {\n        this.success = other.success;\n      }\n    }\n\n    @Override\n    public sayHello_result deepCopy() {\n      return new sayHello_result(this);\n    }\n\n    @Override\n    public void clear() {\n      this.success = null;\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    public java.lang.String getSuccess() {\n      return this.success;\n    }\n\n    public sayHello_result setSuccess(@org.apache.thrift.annotation.Nullable java.lang.String success) {\n      this.success = success;\n      return this;\n    }\n\n    public void unsetSuccess() {\n      this.success = null;\n    }\n\n    /** Returns true if field success is set (has been assigned a value) and false otherwise */\n    public boolean isSetSuccess() {\n      return this.success != null;\n    }\n\n    public void setSuccessIsSet(boolean value) {\n      if (!value) {\n        this.success = null;\n      }\n    }\n\n    @Override\n    public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {\n      switch (field) {\n        case SUCCESS:\n          if (value == null) {\n            unsetSuccess();\n          } else {\n            setSuccess((java.lang.String)value);\n          }\n          break;\n\n      }\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    @Override\n    public java.lang.Object getFieldValue(_Fields field) {\n      switch (field) {\n        case SUCCESS:\n          return getSuccess();\n\n      }\n      throw new java.lang.IllegalStateException();\n    }\n\n    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */\n    @Override\n    public boolean isSet(_Fields field) {\n      if (field == null) {\n        throw new java.lang.IllegalArgumentException();\n      }\n\n      switch (field) {\n        case SUCCESS:\n          return isSetSuccess();\n      }\n      throw new java.lang.IllegalStateException();\n    }\n\n    @Override\n    public boolean equals(java.lang.Object that) {\n      if (that instanceof sayHello_result)\n        return this.equals((sayHello_result)that);\n      return false;\n    }\n\n    public boolean equals(sayHello_result that) {\n      if (that == null)\n        return false;\n      if (this == that)\n        return true;\n\n      boolean this_present_success = true && this.isSetSuccess();\n      boolean that_present_success = true && that.isSetSuccess();\n      if (this_present_success || that_present_success) {\n        if (!(this_present_success && that_present_success))\n          return false;\n        if (!this.success.equals(that.success))\n          return false;\n      }\n\n      return true;\n    }\n\n    @Override\n    public int hashCode() {\n      int hashCode = 1;\n\n      hashCode = hashCode * 8191 + ((isSetSuccess()) ? 131071 : 524287);\n      if (isSetSuccess())\n        hashCode = hashCode * 8191 + success.hashCode();\n\n      return hashCode;\n    }\n\n    @Override\n    public int compareTo(sayHello_result other) {\n      if (!getClass().equals(other.getClass())) {\n        return getClass().getName().compareTo(other.getClass().getName());\n      }\n\n      int lastComparison = 0;\n\n      lastComparison = java.lang.Boolean.compare(isSetSuccess(), other.isSetSuccess());\n      if (lastComparison != 0) {\n        return lastComparison;\n      }\n      if (isSetSuccess()) {\n        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);\n        if (lastComparison != 0) {\n          return lastComparison;\n        }\n      }\n      return 0;\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    @Override\n    public _Fields fieldForId(int fieldId) {\n      return _Fields.findByThriftId(fieldId);\n    }\n\n    @Override\n    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {\n      scheme(iprot).read(iprot, this);\n    }\n\n    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {\n      scheme(oprot).write(oprot, this);\n    }\n\n    @Override\n    public java.lang.String toString() {\n      java.lang.StringBuilder sb = new java.lang.StringBuilder(\"sayHello_result(\");\n      boolean first = true;\n\n      sb.append(\"success:\");\n      if (this.success == null) {\n        sb.append(\"null\");\n      } else {\n        sb.append(this.success);\n      }\n      first = false;\n      sb.append(\")\");\n      return sb.toString();\n    }\n\n    public void validate() throws org.apache.thrift.TException {\n      // check for required fields\n      // check for sub-struct validity\n    }\n\n    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {\n      try {\n        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));\n      } catch (org.apache.thrift.TException te) {\n        throw new java.io.IOException(te);\n      }\n    }\n\n    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {\n      try {\n        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));\n      } catch (org.apache.thrift.TException te) {\n        throw new java.io.IOException(te);\n      }\n    }\n\n    private static class sayHello_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {\n      @Override\n      public sayHello_resultStandardScheme getScheme() {\n        return new sayHello_resultStandardScheme();\n      }\n    }\n\n    private static class sayHello_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme<sayHello_result> {\n\n      @Override\n      public void read(org.apache.thrift.protocol.TProtocol iprot, sayHello_result struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TField schemeField;\n        iprot.readStructBegin();\n        while (true)\n        {\n          schemeField = iprot.readFieldBegin();\n          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {\n            break;\n          }\n          switch (schemeField.id) {\n            case 0: // SUCCESS\n              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {\n                struct.success = iprot.readString();\n                struct.setSuccessIsSet(true);\n              } else {\n                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n              }\n              break;\n            default:\n              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n          }\n          iprot.readFieldEnd();\n        }\n        iprot.readStructEnd();\n\n        // check for required fields of primitive type, which can't be checked in the validate method\n        struct.validate();\n      }\n\n      @Override\n      public void write(org.apache.thrift.protocol.TProtocol oprot, sayHello_result struct) throws org.apache.thrift.TException {\n        struct.validate();\n\n        oprot.writeStructBegin(STRUCT_DESC);\n        if (struct.success != null) {\n          oprot.writeFieldBegin(SUCCESS_FIELD_DESC);\n          oprot.writeString(struct.success);\n          oprot.writeFieldEnd();\n        }\n        oprot.writeFieldStop();\n        oprot.writeStructEnd();\n      }\n\n    }\n\n    private static class sayHello_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {\n      @Override\n      public sayHello_resultTupleScheme getScheme() {\n        return new sayHello_resultTupleScheme();\n      }\n    }\n\n    private static class sayHello_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme<sayHello_result> {\n\n      @Override\n      public void write(org.apache.thrift.protocol.TProtocol prot, sayHello_result struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;\n        java.util.BitSet optionals = new java.util.BitSet();\n        if (struct.isSetSuccess()) {\n          optionals.set(0);\n        }\n        oprot.writeBitSet(optionals, 1);\n        if (struct.isSetSuccess()) {\n          oprot.writeString(struct.success);\n        }\n      }\n\n      @Override\n      public void read(org.apache.thrift.protocol.TProtocol prot, sayHello_result struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;\n        java.util.BitSet incoming = iprot.readBitSet(1);\n        if (incoming.get(0)) {\n          struct.success = iprot.readString();\n          struct.setSuccessIsSet(true);\n        }\n      }\n    }\n\n    private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {\n      return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();\n    }\n  }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/java/com/pinecone/hydra/thrift/service/impl/HelloWorldServiceImpl.java",
    "content": "package com.pinecone.hydra.thrift.service.impl;\n\nimport com.pinecone.hydra.thrift.service.HelloWorldService;\nimport org.apache.thrift.TException;\n\n\npublic class HelloWorldServiceImpl implements HelloWorldService.Iface {\n    @Override\n    public String sayHello(String name) throws TException {\n        System.out.println(name);\n        try {\n            return \"Hello, \" + name;\n        } catch (Exception e) {\n            e.printStackTrace();\n            throw new TException(\"Error in sayHello: \" + e.getMessage());\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/main/resources/thrift/hellow.thrift",
    "content": "namespace java com.example.thrift\n\nservice HelloWorldService {\n    string sayHello(1: string name)\n}"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/test/java/com/thrift/TestThriftClient.java",
    "content": "package com.thrift;\n\nimport com.pinecone.hydra.thrift.client.GenericThriftClient;\nimport com.pinecone.hydra.thrift.service.HelloWorldService;\nimport org.apache.thrift.TException;\n\npublic class TestThriftClient {\n    public static void main(String[] args) throws TException {\n        GenericThriftClient<HelloWorldService.Client> client = new GenericThriftClient<>(\"localhost\", 8001, 30000, HelloWorldService.Client.class);\n        HelloWorldService.Client clientClient = client.getClient();\n        clientClient.sayHello(\"你好\");\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/test/java/com/thrift/TestThriftService.java",
    "content": "package com.thrift;\n\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.thrift.client.GenericMultiplexedThriftClient;\nimport com.pinecone.hydra.thrift.server.MultiplexedServer;\nimport com.pinecone.hydra.thrift.service.HelloWorldService;\nimport com.pinecone.hydra.thrift.service.impl.HelloWorldServiceImpl;\n\n\npublic class TestThriftService {\n    public static void main(String[] args) throws Exception {\n        Thread thread = new Thread(()->{\n            HelloWorldService.Iface hello = new HelloWorldServiceImpl();\n//            GenericThriftServer<HelloWorldService.Processor<HelloWorldService.Iface>> server = new GenericThriftServer<>(new HelloWorldService.Processor<>(hello), 8001);\n//            server.start();\n            MultiplexedServer multiplexedServer = new MultiplexedServer(\n                    new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                    \"port: 16701, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\")\n            );\n            multiplexedServer.registerProcessor( new HelloWorldService.Processor<>(hello) );\n            multiplexedServer.start();\n        });\n        thread.start();\n\n\n        Thread.sleep( 1000 );\n\n//        GenericThriftClient<HelloWorldService.Client> client = new GenericThriftClient<>(\"localhost\", 8001, 30000, HelloWorldService.Client.class);\n//        HelloWorldService.Client clientClient = client.getClient();\n//        clientClient.sayHello(\"你好\");\n        GenericMultiplexedThriftClient thriftClient = new GenericMultiplexedThriftClient(\"localhost\", 16701);\n        HelloWorldService.Client hello = thriftClient.getClient(\"HelloWorldService\", HelloWorldService.Client.class);\n        hello.sayHello(\"你好\");\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-thrift-sdk/src/test/java/org/example/AppTest.java",
    "content": "package org.example;\n\nimport junit.framework.Test;\nimport junit.framework.TestCase;\nimport junit.framework.TestSuite;\n\n/**\n * Unit test for simple App.\n */\npublic class AppTest \n    extends TestCase\n{\n    /**\n     * Create the test case\n     *\n     * @param testName name of the test case\n     */\n    public AppTest( String testName )\n    {\n        super( testName );\n    }\n\n    /**\n     * @return the suite of tests being tested\n     */\n    public static Test suite()\n    {\n        return new TestSuite( AppTest.class );\n    }\n\n    /**\n     * Rigourous Test :-)\n     */\n    public void testApp()\n    {\n        assertTrue( true );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-uofs-cache/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.storage.uofs.cache</groupId>\n    <artifactId>hydra-lib-uofs-cache</artifactId>\n    <version>1.2.1</version>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-storage</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>junit</groupId>\n            <artifactId>junit</artifactId>\n            <version>3.8.1</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime.jelly</groupId>\n            <artifactId>jelly</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "Hydra/hydra-lib-uofs-cache/src/main/java/com/pinecone/hydra/storage/file/UOFSCacheComponentor.java",
    "content": "package com.pinecone.hydra.storage.file;\n\nimport com.pinecone.hydra.storage.file.builder.Feature;\nimport com.pinecone.hydra.storage.file.builder.UOFSComponentor;\nimport com.pinecone.hydra.storage.file.cache.FileSystemCacheConfig;\nimport com.pinecone.slime.jelly.source.redis.GenericRedisMasterManipulator;\nimport com.pinecone.slime.map.indexable.IndexableMapQuerier;\nimport com.pinecone.slime.source.indexable.GenericIndexableTargetScopeMeta;\nimport com.pinecone.slime.source.indexable.IndexableIterableManipulator;\nimport com.pinecone.slime.source.indexable.IndexableTargetScopeMeta;\nimport redis.clients.jedis.Jedis;\nimport redis.clients.jedis.JedisPool;\nimport redis.clients.jedis.JedisPoolConfig;\n\npublic class UOFSCacheComponentor implements UOFSComponentor {\n    private FileSystemCacheConfig   cacheConfig;\n    private String                  redisHost;\n    private int                     redisPort;\n    private int                     redisTimeOut;\n    private String                  redisPassword;\n    private int                     redisDatabase;\n\n    public UOFSCacheComponentor(FileSystemCacheConfig cacheConfig){\n        this.cacheConfig = cacheConfig;\n        this.redisHost = this.cacheConfig.getRedisHost();\n        this.redisPort = this.cacheConfig.getRedisPort();\n        this.redisTimeOut = this.cacheConfig.getRedisTimeOut();\n        this.redisPassword = this.cacheConfig.getRedisPassword();\n        this.redisDatabase = this.cacheConfig.getRedisDatabase();\n    }\n\n    @Override\n    public Feature getFeature() {\n        return Feature.EnableGlobalCache;\n    }\n\n    @Override\n    public void apply( KOMFileSystem fs ) {\n        UniformObjectFileSystem uofs = (UniformObjectFileSystem) fs;\n\n        JedisPoolConfig poolConfig = new JedisPoolConfig();\n        JedisPool jedisPool = new JedisPool( poolConfig, this.redisHost, this.redisPort, this.redisTimeOut, this.redisPassword, this.redisDatabase );\n        Jedis jedis = jedisPool.getResource();\n        jedis.auth( this.redisPassword );\n        IndexableIterableManipulator<String, String > manipulator = new GenericRedisMasterManipulator<>( jedis );\n        IndexableTargetScopeMeta meta = new GenericIndexableTargetScopeMeta( \"0\", \"\", Object.class, manipulator );\n        IndexableMapQuerier<String, String > querier = new IndexableMapQuerier<>( meta, true );\n        uofs.apply( querier );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-lib-uofs-cache/src/test/java/org/example/AppTest.java",
    "content": "package org.example;\n\nimport junit.framework.Test;\nimport junit.framework.TestCase;\nimport junit.framework.TestSuite;\n\n/**\n * Unit test for simple App.\n */\npublic class AppTest \n    extends TestCase\n{\n    /**\n     * Create the test case\n     *\n     * @param testName name of the test case\n     */\n    public AppTest( String testName )\n    {\n        super( testName );\n    }\n\n    /**\n     * @return the suite of tests being tested\n     */\n    public static Test suite()\n    {\n        return new TestSuite( AppTest.class );\n    }\n\n    /**\n     * Rigourous Test :-)\n     */\n    public void testApp()\n    {\n        assertTrue( true );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kernel</groupId>\n    <artifactId>hydra-message-broadcast</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-message-control</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n\n        <dependency>\n            <groupId>io.netty</groupId>\n            <artifactId>netty-all</artifactId>\n            <version>4.1.80.Final</version>\n        </dependency>\n\n        <dependency>\n            <groupId>com.rabbitmq</groupId>\n            <artifactId>amqp-client</artifactId>\n            <version>5.14.2</version>\n        </dependency>\n\n        <dependency>\n            <groupId>org.apache.rocketmq</groupId>\n            <artifactId>rocketmq-client</artifactId>\n            <version>4.9.1</version>\n        </dependency>\n\n        <dependency>\n            <groupId>org.apache.kafka</groupId>\n            <artifactId>kafka-clients</artifactId>\n            <version>3.9.1</version>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UMBBytesDecoder.java",
    "content": "package com.pinecone.hydra.umb;\n\nimport java.io.IOException;\n\nimport com.pinecone.hydra.umc.msg.GenericEMCBytesDecoder;\nimport com.pinecone.hydra.umc.msg.UMCHead;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\npublic class UMBBytesDecoder extends GenericEMCBytesDecoder {\n    @Override\n    public UMCHead decode( byte[] buf, ExtraHeadCoder extraHeadCoder ) throws IOException {\n        UMCHead head = super.decode( buf, extraHeadCoder );\n        if ( head != null ) {\n            return head;\n        }\n\n        if ( this.isQualified( buf, UMBPHeadV1.ProtocolSignature ) ) {\n            return UMBPHeadV1.decode( buf, UMBPHeadV1.ProtocolSignature, extraHeadCoder );\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UMBClientException.java",
    "content": "package com.pinecone.hydra.umb;\n\nimport com.pinecone.hydra.umct.ServiceException;\n\npublic class UMBClientException extends ServiceException {\n    public UMBClientException() {\n        super();\n    }\n\n    public UMBClientException( String message ) {\n        super(message);\n    }\n\n    public UMBClientException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public UMBClientException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UMBHead.java",
    "content": "package com.pinecone.hydra.umb;\n\nimport com.pinecone.hydra.umc.msg.UMCHead;\n\n/**\n *  Pinecone Ursus For Java UMB [ Uniform Message Broadcast Control Transmit ]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  **********************************************************\n *  Uniform Message Control Transmission Protocol - Broadcast [UMC-T-B]\n *  统一消息广播控制传输协议\n *  For: MQ / Kafka\n *  **********************************************************\n */\npublic interface UMBHead extends UMCHead {\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UMBPHeadV1.java",
    "content": "package com.pinecone.hydra.umb;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.nio.ByteOrder;\nimport java.util.Arrays;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.prototype.ObjectiveBean;\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.util.Bytes;\nimport com.pinecone.framework.util.ReflectionUtils;\nimport com.pinecone.framework.util.datetime.compact.CompactTimeUnit;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.umc.msg.AbstractUMCHead;\nimport com.pinecone.hydra.umc.msg.ExtraEncode;\nimport com.pinecone.hydra.umc.msg.Status;\nimport com.pinecone.hydra.umc.msg.StreamTerminateException;\nimport com.pinecone.hydra.umc.msg.UMCHead;\nimport com.pinecone.hydra.umc.msg.UMCHeadV1;\nimport com.pinecone.hydra.umc.msg.UMCMethod;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\n/**\n *  Pinecone Ursus For Java UMB [ Uniform Message Broadcast Control Transmit - Package ]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  **********************************************************\n *  Uniform Message Control Transmission Protocol - Broadcast Package [UMC-T-BP]\n *  统一消息广播控制传输协议-小包分协议\n *  For: Simplified Message Small-Package [最小压缩邮政小包]\n *  **********************************************************\n *  According to MQ traits, in practice, if a message is received, its status should be 'OK' in principle.\n *  根据MQ特性，实践中，若收到消息状态码原则上就应该是 `OK`\n *  **********************************************************\n */\npublic class UMBPHeadV1 extends AbstractUMCHead implements UMBHead {\n    public static final String     ProtocolVersion   = \"1.1\";\n    public static final String     ProtocolSignature = \"UMC-BP/\" + UMBPHeadV1.ProtocolVersion;\n    public static final int        StructBlockSize   = Integer.BYTES + Byte.BYTES;\n    public static final int        HeadBlockSize     = UMBPHeadV1.ProtocolSignature.length() + UMBPHeadV1.StructBlockSize;\n    public static final ByteOrder  BinByteOrder      = UMCHeadV1.BinByteOrder;\n    public static final int        HeadFieldsSize    = 3;\n\n    protected String                 szSignature                                ; // :0\n    protected int                    nExtraHeadLength  = 2                      ; // :1 sizeof( int32 ) = 4\n    protected ExtraEncode            extraEncode       = ExtraEncode.Undefined  ; // :2 sizeof( ExtraEncode/byte ) = 1\n\n    protected byte[]                 extraHead         = {}                     ;\n    protected Object                 dyExtraHead                                ;\n    protected ExtraHeadCoder         extraHeadCoder                             ;\n\n\n    public UMBPHeadV1(  ) {\n        this.szSignature = UMBPHeadV1.ProtocolSignature;\n        this.dyExtraHead = new LinkedTreeMap<>();\n    }\n\n\n    @Override\n    public int sizeof() {\n        return UMBPHeadV1.HeadBlockSize;\n    }\n\n\n    @Override\n    public int fieldsSize() {\n        return UMBPHeadV1.HeadFieldsSize;\n    }\n\n\n\n    @Override\n    protected void setSignature            ( String signature                         ) {\n        this.szSignature = signature;\n    }\n\n    @Override\n    protected void setBodyLength           ( long length                              ) {\n\n    }\n\n    @Override\n    public void setKeepAlive               ( int nKeepAliveMills                      ) {\n\n    }\n\n    @Override\n    public void setKeepAlive               ( int nKeepAlive, CompactTimeUnit timeUnit ) {\n\n    }\n\n    @Override\n    protected void setMethod               ( UMCMethod umcMethod                      ) {\n\n    }\n\n    @Override\n    protected void setExtraEncode          ( ExtraEncode encode                       ) {\n        this.extraEncode = encode;\n    }\n\n\n\n    @Override\n    public void setControlBits   ( int controlBits       ) {\n\n    }\n\n    @Override\n    public void setSessionId     ( long sessionId         ) {\n\n    }\n\n    @Override\n    public void setIdentityId    ( long identityId        ) {\n\n    }\n\n\n\n    @Override\n    protected void setExtraHead            ( JSONObject jo          ) {\n        this.dyExtraHead = jo.getMap();\n    }\n\n    @Override\n    protected void setExtraHead            ( Map<String,Object > jo ) {\n        this.dyExtraHead = jo;\n    }\n\n    @Override\n    protected void setExtraHead            ( Object o               ) {\n        this.dyExtraHead = o;\n        if( o == null ) {\n            this.nExtraHeadLength = 0;\n        }\n    }\n\n    @Override\n    protected void transApplyExHead        (                        ) {\n        if ( this.dyExtraHead != null ) {\n            this.extraHead         = this.extraHeadCoder.getEncoder().encode( this, this.dyExtraHead );\n            this.nExtraHeadLength  = this.extraHead.length;\n        }\n        else {\n            if( this.extraEncode == ExtraEncode.JSONString ) {\n                this.extraHead  = \"{}\".getBytes();\n            }\n            else if( this.extraEncode == ExtraEncode.Prototype ) {\n                this.extraHead         = null;\n                this.nExtraHeadLength  = 0;\n                return;\n            }\n            else if( this.extraEncode == ExtraEncode.Iussum ) {\n                this.extraHead         = new byte[ 0 ];\n                this.nExtraHeadLength  = 0;\n                return;\n            }\n            else {\n                this.dyExtraHead = this.extraHeadCoder.newExtraHead();\n                this.extraHead   = this.extraHeadCoder.getEncoder().encode( this, this.dyExtraHead );\n            }\n        }\n\n        this.nExtraHeadLength  = this.extraHead.length;\n    }\n\n    @Override\n    protected void applyExtraHeadCoder     ( ExtraHeadCoder coder   ) {\n        this.extraHeadCoder = coder;\n\n        if( this.extraEncode == ExtraEncode.Undefined ) {\n            this.extraEncode = coder.getDefaultEncode();\n        }\n    }\n\n\n\n    @Override\n    public void            setStatus ( Status status ) {\n\n    }\n\n    @Override\n    public ExtraHeadCoder  getExtraHeadCoder() {\n        return this.extraHeadCoder;\n    }\n\n    @Override\n    public String          getSignature() {\n        return this.szSignature;\n    }\n\n    @Override\n    public int             getSignatureLength() {\n        return this.getSignature().length();\n    }\n\n    @Override\n    public UMCMethod       getMethod() {\n        return UMCMethod.INFORM;\n    }\n\n    @Override\n    public int             getExtraHeadLength() {\n        return this.nExtraHeadLength;\n    }\n\n    @Override\n    public long            getBodyLength() {\n        return 0L;\n    }\n\n    @Override\n    public long            getKeepAlive() {\n        return -1L;\n    }\n\n    @Override\n    public int             getCompactKeepAlive() {\n        return -1;\n    }\n\n    @Override\n    public long            getSessionId() {\n        return -1L;\n    }\n\n    @Override\n    public Status          getStatus() {\n        return Status.OK;\n    }\n\n    @Override\n    public ExtraEncode     getExtraEncode() {\n        return this.extraEncode;\n    }\n\n    @Override\n    public int            getControlBits() {\n        return 0;\n    }\n\n    @Override\n    public long            getIdentityId() {\n        return 0;\n    }\n\n    @Override\n    public byte[]          getExtraHeadBytes() {\n        return this.extraHead ;\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Map<String, Object > evalMapExtraHead() {\n        if( this.dyExtraHead instanceof Map ) {\n            return (Map) this.dyExtraHead;\n        }\n        return ( new ObjectiveBean( this.dyExtraHead ) ).toMap();\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Map<String, Object > getMapExtraHead() {\n        if( this.dyExtraHead instanceof Map ) {\n            return (Map) this.dyExtraHead;\n        }\n        return null;\n    }\n\n    @Override\n    public Object getExtraHead() {\n        return this.dyExtraHead;\n    }\n\n    @Override\n    public void putExHeaderVal( String key, Object val ) throws IllegalArgumentException {\n        if( this.dyExtraHead instanceof Map ) {\n            this.getMapExtraHead().put( key, val );\n        }\n        else {\n            ReflectionUtils.beanSet( this.dyExtraHead, key, val );\n        }\n    }\n\n    @Override\n    public Object getExHeaderVal( String key ) {\n        if( this.dyExtraHead instanceof Map ) {\n            return this.getMapExtraHead().get( key );\n        }\n        else {\n            return ReflectionUtils.beanGet( this.dyExtraHead, key );\n        }\n    }\n\n    @Override\n    protected UMCHead applyExHead( Map<String, Object > jo      ) {\n        if( !( this.dyExtraHead instanceof Map ) && this.dyExtraHead != null ) {\n            throw new IllegalArgumentException( \"Current extra headed is not dynamic.\" );\n        }\n\n        if( this.getMapExtraHead() == null || this.getMapExtraHead().size() == 0 ) {\n            this.setExtraHead( jo );\n        }\n        else {\n            if( jo.size() > this.getMapExtraHead().size() ) {\n                jo.putAll( this.getMapExtraHead() );\n                this.setExtraHead( jo );\n            }\n            else {\n                this.getMapExtraHead().putAll( jo );\n            }\n        }\n        return this;\n    }\n\n    public UMCHead receiveSet( Map<String, Object > joExtraHead ) {\n        this.dyExtraHead = joExtraHead;\n        return this;\n    }\n\n    @Override\n    public void release() {\n        // Help GC\n        this.dyExtraHead = null;\n    }\n\n    @Override\n    public EncodePair bytesEncode( ExtraHeadCoder extraHeadCoder ) {\n        return UMBPHeadV1.encode( this, extraHeadCoder );\n    }\n\n\n\n    public static EncodePair encode( UMCHead umcHead, ExtraHeadCoder extraHeadCoder ) {\n        UMBPHeadV1 head = (UMBPHeadV1) umcHead;\n        head.applyExtraHeadCoder( extraHeadCoder );\n        head.transApplyExHead();\n\n        ByteBuffer byteBuffer = ByteBuffer.allocate( UMCHeadV1.ReadBufferSize + head.getExtraHeadLength() );\n        byteBuffer.order( BinByteOrder );\n\n        byteBuffer.put( head.getSignature().getBytes() );\n\n        int nBufLength = head.getSignatureLength();\n        byteBuffer.putInt( head.nExtraHeadLength );\n        nBufLength += Integer.BYTES;\n\n        byteBuffer.put( head.extraEncode.getByteValue() );\n        nBufLength += Byte.BYTES;\n\n\n\n\n        if( head.extraHead == null ) {\n            byteBuffer.put( Bytes.Empty );\n        }\n        else {\n            byteBuffer.put( head.extraHead );\n        }\n        nBufLength += head.getExtraHeadLength();\n\n        return new EncodePair( byteBuffer, nBufLength );\n    }\n\n    public static UMCHead decode( byte[] buf, String szSignature, ExtraHeadCoder extraHeadCoder ) throws IOException {\n        int nBufSize = szSignature.length() + UMBPHeadV1.StructBlockSize;\n\n        if ( buf.length < nBufSize ) {\n            throw new StreamTerminateException( \"StreamEndException:[UMBPProtocol] Stream is ended.\" );\n        }\n\n        int nReadAt = szSignature.length();\n        if ( !Arrays.equals( buf, 0, szSignature.length(), szSignature.getBytes(), 0, szSignature.length() )  ) {\n            throw new IOException( \"[UMBPProtocol] Illegal protocol signature.\" );\n        }\n\n        UMBPHeadV1 head = new UMBPHeadV1();\n        head.applyExtraHeadCoder( extraHeadCoder );\n\n\n        head.nExtraHeadLength  = ByteBuffer.wrap( buf, nReadAt, Integer.BYTES ).order( BinByteOrder ).getInt();\n        nReadAt += Integer.BYTES;\n\n        head.extraEncode       = ExtraEncode.asValue( ByteBuffer.wrap( buf, nReadAt, Byte.BYTES ).order( BinByteOrder ).get() );\n        nReadAt += Byte.BYTES;\n\n        return head;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UMBServiceException.java",
    "content": "package com.pinecone.hydra.umb;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umct.ServiceException;\n\npublic class UMBServiceException extends ServiceException implements Pinenut {\n    public UMBServiceException() {\n        super();\n    }\n\n    public UMBServiceException( String message ) {\n        super(message);\n    }\n\n    public UMBServiceException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public UMBServiceException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UMCPackageMessageEncoder.java",
    "content": "package com.pinecone.hydra.umb;\n\nimport java.io.IOException;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\npublic interface UMCPackageMessageEncoder extends Pinenut {\n    byte[] encode( UMCMessage message ) throws IOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UlfMBInformMessage.java",
    "content": "package com.pinecone.hydra.umb;\n\nimport com.pinecone.hydra.umc.msg.ArchUMCMessage;\nimport com.pinecone.hydra.umc.msg.ExtraEncode;\nimport com.pinecone.hydra.umc.msg.InformMessage;\nimport com.pinecone.hydra.umc.msg.UMCCHeadV1;\nimport com.pinecone.hydra.umc.msg.UMCHead;\n\nimport java.util.Map;\n\npublic class UlfMBInformMessage extends ArchUMCMessage implements InformMessage {\n    public static UMCHead newUMCHead( Object exHead ) {\n        UMBPHeadV1 head = new UMBPHeadV1();\n        head.setExtraHead( exHead );\n        head.setExtraEncode( ExtraEncode.Prototype );\n        return head;\n    }\n\n    public static UMCHead newUMCHead( Map<String,Object > joExHead ) {\n        UMBPHeadV1 head = new UMBPHeadV1();\n        head.applyExHead( joExHead );\n        return head;\n    }\n\n    public static UMCHead newUMCHead( Object exHead, int controlBits ) {\n        UMCCHeadV1 head = UlfMBInformMessage.newUMCHead( controlBits );\n        head.setExtraHead( exHead );\n        head.setExtraEncode( ExtraEncode.Prototype );\n        return head;\n    }\n\n    public static UMCHead newUMCHead( Map<String,Object > joExHead, int controlBits ) {\n        UMCCHeadV1 head = UlfMBInformMessage.newUMCHead( controlBits );\n        head.applyExHead( joExHead );\n        return head;\n    }\n\n    public static UMCCHeadV1 newUMCHead( int controlBits ) {\n        UMCCHeadV1 head = new UMCCHeadV1();\n        head.setControlBits( controlBits );\n        return head;\n    }\n\n\n\n    public UlfMBInformMessage( UMCHead head ) {\n        super( head );\n    }\n\n    public UlfMBInformMessage( Map<String,Object > joExHead ) {\n        this( UlfMBInformMessage.newUMCHead( joExHead ) );\n    }\n\n    public UlfMBInformMessage( Object protoExHead ) {\n        this( UlfMBInformMessage.newUMCHead( protoExHead ) );\n    }\n\n\n    public UlfMBInformMessage( Map<String,Object > joExHead, int controlBits ) {\n        this( UlfMBInformMessage.newUMCHead( joExHead, controlBits ) );\n    }\n\n    public UlfMBInformMessage( Object protoExHead, int controlBits ) {\n        this( UlfMBInformMessage.newUMCHead( protoExHead, controlBits ) );\n    }\n\n\n    public UlfMBInformMessage( int controlBits ) {\n        this( UlfMBInformMessage.newUMCHead( controlBits ) );\n    }\n\n\n    @Override\n    public long        getMessageLength(){\n        if ( this.mHead instanceof UMBPHeadV1 ) {\n            return UMBPHeadV1.HeadBlockSize + this.mHead.getExtraHeadLength();\n        }\n\n        return UMCCHeadV1.HeadBlockSize + this.mHead.getExtraHeadLength();\n    }\n\n    @Override\n    public UMCHead getHead() {\n        return super.getHead();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UlfPackageMessageEncoder.java",
    "content": "package com.pinecone.hydra.umb;\n\nimport java.io.IOException;\n\nimport com.pinecone.framework.util.UnitHelper;\nimport com.pinecone.hydra.umc.msg.ArchBytesTransferMessage;\nimport com.pinecone.hydra.umc.msg.ArchStreamTransferMessage;\nimport com.pinecone.hydra.umc.msg.UMCHead;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\n\npublic class UlfPackageMessageEncoder implements UMCPackageMessageEncoder {\n    protected ExtraHeadCoder        mExtraHeadCoder     ;\n\n    public UlfPackageMessageEncoder ( ExtraHeadCoder extraHeadCoder ) {\n        this.mExtraHeadCoder = extraHeadCoder;\n    }\n\n    @Override\n    public byte[] encode( UMCMessage message ) throws IOException {\n        if ( message.evinceTransferMessage() != null ) {\n            UMCHead.EncodePair pair = message.getHead().bytesEncode( this.mExtraHeadCoder );\n            byte[] headBuf = pair.getBytes();\n\n            if ( message instanceof ArchStreamTransferMessage ) {\n                ArchStreamTransferMessage transferMessage = (ArchStreamTransferMessage) message;\n\n                byte[] bytes = transferMessage.getBody().readAllBytes();\n\n                return (byte[]) UnitHelper.mergeArr( headBuf, bytes );\n            }\n            else if ( message instanceof ArchBytesTransferMessage ) {\n                ArchBytesTransferMessage transferMessage = (ArchBytesTransferMessage) message;\n\n                byte[] bytes = transferMessage.getBody();\n\n                return (byte[]) UnitHelper.mergeArr( headBuf, bytes );\n            }\n        }\n        else if ( message.evinceInformMessage() != null ) {\n            UMCHead.EncodePair pair = message.getHead().bytesEncode( this.mExtraHeadCoder );\n            return pair.getBytes();\n        }\n\n        throw new IllegalArgumentException( \"Type of `UMCMessage` [ \" + message.getClass().getSimpleName() + \" ] is not supported.\" );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/UlfPackageMessageHandler.java",
    "content": "package com.pinecone.hydra.umb;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface UlfPackageMessageHandler extends Pinenut {\n    default void onSuccessfulMsgReceived ( byte[] body, Object[] args ) throws Exception {\n\n    }\n\n    default void onErrorMsgReceived      ( byte[] body, Object[] args ) throws Exception {\n\n    }\n\n    default void onError                 ( Object data, Throwable cause ) {\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/ArchUnidirectionalMCProtocol.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCHeadV1;\nimport com.pinecone.hydra.umc.msg.UMCProtocol;\n\npublic class ArchUnidirectionalMCProtocol implements UMCProtocol {\n\n    protected String        mszVersion     = UMCHeadV1.ProtocolVersion;\n\n    protected String        mszSignature   = UMCHeadV1.ProtocolSignature;\n\n    protected Medium        mMessageSource ;\n\n    public ArchUnidirectionalMCProtocol( Medium messageSource ) {\n        this.mMessageSource = messageSource;\n        this.applyMessageSource( messageSource );\n    }\n\n    @Override\n    public UMCProtocol applyMessageSource( Medium medium ) {\n        this.mMessageSource = medium;\n        return this;\n    }\n\n    @Override\n    public Medium getMessageSource() {\n        return this.mMessageSource;\n    }\n\n    @Override\n    public String getVersion(){\n        return this.mszVersion;\n    }\n\n    @Override\n    public String getSignature() {\n        return this.mszSignature;\n    }\n\n    @Override\n    public void release() {\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastConsumer.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.UlfPackageMessageHandler;\n\npublic interface BroadcastConsumer extends Pinenut {\n\n    void close();\n\n    void start( UlfPackageMessageHandler handler ) throws UMBServiceException;\n\n    boolean isClosed();\n\n    String topic();\n\n    String tag();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastControlAgent.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodDigest;\nimport com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery;\n\npublic interface BroadcastControlAgent extends Pinenut {\n\n    MCTContextMachinery getMCTTransformer();\n\n    InterfacialCompiler getInterfacialCompiler();\n\n    ClassDigest queryClassDigest( String name );\n\n    MethodDigest queryMethodDigest( String name );\n\n    void addClassDigest( ClassDigest that );\n\n    void addMethodDigest( MethodDigest that );\n\n    ClassDigest compile( Class<? > clazz, boolean bAsIface );\n\n    BroadcastControlNode broadcastControlNode();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastControlConsumer.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\n\npublic interface BroadcastControlConsumer extends BroadcastControlAgent {\n\n    void start() throws UMBServiceException;\n\n    void start( UMCTExpressHandler handler ) throws UMBServiceException;\n\n    void close();\n\n\n\n\n    void registerInstance( String deliverName, Object instance, Class<?> iface ) ;\n\n    void registerInstance( Object instance, Class<?> iface ) ;\n\n    void registerController( String deliverName, Object instance, Class<?> controllerType ) ;\n\n    void registerController( Object instance, Class<?> controllerType ) ;\n\n    default void registerController( Object instance ) {\n        this.registerController( instance, instance.getClass() );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastControlNode.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.hydra.umct.UMCTExpress;\nimport com.pinecone.hydra.umct.UMCTNode;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodDigest;\nimport com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery;\nimport com.pinecone.hydra.umct.husky.machinery.RouteDispatcher;\n\npublic interface BroadcastControlNode extends UMCBroadcastNode, UMCTNode {\n\n    UMCBroadcastNode getUMCBroadcastNode();\n\n\n\n    RouteDispatcher getRouteDispatcher();\n\n    MCTContextMachinery getMCTTransformer();\n\n    InterfacialCompiler getInterfacialCompiler();\n\n    ClassDigest queryClassDigest( String name );\n\n    MethodDigest queryMethodDigest( String name );\n\n    void addClassDigest( ClassDigest that );\n\n    void addMethodDigest( MethodDigest that );\n\n    ClassDigest compile( Class<? > clazz, boolean bAsIface );\n\n\n\n\n    void registerInstance( String deliverName, Object instance, Class<?> iface ) ;\n\n    void registerInstance( Object instance, Class<?> iface ) ;\n\n    void registerController( String deliverName, Object instance, Class<?> controllerType ) ;\n\n    void registerController( Object instance, Class<?> controllerType ) ;\n\n    default void registerController( Object instance ) {\n        this.registerController( instance, instance.getClass() );\n    }\n\n\n\n    void applyMCTContextMachinery( MCTContextMachinery mctContextMachinery ) ;\n\n    void applyRouteDispatcher( RouteDispatcher routeDispatcher );\n\n    UMCTExpress createUMCTExpress( String name, Class<?> expressType );\n\n\n\n\n    BroadcastControlConsumer createBroadcastControlConsumer( UMCBroadcastConsumer workAgent, RouteDispatcher routeDispatcher ) ;\n\n    BroadcastControlConsumer createBroadcastControlConsumer( UMCBroadcastConsumer workAgent ) ;\n\n    BroadcastControlConsumer createBroadcastControlConsumer( UNT unt ) ;\n\n    BroadcastControlConsumer createBroadcastControlConsumer( String topic, String ns ) ;\n\n    BroadcastControlConsumer createBroadcastControlConsumer( String topic ) ;\n\n\n\n    BroadcastControlProducer createBroadcastControlProducer( UMCBroadcastProducer workAgent ) ;\n\n    BroadcastControlProducer createBroadcastControlProducer() ;\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastControlProducer.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport java.io.IOException;\n\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umct.husky.compiler.MethodPrototype;\n\npublic interface BroadcastControlProducer extends BroadcastControlAgent {\n\n    void issueInform( UNT unt, String name, MethodPrototype method, Object[] args ) throws IOException ;\n\n    void issueInform( String topic, String ns, String name, MethodPrototype method, Object[] args ) throws IOException ;\n\n    void issueInform( String topic, MethodPrototype method, Object[] args ) throws IOException ;\n\n    void issueInform( String topic, String szMethodAddress, Object... args ) throws IOException ;\n\n    <T> T getIface( Class<T> iface, String topic, String ns, String name );\n\n    default <T> T getIface( Class<T> iface, String topic ){\n        return this.getIface( iface, topic, \"\", BroadcastNode.DefaultEntityName );\n    }\n\n    void close();\n\n    void start() throws UMBServiceException;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastNode.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.hydra.umc.msg.MessageNodus;\n\npublic interface BroadcastNode extends MessageNodus {\n    String DefaultEntityName = \"__DEFAULT__\";\n\n    void close();\n\n    void register( BroadcastProducer producer );\n\n    void register( BroadcastConsumer consumer );\n\n    void deregister( BroadcastProducer producer ) ;\n\n    void deregister( BroadcastConsumer consumer ) ;\n\n\n    BroadcastProducer createProducer() ;\n\n    BroadcastConsumer createConsumer( String topic, String ns ) ;\n\n    BroadcastConsumer createConsumer( String topic );\n\n    BroadcastConsumer createConsumer( UNT unt ) ;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastPollConsumer.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.UlfPackageMessageHandler;\n\nimport java.util.List;\n\npublic interface BroadcastPollConsumer extends BroadcastConsumer {\n\n    void close();\n\n    void start( UlfPackageMessageHandler handler ) throws UMBServiceException;\n\n    List<PollResult> startPull(long mils );\n}"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/BroadcastProducer.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umb.UMBClientException;\nimport com.pinecone.hydra.umb.UMBServiceException;\n\npublic interface BroadcastProducer extends Pinenut {\n    void close();\n\n    void start() throws UMBServiceException;\n\n    boolean isClosed();\n\n    void sendMessage( String topic, String ns, String name, byte[] body ) throws UMBClientException ;\n\n    void sendMessage( String topic, byte[] body ) throws UMBClientException ;\n\n    void sendMessage( UNT unt, String name, byte[] body ) throws UMBClientException ;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/DistributedConsumer.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface DistributedConsumer extends Pinenut {\n\n    DistributedConsumer parentConsumer();\n\n    BroadcastConsumer mainConsumer();\n\n    String mainTopic();\n\n    String routerPath();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/GenericUNT.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\npublic class GenericUNT implements UNT {\n    protected String mszTopic;\n\n    protected String mszNamespace;\n\n    protected String[] mNameSegments;\n\n    public GenericUNT ( String topic, String ns, String[] segs ) {\n        this.mszTopic      = topic;\n        this.mszNamespace  = ns;\n\n        this.mNameSegments = segs;\n    }\n\n    @Override\n    public String getTopic() {\n        return this.mszTopic;\n    }\n\n    @Override\n    public String getNamespace() {\n        return this.mszNamespace;\n    }\n\n    @Override\n    public String[] getNameSegments() {\n        return this.mNameSegments;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/PollResult.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface PollResult extends Pinenut {\n    Object getName();\n\n    Object getValue();\n\n    byte[] getBytesValue();\n\n    Object[] getArgs();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/PushConsumer.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.UlfPackageMessageHandler;\n\npublic interface PushConsumer extends BroadcastConsumer {\n    void start( UlfPackageMessageHandler handler ) throws UMBServiceException ;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/UMCBroadcastConsumer.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\n\npublic interface UMCBroadcastConsumer extends BroadcastConsumer {\n\n    void start( UMCTExpressHandler handler ) throws UMBServiceException ;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/UMCBroadcastNode.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\npublic interface UMCBroadcastNode extends BroadcastNode {\n    ExtraHeadCoder getExtraHeadCoder();\n\n    UMCBroadcastProducer createUlfProducer() ;\n\n\n    UMCBroadcastConsumer createUlfConsumer( String topic, String ns ) ;\n\n    UMCBroadcastConsumer createUlfConsumer( String topic ) ;\n\n    UMCBroadcastConsumer createUlfConsumer( UNT unt ) ;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/UMCBroadcastProducer.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.hydra.umb.UMBClientException;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\npublic interface UMCBroadcastProducer extends BroadcastProducer {\n\n    void sendMessage( String topic, String ns, String name, UMCMessage message ) throws UMBClientException ;\n\n    void sendMessage( String topic, UMCMessage message ) throws UMBClientException ;\n\n    void sendMessage( UNT unt, String name, UMCMessage message ) throws UMBClientException ;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/UNT.java",
    "content": "package com.pinecone.hydra.umb.broadcast;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\n/**\n *  Pinecone Ursus For Java [ Uniform Namespaced Topic ]\n *  Author: Harald.E (Dragon King), Ken\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Topic + Namespace\n *  *****************************************************************************************\n */\npublic interface UNT extends Pinenut {\n    String getTopic();\n\n    String getNamespace();\n\n    String[] getNameSegments();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/converter/GenericResultBytesConverter.java",
    "content": "package com.pinecone.hydra.umb.broadcast.converter;\n\nimport com.pinecone.framework.util.Bytes;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.io.ObjectOutputStream;\n\npublic class GenericResultBytesConverter<V > implements ResultBytesConverter<V> {\n    @Override\n    public byte[] convert( V value ) {\n        if ( value == null ) {\n            return Bytes.Empty;\n        }\n        else if ( value instanceof byte[] ) {\n            return (byte[]) value;\n        }\n        else if ( value instanceof String ) {\n            return ( (String) value ).getBytes();\n        }\n\n\n        try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();\n             ObjectOutputStream objectOutputStream = new ObjectOutputStream(byteArrayOutputStream)) {\n\n            objectOutputStream.writeObject(value);\n            objectOutputStream.flush();\n            return byteArrayOutputStream.toByteArray();\n\n        }\n        catch ( IOException e ) {\n            return Bytes.Empty;\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/converter/ResultBytesConverter.java",
    "content": "package com.pinecone.hydra.umb.broadcast.converter;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ResultBytesConverter<V> extends Pinenut {\n    byte[] convert( V value );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/proxy/GenericIfaceProxyFactory.java",
    "content": "package com.pinecone.hydra.umb.broadcast.proxy;\n\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlProducer;\nimport com.pinecone.hydra.umct.husky.compiler.MethodPrototype;\nimport com.pinecone.hydra.umct.proxy.UMCTHub;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype;\nimport com.pinecone.hydra.umct.stereotype.IfaceUtils;\nimport org.springframework.cglib.proxy.Enhancer;\nimport org.springframework.cglib.proxy.MethodInterceptor;\nimport org.springframework.cglib.proxy.MethodProxy;\n\nimport java.lang.reflect.Method;\nimport java.util.concurrent.ConcurrentHashMap;\n\npublic class GenericIfaceProxyFactory implements IfaceProxyFactory {\n    protected final ConcurrentHashMap<Class<?>, Enhancer> mEnhancerCache = new ConcurrentHashMap<>();\n\n    protected BroadcastControlProducer  mProducer;\n\n    public GenericIfaceProxyFactory( BroadcastControlProducer producer ) {\n        this.mProducer = producer;\n    }\n\n    @Override\n    public <T> T createProxy( BroadcastControlProducer producer, ClassDigest classDigest, Class<T> iface, String topic, String ns, String name ) {\n//        if (!iface.isInterface()) {\n//            throw new IllegalArgumentException(\"The provided class must be an interface.\");\n//        }\n\n        Enhancer enhancer = this.mEnhancerCache.computeIfAbsent(iface, clazz -> {\n            Enhancer e = new Enhancer();\n            e.setSuperclass(UMCTHub.class);\n            e.setInterfaces( new Class[]{iface} );\n\n            e.setCallback(new MethodInterceptor() {\n                @Override\n                public Object intercept( Object obj, Method method, Object[] args, MethodProxy proxy ) throws Throwable {\n                    String methodName = IfaceUtils.getIfaceMethodName( method );\n                    MethodPrototype methodPrototype = (DynamicMethodPrototype) producer.queryMethodDigest(\n                            classDigest.getClassName() + Namespace.DEFAULT_SEPARATOR + methodName\n                    );\n\n                    producer.issueInform(\n                            topic, ns,\n                            name,\n                            methodPrototype,\n                            args\n                    );\n                    return null;\n                }\n            });\n            return e;\n        });\n\n        return iface.cast( enhancer.create() );\n    }\n\n    @Override\n    public <T> T createProxy( BroadcastControlProducer producer, Class<T> iface, String topic, String ns, String name ) {\n        ClassDigest classDigest = producer.queryClassDigest( IfaceUtils.queryIfaceClassNameAddress( iface ) );\n\n        return this.createProxy( producer, classDigest, iface, topic, ns, name );\n    }\n\n    @Override\n    public <T> T createProxy( Class<T> iface, String topic, String ns, String name ) {\n        return this.createProxy( this.mProducer, iface, topic, ns, name );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/broadcast/proxy/IfaceProxyFactory.java",
    "content": "package com.pinecone.hydra.umb.broadcast.proxy;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlProducer;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\n\npublic interface IfaceProxyFactory extends Pinenut {\n    <T> T createProxy( BroadcastControlProducer producer, ClassDigest classDigest, Class<T> iface, String topic, String ns, String name ) ;\n\n    <T> T createProxy( BroadcastControlProducer producer, Class<T> iface, String topic, String ns, String name ) ;\n\n    <T> T createProxy( Class<T> iface, String topic, String ns, String name );\n}"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KBroadcastPollConsumer.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.hydra.umb.broadcast.BroadcastPollConsumer;\n\npublic interface KBroadcastPollConsumer<K, V > extends BroadcastPollConsumer {\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KBroadcastProducer.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.hydra.umb.UMBClientException;\nimport com.pinecone.hydra.umb.broadcast.BroadcastProducer;\n\npublic interface KBroadcastProducer<K, V > extends BroadcastProducer {\n\n    void sendPrototypeMessage( String topic, String ns, K name, V body ) throws UMBClientException ;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KClient.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.hydra.umb.broadcast.BroadcastConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastNode;\nimport com.pinecone.hydra.umb.broadcast.BroadcastProducer;\nimport com.pinecone.hydra.umb.broadcast.converter.ResultBytesConverter;\nimport com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit;\n\nimport java.util.Collection;\nimport java.util.Properties;\n\npublic interface KClient extends BroadcastNode {\n    @Override\n    default ErrorMessageAudit    getErrorMessageAudit() {\n        return null;\n    }\n\n    @Override\n    default void                 setErrorMessageAudit( ErrorMessageAudit audit ){\n\n    }\n\n    KConfig getKafkaConfig();\n\n    <K, V > KBroadcastProducer<K, V > createPrototypeProducer( Properties properties ) ;\n\n    default KBroadcastProducer<String, byte[] > createProducer( Properties properties ) {\n        return this.createPrototypeProducer( properties );\n    }\n\n    <K, V > KBroadcastPollConsumer<K, V > createPrototypeConsumer( String topic, String ns, Properties properties ) ;\n\n    default KBroadcastPollConsumer<String, byte[] > createConsumer( String topic, String ns, Properties properties ) {\n        return this.createPrototypeConsumer( topic, ns, properties );\n    }\n\n    ResultBytesConverter<Object > getDafaultResultBytesConverter();\n\n\n    Collection<BroadcastProducer> viewProducerRegister();\n\n    Collection<BroadcastConsumer> viewConsumerRegister();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KConfig.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.hydra.umc.msg.MsgNodeConfig;\n\npublic interface KConfig extends MsgNodeConfig {\n    String getMszServer();\n\n    String getMszAutoOffsetReset();\n\n    long getMnDefaultPollHandleMillis();\n\n    @Override\n    default long getSyncWaitingMillis() {\n        return this.getMnDefaultPollHandleMillis();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaClient.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.hydra.umb.broadcast.BroadcastConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastProducer;\nimport com.pinecone.hydra.umb.broadcast.UNT;\nimport com.pinecone.hydra.umb.broadcast.converter.GenericResultBytesConverter;\nimport com.pinecone.hydra.umb.broadcast.converter.ResultBytesConverter;\nimport com.pinecone.hydra.umc.msg.Messagus;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Map;\nimport java.util.Properties;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\n\npublic class KafkaClient implements KClient {\n    protected Map<BroadcastProducer, Object> producerRegister;\n\n    protected Map<BroadcastConsumer, Object> consumerRegister;\n\n    private static final Object PRESENT = new Object();\n\n    protected KConfig       kafkaConfig;\n\n    protected long          nodeId;\n\n    protected ExecutorService pollConsumerThreadPool;\n\n    protected ResultBytesConverter<Object > resultBytesConverter;\n\n    public KafkaClient( long nodeId, KConfig config ) {\n        this.kafkaConfig             = config;\n\n        this.producerRegister        = new ConcurrentHashMap<>();\n        this.consumerRegister        = new ConcurrentHashMap<>();\n        this.nodeId                  = nodeId;\n        this.resultBytesConverter    = new GenericResultBytesConverter<>();\n        this.pollConsumerThreadPool  = Executors.newCachedThreadPool();\n    }\n\n    public KafkaClient( long nodeId, String server ) {\n        this( nodeId, new KafkaConfig( server ) );\n    }\n\n    public KafkaClient( String server ) {\n        this( Messagus.nextLocalId(), server );\n    }\n\n    public KafkaClient( Map<String, Object> config ){\n        this( Messagus.nextLocalId(), new KafkaConfig( config ) );\n    }\n\n    @Override\n    public void close() {\n        for( Map.Entry<BroadcastConsumer, Object> kv : this.consumerRegister.entrySet() ) {\n            kv.getKey().close();\n        }\n\n        for( Map.Entry<BroadcastProducer, Object> kv : this.producerRegister.entrySet() ) {\n            kv.getKey().close();\n        }\n\n        this.consumerRegister.clear();\n        this.producerRegister.clear();\n\n        this.pollConsumerThreadPool.shutdown();\n    }\n\n    @Override\n    public void register( BroadcastProducer producer ) {\n        this.producerRegister.put( producer, PRESENT );\n    }\n\n    @Override\n    public void register( BroadcastConsumer consumer ) {\n        this.consumerRegister.put( consumer, PRESENT );\n    }\n\n    @Override\n    public void deregister( BroadcastProducer producer ) {\n        this.producerRegister.remove( producer );\n    }\n\n    @Override\n    public void deregister( BroadcastConsumer consumer ) {\n        this.consumerRegister.remove( consumer );\n    }\n\n    @Override\n    public BroadcastProducer createProducer() {\n        UlfBroadcastProducer<String, byte[] > ulfBroadcastProducer = new UlfBroadcastProducer<>(this);\n        this.register(ulfBroadcastProducer);\n        return ulfBroadcastProducer;\n    }\n\n    @Override\n    public BroadcastConsumer createConsumer( String topic, String ns ) {\n        UlfBroadcastPollConsumer<String, byte[] > kafkaBroadcastConsumer = new UlfBroadcastPollConsumer<>(this, topic, ns);\n        this.register(kafkaBroadcastConsumer);\n        return kafkaBroadcastConsumer;\n    }\n\n    @Override\n    public <K, V > KBroadcastProducer<K, V > createPrototypeProducer( Properties properties ) {\n        UlfBroadcastProducer<K, V > ulfBroadcastProducer = new UlfBroadcastProducer<> ( this, properties );\n        this.register(ulfBroadcastProducer);\n        return ulfBroadcastProducer;\n    }\n\n    @Override\n    public <K, V > KBroadcastPollConsumer<K, V >  createPrototypeConsumer( String topic, String ns, Properties properties ) {\n        UlfBroadcastPollConsumer<K, V >  kafkaBroadcastConsumer = new UlfBroadcastPollConsumer<>( this, topic, ns, properties );\n        this.register(kafkaBroadcastConsumer);\n        return kafkaBroadcastConsumer;\n    }\n\n    @Override\n    public BroadcastConsumer createConsumer( String topic ) {\n        return this.createConsumer(topic, \"\");\n    }\n\n    @Override\n    public BroadcastConsumer createConsumer( UNT unt ) {\n        return this.createConsumer( unt.getTopic(), unt.getNamespace() );\n    }\n\n    @Override\n    public KConfig getKafkaConfig() {\n        return this.kafkaConfig;\n    }\n\n    @Override\n    public KConfig getMessageNodeConfig() {\n        return this.getKafkaConfig();\n    }\n\n    @Override\n    public ResultBytesConverter<Object > getDafaultResultBytesConverter() {\n        return this.resultBytesConverter;\n    }\n\n    @Override\n    public long getMessageNodeId() {\n        return this.nodeId;\n    }\n\n    @Override\n    public ExtraHeadCoder getExtraHeadCoder() {\n        return null;\n    }\n\n    protected ExecutorService getPollConsumerThreadPool() {\n        return this.pollConsumerThreadPool;\n    }\n\n\n    @Override\n    public Collection<BroadcastConsumer> viewConsumerRegister() {\n        return Collections.unmodifiableSet(this.consumerRegister.keySet());\n    }\n\n    @Override\n    public Collection<BroadcastProducer> viewProducerRegister() {\n        return Collections.unmodifiableSet(this.producerRegister.keySet());\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaConfig.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport java.util.Map;\n\npublic class KafkaConfig implements KConfig {\n    protected String mszServer;\n\n    protected String mszAutoOffsetReset;\n\n    protected long   mnDefaultPollHandleMillis;\n\n    public KafkaConfig ( Map<String, Object > conf ) {\n        this.mszServer                 = (String) conf.get( \"server\" );\n        this.mszAutoOffsetReset        = (String) conf.getOrDefault( \"AutoOffsetReset\", KafkaConstants.DefaultAutoOffsetReset );\n        this.mnDefaultPollHandleMillis = ( (Number)conf.getOrDefault( \"DefaultPollHandleMillis\", KafkaConstants.DefaultPollHandleMillis ) ).longValue();\n    }\n\n    public KafkaConfig( String szServer, String szAutoOffsetReset, long nDefaultPollHandleMillis ){\n        this.mszServer                 = szServer;\n        this.mszAutoOffsetReset        = szAutoOffsetReset;\n        this.mnDefaultPollHandleMillis = nDefaultPollHandleMillis;\n    }\n\n    public KafkaConfig( String szServer ){\n        this( szServer, KafkaConstants.DefaultAutoOffsetReset, KafkaConstants.DefaultPollHandleMillis );\n    }\n\n    @Override\n    public String getMszServer() {\n        return this.mszServer;\n    }\n\n    @Override\n    public String getMszAutoOffsetReset() {\n        return this.mszAutoOffsetReset;\n    }\n\n    @Override\n    public long getMnDefaultPollHandleMillis() {\n        return this.mnDefaultPollHandleMillis;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaConstants.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\npublic final class KafkaConstants {\n\n    public static final String DefaultAutoOffsetReset   = \"earliest\";\n\n    public static final Long   DefaultPollHandleMillis  = 100L;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaMedium.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.framework.system.NotImplementedException;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.MessageNodus;\n\nimport java.io.InputStream;\nimport java.io.OutputStream;\n\npublic class KafkaMedium implements Medium {\n    protected MessageNodus mMessageNode;\n\n    public KafkaMedium( MessageNodus medium ){\n        this.mMessageNode = medium;\n    }\n\n    @Override\n    public Object getNativeMessageSource() {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public OutputStream getOutputStream() {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public InputStream getInputStream() {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public String sourceName() {\n        return \"Kafka\";\n    }\n\n    @Override\n    public void release() {\n\n    }\n\n    @Override\n    public MessageNodus getMessageNode() {\n        return this.mMessageNode;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaPollResult.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.hydra.umb.broadcast.PollResult;\n\npublic class KafkaPollResult implements PollResult {\n    protected Object name;\n\n    protected Object value;\n\n    protected byte[] bytesValue;\n\n    protected Object[] args;\n\n    public KafkaPollResult( Object name, Object value, byte[] bytesValue, Object[] args ){\n        this( name,value,bytesValue );\n        this.args = args;\n    }\n\n    public KafkaPollResult( Object name, Object value, byte[] bytesValue ){\n        this.name = name;\n        this.value = value;\n        this.bytesValue = bytesValue;\n    }\n\n\n    @Override\n    public Object getName() {\n        return this.name;\n    }\n\n    @Override\n    public Object getValue() {\n        return null;\n    }\n\n    @Override\n    public byte[] getBytesValue() {\n        return new byte[0];\n    }\n\n    @Override\n    public Object[] getArgs() {\n        return new Object[0];\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaReceiver.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.hydra.umb.broadcast.ArchUnidirectionalMCProtocol;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\n\nimport java.io.IOException;\n\npublic class KafkaReceiver extends ArchUnidirectionalMCProtocol implements UMCReceiver {\n    public KafkaReceiver(Medium messageSource){\n        super(messageSource);\n    }\n    @Override\n    public Object readInformMsg() throws IOException {\n        return null;\n    }\n\n    @Override\n    public UMCMessage readTransferMsg() throws IOException {\n        return null;\n    }\n\n    @Override\n    public UMCMessage readTransferMsgBytes() throws IOException {\n        return null;\n    }\n\n    @Override\n    public UMCMessage readMsg() throws IOException {\n        return null;\n    }\n\n    @Override\n    public UMCMessage readMsgBytes() throws IOException {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/KafkaTransmit.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.hydra.umb.broadcast.ArchUnidirectionalMCProtocol;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.Status;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class KafkaTransmit extends ArchUnidirectionalMCProtocol implements UMCTransmit {\n    protected Logger logger = LoggerFactory.getLogger( this.getClass() );\n\n    public KafkaTransmit( Medium messageSource ){\n        super(messageSource);\n    }\n\n    @Override\n    public void sendInformMsg( Object msg ) throws IOException {\n\n    }\n\n    @Override\n    public void sendInformMsg( Object msg, Status status ) throws IOException {\n        if ( status != Status.OK ) {\n            this.logger.warn( \"IllegalTransmitResponse for broadcast message nodes. what => {}, {}\", msg, status );\n        }\n    }\n\n    @Override\n    public void sendTransferMsg( Object msg, byte[] bytes ) throws IOException {\n\n    }\n\n    @Override\n    public void sendTransferMsg( Object msg, byte[] bytes, Status status ) throws IOException {\n        if ( status != Status.OK ) {\n            this.logger.warn( \"IllegalTransmitResponse for broadcast message nodes. what => {}, {}\", msg, status );\n        }\n    }\n\n    @Override\n    public void sendTransferMsg( Object msg, InputStream is ) throws IOException {\n\n    }\n\n    @Override\n    public void sendMsg( UMCMessage msg, boolean bNoneBuffered ) throws IOException {\n        if ( msg.getHead().getStatus() != Status.OK ) {\n            this.logger.warn( \"IllegalTransmitResponse for broadcast message nodes. what => {}\", msg );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/UlfBroadcastPollConsumer.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.framework.system.IrrationalProvokedException;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.UlfPackageMessageHandler;\nimport com.pinecone.hydra.umb.broadcast.PollResult;\nimport com.pinecone.hydra.umb.broadcast.converter.ResultBytesConverter;\nimport org.apache.kafka.clients.consumer.ConsumerRecord;\nimport org.apache.kafka.clients.consumer.ConsumerRecords;\nimport org.apache.kafka.clients.consumer.KafkaConsumer;\nimport org.apache.kafka.common.serialization.ByteArrayDeserializer;\nimport org.apache.kafka.common.serialization.StringDeserializer;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.time.Duration;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Properties;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.atomic.AtomicBoolean;\n\npublic class UlfBroadcastPollConsumer<K, V > implements KBroadcastPollConsumer<K, V > {\n    protected static Properties newDefaultProperties( KConfig kafkaConfig, String group ) {\n        Properties properties = new Properties();\n\n        properties.put( \"bootstrap.servers\", kafkaConfig.getMszServer() );\n        properties.put( \"group.id\", group );\n        properties.put( \"key.deserializer\", StringDeserializer.class.getName() );\n        properties.put( \"value.deserializer\", ByteArrayDeserializer.class.getName() );\n        properties.put( \"auto.offset.reset\", kafkaConfig.getMszAutoOffsetReset() );\n\n        return properties;\n    }\n\n    protected KClient       kafkaClient;\n\n    protected Properties    properties;\n\n    protected String        topic;\n\n    protected String        group;\n\n    protected KafkaConsumer<K, V > wrappedConsumer;\n\n    protected AtomicBoolean pollConsumerCloseSignal;\n\n    protected ResultBytesConverter<V > resultBytesConverter;\n\n    protected ExecutorService pollConsumerThreadPool;\n\n    protected Thread privatePollConsumerThread;\n\n    protected Logger log = LoggerFactory.getLogger( this.getClass() );\n\n    public UlfBroadcastPollConsumer( KClient kafkaClient, String topic, String group, Properties properties, ResultBytesConverter<V > resultBytesConverter ){\n        this.kafkaClient              = kafkaClient;\n        this.properties               = properties;\n        this.topic                    = topic;\n        this.group                    = group;\n        this.pollConsumerCloseSignal  = new AtomicBoolean( false );\n        this.resultBytesConverter     = resultBytesConverter;\n\n\n        try {\n            this.pollConsumerThreadPool = ((KafkaClient)this.getKafkaClient()).getPollConsumerThreadPool();\n        }\n        catch ( ClassCastException ignore ) {\n            // Ignore them.\n        }\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public UlfBroadcastPollConsumer( KClient kafkaClient, String topic, String group, Properties properties ){\n        this( kafkaClient, topic, group, properties, (ResultBytesConverter<V >) kafkaClient.getDafaultResultBytesConverter() );\n    }\n\n    public UlfBroadcastPollConsumer( KClient kafkaClient, String topic, String group ){\n        this(\n                kafkaClient, topic, group,\n                UlfBroadcastPollConsumer.newDefaultProperties( kafkaClient.getKafkaConfig(), group )\n        );\n    }\n\n    @Override\n    public void close() {\n        if ( this.wrappedConsumer != null ) {\n            this.wrappedConsumer.close();\n            this.kafkaClient.deregister( this );\n            this.wrappedConsumer = null;\n\n            if ( this.pollConsumerThreadPool != null ) {\n                this.pollConsumerCloseSignal.compareAndSet( false, true );\n            }\n        }\n    }\n\n    @Override\n    public void start( UlfPackageMessageHandler handler ) throws UMBServiceException {\n        try {\n            this.close();\n            this.wrappedConsumer = this.newBytesConsumer( handler );\n        }\n        catch ( Exception e ) {\n            throw new UMBServiceException( e );\n        }\n    }\n\n    @Override\n    public boolean isClosed() {\n        return this.wrappedConsumer == null;\n    }\n\n    @Override\n    public String topic() {\n        return this.topic;\n    }\n\n    @Override\n    public String tag() {\n        return this.group;\n    }\n\n    @Override\n    public List<PollResult > startPull(long mils ) {\n        this.close();\n\n        KafkaConsumer<K, V > kafkaConsumer = new KafkaConsumer<>( this.properties );\n        kafkaConsumer.subscribe(Collections.singletonList( this.topic ) );\n\n        ConsumerRecords<K, V > records = kafkaConsumer.poll( Duration.ofMillis( mils ) );\n\n        ArrayList<PollResult> pollResults = new ArrayList<>();\n        for ( ConsumerRecord<K, V > record : records ) {\n            KafkaPollResult kafkaPollResult = new KafkaPollResult(\n                    record.key(), record.value(),\n                    this.resultBytesConverter.convert(record.value()), new Object[] {record.headers(),record.topic(),record.offset()}\n            );\n            pollResults.add(kafkaPollResult);\n        }\n        return pollResults;\n    }\n\n    protected KafkaConsumer<K, V > newBytesConsumer( UlfPackageMessageHandler handler ) {\n        KafkaConsumer<K, V > kafkaConsumer = new KafkaConsumer<>(this.properties);\n        kafkaConsumer.subscribe(Collections.singletonList( this.topic ) );\n\n        long pollMills = this.kafkaClient.getKafkaConfig().getMnDefaultPollHandleMillis();\n        Runnable runnable = new Runnable() {\n            @Override\n            public void run() {\n                while ( true ) {\n                    ConsumerRecords<K, V > records = kafkaConsumer.poll( Duration.ofMillis( pollMills ) );\n                    for ( ConsumerRecord<K, V > record : records ) {\n                        try {\n                            handler.onSuccessfulMsgReceived(\n                                    UlfBroadcastPollConsumer.this.resultBytesConverter.convert(record.value()), new Object[] {record.key(), record.headers()}\n                            );\n                        }\n                        catch ( Exception e ) {\n                            log.warn( \"Warning, unexpected proceeding Kafka consumer messages, what => '{}'\", e.getMessage(), e );\n                            //throw new IrrationalProvokedException( e ); // It will kill the kafka loop thread.\n                        }\n                    }\n\n                    if ( UlfBroadcastPollConsumer.this.pollConsumerCloseSignal.get() ) {\n                        break;\n                    }\n                }\n            }\n        };\n\n        if ( this.pollConsumerThreadPool != null ) {\n            this.pollConsumerThreadPool.execute( runnable );\n        }\n        else {\n            this.privatePollConsumerThread = new Thread(runnable);\n            this.privatePollConsumerThread.start();\n        }\n\n        return kafkaConsumer;\n    }\n\n    public KClient getKafkaClient(){\n        return this.kafkaClient;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/UlfBroadcastProducer.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.hydra.umb.UMBClientException;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.broadcast.BroadcastNode;\nimport com.pinecone.hydra.umb.broadcast.UNT;\nimport org.apache.kafka.clients.producer.KafkaProducer;\nimport org.apache.kafka.clients.producer.ProducerRecord;\nimport org.apache.kafka.common.serialization.ByteArraySerializer;\nimport org.apache.kafka.common.serialization.StringSerializer;\n\nimport java.util.Properties;\n\npublic class UlfBroadcastProducer<K, V > implements KBroadcastProducer<K, V > {\n    protected static Properties newDefaultProperties( KConfig kafkaConfig ) {\n        Properties properties = new Properties();\n\n        properties.put( \"bootstrap.servers\", kafkaConfig.getMszServer() );\n        properties.put( \"key.serializer\", StringSerializer.class.getName() );\n        properties.put( \"value.serializer\", ByteArraySerializer.class.getName() );\n\n        return properties;\n    }\n\n    protected String                       server;\n\n    protected KClient                      kafkaClient;\n\n    protected KafkaProducer<K, V>          kafkaProducer;\n\n    protected Properties                   properties;\n\n    public UlfBroadcastProducer( KClient kafkaClient, Properties properties ){\n        this.kafkaClient            = kafkaClient;\n        KConfig kafkaConfig         = kafkaClient.getKafkaConfig();\n        this.server                 = kafkaConfig.getMszServer();\n        this.properties             = properties;\n    }\n\n    public UlfBroadcastProducer( KClient kafkaClient ){\n        this( kafkaClient, UlfBroadcastProducer.newDefaultProperties( kafkaClient.getKafkaConfig() ) );\n    }\n\n    public KClient getKafkaClient(){\n        return this.kafkaClient;\n    }\n\n    @Override\n    public void close() {\n        if ( this.kafkaProducer != null ) {\n            this.kafkaProducer.close();\n            this.kafkaClient.deregister( this );\n            this.kafkaProducer = null;\n        }\n    }\n\n    @Override\n    public void start() throws UMBServiceException {\n        this.close();\n        this.kafkaProducer = new KafkaProducer<>( this.properties );\n    }\n\n    @Override\n    public boolean isClosed() {\n        return this.kafkaProducer == null;\n    }\n\n    @Override\n    public void sendPrototypeMessage( String topic, String ns, K name, V body ) throws UMBClientException {\n        ProducerRecord<K, V > producerRecord = new ProducerRecord<>( topic, name, body );\n        this.kafkaProducer.send( producerRecord );\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    @Override\n    public void sendMessage( String topic, String ns, String name, byte[] body ) throws UMBClientException {\n        ProducerRecord<String, byte[]> producerRecord = new ProducerRecord<>( topic, name, body );\n        this.kafkaProducer.send( (ProducerRecord<K, V >) producerRecord );\n    }\n\n    @Override\n    public void sendMessage( String topic, byte[] body ) throws UMBClientException {\n        this.sendMessage(topic, \"\", BroadcastNode.DefaultEntityName,body);\n    }\n\n    @Override\n    public void sendMessage( UNT unt, String name, byte[] body ) throws UMBClientException {\n        this.sendMessage( unt.getTopic(), unt.getNamespace(), name, body );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/UlfKafkaClient.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastNode;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer;\n\npublic interface UlfKafkaClient extends KClient, UMCBroadcastNode {\n    UMCBroadcastProducer createUlfProducer() ;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/WolfKafkaConsumer.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.hydra.umb.UMBBytesDecoder;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.UlfMBInformMessage;\nimport com.pinecone.hydra.umb.UlfPackageMessageHandler;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer;\nimport com.pinecone.hydra.umc.msg.EMCBytesDecoder;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCHead;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCMethod;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\nimport com.pinecone.hydra.umc.wolf.UlfBytesTransferMessage;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\n\nimport java.io.IOException;\n\npublic class WolfKafkaConsumer extends UlfBroadcastPollConsumer<String, byte[] > implements UMCBroadcastConsumer {\n    protected EMCBytesDecoder          mEMCBytesDecoder;\n\n    protected ExtraHeadCoder           mExtraHeadCoder;\n\n    protected Medium                   mMedium;\n    protected UMCTransmit              mUMCTransmit;\n    protected UMCReceiver              mUMCReceiver;\n\n    public WolfKafkaConsumer( KClient client, String topic, String group, @Nullable ExtraHeadCoder extraHeadCoder ) {\n        super( client,topic,group );\n\n        this.mExtraHeadCoder           = extraHeadCoder;\n        if ( this.mExtraHeadCoder == null ) {\n            this.mExtraHeadCoder = client.getExtraHeadCoder();\n        }\n\n        this.mEMCBytesDecoder = new UMBBytesDecoder();\n\n        // Dummy [ MQ is base on unidirectional communication. ]\n        this.mMedium          = new KafkaMedium( this.getKafkaClient() );\n        this.mUMCReceiver     = new KafkaReceiver( this.mMedium );\n        this.mUMCTransmit     = new KafkaTransmit( this.mMedium );\n\n    }\n\n    public WolfKafkaConsumer( KClient client, String topic, String group ) {\n        this( client, topic, group, null );\n    }\n\n    protected UMCMessage decodeMessage( byte[] raw ) throws IOException {\n        UMCHead head = WolfKafkaConsumer.this.mEMCBytesDecoder.decodeIntegrated( raw, WolfKafkaConsumer.this.mExtraHeadCoder );\n        if ( head.getMethod() == UMCMethod.TRANSFER ) {\n            int bodyLen = (int)head.getBodyLength();\n            byte[] bodyBuf = new byte[ bodyLen ];\n            int headSize = head.sizeof() + head.getExtraHeadLength();\n            System.arraycopy( raw, headSize, bodyBuf, 0, bodyLen );\n\n            return new UlfBytesTransferMessage( head, bodyBuf );\n        }\n        return new UlfMBInformMessage( head );\n    }\n\n    @Override\n    public void start( UMCTExpressHandler handler ) throws UMBServiceException {\n        super.start(new UlfPackageMessageHandler() {\n            @Override\n            public void onSuccessfulMsgReceived( byte[] raw, Object[] args ) throws Exception {\n                UMCMessage message = WolfKafkaConsumer.this.decodeMessage( raw );\n                handler.onSuccessfulMsgReceived( WolfKafkaConsumer.this.mMedium, WolfKafkaConsumer.this.mUMCTransmit, WolfKafkaConsumer.this.mUMCReceiver, message, args );\n            }\n\n            @Override\n            public void onErrorMsgReceived( byte[] raw, Object[] args ) throws Exception {\n                UMCMessage message = WolfKafkaConsumer.this.decodeMessage( raw );\n                handler.onErrorMsgReceived( WolfKafkaConsumer.this.mMedium, WolfKafkaConsumer.this.mUMCTransmit, WolfKafkaConsumer.this.mUMCReceiver, message, args );\n            }\n\n            @Override\n            public void onError( Object data, Throwable cause ) {\n                handler.onError( data, cause );\n            }\n        });\n    }\n\n    @Override\n    public UlfKafkaClient getKafkaClient() {\n        return (UlfKafkaClient) this.kafkaClient;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/WolfKafkaProducer.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.hydra.umb.UMBClientException;\nimport com.pinecone.hydra.umb.UMCPackageMessageEncoder;\nimport com.pinecone.hydra.umb.UlfPackageMessageEncoder;\nimport com.pinecone.hydra.umb.broadcast.BroadcastNode;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer;\nimport com.pinecone.hydra.umb.broadcast.UNT;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\nimport java.io.IOException;\n\npublic class WolfKafkaProducer extends UlfBroadcastProducer<String, byte[] > implements UMCBroadcastProducer {\n    protected UMCPackageMessageEncoder mUMCPackageMessageEncoder;\n\n    protected ExtraHeadCoder           mExtraHeadCoder;\n\n    public WolfKafkaProducer( UlfKafkaClient client, @Nullable ExtraHeadCoder extraHeadCoder ){\n        super( client );\n\n        this.mExtraHeadCoder           = extraHeadCoder;\n        if ( this.mExtraHeadCoder == null ) {\n            this.mExtraHeadCoder = client.getExtraHeadCoder();\n        }\n\n        this.mUMCPackageMessageEncoder = new UlfPackageMessageEncoder( this.mExtraHeadCoder );\n    }\n\n    public WolfKafkaProducer( UlfKafkaClient client ){\n        this(client,null);\n    }\n\n    @Override\n    public UlfKafkaClient getKafkaClient() {\n        return (UlfKafkaClient) this.kafkaClient;\n    }\n\n    @Override\n    public void sendMessage( String topic, String ns, String name, UMCMessage message ) throws UMBClientException {\n        try{\n            this.sendMessage( topic, ns, name, this.mUMCPackageMessageEncoder.encode( message ) );\n        }\n        catch ( IOException e ) {\n            throw new UMBClientException( e );\n        }\n    }\n\n    @Override\n    public void sendMessage( String topic, UMCMessage message ) throws UMBClientException {\n        this.sendMessage( topic, \"\", BroadcastNode.DefaultEntityName, message );\n    }\n\n    @Override\n    public void sendMessage( UNT unt, String name, UMCMessage message ) throws UMBClientException {\n        this.sendMessage( unt.getTopic(), unt.getNamespace(), name, message );\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/kafka/WolfMCKafkaClient.java",
    "content": "package com.pinecone.hydra.umb.kafka;\n\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer;\nimport com.pinecone.hydra.umb.broadcast.UNT;\nimport com.pinecone.hydra.umc.msg.Messagus;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\nimport com.pinecone.hydra.umc.msg.extra.GenericExtraHeadCoder;\nimport com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit;\nimport com.pinecone.hydra.umc.msg.handler.GenericErrorMessageAudit;\n\nimport java.util.Map;\n\npublic class WolfMCKafkaClient extends KafkaClient implements UlfKafkaClient{\n    protected ExtraHeadCoder mExtraHeadCoder;\n\n    protected ErrorMessageAudit mErrorMessageAudit;\n\n    public WolfMCKafkaClient( long nodeId, KafkaConfig config, ExtraHeadCoder extraHeadCoder ) {\n        super( nodeId, config );\n\n        this.mExtraHeadCoder           = extraHeadCoder;\n        this.mErrorMessageAudit        = new GenericErrorMessageAudit( this );\n    }\n\n    public WolfMCKafkaClient( long nodeId, String nameSrvAddr, ExtraHeadCoder extraHeadCoder ) {\n        this( nodeId, new KafkaConfig( nameSrvAddr ), extraHeadCoder );\n    }\n\n    public WolfMCKafkaClient( String nameSrvAddr ) {\n        this( Messagus.nextLocalId(), nameSrvAddr, new GenericExtraHeadCoder() );\n    }\n\n    public WolfMCKafkaClient( long nodeId, Map<String, Object> config, ExtraHeadCoder extraHeadCoder ){\n        this( nodeId, new KafkaConfig( config ), extraHeadCoder );\n    }\n\n    public WolfMCKafkaClient( Map<String, Object> config, ExtraHeadCoder extraHeadCoder ){\n        this( Messagus.nextLocalId(), config, extraHeadCoder );\n    }\n\n\n    @Override\n    public ErrorMessageAudit getErrorMessageAudit() {\n        return this.mErrorMessageAudit;\n    }\n\n    @Override\n    public void setErrorMessageAudit( ErrorMessageAudit audit ){\n        this.mErrorMessageAudit = audit;\n    }\n    @Override\n    public ExtraHeadCoder getExtraHeadCoder() {\n        return this.mExtraHeadCoder;\n    }\n\n\n    @Override\n    public UMCBroadcastConsumer createUlfConsumer( String topic, String ns ) {\n        WolfKafkaConsumer consumer = new WolfKafkaConsumer( this,topic,ns );\n        this.register( consumer );\n        return consumer;\n    }\n\n    @Override\n    public UMCBroadcastConsumer createUlfConsumer( String topic ) {\n        return this.createUlfConsumer( topic,\"\" );\n    }\n\n    @Override\n    public UMCBroadcastConsumer createUlfConsumer( UNT unt ) {\n        return this.createUlfConsumer( unt.getTopic(), unt.getNamespace() );\n    }\n\n    @Override\n    public UMCBroadcastProducer createUlfProducer() {\n        WolfKafkaProducer wolfKafkaProducer = new WolfKafkaProducer(this, this.mExtraHeadCoder);\n        this.register( wolfKafkaProducer );\n        return wolfKafkaProducer;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rabbit/RabbitMQClient.java",
    "content": "package com.pinecone.hydra.umb.rabbit;\n\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.rabbitmq.client.Channel;\nimport com.rabbitmq.client.Connection;\nimport com.rabbitmq.client.ConnectionFactory;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.JSONObject;\n\nimport java.io.IOException;\nimport java.util.concurrent.TimeoutException;\n\npublic class RabbitMQClient implements Pinenut {\n    protected Hydrogen mSystem;\n    protected JSONObject     mjoSectionConf;\n    protected String         mszHost;\n    protected short          mnPort;\n    protected int            mnKeepAliveTimeout;\n    protected int            mnSocketTimeout;\n    protected String         mszUsername;\n    protected String         mszPassword;\n    protected String         mszVHost = \"/wolf\";\n\n    public RabbitMQClient(Hydrogen system, JSONObject joConf ) {\n        this.mSystem        = system;\n        this.mjoSectionConf = joConf;\n\n        this.apply( joConf );\n    }\n\n    public RabbitMQClient apply( JSONObject joConf ) {\n        this.mjoSectionConf = joConf;\n\n        this.mszHost            = this.mjoSectionConf.optString( \"host\" );\n        this.mnPort             = (short) this.mjoSectionConf.optInt( \"port\" );\n        this.mszPassword        = this.mjoSectionConf.optString( \"password\" );\n        this.mszUsername        = this.mjoSectionConf.optString( \"username\" );\n\n        return this;\n    }\n\n    public void toListen() throws IOException, TimeoutException {\n        ConnectionFactory connectionFactory = new ConnectionFactory();\n        connectionFactory.setHost( this.mszHost );\n        connectionFactory.setPort( this.mnPort  );\n        connectionFactory.setUsername( this.mszUsername );\n        connectionFactory.setPassword( this.mszPassword );\n        connectionFactory.setVirtualHost( this.mszVHost );\n\n\n\n        Connection connection = connectionFactory.newConnection();\n        Channel channel = connection.createChannel();\n\n        channel.queueDeclare(\"fuck\", false, false, false, null);\n\n        for ( int i = 0; i < 1e3; i++ ) {\n            channel.basicPublish(\"\", \"fuck\", null, \"Jesus fucking christ\".getBytes());\n        }\n\n        channel.close();\n        connection.close();\n    }\n\n    @Override\n    public String toString() {\n        return String.format(\n                \"[object %s(0x%s)<\\uD83D\\uDC07>]\",\n                this.className() , Integer.toHexString( this.hashCode() )\n        );\n    }\n\n    @Override\n    public String toJSONString() {\n        return \"\\\"\" + this.toString() + \"\\\"\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rabbit/RabbitMedium.java",
    "content": "package com.pinecone.hydra.umb.rabbit;\n\npublic class RabbitMedium {\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/ArchMQConsumer.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport com.pinecone.hydra.umb.broadcast.PushConsumer;\n\npublic abstract class ArchMQConsumer implements PushConsumer {\n    protected String mszNameServerAddr;\n\n    protected String mszGroupName;\n\n    protected String mszTopic;\n\n    protected String mszTag;\n\n    public ArchMQConsumer( String nameSrvAddr, String groupName, String topic, String tag ) {\n        this.mszNameServerAddr = nameSrvAddr;\n        this.mszGroupName      = groupName;\n        this.mszTopic          = topic;\n        this.mszTag            = tag;\n    }\n\n    @Override\n    public String topic() {\n        return this.mszTopic;\n    }\n\n    @Override\n    public String tag() {\n        return this.mszTag;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketClient.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport java.util.function.Supplier;\n\nimport org.apache.rocketmq.client.producer.DefaultMQProducer;\n\nimport com.pinecone.hydra.umb.broadcast.BroadcastNode;\nimport com.pinecone.hydra.umb.broadcast.BroadcastProducer;\nimport com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit;\n\npublic interface RocketClient extends BroadcastNode {\n\n    @Override\n    default ErrorMessageAudit    getErrorMessageAudit() {\n        return null;\n    }\n\n    @Override\n    default void                 setErrorMessageAudit( ErrorMessageAudit audit ){\n\n    }\n\n\n\n    RocketConfig getRocketConfig();\n\n    BroadcastProducer createProducer(Supplier<DefaultMQProducer> producerSupplier );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketConfig.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport com.pinecone.hydra.umc.msg.MsgNodeConfig;\n\npublic interface RocketConfig extends MsgNodeConfig {\n    String getNameServerAddr();\n\n    String getGroupName();\n\n    int getMaxMessageSize();\n\n    int getSendMsgTimeout();\n\n    int getRetryTimesWhenSendFailed();\n\n    @Override\n    default long getSyncWaitingMillis() {\n        return this.getSendMsgTimeout();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketConstants.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\npublic final class RocketConstants {\n\n    public static Integer DefaultMaxMessageSize           = 4096;\n\n    public static Integer DefaultSendMsgTimeout           = 8000;\n\n    public static Integer DefaultRetryTimesWhenSendFailed = 2;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketMQClient.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport com.pinecone.hydra.umb.broadcast.BroadcastConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastProducer;\nimport com.pinecone.hydra.umb.broadcast.UNT;\nimport com.pinecone.hydra.umc.msg.Messagus;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\nimport org.apache.rocketmq.client.producer.DefaultMQProducer;\n\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.function.Supplier;\n\npublic class RocketMQClient implements RocketClient {\n    protected Map<BroadcastProducer, Object> mProducerRegister;\n\n    protected Map<BroadcastConsumer, Object> mConsumerRegister;\n\n    protected RocketConfig                   mRocketConfig;\n\n    protected long                           mnNodeId;\n\n    private static final Object PRESENT = new Object();\n\n\n    public RocketMQClient( long nodeId, RocketConfig config ) {\n        this.mRocketConfig     = config;\n        this.mProducerRegister = new ConcurrentHashMap<>();\n        this.mConsumerRegister = new ConcurrentHashMap<>();\n        this.mnNodeId          = nodeId;\n    }\n\n    public RocketMQClient( long nodeId, String nameSrvAddr, String groupName ) {\n        this( nodeId, new RocketMQConfig(\n                nameSrvAddr, groupName, RocketConstants.DefaultMaxMessageSize, RocketConstants.DefaultSendMsgTimeout, RocketConstants.DefaultRetryTimesWhenSendFailed\n        ) );\n    }\n\n    public RocketMQClient( String nameSrvAddr, String groupName ) {\n        this( Messagus.nextLocalId(), nameSrvAddr, groupName );\n    }\n\n    public RocketMQClient( long nodeId, Map<String, Object> config ){\n        this( nodeId, new RocketMQConfig( config ) );\n    }\n\n    public RocketMQClient( Map<String, Object> config ){\n        this( Messagus.nextLocalId(), config );\n    }\n\n\n    @Override\n    public ExtraHeadCoder getExtraHeadCoder() {\n        return null;\n    }\n\n    @Override\n    public long getMessageNodeId() {\n        return this.mnNodeId;\n    }\n\n    @Override\n    public RocketConfig getRocketConfig() {\n        return this.mRocketConfig;\n    }\n\n    @Override\n    public RocketConfig getMessageNodeConfig() {\n        return this.getRocketConfig();\n    }\n\n    @Override\n    public void close() {\n        for( Map.Entry<BroadcastConsumer, Object> kv : this.mConsumerRegister.entrySet() ) {\n            kv.getKey().close();\n        }\n\n        for( Map.Entry<BroadcastProducer, Object> kv : this.mProducerRegister.entrySet() ) {\n            kv.getKey().close();\n        }\n\n        this.mConsumerRegister.clear();\n        this.mProducerRegister.clear();\n    }\n\n\n    @Override\n    public void register( BroadcastProducer producer ) {\n        this.mProducerRegister.put( producer, PRESENT );\n    }\n\n    @Override\n    public void register( BroadcastConsumer consumer ) {\n        this.mConsumerRegister.put( consumer, PRESENT );\n    }\n\n    @Override\n    public void deregister( BroadcastProducer producer ) {\n        this.mProducerRegister.remove( producer );\n    }\n\n    @Override\n    public void deregister( BroadcastConsumer consumer ) {\n        this.mConsumerRegister.remove( consumer );\n    }\n\n\n    @Override\n    public BroadcastProducer createProducer( Supplier<DefaultMQProducer> producerSupplier ) {\n        BroadcastProducer producer = new UlfBroadcastProducer( this, producerSupplier );\n        this.register( producer );\n        return producer;\n    }\n\n    @Override\n    public BroadcastProducer createProducer() {\n        return this.createProducer( DefaultMQProducer::new );\n    }\n\n    @Override\n    public BroadcastConsumer createConsumer( String topic, String ns ) {\n        BroadcastConsumer consumer = new UlfPushConsumer( this, topic, ns );\n        this.register( consumer );\n        return consumer;\n    }\n\n    @Override\n    public BroadcastConsumer createConsumer( String topic ) {\n        return this.createConsumer( topic, \"\" );\n    }\n\n    @Override\n    public BroadcastConsumer createConsumer( UNT unt ) {\n        return this.createConsumer( unt.getTopic(), unt.getNamespace() );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketMQConfig.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport java.util.Map;\n\npublic class RocketMQConfig implements RocketConfig {\n    protected String mszNameServerAddr;\n    protected String mszGroupName;\n    protected int    mnMaxMessageSize;\n    protected int    mnSendMsgTimeout;\n    protected int    mnRetryTimesWhenSendFailed;\n\n    public RocketMQConfig( Map<String, Object> conf ){\n        this.mszNameServerAddr           = (String) conf.get( \"NameServerAddr\" );\n        this.mszGroupName                = (String) conf.get( \"GroupName\" );\n        this.mnMaxMessageSize            = ( (Number) conf.get( \"MaxMessageSize\" ) ).intValue();\n        this.mnSendMsgTimeout            = ( (Number) conf.get( \"SendMsgTimeout\" ) ).intValue();\n        this.mnRetryTimesWhenSendFailed  = ( (Number) conf.get( \"RetryTimesWhenSendFailed\" ) ).intValue();\n    }\n\n    public RocketMQConfig( String nameServerAddr, String groupName, int maxMessageSize, int sendMsgTimeout, int retryTimesWhenSendFailed ) {\n        this.mszNameServerAddr          = nameServerAddr;\n        this.mszGroupName               = groupName;\n        this.mnMaxMessageSize           = maxMessageSize;\n        this.mnSendMsgTimeout           = sendMsgTimeout;\n        this.mnRetryTimesWhenSendFailed = retryTimesWhenSendFailed;\n    }\n\n    @Override\n    public String getNameServerAddr() {\n        return this.mszNameServerAddr;\n    }\n\n    @Override\n    public String getGroupName() {\n        return this.mszGroupName;\n    }\n\n    @Override\n    public int getMaxMessageSize() {\n        return this.mnMaxMessageSize;\n    }\n\n    @Override\n    public int getSendMsgTimeout() {\n        return this.mnSendMsgTimeout;\n    }\n\n    @Override\n    public int getRetryTimesWhenSendFailed() {\n        return this.mnRetryTimesWhenSendFailed;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketMedium.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport java.io.InputStream;\nimport java.io.OutputStream;\n\nimport com.pinecone.framework.system.NotImplementedException;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.MessageNodus;\n\npublic class RocketMedium implements Medium {\n    protected MessageNodus mMessageNode;\n\n    public RocketMedium( MessageNodus medium ) {\n        this.mMessageNode = medium;\n    }\n\n    @Override\n    public OutputStream getOutputStream(){\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public InputStream getInputStream(){\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public Object getNativeMessageSource(){\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public String sourceName(){\n        return \"RocketMQ\";\n    }\n\n    @Override\n    public MessageNodus getMessageNode() {\n        return this.mMessageNode;\n    }\n\n    @Override\n    public void release() {\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketReceiver.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport java.io.IOException;\n\nimport com.pinecone.hydra.umb.broadcast.ArchUnidirectionalMCProtocol;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\n\npublic class RocketReceiver extends ArchUnidirectionalMCProtocol implements UMCReceiver {\n\n    public RocketReceiver( Medium messageSource ) {\n        super( messageSource );\n    }\n\n    @Override\n    public Object readInformMsg() throws IOException {\n        return null;\n    }\n\n    @Override\n    public UMCMessage readTransferMsg() throws IOException {\n        return null;\n    }\n\n    @Override\n    public UMCMessage readTransferMsgBytes() throws IOException {\n        return null;\n    }\n\n    @Override\n    public UMCMessage readMsg() throws IOException {\n        return null;\n    }\n\n    @Override\n    public UMCMessage readMsgBytes() throws IOException {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/RocketTransmit.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.hydra.umb.broadcast.ArchUnidirectionalMCProtocol;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.Status;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\n\npublic class RocketTransmit extends ArchUnidirectionalMCProtocol implements UMCTransmit {\n    protected Logger logger = LoggerFactory.getLogger( this.getClass() );\n\n    public RocketTransmit( Medium messageSource ) {\n        super( messageSource );\n    }\n\n    @Override\n    public void sendInformMsg( Object msg ) throws IOException {\n\n    }\n\n    @Override\n    public void sendInformMsg( Object msg, Status status ) throws IOException {\n        if ( status != Status.OK ) {\n            this.logger.warn( \"IllegalTransmitResponse for broadcast message nodes. what => {}, {}\", msg, status );\n        }\n    }\n\n    @Override\n    public void sendTransferMsg( Object msg, byte[] bytes ) throws IOException {\n\n    }\n\n    @Override\n    public void sendTransferMsg( Object msg, byte[] bytes, Status status ) throws IOException {\n        if ( status != Status.OK ) {\n            this.logger.warn( \"IllegalTransmitResponse for broadcast message nodes. what => {}, {}\", msg, status );\n        }\n    }\n\n    @Override\n    public void sendTransferMsg( Object msg, InputStream is ) throws IOException {\n\n    }\n\n    @Override\n    public void sendMsg( UMCMessage msg, boolean bNoneBuffered ) throws IOException {\n        if ( msg.getHead().getStatus() != Status.OK ) {\n            this.logger.warn( \"IllegalTransmitResponse for broadcast message nodes. what => {}\", msg );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/UlfBroadcastProducer.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport com.pinecone.hydra.umb.UMBClientException;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.broadcast.BroadcastNode;\nimport com.pinecone.hydra.umb.broadcast.BroadcastProducer;\nimport com.pinecone.hydra.umb.broadcast.UNT;\nimport org.apache.rocketmq.client.exception.MQBrokerException;\nimport org.apache.rocketmq.client.exception.MQClientException;\nimport org.apache.rocketmq.client.producer.DefaultMQProducer;\nimport org.apache.rocketmq.client.producer.MQProducer;\nimport org.apache.rocketmq.common.message.Message;\nimport org.apache.rocketmq.remoting.exception.RemotingException;\n\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.function.Supplier;\n\npublic class UlfBroadcastProducer implements BroadcastProducer {\n    protected String mszNameServerAddr;\n\n    protected String mszGroupName;\n\n    protected int mnMaxMessageSize ;\n\n    protected int mnSendMsgTimeout;\n\n    protected int mnRetryTimesWhenSendFailed;\n\n    protected MQProducer mWrappedProducer;\n\n    protected RocketClient mRocketClient;\n\n    protected AtomicBoolean mStart = new AtomicBoolean( false );\n\n\n    public UlfBroadcastProducer( RocketClient client, Supplier<DefaultMQProducer> producerSupplier ) {\n        this.mRocketClient = client;\n        RocketConfig config = client.getRocketConfig();\n        this.mszNameServerAddr          = config.getNameServerAddr();\n        this.mszGroupName               = config.getGroupName();\n        this.mnMaxMessageSize           = config.getMaxMessageSize();\n        this.mnSendMsgTimeout           = config.getSendMsgTimeout();\n        this.mnRetryTimesWhenSendFailed = config.getRetryTimesWhenSendFailed();\n\n        DefaultMQProducer producer = producerSupplier.get();\n        producer.setProducerGroup(this.mszGroupName);\n        producer.setNamesrvAddr(this.mszNameServerAddr);\n        producer.setMaxMessageSize(this.mnMaxMessageSize);\n        producer.setSendMsgTimeout(this.mnSendMsgTimeout);\n        producer.setRetryTimesWhenSendFailed(this.mnRetryTimesWhenSendFailed);\n        this.mWrappedProducer = producer;\n    }\n\n    public UlfBroadcastProducer( RocketClient client ) {\n        this( client, DefaultMQProducer::new );\n    }\n\n\n    public RocketClient getRocketClient() {\n        return this.mRocketClient;\n    }\n\n    @Override\n    public void sendMessage( String topic, String ns, String name, byte[] body ) throws UMBClientException {\n        Message msg = new Message( topic, ns, name, body );\n        try {\n            this.mWrappedProducer.send( msg );\n        }\n        catch ( MQClientException | RemotingException | MQBrokerException | InterruptedException e ) {\n            throw new UMBClientException( e );\n        }\n    }\n\n    @Override\n    public void sendMessage( String topic, byte[] body ) throws UMBClientException {\n        this.sendMessage( topic, \"\", BroadcastNode.DefaultEntityName, body );\n    }\n\n    @Override\n    public void sendMessage( UNT unt, String name, byte[] body ) throws UMBClientException {\n        this.sendMessage( unt.getTopic(), unt.getNamespace(), name, body );\n    }\n\n    @Override\n    public void close() {\n        this.mWrappedProducer.shutdown();\n        this.mRocketClient.deregister( this );\n        this.mStart.compareAndSet( true, false );\n    }\n\n    @Override\n    public void start() throws UMBServiceException {\n        try {\n            this.mWrappedProducer.start();\n            this.mStart.compareAndSet( false, true );\n        }\n        catch ( MQClientException e ) {\n            throw new UMBServiceException( e );\n        }\n    }\n\n    @Override\n    public boolean isClosed() {\n        return !this.mStart.get();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/UlfPushConsumer.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.UlfPackageMessageHandler;\nimport com.pinecone.hydra.umb.broadcast.PushConsumer;\n\nimport org.apache.rocketmq.client.consumer.DefaultMQPushConsumer;\nimport org.apache.rocketmq.client.consumer.MQPushConsumer;\nimport org.apache.rocketmq.client.consumer.listener.ConsumeConcurrentlyContext;\nimport org.apache.rocketmq.client.consumer.listener.ConsumeConcurrentlyStatus;\nimport org.apache.rocketmq.client.consumer.listener.MessageListenerConcurrently;\nimport org.apache.rocketmq.client.exception.MQClientException;\nimport org.apache.rocketmq.common.message.MessageExt;\n\nimport java.util.List;\n\npublic class UlfPushConsumer extends ArchMQConsumer implements PushConsumer {\n\n    protected MQPushConsumer  wrappedConsumer;\n\n    protected RocketClient    mRocketClient;\n\n    public UlfPushConsumer( RocketClient client, String topic, String tag ) {\n        super(\n                client.getRocketConfig().getNameServerAddr(),\n                client.getRocketConfig().getGroupName(),\n                topic, tag\n        );\n        this.mRocketClient = client;\n    }\n\n    public MQPushConsumer newMQPushConsumer( UlfPackageMessageHandler handler ) throws UMBServiceException {\n        DefaultMQPushConsumer consumer = new DefaultMQPushConsumer( this.mszGroupName );\n        consumer.setNamesrvAddr( this.mszNameServerAddr );\n\n        try {\n            consumer.subscribe( this.mszTopic, this.mszTag );\n            consumer.registerMessageListener(new MessageListenerConcurrently() {\n                @Override\n                public ConsumeConcurrentlyStatus consumeMessage( List<MessageExt> msgs, ConsumeConcurrentlyContext context ) {\n                    for ( MessageExt msg : msgs ) {\n                        try{\n                            handler.onSuccessfulMsgReceived( msg.getBody(), new Object[] { msg, msgs, context } );\n                        }\n                        catch ( Exception e ) {\n                            handler.onError( msg.getBody(), e );\n                            return ConsumeConcurrentlyStatus.RECONSUME_LATER;\n                        }\n                    }\n                    return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;\n                }\n            });\n            //consumer.start();\n        }\n        catch ( MQClientException e ) {\n            throw new UMBServiceException( e );\n        }\n        return consumer;\n    }\n\n    @Override\n    public void start( UlfPackageMessageHandler handler ) throws UMBServiceException {\n        MQPushConsumer consumer = this.newMQPushConsumer( handler );\n\n        if ( this.wrappedConsumer == null ) {\n            this.wrappedConsumer = consumer;\n        }\n\n        try{\n            consumer.start();\n        }\n        catch ( MQClientException e ) {\n            throw new UMBServiceException( e );\n        }\n    }\n\n    @Override\n    public void close() {\n        if ( this.wrappedConsumer != null ) {\n            this.wrappedConsumer.shutdown();\n            this.mRocketClient.deregister( this );\n            this.wrappedConsumer = null;\n        }\n    }\n\n    @Override\n    public boolean isClosed() {\n        return this.wrappedConsumer == null;\n    }\n\n    public RocketClient getRocketClient() {\n        return this.mRocketClient;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/UlfRocketClient.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport java.util.function.Supplier;\n\nimport org.apache.rocketmq.client.producer.DefaultMQProducer;\n\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastNode;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer;\n\npublic interface UlfRocketClient extends RocketClient, UMCBroadcastNode {\n\n    UMCBroadcastProducer createUlfProducer( Supplier<DefaultMQProducer> producerSupplier ) ;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/WolfBroadcastProducer.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.hydra.umb.UMBClientException;\nimport com.pinecone.hydra.umb.UMCPackageMessageEncoder;\nimport com.pinecone.hydra.umb.UlfPackageMessageEncoder;\nimport com.pinecone.hydra.umb.broadcast.BroadcastNode;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer;\nimport com.pinecone.hydra.umb.broadcast.UNT;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\nimport org.apache.rocketmq.client.producer.DefaultMQProducer;\n\nimport java.io.IOException;\nimport java.util.function.Supplier;\n\npublic class WolfBroadcastProducer extends UlfBroadcastProducer implements UMCBroadcastProducer {\n    protected UMCPackageMessageEncoder mUMCPackageMessageEncoder;\n\n    protected ExtraHeadCoder           mExtraHeadCoder;\n\n    public WolfBroadcastProducer( UlfRocketClient client, Supplier<DefaultMQProducer> producerSupplier, @Nullable ExtraHeadCoder extraHeadCoder ) {\n        super( client, producerSupplier );\n\n        this.mExtraHeadCoder           = extraHeadCoder;\n        if ( this.mExtraHeadCoder == null ) {\n            this.mExtraHeadCoder = client.getExtraHeadCoder();\n        }\n\n        this.mUMCPackageMessageEncoder = new UlfPackageMessageEncoder( this.mExtraHeadCoder );\n    }\n\n    public WolfBroadcastProducer( UlfRocketClient client ) {\n        this( client, DefaultMQProducer::new, null );\n    }\n\n\n    @Override\n    public UlfRocketClient getRocketClient() {\n        return (UlfRocketClient)this.mRocketClient;\n    }\n\n\n    @Override\n    public void sendMessage( String topic, String ns, String name, UMCMessage message ) throws UMBClientException {\n        try{\n            this.sendMessage( topic, ns, name, this.mUMCPackageMessageEncoder.encode( message ) );\n        }\n        catch ( IOException e ) {\n            throw new UMBClientException( e );\n        }\n    }\n\n    @Override\n    public void sendMessage( String topic, UMCMessage message ) throws UMBClientException {\n        this.sendMessage( topic, \"\", BroadcastNode.DefaultEntityName, message );\n    }\n\n    @Override\n    public void sendMessage( UNT unt, String name, UMCMessage message ) throws UMBClientException {\n        this.sendMessage( unt.getTopic(), unt.getNamespace(), name, message );\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/WolfMCRocketClient.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport java.util.Map;\nimport java.util.function.Supplier;\n\nimport org.apache.rocketmq.client.producer.DefaultMQProducer;\n\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer;\nimport com.pinecone.hydra.umb.broadcast.UNT;\nimport com.pinecone.hydra.umc.msg.Messagus;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\nimport com.pinecone.hydra.umc.msg.extra.GenericExtraHeadCoder;\nimport com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit;\nimport com.pinecone.hydra.umc.msg.handler.GenericErrorMessageAudit;\n\n/**\n *  Pinecone Ursus For Java Wolf-UMC-RocketMQ [ Wolf, Uniform Message Control Protocol Client ]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  **********************************************************\n *  Uniform Message Control Protocol for RocketMQ Client\n *  统一消息广播控制客户端 (RocketMQ 版本)\n *  **********************************************************\n */\npublic class WolfMCRocketClient extends RocketMQClient implements UlfRocketClient {\n\n    protected ExtraHeadCoder           mExtraHeadCoder;\n\n    protected ErrorMessageAudit        mErrorMessageAudit;\n\n    public WolfMCRocketClient( long nodeId, RocketConfig config, ExtraHeadCoder extraHeadCoder ) {\n        super( nodeId, config );\n\n        this.mExtraHeadCoder           = extraHeadCoder;\n        this.mErrorMessageAudit        = new GenericErrorMessageAudit( this );\n    }\n\n    public WolfMCRocketClient( long nodeId, String nameSrvAddr, String groupName, ExtraHeadCoder extraHeadCoder ) {\n        this( nodeId, new RocketMQConfig(\n                nameSrvAddr, groupName, RocketConstants.DefaultMaxMessageSize, RocketConstants.DefaultSendMsgTimeout, RocketConstants.DefaultRetryTimesWhenSendFailed\n        ), extraHeadCoder );\n    }\n\n    public WolfMCRocketClient( String nameSrvAddr, String groupName, ExtraHeadCoder extraHeadCoder ) {\n        this( Messagus.nextLocalId(), nameSrvAddr, groupName, extraHeadCoder );\n    }\n\n    public WolfMCRocketClient( String nameSrvAddr, String groupName ) {\n        this( Messagus.nextLocalId(), nameSrvAddr, groupName, new GenericExtraHeadCoder() );\n    }\n\n    public WolfMCRocketClient( long nodeId, Map<String, Object> config, ExtraHeadCoder extraHeadCoder ){\n        this( nodeId, new RocketMQConfig( config ), extraHeadCoder );\n    }\n\n    public WolfMCRocketClient( Map<String, Object> config, ExtraHeadCoder extraHeadCoder ){\n        this( Messagus.nextLocalId(), config, extraHeadCoder );\n    }\n\n\n\n    @Override\n    public ErrorMessageAudit getErrorMessageAudit() {\n        return this.mErrorMessageAudit;\n    }\n\n    @Override\n    public void setErrorMessageAudit( ErrorMessageAudit audit ){\n        this.mErrorMessageAudit = audit;\n    }\n\n\n    @Override\n    public ExtraHeadCoder getExtraHeadCoder() {\n        return this.mExtraHeadCoder;\n    }\n\n    @Override\n    public UMCBroadcastProducer createUlfProducer( Supplier<DefaultMQProducer> producerSupplier ) {\n        UMCBroadcastProducer producer = new WolfBroadcastProducer( this, producerSupplier, this.mExtraHeadCoder );\n        this.register( producer );\n        return producer;\n    }\n\n    @Override\n    public UMCBroadcastProducer createUlfProducer() {\n        return this.createUlfProducer( DefaultMQProducer::new );\n    }\n\n\n    @Override\n    public UMCBroadcastConsumer createUlfConsumer( String topic, String ns ) {\n        UMCBroadcastConsumer consumer = new WolfPushConsumer( this, topic, ns, this.mExtraHeadCoder );\n        this.register( consumer );\n        return consumer;\n    }\n\n    @Override\n    public UMCBroadcastConsumer createUlfConsumer( String topic ) {\n        return this.createUlfConsumer( topic, \"\" );\n    }\n\n    @Override\n    public UMCBroadcastConsumer createUlfConsumer( UNT unt ) {\n        return this.createUlfConsumer( unt.getTopic(), unt.getNamespace() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/rocket/WolfPushConsumer.java",
    "content": "package com.pinecone.hydra.umb.rocket;\n\nimport java.io.IOException;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.hydra.umb.UMBBytesDecoder;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.UlfMBInformMessage;\nimport com.pinecone.hydra.umb.UlfPackageMessageHandler;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer;\nimport com.pinecone.hydra.umc.msg.EMCBytesDecoder;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCHead;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCMethod;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\nimport com.pinecone.hydra.umc.wolf.UlfBytesTransferMessage;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\n\npublic class WolfPushConsumer extends UlfPushConsumer implements UMCBroadcastConsumer {\n    protected EMCBytesDecoder          mEMCBytesDecoder;\n\n    protected ExtraHeadCoder           mExtraHeadCoder;\n\n    protected Medium                   mMedium;\n    protected UMCTransmit              mUMCTransmit;\n    protected UMCReceiver              mUMCReceiver;\n\n\n    public WolfPushConsumer( UlfRocketClient client, String topic, String tag, @Nullable ExtraHeadCoder extraHeadCoder ) {\n        super( client, topic, tag );\n\n        this.mExtraHeadCoder           = extraHeadCoder;\n        if ( this.mExtraHeadCoder == null ) {\n            this.mExtraHeadCoder = client.getExtraHeadCoder();\n        }\n\n        this.mEMCBytesDecoder = new UMBBytesDecoder();\n\n        // Dummy [ MQ is base on unidirectional communication. ]\n        this.mMedium          = new RocketMedium( this.getRocketClient() );\n        this.mUMCReceiver     = new RocketReceiver( this.mMedium );\n        this.mUMCTransmit     = new RocketTransmit( this.mMedium );\n    }\n\n    public WolfPushConsumer( UlfRocketClient client, String topic, String tag ) {\n        this( client, topic, tag, null );\n    }\n\n    @Override\n    public UlfRocketClient getRocketClient() {\n        return (UlfRocketClient)this.mRocketClient;\n    }\n\n\n    protected UMCMessage decodeMessage( byte[] raw ) throws IOException {\n        UMCHead head = WolfPushConsumer.this.mEMCBytesDecoder.decodeIntegrated( raw, WolfPushConsumer.this.mExtraHeadCoder );\n        if ( head.getMethod() == UMCMethod.TRANSFER ) {\n            int bodyLen = (int)head.getBodyLength();\n            byte[] bodyBuf = new byte[ bodyLen ];\n            int headSize = head.sizeof() + head.getExtraHeadLength();\n            System.arraycopy( raw, headSize, bodyBuf, 0, bodyLen );\n\n            return new UlfBytesTransferMessage( head, bodyBuf );\n        }\n        return new UlfMBInformMessage( head );\n    }\n\n    @Override\n    public void start( UMCTExpressHandler handler ) throws UMBServiceException {\n        super.start(new UlfPackageMessageHandler() {\n            @Override\n            public void onSuccessfulMsgReceived( byte[] raw, Object[] args ) throws Exception {\n                UMCMessage message = WolfPushConsumer.this.decodeMessage( raw );\n                handler.onSuccessfulMsgReceived( WolfPushConsumer.this.mMedium, WolfPushConsumer.this.mUMCTransmit, WolfPushConsumer.this.mUMCReceiver, message, args );\n            }\n\n            @Override\n            public void onErrorMsgReceived( byte[] raw, Object[] args ) throws Exception {\n                UMCMessage message = WolfPushConsumer.this.decodeMessage( raw );\n                handler.onErrorMsgReceived( WolfPushConsumer.this.mMedium, WolfPushConsumer.this.mUMCTransmit, WolfPushConsumer.this.mUMCReceiver, message, args );\n            }\n\n            @Override\n            public void onError( Object data, Throwable cause ) {\n                handler.onError( data, cause );\n            }\n        });\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/ArchBroadcastControlAgent.java",
    "content": "package com.pinecone.hydra.umb.wolf;\n\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlAgent;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlNode;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodDigest;\nimport com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery;\n\npublic abstract class ArchBroadcastControlAgent implements BroadcastControlAgent {\n    protected MCTContextMachinery mMCTContextMachinery;\n\n    protected BroadcastControlNode mBroadcastControlNode;\n\n    public ArchBroadcastControlAgent( BroadcastControlNode controlNode ) {\n        this.mBroadcastControlNode = controlNode;\n        this.mMCTContextMachinery = controlNode.getMCTTransformer();\n    }\n\n\n    @Override\n    public InterfacialCompiler getInterfacialCompiler() {\n        return this.mBroadcastControlNode.getInterfacialCompiler();\n    }\n\n    @Override\n    public MCTContextMachinery getMCTTransformer() {\n        return this.mMCTContextMachinery;\n    }\n\n    @Override\n    public BroadcastControlNode broadcastControlNode() {\n        return this.mBroadcastControlNode;\n    }\n\n    @Override\n    public ClassDigest queryClassDigest( String name ) {\n        return this.mBroadcastControlNode.queryClassDigest( name );\n    }\n\n    @Override\n    public MethodDigest queryMethodDigest( String name ) {\n        return this.mBroadcastControlNode.queryMethodDigest( name );\n    }\n\n    @Override\n    public void addClassDigest( ClassDigest that ) {\n        this.mBroadcastControlNode.addClassDigest( that );\n    }\n\n    @Override\n    public void addMethodDigest( MethodDigest that ) {\n        this.mBroadcastControlNode.addMethodDigest( that );\n    }\n\n    @Override\n    public ClassDigest compile( Class<? > clazz, boolean bAsIface ) {\n        return this.mBroadcastControlNode.compile( clazz, bAsIface );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/ArchUlfBroadcastControlAgent.java",
    "content": "package com.pinecone.hydra.umb.wolf;\n\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DynamicMessage;\nimport com.pinecone.framework.lang.field.FieldEntity;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlNode;\nimport com.pinecone.hydra.umct.husky.compiler.CompilerEncoder;\nimport com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype;\nimport com.pinecone.hydra.umct.husky.compiler.MethodPrototype;\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufDecoder;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufEncoder;\n\npublic abstract class ArchUlfBroadcastControlAgent extends ArchBroadcastControlAgent implements UlfBroadcastControlAgent {\n\n    public ArchUlfBroadcastControlAgent( BroadcastControlNode controlNode ) {\n        super( controlNode );\n    }\n\n    @Override\n    public ProtoInterfacialCompiler getInterfacialCompiler() {\n        return this.broadcastControlNode().getInterfacialCompiler();\n    }\n\n    @Override\n    public PMCTContextMachinery getMCTTransformer() {\n        return (PMCTContextMachinery) this.mMCTContextMachinery;\n    }\n\n    @Override\n    public FieldProtobufEncoder getFieldProtobufEncoder() {\n        return this.broadcastControlNode().getFieldProtobufEncoder();\n    }\n\n    @Override\n    public FieldProtobufDecoder getFieldProtobufDecoder() {\n        return this.broadcastControlNode().getFieldProtobufDecoder();\n    }\n\n    @Override\n    public UlfBroadcastControlNode broadcastControlNode() {\n        return (UlfBroadcastControlNode) super.broadcastControlNode();\n    }\n\n\n\n\n    protected CompilerEncoder getCompilerEncoder() {\n        return this.getInterfacialCompiler().getCompilerEncoder();\n    }\n\n    protected DynamicMessage reinterpretMsg( MethodPrototype prototype, Object[] args ) {\n        FieldProtobufEncoder encoder = this.getFieldProtobufEncoder();\n        Descriptors.Descriptor descriptor = prototype.getArgumentsDescriptor();\n\n        FieldEntity[] types = prototype.getArgumentTemplate().getSegments();\n        for ( int i = 0; i < args.length; ++i ) {\n            Object v = args [ i ]; // Fuck duplicated codes.\n            types[ i + 1 ].setValue( v );\n        }\n\n        return encoder.encode(\n                descriptor, types, this.getCompilerEncoder().getExceptedKeys(), this.getCompilerEncoder().getOptions()\n        );\n    }\n\n    protected DynamicMethodPrototype queryMethodPrototype(String szMethodAddress ) {\n        DynamicMethodPrototype method = (DynamicMethodPrototype) this.queryMethodDigest( szMethodAddress );\n        if ( method == null ) {\n            throw new IllegalArgumentException( \"Method address: `\" + szMethodAddress + \"` is invalid.\" );\n        }\n\n        return method;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/UlfBroadcastControlAgent.java",
    "content": "package com.pinecone.hydra.umb.wolf;\n\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlAgent;\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufDecoder;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufEncoder;\n\npublic interface UlfBroadcastControlAgent extends BroadcastControlAgent {\n\n    @Override\n    PMCTContextMachinery getMCTTransformer();\n\n    @Override\n    ProtoInterfacialCompiler getInterfacialCompiler();\n\n    default FieldProtobufEncoder getFieldProtobufEncoder() {\n        return this.getInterfacialCompiler().getCompilerEncoder().getEncoder();\n    }\n\n    FieldProtobufDecoder getFieldProtobufDecoder();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/UlfBroadcastControlNode.java",
    "content": "package com.pinecone.hydra.umb.wolf;\n\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlNode;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastNode;\nimport com.pinecone.hydra.umct.UMCTExpress;\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery;\nimport com.pinecone.hydra.umct.husky.machinery.RouteDispatcher;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufDecoder;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufEncoder;\n\npublic interface UlfBroadcastControlNode extends BroadcastControlNode {\n\n    UMCTExpress createUlfExpress( String name ) ;\n\n    RouteDispatcher createHuskyRoute() ;\n\n    RouteDispatcher createHuskyRoute( UMCTExpress express ) ;\n\n    @Override\n    PMCTContextMachinery getMCTTransformer();\n\n    @Override\n    ProtoInterfacialCompiler getInterfacialCompiler();\n\n    default FieldProtobufEncoder getFieldProtobufEncoder() {\n        return this.getInterfacialCompiler().getCompilerEncoder().getEncoder();\n    }\n\n    FieldProtobufDecoder getFieldProtobufDecoder();\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/UlfBroadcastControlProducer.java",
    "content": "package com.pinecone.hydra.umb.wolf;\n\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlProducer;\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery;\n\npublic interface UlfBroadcastControlProducer extends BroadcastControlProducer {\n\n    @Override\n    PMCTContextMachinery getMCTTransformer();\n\n    @Override\n    ProtoInterfacialCompiler getInterfacialCompiler();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/WolfMCBClient.java",
    "content": "package com.pinecone.hydra.umb.wolf;\n\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\n\nimport org.slf4j.Logger;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.servgram.ArchServgramium;\nimport com.pinecone.hydra.umb.broadcast.BroadcastConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlProducer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastNode;\nimport com.pinecone.hydra.umb.broadcast.BroadcastProducer;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastNode;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer;\nimport com.pinecone.hydra.umb.broadcast.UNT;\nimport com.pinecone.hydra.umc.msg.MsgNodeConfig;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\nimport com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit;\nimport com.pinecone.hydra.umct.MessageJunction;\nimport com.pinecone.hydra.umct.UMCTExpress;\nimport com.pinecone.hydra.umct.WolfMCExpress;\nimport com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodDigest;\nimport com.pinecone.hydra.umct.husky.machinery.HuskyContextMachinery;\nimport com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcher;\nimport com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcherFabricator;\nimport com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery;\nimport com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery;\nimport com.pinecone.hydra.umct.husky.machinery.RouteDispatcher;\nimport com.pinecone.hydra.umct.mapping.BytecodeControllerInspector;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufDecoder;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufEncoder;\nimport com.pinecone.ulf.util.protobuf.GenericFieldProtobufDecoder;\n\nimport javassist.ClassPool;\n\n/**\n *  Pinecone Ursus For Java Wolf-UMCT-B [ Uniform Message Broadcast Control Transmit ]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  **********************************************************\n *  Uniform Message Control Transmission Protocol - Broadcast [UMC-T-B]\n *  统一消息广播传输控制传输协议\n *  **********************************************************\n */\npublic class WolfMCBClient extends ArchServgramium implements UlfBroadcastControlNode {\n\n    protected PMCTContextMachinery          mPMCTContextMachinery;\n\n    protected RouteDispatcher               mRouteDispatcher;\n\n    protected UMCBroadcastNode              mUMCBroadcastNode;\n\n\n    public WolfMCBClient( UMCBroadcastNode broadcastNode, @Nullable RouteDispatcher routeDispatcher, PMCTContextMachinery machinery, String szGramName, Processum parent ) {\n        super( szGramName, parent );\n\n        this.mPMCTContextMachinery = machinery;\n        this.mRouteDispatcher      = routeDispatcher;\n        this.mUMCBroadcastNode     = broadcastNode;\n    }\n\n    public WolfMCBClient( UMCBroadcastNode broadcastNode, PMCTContextMachinery machinery, String szGramName, Processum parent ) {\n        this( broadcastNode, null, machinery, szGramName, parent );\n\n        this.mRouteDispatcher = this.createHuskyRoute();\n    }\n\n    public WolfMCBClient( UMCBroadcastNode broadcastNode, PMCTContextMachinery machinery, String szGramName, Processum parent, Class<?> expressType ) {\n        this( broadcastNode, null, machinery, szGramName, parent );\n\n        UMCTExpress express   = this.createUMCTExpress( BroadcastNode.DefaultEntityName, expressType );\n        this.mRouteDispatcher = this.createHuskyRoute( express );\n        HuskyRouteDispatcherFabricator.afterConstructed( (HuskyRouteDispatcher)this.mRouteDispatcher, express );\n    }\n\n    public WolfMCBClient( UMCBroadcastNode broadcastNode, String szGramName, Processum parent, Class<?> expressType ) {\n        this(\n                broadcastNode,\n                null,\n\n                new HuskyContextMachinery( new BytecodeIfaceCompiler(\n                        ClassPool.getDefault(), parent.getTaskManager().getClassLoader()\n                ), new BytecodeControllerInspector(\n                        ClassPool.getDefault(),  parent.getTaskManager().getClassLoader()\n                ), new GenericFieldProtobufDecoder() ),\n\n                szGramName,\n                parent\n        );\n\n        UMCTExpress express   = this.createUMCTExpress( BroadcastNode.DefaultEntityName, expressType );\n        this.mRouteDispatcher = this.createHuskyRoute( express );\n        HuskyRouteDispatcherFabricator.afterConstructed( (HuskyRouteDispatcher)this.mRouteDispatcher, express );\n    }\n\n\n\n    @Override\n    public long getMessageNodeId() {\n        return this.mUMCBroadcastNode.getMessageNodeId();\n    }\n\n    @Override\n    public ErrorMessageAudit getErrorMessageAudit() {\n        return this.mUMCBroadcastNode.getErrorMessageAudit();\n    }\n\n    @Override\n    public void setErrorMessageAudit( ErrorMessageAudit audit ) {\n        this.mUMCBroadcastNode.setErrorMessageAudit( audit );\n    }\n\n    @Override\n    public MsgNodeConfig getMessageNodeConfig() {\n        return this.mUMCBroadcastNode.getMessageNodeConfig();\n    }\n\n    @Override\n    public void applyMCTContextMachinery( MCTContextMachinery mctContextMachinery ) {\n        this.mPMCTContextMachinery = (PMCTContextMachinery) mctContextMachinery;\n    }\n\n    @Override\n    public void applyRouteDispatcher( RouteDispatcher routeDispatcher ) {\n        this.mRouteDispatcher = routeDispatcher;\n    }\n\n    @Override\n    public UMCTExpress createUMCTExpress( String name, Class<?> expressType ) {\n        try{\n            Constructor<?> constructor = expressType.getConstructor( String.class, MessageJunction.class, Logger.class );\n            return  (UMCTExpress) constructor.newInstance( name, this, this.getLogger() );\n        }\n        catch ( NoSuchMethodException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) {\n            throw new IllegalArgumentException( \"`\" + expressType.getSimpleName() + \"` is not UMCTExpress calibre qualified.\" );\n        }\n    }\n\n    @Override\n    public UMCTExpress createUlfExpress( String name ) {\n        return this.createUMCTExpress( name, WolfMCExpress.class );\n    }\n\n    @Override\n    public RouteDispatcher createHuskyRoute() {\n        return new HuskyRouteDispatcher( this.getTaskManager().getClassLoader(), true );\n    }\n\n    @Override\n    public RouteDispatcher createHuskyRoute( UMCTExpress express ) {\n        RouteDispatcher dispatcher = this.createHuskyRoute();\n        dispatcher.setUMCTExpress( express );\n        return dispatcher;\n    }\n\n\n\n    @Override\n    public ProtoInterfacialCompiler getInterfacialCompiler() {\n        return this.mPMCTContextMachinery.getInterfacialCompiler();\n    }\n\n    @Override\n    public PMCTContextMachinery getMCTTransformer() {\n        return this.mPMCTContextMachinery;\n    }\n\n    @Override\n    public RouteDispatcher getRouteDispatcher() {\n        return this.mRouteDispatcher;\n    }\n\n    @Override\n    public FieldProtobufEncoder getFieldProtobufEncoder() {\n        return this.mPMCTContextMachinery.getFieldProtobufEncoder();\n    }\n\n    @Override\n    public FieldProtobufDecoder getFieldProtobufDecoder() {\n        return this.mPMCTContextMachinery.getFieldProtobufDecoder();\n    }\n\n\n\n\n    @Override\n    public ClassDigest queryClassDigest( String name ) {\n        return this.mPMCTContextMachinery.queryClassDigest( name );\n    }\n\n    @Override\n    public MethodDigest queryMethodDigest( String name ) {\n        return this.mPMCTContextMachinery.queryMethodDigest( name );\n    }\n\n    @Override\n    public void addClassDigest( ClassDigest that ) {\n        this.mPMCTContextMachinery.addClassDigest( that );\n    }\n\n    @Override\n    public void addMethodDigest( MethodDigest that ) {\n        this.mPMCTContextMachinery.addMethodDigest( that );\n    }\n\n    @Override\n    public ClassDigest compile( Class<? > clazz, boolean bAsIface ) {\n        return this.mPMCTContextMachinery.compile( clazz, bAsIface );\n    }\n\n\n\n\n\n    @Override\n    public void registerInstance( String deliverName, Object instance, Class<?> iface ) {\n        this.mRouteDispatcher.registerInstance( deliverName, instance, iface );\n    }\n\n    @Override\n    public void registerInstance( Object instance, Class<?> iface ) {\n        this.mRouteDispatcher.registerInstance( instance, iface );\n    }\n\n    @Override\n    public void registerController( String deliverName, Object instance, Class<?> controllerType ) {\n        this.mRouteDispatcher.registerController( deliverName, instance, controllerType );\n    }\n\n    @Override\n    public void registerController( Object instance, Class<?> controllerType ) {\n        this.mRouteDispatcher.registerController( instance, controllerType );\n    }\n\n\n\n\n\n\n    @Override\n    public BroadcastControlConsumer createBroadcastControlConsumer( UMCBroadcastConsumer workAgent, RouteDispatcher routeDispatcher ) {\n        return new WolfMCBConsumer( this, routeDispatcher, workAgent );\n    }\n\n    @Override\n    public BroadcastControlConsumer createBroadcastControlConsumer( UMCBroadcastConsumer workAgent ) {\n        return this.createBroadcastControlConsumer( workAgent, this.getRouteDispatcher() );\n    }\n\n    @Override\n    public BroadcastControlConsumer createBroadcastControlConsumer( UNT unt ) {\n        return this.createBroadcastControlConsumer( this.createUlfConsumer( unt ), this.getRouteDispatcher() );\n    }\n\n    @Override\n    public BroadcastControlConsumer createBroadcastControlConsumer( String topic, String ns ) {\n        return this.createBroadcastControlConsumer( this.createUlfConsumer( topic, ns ), this.getRouteDispatcher() );\n    }\n\n    @Override\n    public BroadcastControlConsumer createBroadcastControlConsumer( String topic ) {\n        return this.createBroadcastControlConsumer( this.createUlfConsumer( topic ), this.getRouteDispatcher() );\n    }\n\n\n\n    @Override\n    public BroadcastControlProducer createBroadcastControlProducer( UMCBroadcastProducer workAgent ) {\n        return new WolfMCBProducer( this, workAgent );\n    }\n\n    @Override\n    public BroadcastControlProducer createBroadcastControlProducer() {\n        return this.createBroadcastControlProducer( this.createUlfProducer() );\n    }\n\n\n\n\n\n\n\n\n    @Override\n    public UMCBroadcastNode getUMCBroadcastNode() {\n        return this.mUMCBroadcastNode;\n    }\n\n    @Override\n    public ExtraHeadCoder getExtraHeadCoder() {\n        return this.mUMCBroadcastNode.getExtraHeadCoder();\n    }\n\n    @Override\n    public UMCBroadcastProducer createUlfProducer() {\n        return this.mUMCBroadcastNode.createUlfProducer();\n    }\n\n    @Override\n    public UMCBroadcastConsumer createUlfConsumer( String topic, String ns ) {\n        return this.mUMCBroadcastNode.createUlfConsumer( topic, ns );\n    }\n\n    @Override\n    public UMCBroadcastConsumer createUlfConsumer( String topic ) {\n        return this.mUMCBroadcastNode.createUlfConsumer( topic );\n    }\n\n    @Override\n    public UMCBroadcastConsumer createUlfConsumer( UNT unt ) {\n        return this.mUMCBroadcastNode.createUlfConsumer( unt );\n    }\n\n    @Override\n    public void register( BroadcastProducer producer ) {\n        this.mUMCBroadcastNode.register( producer );\n    }\n\n    @Override\n    public void register( BroadcastConsumer consumer ) {\n        this.mUMCBroadcastNode.register( consumer );\n    }\n\n    @Override\n    public void deregister( BroadcastProducer producer ) {\n        this.mUMCBroadcastNode.deregister( producer );\n    }\n\n    @Override\n    public void deregister( BroadcastConsumer consumer ) {\n        this.mUMCBroadcastNode.deregister( consumer );\n    }\n\n    @Override\n    public BroadcastProducer createProducer() {\n        return this.mUMCBroadcastNode.createUlfProducer();\n    }\n\n    @Override\n    public BroadcastConsumer createConsumer( String topic, String ns ) {\n        return this.mUMCBroadcastNode.createUlfConsumer( topic, ns );\n    }\n\n    @Override\n    public BroadcastConsumer createConsumer( String topic ) {\n        return this.mUMCBroadcastNode.createUlfConsumer( topic );\n    }\n\n    @Override\n    public BroadcastConsumer createConsumer( UNT unt ) {\n        return this.mUMCBroadcastNode.createUlfConsumer( unt );\n    }\n\n    @Override\n    public void execute() throws Exception {\n\n    }\n\n    @Override\n    public void close() {\n        this.mUMCBroadcastNode.close();\n    }\n\n    @Override\n    public void terminate() {\n        this.close();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/WolfMCBConsumer.java",
    "content": "package com.pinecone.hydra.umb.wolf;\n\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlNode;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\nimport com.pinecone.hydra.umct.husky.machinery.RouteDispatcher;\n\npublic class WolfMCBConsumer extends ArchBroadcastControlAgent implements BroadcastControlConsumer {\n    protected RouteDispatcher               mRouteDispatcher;\n\n    protected UMCBroadcastConsumer          mBroadcastConsumer;\n\n    public WolfMCBConsumer ( BroadcastControlNode controlNode, RouteDispatcher routeDispatcher, UMCBroadcastConsumer broadcastConsumer ) {\n        super( controlNode );\n\n        this.mRouteDispatcher    = routeDispatcher;\n        this.mBroadcastConsumer  = broadcastConsumer;\n    }\n\n    public WolfMCBConsumer ( BroadcastControlNode controlNode, UMCBroadcastConsumer broadcastConsumer ) {\n        this( controlNode, controlNode.getRouteDispatcher(), broadcastConsumer );\n    }\n\n\n\n\n    @Override\n    public void start() throws UMBServiceException {\n        this.start( this.mRouteDispatcher.getUMCTExpress() );\n    }\n\n    @Override\n    public void start( UMCTExpressHandler handler ) throws UMBServiceException {\n        this.mBroadcastConsumer.start( handler );\n    }\n\n    @Override\n    public void close() {\n        this.mBroadcastConsumer.close();\n    }\n\n\n\n\n\n    @Override\n    public void registerInstance( String deliverName, Object instance, Class<?> iface ) {\n        this.mRouteDispatcher.registerInstance( deliverName, instance, iface );\n    }\n\n    @Override\n    public void registerInstance( Object instance, Class<?> iface ) {\n        this.mRouteDispatcher.registerInstance( instance, iface );\n    }\n\n    @Override\n    public void registerController( String deliverName, Object instance, Class<?> controllerType ) {\n        this.mRouteDispatcher.registerController( deliverName, instance, controllerType );\n    }\n\n    @Override\n    public void registerController( Object instance, Class<?> controllerType ) {\n        this.mRouteDispatcher.registerController( instance, controllerType );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-broadcast/src/main/java/com/pinecone/hydra/umb/wolf/WolfMCBProducer.java",
    "content": "package com.pinecone.hydra.umb.wolf;\n\nimport java.io.IOException;\n\nimport com.pinecone.hydra.umb.broadcast.proxy.GenericIfaceProxyFactory;\nimport com.pinecone.hydra.umb.broadcast.proxy.IfaceProxyFactory;\n\nimport com.google.protobuf.DynamicMessage;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.UlfMBInformMessage;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlNode;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer;\nimport com.pinecone.hydra.umb.broadcast.UNT;\nimport com.pinecone.hydra.umct.husky.compiler.MethodPrototype;\n\npublic class WolfMCBProducer extends ArchUlfBroadcastControlAgent implements UlfBroadcastControlProducer {\n    protected UMCBroadcastProducer          mBroadcastProducer;\n\n    protected IfaceProxyFactory             mIfaceProxyFactory;\n\n    public WolfMCBProducer ( BroadcastControlNode controlNode, UMCBroadcastProducer broadcastProducer ) {\n        super( controlNode );\n\n        this.mBroadcastProducer = broadcastProducer;\n        this.mIfaceProxyFactory = new GenericIfaceProxyFactory( this );\n    }\n\n\n    @Override\n    public void issueInform( UNT unt, String name, MethodPrototype method, Object[] args ) throws IOException {\n        DynamicMessage message = this.reinterpretMsg( method, args );\n\n        this.mBroadcastProducer.sendMessage( unt, name, new UlfMBInformMessage( message.toByteArray() ) );\n    }\n\n    @Override\n    public void issueInform( String topic, String ns, String name, MethodPrototype method, Object[] args ) throws IOException {\n        DynamicMessage message = this.reinterpretMsg( method, args );\n\n        this.mBroadcastProducer.sendMessage( topic, ns, name, new UlfMBInformMessage( message.toByteArray() ) );\n    }\n\n    @Override\n    public void issueInform( String topic, MethodPrototype method, Object[] args ) throws IOException {\n        DynamicMessage message = this.reinterpretMsg( method, args );\n\n        this.mBroadcastProducer.sendMessage( topic, new UlfMBInformMessage( message.toByteArray() ) );\n    }\n\n    @Override\n    public void issueInform( String topic, String szMethodAddress, Object... args ) throws IOException {\n        this.issueInform( topic, this.queryMethodPrototype( szMethodAddress ), args );\n    }\n\n    @Override\n    public <T> T getIface( Class<T> iface, String topic, String ns, String name ) {\n        return this.mIfaceProxyFactory.createProxy( iface, topic, ns, name );\n    }\n\n    @Override\n    public void close() {\n        this.mBroadcastProducer.close();\n    }\n\n    @Override\n    public void start() throws UMBServiceException {\n        this.mBroadcastProducer.start();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kernel</groupId>\n    <artifactId>hydra-message-control</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n\n        <dependency>\n            <groupId>io.netty</groupId>\n            <artifactId>netty-all</artifactId>\n            <version>4.1.80.Final</version>\n        </dependency>\n\n        <dependency>\n            <groupId>org.javassist</groupId>\n            <artifactId>javassist</artifactId>\n            <version>3.29.0-GA</version>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/appoints/AppointNodus.java",
    "content": "package com.pinecone.hydra.appoints;\n\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.hydra.umc.msg.Messagus;\n\npublic interface AppointNodus extends Messagus {\n\n    String getName();\n\n    PatriarchalConfig getConfig();\n\n    void close() ;\n\n    void execute() throws Exception ;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/ActingDuplexExpress.java",
    "content": "package com.pinecone.hydra.uma;\n\nimport com.pinecone.hydra.express.Deliver;\nimport com.pinecone.hydra.umct.MessageDeliver;\nimport com.pinecone.hydra.umct.MessageExpress;\nimport com.pinecone.hydra.umct.MessageJunction;\n\npublic abstract class ActingDuplexExpress extends ArchDuplexExpress {\n    @Override\n    public String getName() {\n        return null;\n    }\n\n    @Override\n    public MessageJunction getJunction() {\n        return null;\n    }\n\n    @Override\n    public MessageDeliver recruit( String szName ) {\n        return null;\n    }\n\n    @Override\n    public MessageExpress register( Deliver deliver ) {\n        return null;\n    }\n\n    @Override\n    public MessageExpress fired( Deliver deliver ) {\n        return null;\n    }\n\n    @Override\n    public MessageDeliver getDeliver( String szName ) {\n        return null;\n    }\n\n    @Override\n    public boolean hasOwnDeliver( Deliver deliver ) {\n        return false;\n    }\n\n    @Override\n    public boolean hasOwnDeliver( String deliverName ) {\n        return false;\n    }\n\n    @Override\n    public int size() {\n        return 0;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/AppointClient.java",
    "content": "package com.pinecone.hydra.uma;\n\nimport java.io.IOException;\n\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umct.IlleagalResponseException;\nimport com.pinecone.hydra.umct.husky.compiler.MethodPrototype;\n\npublic interface AppointClient extends AppointNode {\n    UMCMessage sendSyncMsg( UMCMessage request ) throws IOException;\n\n    UMCMessage sendSyncMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException ;\n\n    void       sendAsynMsg( UMCMessage request ) throws IOException ;\n\n    void       sendAsynMsg( UMCMessage request, AsynMsgHandler handler ) throws IOException ;\n\n\n    void invokeInformAsyn( MethodPrototype method, Object[] args, AsynMsgHandler handler ) throws IOException;\n\n    void invokeInformAsyn( MethodPrototype method, Object[] args, AsynReturnHandler handler ) throws IOException ;\n\n    Object invokeInform( MethodPrototype method, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException ;\n\n    Object invokeInform( MethodPrototype method, Object... args ) throws IlleagalResponseException, IOException ;\n\n    void invokeInformAsyn( String szMethodAddress, Object[] args, AsynMsgHandler handler ) throws IOException ;\n\n    void invokeInformAsyn( String szMethodAddress, Object[] args, AsynReturnHandler handler ) throws IOException ;\n\n    Object invokeInform( String szMethodAddress, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException ;\n\n    Object invokeInform( String szMethodAddress, Object... args ) throws IlleagalResponseException, IOException ;\n\n    <T> T getIface( Class<T> iface );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/AppointNode.java",
    "content": "package com.pinecone.hydra.uma;\n\nimport com.pinecone.hydra.appoints.AppointNodus;\nimport com.pinecone.hydra.umc.msg.MessageNode;\nimport com.pinecone.hydra.umct.UMCTNode;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodDigest;\nimport com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery;\n\npublic interface AppointNode extends UMCTNode, AppointNodus {\n\n    MessageNode getMessageNode();\n\n    default long getMessageNodeId() {\n        return getMessageNode().getMessageNodeId();\n    }\n\n    MCTContextMachinery getMCTTransformer();\n\n    InterfacialCompiler getInterfacialCompiler();\n\n    ClassDigest queryClassDigest( String name );\n\n    MethodDigest queryMethodDigest( String name );\n\n    void addClassDigest( ClassDigest that );\n\n    void addMethodDigest( MethodDigest that );\n\n    ClassDigest compile( Class<? > clazz, boolean bAsIface );\n\n    void close();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/AppointServer.java",
    "content": "package com.pinecone.hydra.uma;\n\nimport com.pinecone.hydra.express.Deliver;\nimport com.pinecone.hydra.umct.MessageDeliver;\nimport com.pinecone.hydra.umct.MessageExpress;\nimport com.pinecone.hydra.umct.UMCTExpress;\n\npublic interface AppointServer extends AppointNode {\n\n    String DefaultEntityName = \"__DEFAULT__\";\n\n    AppointServer apply( UMCTExpress handler );\n\n    UMCTExpress getUMCTExpress();\n\n    MessageExpress register          ( Deliver deliver ) ;\n\n    MessageExpress fired             ( Deliver deliver ) ;\n\n    MessageDeliver getDeliver        ( String name );\n\n    MessageDeliver getDefaultDeliver ();\n\n    void registerInstance( String deliverName, Object instance, Class<?> iface ) ;\n\n    void registerInstance( Object instance, Class<?> iface ) ;\n\n    void registerController( String deliverName, Object instance, Class<?> controllerType ) ;\n\n    void registerController( Object instance, Class<?> controllerType ) ;\n\n    default void registerController( Object instance ) {\n        this.registerController( instance, instance.getClass() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/ArchAppointNode.java",
    "content": "package com.pinecone.hydra.uma;\n\nimport com.pinecone.framework.system.GenericMasterTaskManager;\nimport com.pinecone.framework.system.executum.ArchProcessum;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.servgram.ArchServgramium;\nimport com.pinecone.hydra.servgram.Servgram;\nimport com.pinecone.hydra.servgram.Servgramium;\nimport com.pinecone.hydra.umc.msg.MessageNode;\nimport com.pinecone.hydra.umct.ServiceException;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodDigest;\nimport com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery;\n\n\npublic abstract class ArchAppointNode extends ArchServgramium implements AppointNode {\n    protected MCTContextMachinery mMCTContextMachinery;\n\n    protected ArchAppointNode( Servgramium sharded ) {\n        super( sharded, true );\n        this.mAffiliateThread       = sharded.getAffiliateThread();\n    }\n\n    protected ArchAppointNode( Servgramium sharded, MCTContextMachinery machinery ) {\n        this( sharded );\n        this.mMCTContextMachinery = machinery;\n    }\n\n    public abstract MessageNode getMessageNode();\n\n    @Override\n    public InterfacialCompiler getInterfacialCompiler() {\n        return this.mMCTContextMachinery.getInterfacialCompiler();\n    }\n\n    @Override\n    public MCTContextMachinery getMCTTransformer() {\n        return this.mMCTContextMachinery;\n    }\n\n\n    @Override\n    public Thread getAffiliateThread() {\n        return this.getMessageNode().getAffiliateThread();\n    }\n\n    @Override\n    public ArchProcessum setThreadAffinity( Thread affinity ) {\n        this.getMessageNode().setThreadAffinity( affinity );\n        return super.setThreadAffinity(affinity);\n    }\n\n    @Override\n    public boolean isTerminated() {\n        return this.getMessageNode().isTerminated();\n    }\n\n    @Override\n    public void interrupt() {\n        this.getMessageNode().interrupt();\n    }\n\n    @Override\n    public void kill() {\n        this.getMessageNode().kill();\n    }\n\n    @Override\n    public Processum parentExecutum() {\n        return (Processum) this.getMessageNode().parentExecutum();\n    }\n\n    @Override\n    public void apoptosis() {\n        this.getMessageNode().apoptosis();\n    }\n\n    @Override\n    public GenericMasterTaskManager getTaskManager() {\n        return (GenericMasterTaskManager) this.getMessageNode().getTaskManager();\n    }\n\n    @Override\n    public void execute() throws ServiceException {\n        try{\n            ( (Servgram) this.getMessageNode() ).execute();\n        }\n        catch ( Exception e ) {\n            throw new ServiceException( e );\n        }\n    }\n\n\n\n    @Override\n    public ClassDigest queryClassDigest( String name ) {\n        return this.mMCTContextMachinery.queryClassDigest( name );\n    }\n\n    @Override\n    public MethodDigest queryMethodDigest( String name ) {\n        return this.mMCTContextMachinery.queryMethodDigest( name );\n    }\n\n    @Override\n    public void addClassDigest( ClassDigest that ) {\n        this.mMCTContextMachinery.addClassDigest( that );\n    }\n\n    @Override\n    public void addMethodDigest( MethodDigest that ) {\n        this.mMCTContextMachinery.addMethodDigest( that );\n    }\n\n    @Override\n    public ClassDigest compile( Class<? > clazz, boolean bAsIface ) {\n        return this.mMCTContextMachinery.compile( clazz, bAsIface );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/ArchDuplexExpress.java",
    "content": "package com.pinecone.hydra.uma;\n\nimport java.io.IOException;\n\nimport org.slf4j.Logger;\n\nimport com.pinecone.hydra.uma.pool.GenericMultiClientChannelRegistry;\nimport com.pinecone.hydra.express.Package;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.pinecone.hydra.umc.msg.ChannelAllocateException;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.ChannelPool;\nimport com.pinecone.hydra.umc.msg.FairChannelPool;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.MessageNodus;\nimport com.pinecone.hydra.umc.msg.MultiClientChannelRegistry;\nimport com.pinecone.hydra.umc.msg.RecipientChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.UMCChannel;\nimport com.pinecone.hydra.umc.msg.UMCConstants;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umc.wolf.UlfChannelStatus;\nimport com.pinecone.hydra.umc.wolf.UlfMessageNode;\nimport com.pinecone.hydra.umc.wolf.WolfMCStandardConstants;\nimport com.pinecone.hydra.umc.wolf.server.RecipientNettyChannelControlBlock;\nimport com.pinecone.hydra.umct.DuplexExpress;\nimport com.pinecone.hydra.umct.MessageExpress;\nimport com.pinecone.hydra.umct.ServiceInternalException;\nimport com.pinecone.hydra.umct.UMCConnection;\nimport com.pinecone.hydra.umct.UlfConnection;\nimport com.pinecone.hydra.umct.husky.HuskyCTPConstants;\n\nimport io.netty.channel.Channel;\nimport io.netty.util.AttributeKey;\n\npublic abstract class ArchDuplexExpress implements DuplexExpress, MessageExpress, Slf4jTraceable {\n    protected Logger                            mLogger      ;\n    protected MultiClientChannelRegistry<Long > mMultiClientChannelRegistry;\n\n    protected ArchDuplexExpress() {\n        this.mMultiClientChannelRegistry = new GenericMultiClientChannelRegistry<>();\n    }\n\n    public ArchDuplexExpress( Logger logger ) {\n        this();\n        this.mLogger = logger;\n    }\n\n    @Override\n    public Logger getLogger() {\n        return this.mLogger;\n    }\n\n    protected UMCConnection wrap( Package that ) {\n        return (UMCConnection) that;\n    }\n\n    @Override\n    public UMCMessage processResponse( UMCMessage request, UMCMessage response ) {\n        if ( request.getHead().getControlBits() == HuskyCTPConstants.HCTP_DUP_CONTROL_PASSIVE_REQUEST ) {\n            response.getHead().setControlBits( HuskyCTPConstants.HCTP_DUP_CONTROL_PASSIVE_RESPONSE );\n        }\n\n        return response;\n    }\n\n    protected abstract void onSuccessfulMsgReceived( UMCConnection connection, Object[] args ) throws Exception ;\n\n    protected boolean handleDuplexControlMessage( UMCConnection connection, Object[] args ) throws Exception {\n        if ( this.interceptPassiveChannel( connection, args ) ) {\n            return true;\n        }\n        if ( this.interceptHandlePassiveResponse( connection, args ) ) {\n            return true;\n        }\n\n        return false;\n    }\n\n    @Override\n    public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n        UlfConnection connection = new UlfConnection(  medium, msg, transmit, receiver, args );\n        if ( this.handleDuplexControlMessage( connection, args ) ) {\n            return;\n        }\n\n        this.onSuccessfulMsgReceived( connection, args );\n    }\n\n    protected boolean interceptHandlePassiveResponse( UMCConnection connection, Object[] args ) throws ServiceInternalException {\n        UMCConnection uc          = this.wrap( connection );\n        UMCMessage msg            = uc.getMessage();\n        int controlBits           = msg.getHead().getControlBits();\n\n        // Notice:\n        // For duplex passive channel, it is necessary to use control-bits markers and explicitly call the `handler`, otherwise it will be intercepted by the `express`.\n        // 对于双工被动链路，必须走VIP通道使用控制位标记，并显式调用绑定的回调函数，不然会被总线错误拦截。\n\n        if ( controlBits == HuskyCTPConstants.HCTP_DUP_CONTROL_PASSIVE_RESPONSE ) {\n            RecipientChannelControlBlock cb = (RecipientChannelControlBlock)args[ 0 ];\n            Channel channel = (Channel)cb.getChannel().getNativeHandle();\n\n            long nWaitMillis;\n            MessageNodus nodus = connection.getMessageSource().getMessageNode();\n            if ( nodus != null ) {\n                nWaitMillis = nodus.getMessageNodeConfig().getSyncWaitingMillis();\n            }\n            else {\n                nWaitMillis = UMCConstants.DefaultSyncWaitingMillis;\n            }\n\n            try {\n                UlfAsyncMsgHandleAdapter handle = (UlfAsyncMsgHandleAdapter) channel.attr(\n                        AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY )\n                ).get();\n                if ( handle == null ) {\n                    handle = cb.pollMsgHandle( nWaitMillis );\n                }\n                if ( handle == null ) {\n                    throw new ServiceInternalException( \"Undefined MsgHandle.\" );\n                }\n\n\n\n                try {\n                    handle.onSuccessfulMsgReceived( connection.getMessageSource(), connection.getTransmit(), connection.getReceiver(), msg, args );\n                }\n                catch ( Exception e ) {\n                    throw new ServiceInternalException( e );\n                }\n            }\n            catch ( InterruptedException e ) {\n                throw new ServiceInternalException( e );\n            }\n\n            return true;\n        }\n        return false;\n    }\n\n\n    protected boolean interceptPassiveChannel( UMCConnection connection, Object[] args ) {\n        UMCConnection uc          = this.wrap( connection );\n        UMCMessage msg            = uc.getMessage();\n        int controlBits           = msg.getHead().getControlBits();\n        if ( controlBits == HuskyCTPConstants.HCTP_DUP_CONTROL_REGISTER ) {\n            this.registerPassiveChannel( uc, connection, args );\n            return true;\n        }\n\n        return false;\n    }\n\n    protected void registerPassiveChannel( UMCConnection uc, UMCConnection connection, Object[] args ) {\n        ChannelControlBlock ccb = (ChannelControlBlock) args[ 0 ];\n        UMCChannel channel = ccb.getChannel();\n        long                cid = channel.getIdentityID();\n\n        this.mMultiClientChannelRegistry.register( cid, ccb );\n        this.getLogger().info( \"[PassiveChannel] [ClientId: {}, ChannelId: {}] <{}>\", cid, ccb.getChannel().getChannelID(), \"Registered\" );\n    }\n\n\n    static void reconnect( ChannelControlBlock block, long mils ) throws IOException {\n        if( block.isShutdown() ) {\n            block.getChannel().reconnect( mils );\n            ( (UlfMessageNode)block.getParentMessageNode() ).getChannelPool().setIdleChannel( block );\n        }\n    }\n\n    RecipientNettyChannelControlBlock nextAsyChannelCB( FairChannelPool pool ) throws IOException {\n        RecipientNettyChannelControlBlock block = (RecipientNettyChannelControlBlock) pool.nextAsynChannel( pool.getMajorWaitTimeout() * 2 );\n        if( block == null ) {\n            throw new ChannelAllocateException( \"Channel allocate failed.\" );\n        }\n        reconnect( block, pool.getMajorWaitTimeout() );\n        return block;\n    }\n\n    @Override\n    public void afterChannelInactive( ChannelControlBlock controlBlock ) {\n        this.mMultiClientChannelRegistry.deregister( controlBlock.getChannel().getIdentityID(), controlBlock );\n    }\n\n    @Override\n    public ChannelPool getPoolByClientId( long clientId ) {\n        return this.mMultiClientChannelRegistry.getPool( clientId );\n    }\n\n    @Override\n    public void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, UlfAsyncMsgHandleAdapter handler ) throws IOException, IllegalArgumentException {\n        try{\n            ChannelPool pool = this.mMultiClientChannelRegistry.getPool( clientId );\n            if ( pool == null ) {\n                throw new IllegalArgumentException( \"No such client \" + clientId );\n            }\n\n            FairChannelPool fp = (FairChannelPool) pool;\n            RecipientNettyChannelControlBlock cb = this.nextAsyChannelCB( fp );\n            if ( handler != null ) {\n                cb.pushMsgHandle( handler );\n                //cb.getChannel().getNativeHandle().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY ) ).set( handler );\n            }\n            cb.getChannel().setChannelStatus( UlfChannelStatus.WAITING_PASSIVE_SEND );\n            cb.getTransmit().sendMsg( request, bNoneBuffered );\n            cb.getChannel().setChannelStatus( UlfChannelStatus.WAITING_PASSIVE_RECEIVE );\n        }\n        catch ( ChannelAllocateException e ) {\n            throw new IOException( e );\n        }\n    }\n\n    @Override\n    public void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, AsynMsgHandler handler ) throws IOException {\n        this.sendAsynMsg( clientId, request, bNoneBuffered, AsynMsgHandler.wrap( handler ) );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/ArchUlfAppointNode.java",
    "content": "package com.pinecone.hydra.uma;\n\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DynamicMessage;\nimport com.google.protobuf.InvalidProtocolBufferException;\nimport com.pinecone.framework.lang.field.FieldEntity;\nimport com.pinecone.hydra.servgram.Servgramium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umct.IlleagalResponseException;\nimport com.pinecone.hydra.umct.husky.compiler.CompilerEncoder;\nimport com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype;\nimport com.pinecone.hydra.umct.husky.compiler.MethodPrototype;\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.machinery.MCTContextMachinery;\nimport com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufDecoder;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufEncoder;\n\npublic abstract class ArchUlfAppointNode extends ArchAppointNode implements UlfAppointNode {\n    protected ArchUlfAppointNode( Servgramium sharded ) {\n        super( sharded );\n    }\n\n    protected ArchUlfAppointNode( Servgramium sharded, MCTContextMachinery machinery ) {\n        super( sharded, machinery );\n    }\n\n    @Override\n    public ProtoInterfacialCompiler getInterfacialCompiler() {\n        return (ProtoInterfacialCompiler) super.getInterfacialCompiler();\n    }\n\n    @Override\n    public PMCTContextMachinery getMCTTransformer() {\n        return (PMCTContextMachinery) super.getMCTTransformer();\n    }\n\n    @Override\n    public FieldProtobufEncoder getFieldProtobufEncoder() {\n        return this.getMCTTransformer().getFieldProtobufEncoder();\n    }\n\n    @Override\n    public FieldProtobufDecoder getFieldProtobufDecoder() {\n        return this.getMCTTransformer().getFieldProtobufDecoder();\n    }\n\n\n\n\n    protected CompilerEncoder getCompilerEncoder() {\n        return this.getInterfacialCompiler().getCompilerEncoder();\n    }\n\n    protected DynamicMessage reinterpretMsg( MethodPrototype prototype, Object[] args ) {\n        FieldProtobufEncoder encoder = this.getFieldProtobufEncoder();\n        Descriptors.Descriptor descriptor = prototype.getArgumentsDescriptor();\n\n        FieldEntity[] types = prototype.getArgumentTemplate().getSegments();\n        for ( int i = 0; i < args.length; ++i ) {\n            types[ i + 1 ].setValue( args [ i ] );\n        }\n\n        return encoder.encode(\n                descriptor, types, this.getCompilerEncoder().getExceptedKeys(), this.getCompilerEncoder().getOptions()\n        );\n    }\n\n    public Object unmarshalResponse( MethodPrototype digest, byte[] raw ) throws IlleagalResponseException {\n        try{\n            Descriptors.Descriptor retDes = digest.getReturnDescriptor();\n            if ( retDes == null ) {\n                // undefined response for `void` type-return.\n                if ( digest.getReturnType() == void.class || digest.getReturnType() == Void.class ) {\n                    return null;\n                }\n\n                throw new IlleagalResponseException( \"Illegal undefined return type, what => \" + digest.getReturnType() );\n            }\n            DynamicMessage rm = DynamicMessage.parseFrom( retDes, raw );\n            FieldProtobufDecoder decoder = this.getMCTTransformer().getFieldProtobufDecoder();\n            return decoder.decode(\n                    digest.getReturnType(), digest.getGenericReturnTypeLabel(), retDes, rm, this.getCompilerEncoder().getExceptedKeys(), this.getCompilerEncoder().getOptions()\n            );\n        }\n        catch ( InvalidProtocolBufferException e ) {\n            throw new IlleagalResponseException( e );\n        }\n    }\n\n    public Object unmarshalResponse( MethodPrototype digest, UMCMessage msg ) throws IlleagalResponseException {\n        return this.unmarshalResponse( digest, (byte[]) msg.getHead().getExtraHead() );\n    }\n\n    protected DynamicMethodPrototype queryMethodPrototype(String szMethodAddress ) {\n        DynamicMethodPrototype method = (DynamicMethodPrototype) this.queryMethodDigest( szMethodAddress );\n        if ( method == null ) {\n            throw new IllegalArgumentException( \"Method address `\" + szMethodAddress + \"` is invalid.\" );\n        }\n\n        return method;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/AsynMsgHandler.java",
    "content": "package com.pinecone.hydra.uma;\n\nimport java.io.IOException;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\n\nimport io.netty.channel.ChannelHandlerContext;\n\n\npublic interface AsynMsgHandler extends Pinenut {\n    default void onSuccessfulMsgReceived( UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg ) throws Exception {\n        this.onSuccessfulMsgReceived( msg );\n    }\n\n    void onSuccessfulMsgReceived( UMCMessage msg ) throws Exception ;\n\n    default void onErrorMsgReceived( UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg ) throws Exception {\n        this.onErrorMsgReceived( msg );\n    }\n\n    void onErrorMsgReceived( UMCMessage msg ) throws Exception ;\n\n    default void onError( Object data, Throwable cause ) {\n\n    }\n\n    static UlfAsyncMsgHandleAdapter wrap( AsynMsgHandler handler ) throws IOException {\n        return new UlfAsyncMsgHandleAdapter() {\n            @Override\n            public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception {\n                handler.onSuccessfulMsgReceived( block.getTransmit(), block.getReceiver(), msg );\n            }\n\n            @Override\n            public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n                handler.onSuccessfulMsgReceived( transmit, receiver, msg );\n            }\n\n            @Override\n            public void onErrorMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception {\n                handler.onErrorMsgReceived( block.getTransmit(), block.getReceiver(), msg );\n            }\n\n            @Override\n            public void onErrorMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n                handler.onErrorMsgReceived( transmit, receiver, msg );\n            }\n\n            @Override\n            public void onError( Object data, Throwable cause ) {\n                handler.onError( data, cause );\n            }\n        };\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/AsynReturnHandler.java",
    "content": "package com.pinecone.hydra.uma;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\npublic interface AsynReturnHandler extends Pinenut {\n\n    void onSuccessfulReturn( Object ret ) throws Exception ;\n\n    void onErrorMsgReceived( UMCMessage msg ) throws Exception ;\n\n    default void onError( Object data, Throwable cause ) {\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/DuplexAppointClient.java",
    "content": "package com.pinecone.hydra.uma;\n\nimport java.io.IOException;\n\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\nimport com.pinecone.hydra.umct.husky.machinery.RouteDispatcher;\n\npublic interface DuplexAppointClient extends AppointClient, DuplexAppointNode {\n\n    void createPassiveChannel( int nLine );\n\n    void embraces( int nLine, UlfAsyncMsgHandleAdapter handler ) throws IOException;\n\n    void embraces( int nLine, UMCTExpressHandler handler ) throws IOException;\n\n    void embraces( int nLine ) throws IOException ;\n\n    RouteDispatcher getRouteDispatcher();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/DuplexAppointNode.java",
    "content": "package com.pinecone.hydra.uma;\n\npublic interface DuplexAppointNode extends AppointNode {\n    boolean supportDuplex();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/DuplexAppointServer.java",
    "content": "package com.pinecone.hydra.uma;\n\nimport java.io.IOException;\n\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umct.DuplexExpress;\nimport com.pinecone.hydra.umct.IlleagalResponseException;\nimport com.pinecone.hydra.umct.husky.compiler.MethodPrototype;\n\npublic interface DuplexAppointServer extends AppointServer, DuplexAppointNode {\n\n    @Override\n    DuplexExpress getUMCTExpress();\n\n    void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, UlfAsyncMsgHandleAdapter handler ) throws IOException;\n\n    void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, AsynMsgHandler handler ) throws IOException;\n\n    void sendAsynMsg( long clientId, UMCMessage request, AsynMsgHandler handler ) throws IOException;\n\n\n\n    void invokeInformAsyn( long clientId, MethodPrototype method, Object[] args, AsynMsgHandler handler ) throws IOException ;\n\n    void invokeInformAsyn( long clientId, MethodPrototype method, Object[] args, AsynReturnHandler handler ) throws IOException ;\n\n    void invokeInformAsyn( long clientId, String szMethodAddress, Object[] args, AsynMsgHandler handler ) throws IOException ;\n\n    void invokeInformAsyn( long clientId, String szMethodAddress, Object[] args, AsynReturnHandler handler ) throws IOException ;\n\n\n\n    Object invokeInform( long clientId, MethodPrototype method, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException ;\n\n    Object invokeInform( long clientId, MethodPrototype method, Object... args ) throws IlleagalResponseException, IOException ;\n\n    Object invokeInform( long clientId, String szMethodAddress, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException ;\n\n    Object invokeInform( long clientId, String szMethodAddress, Object... args ) throws IlleagalResponseException, IOException ;\n\n\n\n    <T> T getIface( long clientId, Class<T> iface );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/HuskyDuplexExpress.java",
    "content": "package com.pinecone.hydra.uma;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.hydra.express.Deliver;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\nimport com.pinecone.hydra.umct.MessageDeliver;\nimport com.pinecone.hydra.umct.MessageJunction;\nimport com.pinecone.hydra.umct.UMCConnection;\nimport com.pinecone.hydra.umct.WolfMCExpress;\n\n\npublic class HuskyDuplexExpress extends ArchDuplexExpress {\n    protected WolfMCExpress                     mFriendExpress;\n\n    public HuskyDuplexExpress( String name, MessageJunction messagram, Logger logger ) {\n        super();\n        this.mFriendExpress = new HuskyMCDuplexExpress( name, messagram, logger, this );\n        this.mLogger = this.mFriendExpress.getLogger();\n    }\n\n    public HuskyDuplexExpress( String name, MessageJunction messagram ) {\n        this( name, messagram, LoggerFactory.getLogger( HuskyDuplexExpress.class.getName() ) );\n    }\n\n    public HuskyDuplexExpress( MessageJunction messagram ) {\n        this( null, messagram );\n    }\n\n\n    @Override\n    public UMCMessage processResponse( UMCMessage request, UMCMessage response ) {\n        response = this.mFriendExpress.processResponse( request, response );\n        response = super.processResponse( request, response );\n        // Maintain the chain-of-responsibility.\n        // 保持责任链，确保每一层级的处理序.\n\n        return response;\n    }\n\n    @Override\n    protected void onSuccessfulMsgReceived( UMCConnection connection, Object[] args ) throws Exception {\n        // Dummy\n    }\n\n    @Override\n    public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n        this.mFriendExpress.onSuccessfulMsgReceived( medium, transmit, receiver, msg, args );\n    }\n\n    @Override\n    public void onErrorMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n        this.mFriendExpress.onErrorMsgReceived( medium, transmit, receiver, msg, args );\n    }\n\n    @Override\n    public void onError( Object data, Throwable cause ) {\n        this.mFriendExpress.onError( data, cause );\n    }\n\n    @Override\n    public String getName() {\n        return this.mFriendExpress.getName();\n    }\n\n    @Override\n    public MessageJunction getJunction() {\n        return this.mFriendExpress.getJunction();\n    }\n\n    @Override\n    public Logger getLogger() {\n        return this.mFriendExpress.getLogger();\n    }\n\n    @Override\n    public MessageDeliver   recruit ( String szName ) {\n        return this.mFriendExpress.recruit( szName );\n    }\n\n    @Override\n    public HuskyDuplexExpress register    ( Deliver deliver ) {\n        this.mFriendExpress.register( deliver );\n        return this;\n    }\n\n    @Override\n    public HuskyDuplexExpress   fired       ( Deliver deliver ) {\n        this.mFriendExpress.fired( deliver );\n        return this;\n    }\n\n    @Override\n    public MessageDeliver getDeliver    ( String szName ) {\n        return this.mFriendExpress.getDeliver( szName );\n    }\n\n    @Override\n    public boolean hasOwnDeliver( Deliver deliver ) {\n        return this.mFriendExpress.hasOwnDeliver( deliver );\n    }\n\n    @Override\n    public boolean hasOwnDeliver( String deliverName ) {\n        return this.mFriendExpress.hasOwnDeliver( deliverName );\n    }\n\n    @Override\n    public int size() {\n        return this.mFriendExpress.size();\n    }\n\n\n\n    static class HuskyMCDuplexExpress extends WolfMCExpress {\n        private HuskyDuplexExpress husky;\n\n        public HuskyMCDuplexExpress( String name, MessageJunction messagram, Logger logger, HuskyDuplexExpress self ) {\n            super( name, messagram );\n\n            this.husky   = self;\n            this.mLogger = logger;\n        }\n\n        @Override\n        protected void onSuccessfulMsgReceived( UMCConnection connection, Object[] args ) throws Exception {\n            boolean isDuplexControlMessage = this.husky.handleDuplexControlMessage( connection, args );\n            if ( !isDuplexControlMessage ) {\n                super.onSuccessfulMsgReceived( connection, args );\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/UlfAppointClient.java",
    "content": "package com.pinecone.hydra.uma;\n\npublic interface UlfAppointClient extends AppointClient{\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/UlfAppointNode.java",
    "content": "package com.pinecone.hydra.uma;\n\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufDecoder;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufEncoder;\n\npublic interface UlfAppointNode extends AppointNode {\n\n    @Override\n    PMCTContextMachinery getMCTTransformer();\n\n    @Override\n    ProtoInterfacialCompiler getInterfacialCompiler();\n\n    default FieldProtobufEncoder getFieldProtobufEncoder() {\n        return this.getInterfacialCompiler().getCompilerEncoder().getEncoder();\n    }\n\n    FieldProtobufDecoder getFieldProtobufDecoder();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/UlfAppointServer.java",
    "content": "package com.pinecone.hydra.uma;\n\npublic interface UlfAppointServer extends AppointServer {\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/UlfDuplexAppointClient.java",
    "content": "package com.pinecone.hydra.uma;\n\npublic interface UlfDuplexAppointClient extends DuplexAppointClient {\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/UlfDuplexAppointServer.java",
    "content": "package com.pinecone.hydra.uma;\n\npublic interface UlfDuplexAppointServer extends DuplexAppointServer {\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/pool/GenericMultiClientChannelRegistry.java",
    "content": "package com.pinecone.hydra.uma.pool;\n\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.locks.Lock;\nimport java.util.concurrent.locks.ReentrantLock;\n\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.ChannelPool;\nimport com.pinecone.hydra.umc.msg.FairChannelPool;\nimport com.pinecone.hydra.umc.msg.MessageNode;\nimport com.pinecone.hydra.umc.msg.MultiClientChannelRegistry;\nimport com.pinecone.hydra.umc.wolf.UlfIOLoadBalanceStrategy;\nimport com.pinecone.hydra.umc.wolf.UlfIdleFirstBalanceStrategy;\nimport com.pinecone.hydra.umc.wolf.client.ProactiveParallelFairChannelPool;\n\npublic class GenericMultiClientChannelRegistry<CID > implements MultiClientChannelRegistry<CID > {\n    protected static final UlfIOLoadBalanceStrategy LoadBalanceStrategy = new UlfIdleFirstBalanceStrategy();\n\n    protected Lock                          mPoolLock;\n\n    protected Map<CID, FairChannelPool >    mClientChannelRegistry;\n\n    public GenericMultiClientChannelRegistry() {\n        this.mClientChannelRegistry = new ConcurrentHashMap<>();\n        this.mPoolLock              = new ReentrantLock();\n    }\n\n    @Override\n    public void register( CID id, ChannelControlBlock controlBlock ) {\n        FairChannelPool pool = this.mClientChannelRegistry.computeIfAbsent( id, (k)->{\n            return new ProactiveParallelFairChannelPool<>( LoadBalanceStrategy );\n        } );\n        pool.add( controlBlock );\n    }\n\n    @Override\n    public void deregister( CID id, ChannelControlBlock controlBlock ) {\n        FairChannelPool pool = this.mClientChannelRegistry.computeIfPresent( id, (k, v)->{\n            v.remove( controlBlock );\n            if ( v.isEmpty() ) {\n                MessageNode messageNode = controlBlock.getParentMessageNode();\n                if ( messageNode instanceof Slf4jTraceable) {\n                    ((Slf4jTraceable) messageNode).getLogger().info( \"Client `{}` is detached.\", id );\n                }\n                return null;\n            }\n\n            return v;\n        } );\n    }\n\n    @Override\n    public void deregister( CID id ) {\n        FairChannelPool pool = this.mClientChannelRegistry.remove( id );\n        if ( pool != null ) {\n            pool.clear(); // All channels should be closed in this method, in principle.\n        }\n    }\n\n    @Override\n    public ChannelPool getPool( CID id ) {\n        return this.mClientChannelRegistry.get( id );\n    }\n\n    @Override\n    public int size() {\n        return this.mClientChannelRegistry.size();\n    }\n\n    @Override\n    public void clear() {\n        this.mPoolLock.lock();\n        try{\n            for( FairChannelPool pool : this.mClientChannelRegistry.values() ) {\n                pool.clear(); // All channels should be closed in this method, in principle.\n            }\n            this.mClientChannelRegistry.clear();\n        }\n        finally {\n            this.mPoolLock.unlock();\n        }\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mClientChannelRegistry.isEmpty();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/proxy/GenericIfaceProxyFactory.java",
    "content": "package com.pinecone.hydra.uma.proxy;\n\nimport com.pinecone.hydra.umct.husky.compiler.MethodPrototype;\nimport com.pinecone.hydra.umct.proxy.UMCTHub;\nimport org.springframework.cglib.proxy.Enhancer;\nimport org.springframework.cglib.proxy.MethodInterceptor;\nimport org.springframework.cglib.proxy.MethodProxy;\n\nimport java.lang.reflect.Method;\nimport java.util.concurrent.ConcurrentHashMap;\n\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.uma.AppointClient;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype;\nimport com.pinecone.hydra.umct.stereotype.IfaceUtils;\n\npublic class GenericIfaceProxyFactory implements IfaceProxyFactory {\n    protected final ConcurrentHashMap<Class<?>, Enhancer> mEnhancerCache = new ConcurrentHashMap<>();\n\n    protected AppointClient mClient;\n\n    public GenericIfaceProxyFactory( AppointClient client ) {\n        this.mClient = client;\n    }\n\n    @Override\n    public <T> T createProxy( AppointClient client, ClassDigest classDigest, Class<T> iface ) {\n//        if (!iface.isInterface()) {\n//            throw new IllegalArgumentException(\"The provided class must be an interface.\");\n//        }\n\n        Enhancer enhancer = this.mEnhancerCache.computeIfAbsent(iface, clazz -> {\n            Enhancer e = new Enhancer();\n            e.setSuperclass(UMCTHub.class);\n            e.setInterfaces( new Class[]{iface} );\n\n            e.setCallback(new MethodInterceptor() {\n                @Override\n                public Object intercept( Object obj, Method method, Object[] args, MethodProxy proxy ) throws Throwable {\n                    String methodName = IfaceUtils.getIfaceMethodName( method );\n                    MethodPrototype methodPrototype = (DynamicMethodPrototype) client.queryMethodDigest(\n                            classDigest.getClassName() + Namespace.DEFAULT_SEPARATOR + methodName\n                    );\n                    return client.invokeInform( methodPrototype, args );\n                }\n            });\n            return e;\n        });\n\n        return iface.cast( enhancer.create() );\n    }\n\n    @Override\n    public <T> T createProxy( AppointClient client, Class<T> iface ) {\n        ClassDigest classDigest = client.queryClassDigest( IfaceUtils.queryIfaceClassNameAddress( iface ) );\n\n        return this.createProxy( client, classDigest, iface );\n    }\n\n    @Override\n    public <T> T createProxy( Class<T> iface ) {\n        return this.createProxy( this.mClient, iface );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/proxy/GenericPassiveClientIfaceProxyFactory.java",
    "content": "package com.pinecone.hydra.uma.proxy;\n\nimport java.lang.reflect.Method;\nimport java.util.concurrent.ConcurrentHashMap;\n\nimport com.pinecone.hydra.umct.husky.compiler.MethodPrototype;\nimport com.pinecone.hydra.umct.proxy.UMCTHub;\nimport org.springframework.cglib.proxy.Enhancer;\nimport org.springframework.cglib.proxy.MethodInterceptor;\nimport org.springframework.cglib.proxy.MethodProxy;\n\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.uma.DuplexAppointServer;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype;\nimport com.pinecone.hydra.umct.stereotype.IfaceUtils;\n\npublic class GenericPassiveClientIfaceProxyFactory implements PassiveClientIfaceProxyFactory {\n    protected final ConcurrentHashMap<Class<?>, Enhancer> mEnhancerCache = new ConcurrentHashMap<>();\n\n    protected DuplexAppointServer mServer;\n\n    public GenericPassiveClientIfaceProxyFactory( DuplexAppointServer server ) {\n        this.mServer = server;\n    }\n\n    @Override\n    public <T> T createProxy( long clientId, DuplexAppointServer server, ClassDigest classDigest, Class<T> iface ) {\n        Enhancer enhancer = this.mEnhancerCache.computeIfAbsent(iface, clazz -> {\n            Enhancer e = new Enhancer();\n            e.setSuperclass( UMCTHub.class );\n            e.setInterfaces( new Class[]{iface} );\n\n            e.setCallback(new MethodInterceptor() {\n                @Override\n                public Object intercept( Object obj, Method method, Object[] args, MethodProxy proxy ) throws Throwable {\n                    String methodName = IfaceUtils.getIfaceMethodName( method );\n                    MethodPrototype methodPrototype = (DynamicMethodPrototype) server.queryMethodDigest(\n                            classDigest.getClassName() + Namespace.DEFAULT_SEPARATOR + methodName\n                    );\n                    return server.invokeInform( clientId, methodPrototype, args );\n                }\n            });\n            return e;\n        });\n\n        return iface.cast( enhancer.create() );\n    }\n\n    @Override\n    public <T> T createProxy( long clientId, DuplexAppointServer server, Class<T> iface ) {\n        ClassDigest classDigest = server.queryClassDigest( IfaceUtils.queryIfaceClassNameAddress( iface ) );\n\n        return this.createProxy( clientId, server, classDigest, iface );\n    }\n\n    @Override\n    public <T> T createProxy( long clientId, Class<T> iface ) {\n        return this.createProxy( clientId, this.mServer, iface );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/proxy/IfaceProxyFactory.java",
    "content": "package com.pinecone.hydra.uma.proxy;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.uma.AppointClient;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\n\npublic interface IfaceProxyFactory extends Pinenut {\n    <T> T createProxy( AppointClient client, ClassDigest classDigest, Class<T> iface ) ;\n\n    <T> T createProxy( AppointClient client, Class<T> iface ) ;\n\n    <T> T createProxy( Class<T> iface );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/proxy/PassiveClientIfaceProxyFactory.java",
    "content": "package com.pinecone.hydra.uma.proxy;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.uma.DuplexAppointServer;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\n\npublic interface PassiveClientIfaceProxyFactory extends Pinenut {\n    <T> T createProxy( long clientId, DuplexAppointServer server, ClassDigest classDigest, Class<T> iface ) ;\n\n    <T> T createProxy( long clientId, DuplexAppointServer server, Class<T> iface ) ;\n\n    <T> T createProxy( long clientId, Class<T> iface );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/wolf/WolfAppointClient.java",
    "content": "package com.pinecone.hydra.uma.wolf;\n\nimport java.io.IOException;\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.TimeoutException;\n\nimport com.google.protobuf.DynamicMessage;\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.hydra.uma.UlfAppointClient;\nimport com.pinecone.hydra.uma.ArchUlfAppointNode;\nimport com.pinecone.hydra.uma.AsynMsgHandler;\nimport com.pinecone.hydra.uma.AsynReturnHandler;\nimport com.pinecone.hydra.uma.proxy.GenericIfaceProxyFactory;\nimport com.pinecone.hydra.uma.proxy.IfaceProxyFactory;\nimport com.pinecone.hydra.servgram.Servgramium;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.ChannelHandleException;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.MediumTerminationException;\nimport com.pinecone.hydra.umc.msg.Messenger;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.event.ChannelDataInterceptor;\nimport com.pinecone.hydra.umc.msg.event.ChannelEventHandler;\nimport com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler;\nimport com.pinecone.hydra.umc.vita.HeartbeatControl;\nimport com.pinecone.hydra.umc.wolf.UlfInformMessage;\nimport com.pinecone.hydra.umc.wolf.client.ArchAsyncMessenger;\nimport com.pinecone.hydra.umc.wolf.client.ClientConnectArguments;\nimport com.pinecone.hydra.umc.wolf.client.UlfAsyncMessengerChannelControlBlock;\nimport com.pinecone.hydra.umc.wolf.client.UlfClient;\nimport com.pinecone.hydra.umc.wolf.client.WolfMCClient;\nimport com.pinecone.hydra.umct.IlleagalResponseException;\nimport com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.CompilerEncoder;\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodPrototype;\nimport com.pinecone.hydra.umct.husky.heartbeat.HuskyHeartbeatControl;\nimport com.pinecone.hydra.umct.husky.machinery.HuskyContextMachinery;\nimport com.pinecone.hydra.umct.mapping.BytecodeControllerInspector;\nimport com.pinecone.hydra.umct.mapping.ControllerInspector;\nimport com.pinecone.ulf.util.protobuf.GenericFieldProtobufDecoder;\n\nimport io.netty.channel.Channel;\nimport io.netty.channel.ChannelHandlerContext;\nimport javassist.ClassPool;\n\n/**\n *  Pinecone Ursus For Java WolfAppointClient [ Ulfhedinn Wolf RPC Client ]\n *  Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family.\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n */\npublic class WolfAppointClient extends ArchUlfAppointNode implements UlfAppointClient {\n    protected UlfClient              mMessenger;\n\n    protected IfaceProxyFactory      mIfaceProxyFactory;\n\n    protected HeartbeatControl       mHeartbeatControl;\n\n    protected boolean afterChannelInactive( ChannelControlBlock ccb, Object context ) throws ChannelHandleException {\n        UlfAsyncMessengerChannelControlBlock cb = (UlfAsyncMessengerChannelControlBlock) ccb;\n        Channel channel = cb.getChannel().getNativeHandle();\n        WolfAppointClient.this.getLogger().info( \"Proactive channel ({}), has detached.\", channel.id() );\n        UlfClient wrappedClient = WolfAppointClient.this.getMessageNode();\n        if ( wrappedClient.getConnectionArguments().isAutoReconnect() ) {\n            try {\n                ArchAsyncMessenger.reconnect( cb, (Messenger) wrappedClient, context );\n\n                WolfAppointClient.this.getLogger().info( \"Proactive Channel ({}, `{}`), reconnect successfully.\", channel.id(), cb.getChannel().getAddress() );\n            }\n            catch ( MediumTerminationException e ) {\n                WolfAppointClient.this.getLogger().info( \"Service already terminated with inactive event. <ACK>\" );\n            }\n            catch ( IOException e ) {\n                WolfAppointClient.this.getLogger().error( \"Proactive channel ({}), attempted to reconnect but failed.\", channel.id(), e );\n                throw new ChannelHandleException( e.getCause() );\n            }\n        }\n\n        return true; // Blocking next inactive sequence.\n    }\n\n    protected void registerChannelInactiveHandler () {\n        this.mMessenger.registerChannelInactiveHandler(new ChannelInactiveHandler() {\n            @Override\n            public boolean afterChannelInactive( ChannelControlBlock ccb, Object context ) throws ChannelHandleException {\n                this.afterEventTriggered( ccb, context );\n\n                return WolfAppointClient.this.afterChannelInactive( ccb, context );\n            }\n        });\n    }\n\n    protected void registerChannelConnectedHandler () {\n        ClientConnectArguments arguments = WolfAppointClient.this.getMessageNode().getConnectionArguments();\n        this.mMessenger.registerChannelConnectedHandler(new ChannelEventHandler() {\n            @Override\n            public void afterEventTriggered( ChannelControlBlock block, Object context ) {\n                if ( arguments.isEnableHeartbeat() ) {\n                    WolfAppointClient.this.mHeartbeatControl.registerChannel( block, arguments.getHeartbeatInterval() );\n                }\n            }\n        });\n    }\n\n    protected void initUlfClientHeartbeatInterceptors( UlfClient client ) {\n        client.registerArrivedDataInterceptor(new ChannelDataInterceptor() {\n            @Override\n            public boolean interceptAfterDataArrived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) {\n                try {\n                    return WolfAppointClient.this.mHeartbeatControl.interceptFeedback( block, msg );\n                }\n                catch ( IOException e ) {\n                    throw new ProvokeHandleException( e );\n                }\n            }\n        });\n    }\n\n    private void initSelf( UlfClient messenger ) {\n        this.mMessenger            = messenger;\n        this.mIfaceProxyFactory    = new GenericIfaceProxyFactory( this );\n\n        ClientConnectArguments arguments = WolfAppointClient.this.getMessageNode().getConnectionArguments();\n        if ( arguments.isEnableHeartbeat() ) {\n            this.mHeartbeatControl = new HuskyHeartbeatControl( arguments.getHeartbeatInterval() );\n            this.registerChannelConnectedHandler();\n            this.initUlfClientHeartbeatInterceptors( messenger );\n        }\n\n        this.registerChannelInactiveHandler();\n    }\n\n    protected WolfAppointClient( UlfClient messenger, boolean delay ){\n        super( (Servgramium) messenger );\n        this.initSelf( messenger );\n    }\n\n    public WolfAppointClient( UlfClient messenger, ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector ){\n        this( messenger, true );\n        this.mMCTContextMachinery = new HuskyContextMachinery( compiler, controllerInspector, new GenericFieldProtobufDecoder() );\n        this.initSelf( messenger );\n    }\n\n    public WolfAppointClient( UlfClient messenger, CompilerEncoder encoder ){\n        this( messenger, new BytecodeIfaceCompiler(\n                ClassPool.getDefault(), messenger.getTaskManager().getClassLoader(), encoder\n        ), new BytecodeControllerInspector(\n                ClassPool.getDefault(), messenger.getTaskManager().getClassLoader()\n        ) );\n    }\n\n    public WolfAppointClient( UlfClient messenger ){\n        this( messenger, new BytecodeIfaceCompiler(\n                ClassPool.getDefault(), messenger.getTaskManager().getClassLoader()\n        ), new BytecodeControllerInspector(\n                ClassPool.getDefault(), messenger.getTaskManager().getClassLoader()\n        ) );\n    }\n\n\n    @Override\n    public void close() {\n        this.mMessenger.close();\n    }\n\n    @Override\n    public UlfClient getMessageNode() {\n        return this.mMessenger;\n    }\n\n\n    @Override\n    public UMCMessage sendSyncMsg( UMCMessage request ) throws IOException {\n        return this.sendSyncMsg( request, false );\n    }\n\n    @Override\n    public UMCMessage sendSyncMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException {\n        return this.mMessenger.sendSyncMsg( request, bNoneBuffered );\n    }\n\n    @Override\n    public void sendAsynMsg( UMCMessage request ) throws IOException {\n        this.mMessenger.sendAsynMsg( request );\n    }\n\n    @Override\n    public void sendAsynMsg( UMCMessage request, AsynMsgHandler handler ) throws IOException {\n        this.mMessenger.sendAsynMsg( request, AsynMsgHandler.wrap( handler ) );\n    }\n\n\n    @Override\n    public void invokeInformAsyn( MethodPrototype method, Object[] args, AsynMsgHandler handler ) throws IOException {\n        DynamicMessage message = this.reinterpretMsg( method, args );\n        this.sendAsynMsg( new UlfInformMessage(message.toByteArray()), handler );\n    }\n\n    @Override\n    public void invokeInformAsyn( MethodPrototype method, Object[] args, AsynReturnHandler handler ) throws IOException {\n        DynamicMessage message = this.reinterpretMsg( method, args );\n        this.sendAsynMsg(new UlfInformMessage(message.toByteArray()), new AsynMsgHandler() {\n            @Override\n            public void onSuccessfulMsgReceived( UMCMessage msg ) throws Exception {\n                handler.onSuccessfulReturn( WolfAppointClient.this.unmarshalResponse( method, msg ) );\n            }\n\n            @Override\n            public void onErrorMsgReceived( UMCMessage msg ) throws Exception {\n                handler.onErrorMsgReceived( msg );\n            }\n        });\n    }\n\n    @Override\n    public Object invokeInform( MethodPrototype method, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException {\n        CompletableFuture<Object> future = new CompletableFuture<>();\n        DynamicMessage message = this.reinterpretMsg(method, args);\n\n        this.sendAsynMsg(new UlfInformMessage(message.toByteArray()), new AsynMsgHandler() {\n            @Override\n            public void onSuccessfulMsgReceived( UMCMessage msg ) throws Exception {\n                try {\n                    Object result = WolfAppointClient.this.unmarshalResponse( method, msg );\n                    future.complete(result);\n                }\n                catch ( IlleagalResponseException e ) {\n                    future.completeExceptionally( e );\n                }\n            }\n\n            @Override\n            public void onErrorMsgReceived( UMCMessage msg ) throws Exception {\n                future.completeExceptionally( new IlleagalResponseException( \"Error message received: \" + msg ) );\n            }\n\n            @Override\n            public void onError( Object data, Throwable cause ) {\n                future.completeExceptionally( cause );\n            }\n        });\n\n        try {\n            if ( nWaitTimeMil == -1 ) {\n                if ( this.mMessenger instanceof WolfMCClient ) {\n                    nWaitTimeMil = ((WolfMCClient) this.mMessenger).getConnectionArguments().getSyncWaitingMillis();\n                }\n            }\n\n            return WolfAppointHelper.evalCompletableFuture( future, nWaitTimeMil );\n        }\n        catch ( TimeoutException | ExecutionException e ) {\n            throw new IlleagalResponseException( e );\n        }\n        catch ( InterruptedException e ) {\n            Thread.currentThread().interrupt();\n            throw new IlleagalResponseException( e );\n        }\n    }\n\n    @Override\n    public Object invokeInform( MethodPrototype method, Object... args ) throws IlleagalResponseException, IOException {\n        return this.invokeInform( method, args, -1 );\n    }\n\n    @Override\n    public void invokeInformAsyn( String szMethodAddress, Object[] args, AsynMsgHandler handler ) throws IOException {\n        this.invokeInformAsyn( this.queryMethodPrototype( szMethodAddress ), args, handler );\n    }\n\n    @Override\n    public void invokeInformAsyn( String szMethodAddress, Object[] args, AsynReturnHandler handler ) throws IOException {\n        this.invokeInformAsyn( this.queryMethodPrototype( szMethodAddress ), args, handler );\n    }\n\n    @Override\n    public Object invokeInform( String szMethodAddress, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException {\n        return this.invokeInform( this.queryMethodPrototype( szMethodAddress ), args, nWaitTimeMil );\n    }\n\n    @Override\n    public Object invokeInform( String szMethodAddress, Object... args ) throws IlleagalResponseException, IOException {\n        return this.invokeInform( this.queryMethodPrototype( szMethodAddress ), args );\n    }\n\n    @Override\n    public <T> T getIface( Class<T> iface ) {\n        return this.mIfaceProxyFactory.createProxy( iface );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/wolf/WolfAppointHelper.java",
    "content": "package com.pinecone.hydra.uma.wolf;\n\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.TimeoutException;\n\nimport com.pinecone.hydra.umct.IlleagalResponseException;\n\npublic final class WolfAppointHelper {\n    public static Object evalCompletableFuture( CompletableFuture<Object> future, long nWaitTimeMil ) throws IlleagalResponseException, TimeoutException, ExecutionException, InterruptedException {\n        Object ret;\n\n        if ( nWaitTimeMil != -1 ) {\n            ret = future.get( nWaitTimeMil, TimeUnit.MILLISECONDS );\n        }\n        else {\n            ret = future.get();\n        }\n\n        if ( ret instanceof Exception ) {\n            throw new IlleagalResponseException( (Exception)ret );\n        }\n\n        return ret;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/wolf/WolfAppointServer.java",
    "content": "package com.pinecone.hydra.uma.wolf;\n\nimport java.io.IOException;\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\n\nimport org.slf4j.Logger;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.hydra.express.Deliver;\nimport com.pinecone.hydra.servgram.Servgramium;\nimport com.pinecone.hydra.uma.UlfAppointServer;\nimport com.pinecone.hydra.uma.ArchUlfAppointNode;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.event.ChannelDataInterceptor;\nimport com.pinecone.hydra.umc.vita.HeartbeatFeedbackor;\nimport com.pinecone.hydra.umc.wolf.server.UlfServer;\nimport com.pinecone.hydra.umct.MessageDeliver;\nimport com.pinecone.hydra.umct.MessageExpress;\nimport com.pinecone.hydra.umct.MessageJunction;\nimport com.pinecone.hydra.umct.UMCTExpress;\nimport com.pinecone.hydra.umct.WolfMCExpress;\nimport com.pinecone.hydra.umct.husky.heartbeat.HuskyHeartbeatFeedbackor;\nimport com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcher;\nimport com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcherFabricator;\nimport com.pinecone.hydra.umct.husky.machinery.RouteDispatcher;\nimport com.pinecone.hydra.umct.mapping.ControllerInspector;\nimport com.pinecone.hydra.umct.husky.compiler.CompilerEncoder;\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\n\nimport io.netty.channel.ChannelHandlerContext;\n\n/**\n *  Pinecone Ursus For Java WolfAppointServer [ Ulfhedinn Wolf RPC Server ]\n *  Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family.\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n */\npublic class WolfAppointServer extends ArchUlfAppointNode implements UlfAppointServer {\n    protected UlfServer                     mRecipient;\n    protected RouteDispatcher               mRouteDispatcher;\n    protected HeartbeatFeedbackor           mHeartbeatFeedbackor;\n\n    protected void applyExpress( UMCTExpress express ) {\n        this.mRecipient.apply( express );\n    }\n\n    protected void initUlfServerHeartbeatInterceptors( UlfServer server ) {\n        server.registerArrivedDataInterceptor(new ChannelDataInterceptor() {\n            @Override\n            public boolean interceptAfterDataArrived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) {\n                //Debug.trace( msg );\n                try {\n                    return WolfAppointServer.this.mHeartbeatFeedbackor.interceptHeartbeat( block, msg );\n                }\n                catch ( IOException e ) {\n                    throw new ProvokeHandleException( e );\n                }\n            }\n        });\n    }\n\n    private void initSelf( UlfServer server ) {\n        this.initUlfServerHeartbeatInterceptors( server );\n    }\n\n\n    protected WolfAppointServer( UlfServer server, RouteDispatcher dispatcher ){\n        super( (Servgramium) server, dispatcher.getContextMachinery() );\n        this.mRecipient           = server;\n        this.mRouteDispatcher     = dispatcher;\n        this.mHeartbeatFeedbackor = new HuskyHeartbeatFeedbackor();\n\n        this.initSelf( server );\n    }\n\n    public WolfAppointServer( UlfServer server, ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector, UMCTExpress express ){\n        this( server, new HuskyRouteDispatcher( compiler, controllerInspector, express ) );\n        this.apply( express );\n    }\n\n    public WolfAppointServer( UlfServer server, CompilerEncoder encoder, UMCTExpress express ){\n        this( server, new HuskyRouteDispatcher( encoder, express, server.getTaskManager().getClassLoader() ) );\n        this.apply( express );\n    }\n\n    public WolfAppointServer( UlfServer server, UMCTExpress express ){\n        this( server, new HuskyRouteDispatcher( express, server.getTaskManager().getClassLoader() ) );\n        this.apply( express );\n    }\n\n    public WolfAppointServer( UlfServer server, Class<?> expressType ){\n        this( server, new HuskyRouteDispatcher( server.getTaskManager().getClassLoader(), true ) );\n\n        try{\n            Constructor<?> constructor = expressType.getConstructor( String.class, MessageJunction.class, Logger.class );\n            UMCTExpress express = (UMCTExpress) constructor.newInstance(DefaultEntityName, this, this.getLogger() );\n\n            this.applyExpress( express );\n            HuskyRouteDispatcherFabricator.afterConstructed( (HuskyRouteDispatcher)this.mRouteDispatcher, express );\n        }\n        catch ( NoSuchMethodException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) {\n            throw new IllegalArgumentException( \"`\" + expressType.getSimpleName() + \"` is not UMCTExpress calibre qualified.\" );\n        }\n    }\n\n    public WolfAppointServer( UlfServer server ){\n        this( server, WolfMCExpress.class );\n    }\n\n\n\n\n    @Override\n    public void close() {\n        this.mRecipient.close();\n    }\n\n    @Override\n    public UlfServer getMessageNode() {\n        return this.mRecipient;\n    }\n\n    @Override\n    public WolfAppointServer apply( UMCTExpress handler ) {\n        this.mRouteDispatcher.setUMCTExpress( handler );\n        this.mRecipient.apply( handler );\n        return this;\n    }\n\n    @Override\n    public UMCTExpress getUMCTExpress() {\n        return this.mRouteDispatcher.getUMCTExpress();\n    }\n\n    @Override\n    public MessageExpress register( Deliver deliver ) {\n        return this.mRouteDispatcher.register( deliver );\n    }\n\n    @Override\n    public MessageExpress  fired   ( Deliver deliver ) {\n        return this.mRouteDispatcher.fired( deliver );\n    }\n\n    @Override\n    public MessageDeliver getDeliver( String name ) {\n        return this.mRouteDispatcher.getDeliver( name );\n    }\n\n    @Override\n    public MessageDeliver getDefaultDeliver() {\n        return this.mRouteDispatcher.getDefaultDeliver();\n    }\n\n    @Override\n    public void registerInstance( String deliverName, Object instance, Class<?> iface ) {\n        this.mRouteDispatcher.registerInstance( deliverName, instance, iface );\n    }\n\n    @Override\n    public void registerInstance( Object instance, Class<?> iface ) {\n        this.mRouteDispatcher.registerInstance( instance, iface );\n    }\n\n    @Override\n    public void registerController( String deliverName, Object instance, Class<?> controllerType ) {\n        this.mRouteDispatcher.registerController( deliverName, instance, controllerType );\n    }\n\n    @Override\n    public void registerController( Object instance, Class<?> controllerType ) {\n        this.mRouteDispatcher.registerController( instance, controllerType );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/wolf/WolvesAppointClient.java",
    "content": "package com.pinecone.hydra.uma.wolf;\n\nimport java.io.IOException;\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.Map;\n\nimport org.slf4j.Logger;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.hydra.uma.AppointServer;\nimport com.pinecone.hydra.uma.HuskyDuplexExpress;\nimport com.pinecone.hydra.uma.UlfDuplexAppointClient;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.ChannelHandleException;\nimport com.pinecone.hydra.umc.msg.ChannelPool;\nimport com.pinecone.hydra.umc.msg.MediumTerminationException;\nimport com.pinecone.hydra.umc.msg.Messenger;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umc.wolf.UlfChannel;\nimport com.pinecone.hydra.umc.wolf.UlfInstructMessage;\nimport com.pinecone.hydra.umc.wolf.WolfMCStandardConstants;\nimport com.pinecone.hydra.umc.wolf.client.ArchAsyncMessenger;\nimport com.pinecone.hydra.umc.wolf.client.UlfAsyncMessengerChannelControlBlock;\nimport com.pinecone.hydra.umc.wolf.client.UlfClient;\nimport com.pinecone.hydra.umct.DuplexExpress;\nimport com.pinecone.hydra.umct.MessageJunction;\nimport com.pinecone.hydra.umct.UMCTExpress;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\nimport com.pinecone.hydra.umct.husky.HuskyCTPConstants;\nimport com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.CompilerEncoder;\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.machinery.HuskyContextMachinery;\nimport com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcher;\nimport com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcherFabricator;\nimport com.pinecone.hydra.umct.husky.machinery.ProtoRouteDispatcher;\nimport com.pinecone.hydra.umct.husky.machinery.RouteDispatcher;\nimport com.pinecone.hydra.umct.mapping.BytecodeControllerInspector;\nimport com.pinecone.hydra.umct.mapping.ControllerInspector;\nimport com.pinecone.ulf.util.protobuf.GenericFieldProtobufDecoder;\n\nimport io.netty.channel.Channel;\nimport io.netty.channel.ChannelId;\nimport io.netty.util.AttributeKey;\nimport javassist.ClassPool;\n\n/**\n *  Pinecone Ursus For Java WolvesAppointClient [ Ulfhedinn Wolf Duplex RPC Client ]\n *  Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family.\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n */\npublic class WolvesAppointClient extends WolfAppointClient implements UlfDuplexAppointClient {\n    protected static Class<?> checkExpressType( Class<?> expressType ) {\n        if ( !DuplexExpress.class.isAssignableFrom( expressType ) ) {\n            throw new IllegalArgumentException( \"`\" + expressType.getSimpleName() + \"` is not DuplexExpress calibre qualified.\" );\n        }\n        return expressType;\n    }\n\n    protected Map<ChannelId, ChannelControlBlock > mInstructedChannels;  // Standby controlled channels, waiting for server to instruct.\n    protected RouteDispatcher                      mRouteDispatcher;\n\n\n    @Override\n    protected boolean afterChannelInactive( ChannelControlBlock ccb, Object context ) throws ChannelHandleException {\n        UlfAsyncMessengerChannelControlBlock cb = (UlfAsyncMessengerChannelControlBlock) ccb;\n        Channel channel = cb.getChannel().getNativeHandle();\n        Object ob = channel.attr( AttributeKey.valueOf( HuskyCTPConstants.HCTP_DUP_PASSIVE_CHANNEL_KEY ) ).get();\n        if ( ob != null && (Boolean)ob ) {\n            WolvesAppointClient.this.getLogger().info( \"Passive-controlled channel ({}), has detached.\", channel.id() );\n            UlfClient wrappedClient = WolvesAppointClient.this.getMessageNode();\n            if ( wrappedClient.getConnectionArguments().isAutoReconnect() ) {\n                try {\n                    ArchAsyncMessenger.reconnect( cb, (Messenger) wrappedClient, context );\n                    Channel newChannel = cb.getChannel().getNativeHandle();\n                    WolvesAppointClient.copyDuplexAttrs( channel, newChannel );\n\n                    UlfInstructMessage instructMessage = new UlfInstructMessage( HuskyCTPConstants.HCTP_DUP_CONTROL_REGISTER );\n                    instructMessage.getHead().setIdentityId( wrappedClient.getMessageNodeId() );\n                    cb.sendAsynMsg( instructMessage, true );\n\n                    WolvesAppointClient.this.getLogger().info( \"Passive-controlled channel ({}, `{}`), reconnect successfully.\", channel.id(), cb.getChannel().getAddress() );\n                }\n                catch ( MediumTerminationException e ) {\n                    WolvesAppointClient.this.getLogger().info( \"Service already terminated with inactive event. <ACK>\" );\n                }\n                catch ( IOException e ) {\n                    WolvesAppointClient.this.getLogger().error( \"Passive-controlled channel ({}), attempted to reconnect but failed.\", channel.id(), e );\n                    throw new ChannelHandleException( e.getCause() );\n                }\n            }\n\n            DuplexExpress express = (DuplexExpress)WolvesAppointClient.this.mRouteDispatcher.getUMCTExpress();\n            express.afterChannelInactive( cb );\n            return true; // Blocking next inactive sequence.\n        }\n        return super.afterChannelInactive( ccb, context );\n    }\n\n    private void initSelf() {\n\n    }\n\n    protected WolvesAppointClient( UlfClient messenger, ProtoRouteDispatcher dispatcher ) {\n        super( messenger, dispatcher.getInterfacialCompiler(), dispatcher.getContextMachinery().getControllerInspector() );\n        this.initSelf();\n        this.mRouteDispatcher = dispatcher;\n        this.mInstructedChannels = new LinkedTreeMap<>();\n    }\n\n    public WolvesAppointClient( UlfClient messenger, ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector, UMCTExpress express ){\n        this( messenger, new HuskyRouteDispatcher( compiler, controllerInspector, express ) );\n        this.apply( express );\n    }\n\n    public WolvesAppointClient( UlfClient messenger, CompilerEncoder encoder, UMCTExpress express ){\n        this( messenger, new HuskyRouteDispatcher( encoder, express, messenger.getTaskManager().getClassLoader() ) );\n        this.apply( express );\n    }\n\n    public WolvesAppointClient( UlfClient messenger, UMCTExpress express ){\n        this( messenger, new HuskyRouteDispatcher( express, messenger.getTaskManager().getClassLoader() ) );\n        this.apply( express );\n    }\n\n    public WolvesAppointClient( UlfClient messenger, Class<?> expressType ){\n        super( messenger, true );\n        this.initSelf();\n\n        try{\n            Constructor<?> constructor = WolvesAppointClient.checkExpressType( expressType ).getConstructor( String.class, MessageJunction.class, Logger.class );\n            UMCTExpress express = (UMCTExpress) constructor.newInstance( AppointServer.DefaultEntityName, this, this.getLogger() );\n\n            this.mRouteDispatcher = new HuskyRouteDispatcher( express, messenger.getTaskManager().getClassLoader() );\n            HuskyRouteDispatcherFabricator.afterConstructed( (HuskyRouteDispatcher)this.mRouteDispatcher, express );\n            this.mMCTContextMachinery = new HuskyContextMachinery( new BytecodeIfaceCompiler(\n                    ClassPool.getDefault(), messenger.getTaskManager().getClassLoader()\n            ), new BytecodeControllerInspector(\n                    ClassPool.getDefault(), messenger.getTaskManager().getClassLoader()\n            ), new GenericFieldProtobufDecoder() );\n            this.apply( express );\n            this.mInstructedChannels = new LinkedTreeMap<>();\n        }\n        catch ( NoSuchMethodException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) {\n            throw new IllegalArgumentException( \"`\" + expressType.getSimpleName() + \"` is not UMCTExpress calibre qualified.\" );\n        }\n    }\n\n    public WolvesAppointClient( UlfClient messenger ){\n        this( messenger, HuskyDuplexExpress.class );\n    }\n\n\n\n    protected static void copyDuplexAttrs( Channel leg, Channel neo ) {\n        UlfChannel.copyChannelAttr( leg, neo, HuskyCTPConstants.HCTP_DUP_PASSIVE_CHANNEL_KEY );\n    }\n\n    public void apply( UMCTExpress handler ) {\n        this.mRouteDispatcher.setUMCTExpress( handler );\n    }\n\n    @Override\n    public RouteDispatcher getRouteDispatcher() {\n        return this.mRouteDispatcher;\n    }\n\n    @Override\n    public boolean supportDuplex() {\n        return true;\n    }\n\n    @Override\n    public void embraces( int nLine, UlfAsyncMsgHandleAdapter handler ) throws IOException {\n        // Join us, embracing uniformity.\n\n        this.createPassiveChannel( nLine );\n        for ( Map.Entry<ChannelId, ChannelControlBlock > kv : this.mInstructedChannels.entrySet() ) {\n            UlfInstructMessage instructMessage = new UlfInstructMessage( HuskyCTPConstants.HCTP_DUP_CONTROL_REGISTER );\n            instructMessage.getHead().setIdentityId( this.mMessenger.getMessageNodeId() );\n\n            ChannelControlBlock ccb = kv.getValue();\n            UlfAsyncMessengerChannelControlBlock cb = (UlfAsyncMessengerChannelControlBlock) ccb;\n            Channel channel = cb.getChannel().getNativeHandle();\n            channel.attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY ) ).set( handler );  // Exclusive handler.\n            channel.attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASY_EXCLUSIVE_HANDLE_KEY ) ).set( true );\n            channel.attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_EXTERNAL_CHANNEL_KEY ) ).set( true );\n            channel.attr( AttributeKey.valueOf( HuskyCTPConstants.HCTP_DUP_PASSIVE_CHANNEL_KEY ) ).set( true );\n            cb.sendAsynMsg( instructMessage, true );\n\n            this.getLogger().info( \"Embracing and registering passive controlled channel ({}).\", cb.getChannel().getNativeHandle().id() );\n        }\n    }\n\n    @Override\n    public void embraces( int nLine, UMCTExpressHandler handler ) throws IOException {\n        this.embraces( nLine, UlfAsyncMsgHandleAdapter.wrap( handler ) );\n    }\n\n    @Override\n    public void embraces( int nLine ) throws IOException {\n        this.embraces( nLine, this.mRouteDispatcher.getUMCTExpress() );\n    }\n\n    @Override\n    public void createPassiveChannel( int nLine ) {\n        ChannelPool pool = this.getMessageNode().getChannelPool();\n\n        ChannelControlBlock[] cbs = new ChannelControlBlock[ nLine ];\n        for ( int i = 0; i < nLine; ++i ) {\n            ChannelControlBlock ccb = pool.depriveIdleChannel();\n            if ( ccb == null ) {\n                for ( int j = 0; j < nLine; ++j ) {\n                    if ( cbs[ j ] == null ) {\n                        break;\n                    }\n                    ChannelId id = (ChannelId)cbs[ j ].getChannel().getChannelID();\n                    this.mInstructedChannels.remove( id );\n                    pool.add( cbs[ j ] );\n                }\n                throw new IllegalArgumentException( \"Creating `PassiveChannel` is compromised due to insufficient free channels. Consider setting up sufficient parallel channels.\" );\n            }\n\n            ChannelId id = (ChannelId)ccb.getChannel().getChannelID();\n            cbs[ i ] = ccb;\n            this.mInstructedChannels.put( id, ccb );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/uma/wolf/WolvesAppointServer.java",
    "content": "package com.pinecone.hydra.uma.wolf;\n\nimport java.io.IOException;\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.TimeoutException;\n\nimport com.google.protobuf.DynamicMessage;\nimport com.pinecone.hydra.uma.AsynMsgHandler;\nimport com.pinecone.hydra.uma.AsynReturnHandler;\nimport com.pinecone.hydra.uma.DuplexAppointServer;\nimport com.pinecone.hydra.uma.HuskyDuplexExpress;\nimport com.pinecone.hydra.uma.UlfDuplexAppointServer;\nimport com.pinecone.hydra.uma.proxy.GenericPassiveClientIfaceProxyFactory;\nimport com.pinecone.hydra.uma.proxy.PassiveClientIfaceProxyFactory;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.ChannelHandleException;\nimport com.pinecone.hydra.umc.msg.ChannelPool;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.event.ChannelEventHandler;\nimport com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umc.wolf.UlfChannelStatus;\nimport com.pinecone.hydra.umc.wolf.UlfInformMessage;\nimport com.pinecone.hydra.umc.wolf.server.UlfServer;\nimport com.pinecone.hydra.umc.wolf.server.WolfMCServer;\nimport com.pinecone.hydra.umct.DuplexExpress;\nimport com.pinecone.hydra.umct.IlleagalResponseException;\nimport com.pinecone.hydra.umct.UMCTExpress;\nimport com.pinecone.hydra.umct.husky.HuskyCTPConstants;\nimport com.pinecone.hydra.umct.husky.compiler.CompilerEncoder;\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodPrototype;\nimport com.pinecone.hydra.umct.husky.machinery.HuskyRouteDispatcher;\nimport com.pinecone.hydra.umct.husky.machinery.RouteDispatcher;\nimport com.pinecone.hydra.umct.mapping.ControllerInspector;\n\n/**\n *  Pinecone Ursus For Java WolfAppointServer [ Ulfhedinn Wolf Duplex RPC Server ]\n *  Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family.\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n */\npublic class WolvesAppointServer extends WolfAppointServer implements UlfDuplexAppointServer {\n    protected static Class<?> checkExpressType( Class<?> expressType ) {\n        if ( !DuplexExpress.class.isAssignableFrom( expressType ) ) {\n            throw new IllegalArgumentException( \"`\" + expressType.getSimpleName() + \"` is not DuplexExpress calibre qualified.\" );\n        }\n        return expressType;\n    }\n\n    protected PassiveClientIfaceProxyFactory mPassiveClientIfaceProxyFactory;\n\n    protected void initUlfServerEventHandlers( UlfServer server ) {\n        server.registerDataArrivedEventHandlers(new ChannelEventHandler() {\n            @Override\n            public void afterEventTriggered( ChannelControlBlock block, Object context ) {\n                if ( block.getChannel().getChannelStatus() == UlfChannelStatus.WAITING_PASSIVE_RECEIVE ) {\n                    ChannelPool pool = WolvesAppointServer.this.getUMCTExpress().getPoolByClientId( block.getChannel().getIdentityID() );\n                    if ( pool != null ) {\n                        pool.setIdleChannel( block );\n                    }\n                }\n            }\n        });\n    }\n\n    private void initSelf( UlfServer server ) {\n        this.initUlfServerEventHandlers( server );\n        this.mPassiveClientIfaceProxyFactory = new GenericPassiveClientIfaceProxyFactory( this );\n\n        this.mRecipient.registerChannelInactiveHandler(new ChannelInactiveHandler() {\n            @Override\n            public boolean afterChannelInactive( ChannelControlBlock ccb, Object context ) throws ChannelHandleException {\n                this.afterEventTriggered( ccb, context );\n\n                DuplexExpress express = (DuplexExpress) WolvesAppointServer.this.mRouteDispatcher.getUMCTExpress();\n                express.afterChannelInactive( ccb );\n                return false;\n            }\n        });\n    }\n\n    protected WolvesAppointServer( UlfServer server, RouteDispatcher dispatcher ){\n        super( server, dispatcher );\n        this.initSelf( server );\n    }\n\n    public WolvesAppointServer( UlfServer server, ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector, UMCTExpress express ){\n        this( server, new HuskyRouteDispatcher( compiler, controllerInspector, express ) );\n    }\n\n    public WolvesAppointServer( UlfServer server, CompilerEncoder encoder, UMCTExpress express ){\n        this( server, new HuskyRouteDispatcher( encoder, express, server.getTaskManager().getClassLoader() ) );\n        this.apply( express );\n    }\n\n    public WolvesAppointServer( UlfServer server, UMCTExpress express ){\n        this( server, new HuskyRouteDispatcher( express, server.getTaskManager().getClassLoader() ) );\n        this.apply( express );\n    }\n\n    public WolvesAppointServer( UlfServer server, Class<?> expressType ){\n        super( server, WolvesAppointServer.checkExpressType( expressType ) );\n        this.initSelf( server );\n    }\n\n    public WolvesAppointServer( UlfServer server ){\n        this( server, HuskyDuplexExpress.class );\n    }\n\n\n    @Override\n    public boolean supportDuplex() {\n        return true;\n    }\n\n\n    @Override\n    public DuplexExpress getUMCTExpress() {\n        return (DuplexExpress) super.getUMCTExpress();\n    }\n\n    @Override\n    public void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, AsynMsgHandler handler ) throws IOException {\n        this.getUMCTExpress().sendAsynMsg( clientId, request, bNoneBuffered, handler );\n    }\n\n    @Override\n    public void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, UlfAsyncMsgHandleAdapter handler ) throws IOException {\n        this.getUMCTExpress().sendAsynMsg( clientId, request, bNoneBuffered, handler );\n    }\n\n    @Override\n    public void sendAsynMsg( long clientId, UMCMessage request, AsynMsgHandler handler ) throws IOException {\n        this.getUMCTExpress().sendAsynMsg( clientId, request, true, handler );\n    }\n\n\n\n    @Override\n    public void invokeInformAsyn( long clientId, MethodPrototype method, Object[] args, AsynMsgHandler handler ) throws IOException {\n        DynamicMessage message = this.reinterpretMsg( method, args );\n        this.sendAsynMsg( clientId, new UlfInformMessage(message.toByteArray()), handler );\n    }\n\n    @Override\n    public void invokeInformAsyn( long clientId, MethodPrototype method, Object[] args, AsynReturnHandler handler ) throws IOException {\n        DynamicMessage message = this.reinterpretMsg( method, args );\n        this.sendAsynMsg(clientId, new UlfInformMessage( message.toByteArray(), HuskyCTPConstants.HCTP_DUP_CONTROL_PASSIVE_REQUEST ), new AsynMsgHandler() {\n            @Override\n            public void onSuccessfulMsgReceived( UMCMessage msg ) throws Exception {\n                handler.onSuccessfulReturn( WolvesAppointServer.this.unmarshalResponse( method, msg ) );\n            }\n\n            @Override\n            public void onErrorMsgReceived( UMCMessage msg ) throws Exception {\n                handler.onErrorMsgReceived( msg );\n            }\n        });\n    }\n\n    @Override\n    public void invokeInformAsyn( long clientId, String szMethodAddress, Object[] args, AsynMsgHandler handler ) throws IOException {\n        this.invokeInformAsyn( clientId, this.queryMethodPrototype( szMethodAddress ), args, handler );\n    }\n\n    @Override\n    public void invokeInformAsyn( long clientId, String szMethodAddress, Object[] args, AsynReturnHandler handler ) throws IOException {\n        this.invokeInformAsyn( clientId, this.queryMethodPrototype( szMethodAddress ), args, handler );\n    }\n\n\n\n    @Override\n    public Object invokeInform( long clientId, MethodPrototype method, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException {\n        CompletableFuture<Object> future = new CompletableFuture<>();\n        DynamicMessage message = this.reinterpretMsg(method, args);\n\n        this.sendAsynMsg(clientId, new UlfInformMessage( message.toByteArray(), HuskyCTPConstants.HCTP_DUP_CONTROL_PASSIVE_REQUEST ), new AsynMsgHandler() {\n            @Override\n            public void onSuccessfulMsgReceived( UMCMessage msg ) throws Exception {\n                try {\n                    Object result = WolvesAppointServer.this.unmarshalResponse( method, msg );\n                    future.complete(result);\n                }\n                catch ( IlleagalResponseException e ) {\n                    future.completeExceptionally( e );\n                }\n            }\n\n            @Override\n            public void onErrorMsgReceived( UMCMessage msg ) throws Exception {\n                future.completeExceptionally( new IlleagalResponseException( \"Error message received: \" + msg ) );\n            }\n\n            @Override\n            public void onError( Object data, Throwable cause ) {\n                future.completeExceptionally( cause );\n            }\n        });\n\n        try {\n            if ( nWaitTimeMil == -1 ) {\n                if ( this.getMessageNode() instanceof WolfMCServer) {\n                    nWaitTimeMil = ((WolfMCServer) this.getMessageNode()).getConnectionArguments().getSyncWaitingMillis();\n                }\n            }\n\n            return WolfAppointHelper.evalCompletableFuture( future, nWaitTimeMil );\n        }\n        catch ( TimeoutException | ExecutionException e ) {\n            throw new IlleagalResponseException( e );\n        }\n        catch ( InterruptedException e ) {\n            Thread.currentThread().interrupt();\n            throw new IlleagalResponseException( e );\n        }\n    }\n\n    @Override\n    public Object invokeInform( long clientId, MethodPrototype method, Object... args ) throws IlleagalResponseException, IOException {\n        return this.invokeInform( clientId, method, args, -1 );\n    }\n\n    @Override\n    public Object invokeInform( long clientId, String szMethodAddress, Object[] args, long nWaitTimeMil ) throws IlleagalResponseException, IOException {\n        return this.invokeInform( clientId, this.queryMethodPrototype( szMethodAddress ), args, nWaitTimeMil );\n    }\n\n    @Override\n    public Object invokeInform( long clientId, String szMethodAddress, Object... args ) throws IlleagalResponseException, IOException {\n        return this.invokeInform( clientId, this.queryMethodPrototype( szMethodAddress ), args );\n    }\n\n\n    @Override\n    public <T> T getIface( long clientId, Class<T> iface ) {\n        return this.mPassiveClientIfaceProxyFactory.createProxy( clientId, iface );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/io/ChannelInputStream.java",
    "content": "package com.pinecone.hydra.umc.io;\n\nimport io.netty.buffer.ByteBuf;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\npublic class ChannelInputStream extends InputStream {\n    protected ByteBuf mByteBuf;\n\n    public ChannelInputStream( ByteBuf byteBuf ) {\n        this.mByteBuf = byteBuf;\n    }\n\n    public ByteBuf getByteBuf(){\n        return this.mByteBuf;\n    }\n\n    @Override\n    public int read() throws IOException {\n        try{\n            return this.mByteBuf.readByte();\n        }\n        catch ( Exception e ) {\n            throw new IOException( e );\n        }\n    }\n\n    @Override\n    public int read( byte[] b ) throws IOException {\n        try{\n            int n = this.mByteBuf.readableBytes();\n            this.mByteBuf.readBytes( b, 0, b.length );\n            return n - this.mByteBuf.readableBytes();\n        }\n        catch ( Exception e ) {\n            throw new IOException( e );\n        }\n    }\n\n    @Override\n    public int read( byte[] b, int off, int len ) throws IOException {\n        try{\n            int n = this.mByteBuf.readableBytes();\n            this.mByteBuf.readBytes( b, off, len );\n            return n - this.mByteBuf.readableBytes();\n        }\n        catch ( Exception e ) {\n            throw new IOException( e );\n        }\n    }\n\n    @Override\n    public byte[] readAllBytes() throws IOException {\n        try{\n            int readerIndex  = this.mByteBuf.readerIndex();\n            int len          = this.mByteBuf.readableBytes();\n            byte[] array;\n            int offset;\n            if ( this.mByteBuf.hasArray() ) {\n                array  = this.mByteBuf.array();\n                int arrayOffset = this.mByteBuf.arrayOffset();\n                offset = arrayOffset + readerIndex;\n            }\n            else {\n                array = new byte[ len ];\n                offset = 0;\n                this.mByteBuf.getBytes( readerIndex, array, 0, len );\n            }\n\n            int nFinalLen = len - offset;\n            byte[] neo = new byte[ nFinalLen ];\n            System.arraycopy( array, offset, neo, 0, nFinalLen );\n            return neo;\n        }\n        catch ( Exception e ) {\n            throw new IOException( e );\n        }\n    }\n\n    @Override\n    public int available() throws IOException {\n        try{\n            return this.mByteBuf.readableBytes();\n        }\n        catch ( Exception e ) {\n            throw new IOException( e );\n        }\n    }\n\n    @Override\n    public void close() throws IOException {\n        try{\n            this.mByteBuf.clear();\n        }\n        catch ( Exception e ) {\n            throw new IOException( e );\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/io/ChannelOutputStream.java",
    "content": "package com.pinecone.hydra.umc.io;\n\nimport io.netty.buffer.ByteBuf;\nimport io.netty.buffer.Unpooled;\nimport io.netty.channel.ChannelHandlerContext;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.util.Objects;\n\npublic class ChannelOutputStream extends OutputStream {\n    protected ChannelHandlerContext mChannelHandlerContext;\n\n    protected ByteBuf               mByteBuf;\n\n    protected byte[]                mTemp = new byte[1];\n\n    public ChannelOutputStream( ChannelHandlerContext context ) {\n        this.mChannelHandlerContext = context;\n    }\n\n    public ChannelHandlerContext getChannelHandlerContext(){\n        return this.mChannelHandlerContext;\n    }\n\n    @Override\n    public void write( int b ) throws IOException {\n        try{\n            this.mTemp[0] = (byte)b;\n            this.mChannelHandlerContext.write( Unpooled.wrappedBuffer(this.mTemp) );\n        }\n        catch ( Exception e ) {\n            throw new IOException( e );\n        }\n    }\n\n    @Override\n    public void write( byte[] b ) throws IOException {\n        try{\n            this.mChannelHandlerContext.write( Unpooled.wrappedBuffer(b) );\n        }\n        catch ( Exception e ) {\n            throw new IOException( e );\n        }\n    }\n\n    @Override\n    public void write( byte[] b, int off, int len ) throws IOException {\n        Objects.checkFromIndexSize(off, len, b.length);\n        try{\n            this.mChannelHandlerContext.write( Unpooled.wrappedBuffer( b, off, len ) );\n        }\n        catch ( Exception e ) {\n            throw new IOException( e );\n        }\n    }\n\n    @Override\n    public void flush() throws IOException {\n        try{\n            this.mChannelHandlerContext.flush();\n        }\n        catch ( Exception e ) {\n            throw new IOException( e );\n        }\n    }\n\n    public void close() throws IOException {\n        try{\n            this.mChannelHandlerContext.close();\n        }\n        catch ( Exception e ) {\n            throw new IOException( e );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/io/IOCounter.java",
    "content": "package com.pinecone.hydra.umc.io;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\n\npublic class IOCounter implements Pinenut {\n    protected long             mnSessionCount    ;     // How many this communication channel/stream/other established. [Transmit and Receive]\n    protected long             mnByteTransmitted ;     // How many bytes transfer, that though transmit. [ Send / Write ]\n    protected long             mnByteReceived    ;     // How many bytes transfer, that though receive.  [ Receive / Read ]\n    protected long             mnByteOther       ;     // How many bytes transfer, that though other operations.\n    protected long             mnTransmitCall    ;     // How many the transmission operation called.\n    protected long             mnReceiveCall     ;     // How many the receive operation called.\n    protected long             mnOtherCall       ;     // How many other operations called.\n    protected long             mnLastConTime     ;     // The last time when this channel/stream/other established.\n\n    public long getSessionCount() {\n        return this.mnSessionCount;\n    }\n\n    public void setSessionCount( long sessionCount ) {\n        this.mnSessionCount = sessionCount;\n    }\n\n    public long getByteTransmitted() {\n        return this.mnByteTransmitted;\n    }\n\n    public void setByteTransmitted( long byteTransmitted ) {\n        this.mnByteTransmitted = byteTransmitted;\n    }\n\n    public long getByteReceived() {\n        return this.mnByteReceived;\n    }\n\n    public void setByteReceived( long byteReceived ) {\n        this.mnByteReceived = byteReceived;\n    }\n\n    public long getByteOther() {\n        return this.mnByteOther;\n    }\n\n    public void setByteOther( long byteOther ) {\n        this.mnByteOther = byteOther;\n    }\n\n    public long getTransmitCall() {\n        return this.mnTransmitCall;\n    }\n\n    public void setTransmitCall( long transmitCall ) {\n        this.mnTransmitCall = transmitCall;\n    }\n\n    public long getReceiveCall() {\n        return this.mnReceiveCall;\n    }\n\n    public void setReceiveCall( long receiveCall ) {\n        this.mnReceiveCall = receiveCall;\n    }\n\n    public long getOtherCall() {\n        return this.mnOtherCall;\n    }\n\n    public void setOtherCall( long otherCall ) {\n        this.mnOtherCall = otherCall;\n    }\n\n    public long getLastConTime() {\n        return this.mnLastConTime;\n    }\n\n    public void setLastConTime( long lastConTime ) {\n        this.mnLastConTime = lastConTime;\n    }\n\n    public JSONObject toJSONObject() {\n        JSONObject json = new JSONMaptron();\n        json.put( \"sessionCount\"    , this.mnSessionCount    );\n        json.put( \"byteTransmitted\" , this.mnByteTransmitted );\n        json.put( \"byteReceived\"    , this.mnByteReceived    );\n        json.put( \"byteOther\"       , this.mnByteOther       );\n        json.put( \"transmitCall\"    , this.mnTransmitCall    );\n        json.put( \"receiveCall\"     , this.mnReceiveCall     );\n        json.put( \"otherCall\"       , this.mnOtherCall       );\n        json.put( \"lastConTime\"     , this.mnLastConTime     );\n        return json;\n    }\n\n    @Override\n    public String toJSONString() {\n        return this.toJSONObject().toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/io/IOLoadBalanceStrategy.java",
    "content": "package com.pinecone.hydra.umc.io;\n\nimport com.pinecone.framework.system.prototype.Strategy;\n\npublic interface IOLoadBalanceStrategy extends Strategy {\n    boolean readPriorityMatched( Object condition );\n\n    boolean writePriorityMatched( Object condition );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/AbstractUMCHead.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\npublic abstract class AbstractUMCHead implements UMCHead {\n    protected abstract void setSignature            ( String signature       );\n\n    protected abstract void setBodyLength           ( long length            );\n\n    protected abstract void setMethod               ( UMCMethod umcMethod    );\n\n    protected abstract void setExtraEncode          ( ExtraEncode encode     );\n\n\n\n    protected abstract void setExtraHead            ( JSONObject jo          );\n\n    protected abstract void setExtraHead            ( Map<String,Object > jo );\n\n    protected abstract void setExtraHead            ( Object o               );\n\n    protected abstract void transApplyExHead        (                        );\n\n    protected abstract void applyExtraHeadCoder     ( ExtraHeadCoder coder   );\n\n\n    protected abstract UMCHead applyExHead( Map<String, Object > jo      );\n\n\n    public static void transApplyExHeadExplicitly ( AbstractUMCHead that ) {\n        that.transApplyExHead();\n    }\n\n    public static void transApplyExHeadExplicitly ( UMCHead that ) {\n        AbstractUMCHead.transApplyExHeadExplicitly( (AbstractUMCHead) that );\n    }\n\n    public static void transApplyExHeadExplicitly ( UMCHead that, ExtraHeadCoder coder ) {\n        ( (AbstractUMCHead) that ).applyExtraHeadCoder( coder );\n        AbstractUMCHead.transApplyExHeadExplicitly( (AbstractUMCHead) that );\n    }\n\n    public static void setExtraHeadExplicitly ( UMCHead that, Object o ) {\n        ( (AbstractUMCHead) that ).setExtraHead( o );\n    }\n\n\n    protected String jsonifyExtraHead() {\n        Map<String, Object > joExtraHead = this.getMapExtraHead();\n        String szExtraHead;\n        if( joExtraHead == null ) {\n            szExtraHead = \"[object Object]\";\n        }\n        else {\n            szExtraHead = JSON.stringify( this.getMapExtraHead() );\n        }\n\n        return szExtraHead;\n    }\n\n    @Override\n    public String toJSONString() {\n        String szExtraHead = this.jsonifyExtraHead();\n\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"Signature\"      , this.getSignature()                                               ),\n                new KeyValue<>( \"ExtraHeadLength\", this.getExtraHeadLength()                                         ),\n                new KeyValue<>( \"ExtraEncode\"    , this.getExtraEncode().getName()                                   ),\n                new KeyValue<>( \"BodyLength\"     , this.getBodyLength()                                              ),\n                new KeyValue<>( \"KeepAlive\"      , this.getKeepAlive()                                               ),\n                new KeyValue<>( \"Method\"         , this.getMethod()                                                  ),\n                new KeyValue<>( \"Status\"         , this.getStatus().getName()                                        ),\n                new KeyValue<>( \"ControlBits\"    , \"0x\" + Integer.toUnsignedString( this.getControlBits(),16 ) ),\n                new KeyValue<>( \"IdentityId\"     , this.getIdentityId()                                              ),\n                new KeyValue<>( \"SessionId\"      , this.getSessionId()                                               ),\n                new KeyValue<>( \"ExtraHead\"      , szExtraHead                                                       ),\n        } );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchBytesTransferMessage.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.util.Map;\n\npublic abstract class ArchBytesTransferMessage extends ArchUMCMessage implements TransferMessage {\n    protected byte[]         msBytesBody   = null ;\n\n    public ArchBytesTransferMessage( UMCHead head ) {\n        super( head );\n    }\n\n    public ArchBytesTransferMessage( UMCHead head, byte[] sBytesBody   ) {\n        this( head );\n        this.setBody( sBytesBody );\n    }\n\n    public ArchBytesTransferMessage( UMCHead head, String szStringBody ) {\n        this( head, szStringBody.getBytes() );\n    }\n\n    public ArchBytesTransferMessage( Map<String,Object > joExHead, byte[] sBytesBody, int controlBits ) {\n        super( joExHead, UMCMethod.TRANSFER, controlBits );\n        this.setBody( sBytesBody );\n    }\n\n    public ArchBytesTransferMessage( Map<String,Object > joExHead, String szStringBody, int controlBits ) {\n        this( joExHead, szStringBody.getBytes(), controlBits );\n    }\n\n    public ArchBytesTransferMessage( Map<String,Object > joExHead, byte[] sBytesBody ) {\n        this( joExHead, sBytesBody, 0 );\n    }\n\n    public ArchBytesTransferMessage( Map<String,Object > joExHead, String szStringBody ) {\n        this( joExHead, szStringBody, 0 );\n    }\n\n\n\n    public ArchBytesTransferMessage( Object exHead, ExtraEncode encode, byte[] sBytesBody, int controlBits ) {\n        super( exHead, encode, UMCMethod.TRANSFER, controlBits );\n        this.setBody( sBytesBody );\n    }\n\n    public ArchBytesTransferMessage( Object exHead, ExtraEncode encode, String szStringBody, int controlBits ) {\n        this( exHead, encode, szStringBody.getBytes(), controlBits );\n    }\n\n    public ArchBytesTransferMessage( Object exHead, byte[] sBytesBody ) {\n        this( exHead, ExtraEncode.Prototype, sBytesBody, 0 );\n    }\n\n    public ArchBytesTransferMessage( Object exHead, String szStringBody ) {\n        this( exHead, ExtraEncode.Prototype, szStringBody, 0 );\n    }\n\n\n\n    void setBody( byte[] sBytesBody ) {\n        this.msBytesBody = sBytesBody;\n        this.mHead.inface().setBodyLength( this.msBytesBody.length );\n    }\n\n    public byte[]      getBody() {\n        return this.msBytesBody;\n    }\n\n    @Override\n    public void        release() {\n        super.release();\n        this.msBytesBody  = null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchInformCMessage.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.util.Map;\n\npublic abstract class ArchInformCMessage extends ArchUMCMessage implements InformMessage {\n    public static UMCCHead newUMCCHead( Object exHead ) {\n        UMCCHeadV1 head = new UMCCHeadV1();\n        head.setExtraHead( exHead );\n        return head;\n    }\n\n    public static UMCCHead newUMCCHead( Map<String,Object > joExHead ) {\n        UMCCHeadV1 head = new UMCCHeadV1();\n        head.applyExHead( joExHead );\n        return head;\n    }\n\n    public ArchInformCMessage( UMCCHead head ) {\n        super( head );\n    }\n\n    public ArchInformCMessage( Map<String,Object > joExHead ) {\n        this( ArchInformCMessage.newUMCCHead( joExHead ) );\n    }\n\n    public ArchInformCMessage( Object protoExHead ) {\n        this( ArchInformCMessage.newUMCCHead( protoExHead ) );\n    }\n\n    @Override\n    public long        getMessageLength(){\n        return UMCCHeadV1.HeadBlockSize + this.mHead.getExtraHeadLength();\n    }\n\n    @Override\n    public UMCCHead getHead() {\n        return (UMCCHead) super.getHead();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchInformMessage.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.util.Map;\n\npublic abstract class ArchInformMessage extends ArchUMCMessage implements InformMessage {\n    public ArchInformMessage( UMCHead head ) {\n        super( head );\n    }\n\n    public ArchInformMessage( Map<String,Object > joExHead , int controlBits ) {\n        super( joExHead, UMCMethod.INFORM, controlBits );\n    }\n\n    public ArchInformMessage( Object protoExHead, int controlBits ) {\n        super( protoExHead, UMCMethod.INFORM, controlBits );\n    }\n\n    public ArchInformMessage( Map<String,Object > joExHead ) {\n        super( joExHead, UMCMethod.INFORM );\n    }\n\n    public ArchInformMessage( Object protoExHead, ExtraEncode encode ) {\n        super( protoExHead, encode );\n    }\n\n    public ArchInformMessage( Object protoExHead ) {\n        super( protoExHead, UMCMethod.INFORM );\n    }\n\n    @Override\n    public long        getMessageLength(){\n        return UMCHeadV1.HeadBlockSize + this.mHead.getExtraHeadLength();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchStreamTransferMessage.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.Map;\n\npublic class ArchStreamTransferMessage extends ArchUMCMessage implements TransferMessage {\n    protected InputStream    mIStreamBody  = null ;\n\n    public ArchStreamTransferMessage( UMCHead head ) {\n        super( head );\n        head.inface().setMethod( UMCMethod.TRANSFER );\n    }\n\n    public ArchStreamTransferMessage( UMCHead head, InputStream inStream ) {\n        this( head );\n        this.setBody( inStream );\n    }\n\n    public ArchStreamTransferMessage( Map<String,Object > joExHead, InputStream inStream, int controlBits ) {\n        super( joExHead, UMCMethod.TRANSFER, controlBits );\n        this.setBody( inStream );\n    }\n\n    public ArchStreamTransferMessage( Map<String,Object > joExHead, InputStream inStream ) {\n        this( joExHead, inStream, 0 );\n    }\n\n\n\n    public ArchStreamTransferMessage( Object exHead, ExtraEncode encode, InputStream inStream, int controlBits ) {\n        super( exHead, encode, UMCMethod.TRANSFER, controlBits );\n        this.setBody( inStream );\n    }\n\n    public ArchStreamTransferMessage( Object exHead, InputStream inStream ) {\n        this( exHead, ExtraEncode.Prototype, inStream, 0 );\n    }\n\n\n\n\n    void setBody( InputStream inStream ) {\n        this.mIStreamBody = inStream;\n        try{\n            this.mHead.inface().setBodyLength( this.mIStreamBody.available() );\n        }\n        catch ( IOException e ) {\n            this.mHead.inface().setBodyLength( 0 );\n        }\n    }\n\n    @Override\n    public InputStream getBody() {\n        return this.mIStreamBody;\n    }\n\n    @Override\n    public void        release() {\n        super.release();\n        this.mIStreamBody  = null;\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchUMCMessage.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\n\nimport java.util.Map;\n\npublic abstract class ArchUMCMessage implements UMCMessage {\n    protected UMCHead        mHead                ;\n\n    public ArchUMCMessage( UMCHead head ) {\n        this.mHead            = head;\n    }\n\n    ArchUMCMessage( Map<String,Object > joExHead, UMCMethod method, int controlBits ) {\n        UMCHeadV1 head = new UMCHeadV1();\n        head.setControlBits( controlBits );\n        head.setMethod( method );\n        head.applyExHead( joExHead );\n        this.mHead = head;\n    }\n\n    ArchUMCMessage( Map<String,Object > joExHead, UMCMethod method ) {\n        this( joExHead, method, 0 );\n    }\n\n    public ArchUMCMessage( Map<String,Object > joExHead, int controlBits ) {\n        this( joExHead, UMCMethod.INFORM, controlBits );\n    }\n\n    public ArchUMCMessage( Map<String,Object > joExHead ) {\n        this( joExHead, UMCMethod.INFORM );\n    }\n\n\n\n    protected ArchUMCMessage( Object protoExHead, ExtraEncode encode, UMCMethod method, int controlBits ) {\n        UMCHeadV1 head = new UMCHeadV1();\n        head.setControlBits( controlBits );\n        head.setMethod( method );\n        head.setExtraHead( protoExHead );\n        head.setExtraEncode( encode );\n        this.mHead = head;\n    }\n\n    protected ArchUMCMessage( Object protoExHead, UMCMethod method, int controlBits ) {\n        this( protoExHead, ExtraEncode.Prototype, method, controlBits );\n    }\n\n    protected ArchUMCMessage( Object protoExHead, UMCMethod method ) {\n        this( protoExHead, method, 0 );\n    }\n\n    protected ArchUMCMessage( Object protoExHead, ExtraEncode encode, UMCMethod method ) {\n        this( protoExHead, encode, method, 0 );\n    }\n\n    public ArchUMCMessage( Object protoExHead, int controlBits ) {\n        this( protoExHead, UMCMethod.INFORM, controlBits );\n    }\n\n    public ArchUMCMessage( Object protoExHead ) {\n        this( protoExHead, UMCMethod.INFORM );\n    }\n\n    public ArchUMCMessage( Object protoExHead, ExtraEncode encode ) {\n        this( protoExHead, encode, UMCMethod.INFORM );\n    }\n\n\n\n    @Override\n    public UMCHead     getHead() {\n        return this.mHead;\n    }\n\n    @Override\n    public Object    getExHead() {\n        return this.mHead.getExtraHead();\n    }\n\n    @Override\n    public long        getMessageLength(){\n        return UMCHeadV1.HeadBlockSize + this.mHead.getExtraHeadLength() + this.mHead.getBodyLength();\n    }\n\n    @Override\n    public long        queryMessageLength(){\n        this.mHead.inface().transApplyExHead();\n        return this.getMessageLength();\n    }\n\n    @Override\n    public void        release() {\n        this.mHead        = null;\n    }\n\n    @Override\n    public String      toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String      toJSONString() {\n        String szControlBits = \"0x\" + Integer.toUnsignedString( this.getHead().getControlBits(),16 );\n\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"ExtraHeadLength\", this.getHead().getExtraHeadLength()    ),\n                new KeyValue<>( \"Head\"           , this.getHead().getExtraHead()          ),\n                new KeyValue<>( \"Method\"         , this.getHead().getMethod()             ),\n                new KeyValue<>( \"BodyLength\"     , this.getHead().getBodyLength()         ),\n                new KeyValue<>( \"ControlBits\"    , szControlBits                          ),\n                new KeyValue<>( \"IdentityId\"     , this.getHead().getIdentityId()         ),\n                new KeyValue<>( \"SessionId\"      , this.getHead().getSessionId()          ),\n                new KeyValue<>( \"Status\"         , this.getHead().getStatus()             )\n        } );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchUMCProtocol.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.io.OutputStream;\nimport java.io.InputStream;\nimport java.io.IOException;\n\nimport com.pinecone.framework.util.json.JSONException;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\n/**\n *  Pinecone Ursus For Java UlfMCProtocol [ Wolf Uniform Message Control Protocol ]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  **********************************************************\n *  UlfUMC Message Struct:\n *  const char* lpszSignature\n *  byteEnum    method\n *  uint32      nExtraHeadLength\n *  uint64      nBodyLength\n *  Atom*       lpjoExtraHead // JSON5 String / JSONObject\n *  Stream      bodyStream\n *  **********************************************************\n *  UlfUMC/1.1 0xFF0xFFFFFFFF0xFFFFFFFFFFFFFFFF{Key:\"Val\"...}\n *  **********************************************************\n *  UlfUMC/1.1 0xFF0xFFFFFFFF0xFFFFFFFFFFFFFFFF{Key:\"Val\"...}\n *  MsgBody\n *  **********************************************************\n */\npublic abstract class ArchUMCProtocol implements UMCProtocol {\n    protected int              mnFrameSize     = 4096;\n\n    protected String           mszVersion      = UMCHeadV1.ProtocolVersion;\n\n    protected String           mszSignature    = UMCHeadV1.ProtocolSignature;\n\n    protected OutputStream     mOutputStream   ;\n\n    protected InputStream      mInputStream    ;\n\n    protected Medium           mMessageSource  ;\n\n    protected ExtraHeadCoder   mExtraHeadCoder ;\n\n    public ArchUMCProtocol( Medium messageSource ) {\n        this.mMessageSource = messageSource;\n        this.mOutputStream  = this.mMessageSource.getOutputStream();\n        this.mInputStream   = this.mMessageSource.getInputStream();\n        this.applyMessageSource( messageSource );\n    }\n\n    @Override\n    public UMCProtocol applyMessageSource( Medium medium ) {\n        this.mMessageSource  = medium;\n        this.mExtraHeadCoder = this.getExtraHeadCoder();\n        return this;\n    }\n\n    @Override\n    public Medium getMessageSource() {\n        return this.mMessageSource;\n    }\n\n    @Override\n    public String getVersion(){\n        return this.mszVersion;\n    }\n\n    @Override\n    public String getSignature() {\n        return this.mszSignature;\n    }\n\n    protected UMCHeadV1 newHead() {\n        UMCHeadV1 head = new UMCHeadV1();\n        head.applyExtraHeadCoder( this.getExtraHeadCoder() );\n        return head;\n    }\n\n    @Override\n    public void release() {\n        this.mMessageSource.release();\n\n        this.mMessageSource   = null;\n        this.mszVersion       = null;\n        this.mszSignature     = null;\n        this.mOutputStream    = null;\n        this.mInputStream     = null;\n    }\n\n    public ExtraHeadCoder getExtraHeadCoder() {\n        return this.mMessageSource.getMessageNode().getExtraHeadCoder();\n    }\n\n    protected void flush() throws IOException {\n        this.mOutputStream.flush();\n    }\n\n    protected void sendMsgHead( UMCHead head ) throws IOException {\n        this.sendMsgHead( head, true );\n    }\n\n    protected void sendMsgHead( UMCHead umcHead, boolean bFlush ) throws IOException {\n        UMCHeadV1.EncodePair encodePair = UMCHeadV1.encode( umcHead, this.getExtraHeadCoder() );\n\n        this.mOutputStream.write( encodePair.byteBuffer.array(), 0, encodePair.bufLength );\n        if( bFlush ) {\n            this.mOutputStream.flush();\n        }\n    }\n\n    protected UMCHead readMsgHead() throws IOException {\n        int nBufSize = ArchUMCProtocol.basicHeadLength( this.mszSignature );\n        byte[] buf = new byte[ nBufSize ];\n\n        if ( this.mInputStream.read( buf ) < nBufSize ) {\n            throw new StreamTerminateException(\"StreamEndException:[UMCProtocol] Stream is ended.\");\n        }\n\n        UMCHeadV1 head = (UMCHeadV1)ArchUMCProtocol.onlyReadMsgBasicHead( buf, this.mszSignature, this.getExtraHeadCoder() );\n\n        byte[] headBuf = new byte[ head.nExtraHeadLength ];\n        if ( this.mInputStream.read( headBuf ) < head.nExtraHeadLength ) {\n            throw new StreamTerminateException(\"[UMCProtocol] Stream is ended.\");\n        }\n\n        try {\n            Object jo = this.getExtraHeadCoder().getDecoder().decode( head, headBuf );\n            head.setExtraHead( jo );\n        }\n        catch ( JSONException e ) {\n            throw new IOException(\" [UMCProtocol] Illegal protocol head.\");\n        }\n\n        return head;\n    }\n\n    public static int basicHeadLength( String szSignature ) {\n        return szSignature.length() + UMCHeadV1.StructBlockSize;\n    }\n\n    public static UMCHead onlyReadMsgBasicHead( byte[] buf, String szSignature, ExtraHeadCoder extraHeadCoder ) throws IOException {\n        return UMCHeadV1.decode( buf, szSignature, extraHeadCoder );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchUMCReceiver.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\n\nimport java.io.IOException;\nimport java.lang.reflect.InvocationTargetException;\n\npublic abstract class ArchUMCReceiver extends ArchUMCProtocol implements UMCReceiver {\n    public ArchUMCReceiver( Medium messageSource ) {\n        super( messageSource );\n    }\n\n    @Override\n    public Object readInformMsg() throws IOException {\n        UMCHead head = this.readMsgHead();\n        if( head.getMethod() != UMCMethod.INFORM ) {\n            throw new IOException( \"[UMCProtocol] Illegal protocol method.\" );\n        }\n        return head.getExtraHead();\n    }\n\n    protected UMCHead readTransferHead() throws IOException {\n        UMCHead head = this.readMsgHead();\n        if( head.getMethod() != UMCMethod.TRANSFER ) {\n            throw new IOException( \"[UMCProtocol] Illegal protocol method.\" );\n        }\n        return head;\n    }\n\n    protected void onlyReadTransferBody( TransferMessage message, boolean bAllBytes ) throws IOException {\n        if( bAllBytes ) {\n            ( (ArchBytesTransferMessage)message ).setBody( this.mInputStream.readAllBytes() );\n        }\n        else {\n            ( (ArchStreamTransferMessage)message ).setBody( this.mInputStream );\n        }\n    }\n\n    public UMCMessage readMsg( boolean bAllBytes, MessageStereotypes stereotypes ) throws IOException {\n        try{\n            UMCHead head = this.readMsgHead();\n            UMCMessage message;\n            if( head.getMethod() == UMCMethod.TRANSFER ){\n                if( bAllBytes ) {\n                    message = (UMCMessage) stereotypes.postBytesType().getConstructor( UMCHead.class ).newInstance( head );\n                }\n                else {\n                    message = (UMCMessage) stereotypes.postStreamType().getConstructor( UMCHead.class ).newInstance( head );\n                }\n                this.onlyReadTransferBody( (TransferMessage)message, bAllBytes );\n            }\n            else {\n                if( head.getMethod() != UMCMethod.INFORM ){\n                    if ( !( head.getMethod() == UMCMethod.UNDEFINED && head.getExtraEncode() == ExtraEncode.Iussum ) ) {\n                        throw new IOException( \" [UMCProtocol] Unrecognized protocol method.\" );\n                    }\n                }\n                message = (UMCMessage) stereotypes.putType().getConstructor( UMCHead.class ).newInstance( head );\n            }\n\n            return message;\n        }\n        catch ( NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e ) {\n            throw new ProvokeHandleException( e );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ArchUMCTransmit.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.Map;\n\npublic abstract class ArchUMCTransmit extends ArchUMCProtocol implements UMCTransmit {\n    public ArchUMCTransmit( Medium messageSource ) {\n        super( messageSource );\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    protected void applyExHead( UMCHeadV1 head, Object msg ) {\n        if( msg instanceof Map ) {\n            head.inface().applyExHead( (Map) msg );\n        }\n        else {\n            head.inface().setExtraHead( msg );\n        }\n    }\n\n    @Override\n    public void sendInformMsg( Object msg, Status status ) throws IOException {\n        UMCHeadV1 head = this.newHead();\n        this.applyExHead( head, msg );\n        head.setStatus( status );\n        head.inface().setMethod( UMCMethod.INFORM );\n        this.sendMsgHead( head );\n    }\n\n    @Override\n    public void sendInformMsg( Object msg ) throws IOException {\n        this.sendInformMsg( msg, Status.OK );\n    }\n\n    public void sendTransferMsgHead( Object msg ) throws IOException {\n        this.sendTransferMsgHead( msg, false );\n    }\n\n\n    public void sendTransferMsgHead( Object msg, boolean bFlush ) throws IOException {\n        UMCHeadV1 head = this.newHead();\n        this.applyExHead( head, msg );\n        head.inface().setMethod( UMCMethod.TRANSFER );\n        this.sendMsgHead( head, bFlush );\n    }\n\n    public void sendTransferMsgContent( byte[] frame, int len ) throws IOException {\n        this.mOutputStream.write( frame, 0, len );\n    }\n\n\n    protected void onlySendPostBody( byte[] bytes ) throws IOException {\n        this.sendTransferMsgContent( bytes, bytes.length );\n        this.mOutputStream.flush();\n    }\n\n    @Override\n    public void sendTransferMsg( Object msg, byte[] bytes, Status status ) throws IOException {\n        UMCHeadV1 head = this.newHead();\n        head.setBodyLength( bytes.length );\n        head.setStatus( status );\n        this.sendTransferMsgHead( msg, false );\n        this.onlySendPostBody( bytes );\n    }\n\n    @Override\n    public void sendTransferMsg( Object msg, byte[] bytes ) throws IOException {\n        this.sendTransferMsg( msg, bytes, Status.OK );\n    }\n\n    protected void onlySendPostBody( InputStream is, boolean bNoneBuffered ) throws IOException {\n        //this.mnFrameSize = 2;\n        byte[] buf;\n        if( bNoneBuffered ) {\n            buf = is.readAllBytes();\n            this.sendTransferMsgContent( buf, buf.length );\n        }\n        else {\n            buf = new byte[ this.mnFrameSize ];\n            while ( true ) {\n                int n = is.available();\n\n                if( n > this.mnFrameSize && is.read( buf ) > 0 ) {\n                    this.sendTransferMsgContent( buf, this.mnFrameSize );\n                }\n                else {\n                    if( is.read( buf, 0, n ) > 0 ) {\n                        this.sendTransferMsgContent( buf, n );\n                    }\n                    break;\n                }\n            }\n        }\n\n        this.getMessageSource().getOutputStream().flush();\n    }\n\n    @Override\n    public void sendTransferMsg( Object msg, InputStream is ) throws IOException {\n        UMCHeadV1 head = this.newHead();\n        head.setBodyLength( is.available() );\n        this.sendTransferMsgHead( msg, false );\n        this.onlySendPostBody( is, false );\n    }\n\n\n    @Override\n    public void sendMsg( UMCMessage msg, boolean bNoneBuffered ) throws IOException {\n        msg.getHead().setIdentityId( this.getMessageSource().getMessageNode().getMessageNodeId() );\n        UMCHead head = msg.getHead();\n        head.inface().setSignature( this.mszSignature );\n\n        if( msg.getMethod() == UMCMethod.INFORM || msg.getMethod() == UMCMethod.UNDEFINED ) {\n            this.sendMsgHead( head );\n        }\n        else if( msg.getMethod() == UMCMethod.TRANSFER ) {\n            this.sendMsgHead( head, false );\n            Object body = msg.evinceTransferMessage().getBody();\n            if( body instanceof byte[] ) {\n                byte[] bytes = (byte[])body;\n                this.onlySendPostBody( bytes );\n            }\n            else if( body instanceof InputStream ) {\n                InputStream is = (InputStream)body;\n                this.onlySendPostBody( is, bNoneBuffered );\n            }\n        }\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/AsynChannelAllocator.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\npublic interface AsynChannelAllocator extends ChannelPool {\n    ChannelControlBlock nextAsynChannel( long nMillisTimeout, boolean bEager ) ;\n\n    default ChannelControlBlock nextAsynChannel( long nMillisTimeout ) {\n        return this.nextAsynChannel( nMillisTimeout, true );\n    }\n\n    default ChannelControlBlock nextAsynChannel() {\n        return this.nextAsynChannel( 5000 );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/AsyncMessenger.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\n\nimport java.io.IOException;\n\npublic interface AsyncMessenger extends Messenger {\n    void sendAsynMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException;\n\n    void sendAsynMsg( UMCMessage request, boolean bNoneBuffered, UlfAsyncMsgHandleAdapter handler ) throws IOException;\n\n    // Javascript/Ajax style.\n    default void sendAsynMsg( UMCMessage request, UlfAsyncMsgHandleAdapter handler ) throws IOException {\n        this.sendAsynMsg( request, false, handler );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/AsyncMessengerChannelControlBlock.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.io.IOException;\n\npublic interface AsyncMessengerChannelControlBlock extends MessengerChannelControlBlock {\n    @Override\n    default AsyncMessenger     getParentMessageNode(){\n        return (AsyncMessenger) this.getChannel().getParentMessageNode();\n    }\n\n    void                       sendAsynMsg( UMCMessage message, boolean bNoneBuffered ) throws IOException;\n\n    @Override\n    default void               sendMsg( UMCMessage message, boolean bNoneBuffered ) throws IOException {\n        this.sendAsynMsg( message, bNoneBuffered );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/AsyncMsgHandleAdapter.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\n\npublic interface AsyncMsgHandleAdapter extends UMCTExpressHandler {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/CascadeMessageNode.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.regimentation.UniformCascadeNodus;\n\npublic interface CascadeMessageNode extends MessageNode, UniformCascadeNodus {\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ChannelAllocateException.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class ChannelAllocateException extends RuntimeException implements Pinenut {\n    public ChannelAllocateException() {\n        super();\n    }\n\n    public ChannelAllocateException( String message ) {\n        super(message);\n    }\n\n    public ChannelAllocateException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ChannelAllocateException( Throwable cause ) {\n        super(cause);\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ChannelControlBlock.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.hydra.umc.io.IOCounter;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\n\nimport java.io.IOException;\n\npublic interface ChannelControlBlock extends Pinenut {\n    UMCChannel                 getChannel();\n\n    IOCounter                  getIOCounter();\n\n    boolean                    getInSyncMode();\n\n    UMCTransmit                getTransmit();\n\n    UMCReceiver                getReceiver();\n\n    default MessageNode        getParentMessageNode(){\n        return this.getChannel().getParentMessageNode();\n    }\n\n    void                       sendMsg( UMCMessage message, boolean bNoneBuffered ) throws IOException;\n\n    void                       release();\n\n    void                       close();\n\n    default boolean            isShutdown(){\n        return this.getChannel().isShutdown();\n    }\n\n    ChannelStatus              getChannelStatus();\n\n\n    void                       pushMsgHandle ( UlfAsyncMsgHandleAdapter msgHandle );\n\n    UlfAsyncMsgHandleAdapter   pollMsgHandle ( long nWaitMillis ) throws InterruptedException;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ChannelHandleException.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.io.IOException;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class ChannelHandleException extends IOException implements Pinenut {\n    public ChannelHandleException() {\n        super();\n    }\n\n    public ChannelHandleException( String message ) {\n        super(message);\n    }\n\n    public ChannelHandleException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ChannelHandleException( Throwable cause ) {\n        super(cause);\n    }\n}\n\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ChannelPool.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ChannelPool extends Pinenut {\n    ChannelControlBlock queryChannelById( Object id ) ;\n\n    void onlyRemove( Object id );\n\n    int size();\n\n    void clear();\n\n    boolean isEmpty();\n\n    Collection getPooledChannels();\n\n    ChannelControlBlock terminate( Object id ) throws InterruptedException;\n\n    boolean isAllChannelsTerminated();\n\n    void remove ( ChannelControlBlock ccb );\n\n    void deactivate ( ChannelControlBlock ccb );\n\n    ChannelPool setIdleChannel( ChannelControlBlock block );\n\n    ChannelPool add( ChannelControlBlock block );\n\n    ChannelControlBlock depriveIdleChannel();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ChannelStatus.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ChannelStatus extends Pinenut {\n    String getName();\n\n    int getValue();\n\n    byte getByteValue();\n\n    boolean isIdle();\n\n    boolean isTerminated();\n\n    boolean isWaitingForIOCompleted();\n\n    boolean isWaitingForLocalCompleted();\n\n    default boolean isWaitingForOperationCompleted() {\n        return this.isWaitingForIOCompleted() || this.isWaitingForLocalCompleted();\n    }\n\n    boolean isAsynAvailable();\n\n    boolean isSyncAvailable();\n\n    default String toJSONString() {\n        return \"\\\"\" + this.toString() + \"\\\"\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/EMCBytesDecoder.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.io.IOException;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\npublic interface EMCBytesDecoder extends Pinenut {\n\n    UMCHead decode( byte[] buf, ExtraHeadCoder extraHeadCoder ) throws IOException;\n\n    UMCHead decodeIntegrated( byte[] buf, ExtraHeadCoder extraHeadCoder ) throws IOException;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/EMCHead.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\n/**\n *  Pinecone Ursus For Java EMC [ Elastic Uniform Message Control ]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  ********************************************************************************************************************\n *  Variable-length message protocol header\n *  可变长弹性协议头\n *  ********************************************************************************************************************\n *  A typical top-level UMC header only contains the signature and the ExtraHeadLength.\n *  In practice, the ExtraHeadLength is usually left empty, so the minimum sizeof = 8.\n *  This allows for elastic and excellent performance while ensuring the supreme uniformity based on the specific message type.\n *  Unlike C/C++, other languages cannot use unions or struct segments for memory manipulation.\n *  To ensure maximum compatibility, the UMC uses the highest bit length (dword/qword).\n *  For uniformity, it will inevitably lead to memory overhead, but the sacrifice is worth it.\n *  ********************************************************************************************************************\n *  一个典型顶级的UMC头仅包含协议签名、扩展头长度，实践中扩展头长度字段（ExtraHeadLength）\n *  默认是置空的，因此最小 sizeof = 8。这样可根据具体的消息类型，灵活确保最高统一抽象和极致性能。\n *  由于其他语言不像C/C++，无法使用union、结构体段等内存手段，UMC协议为确保最高兼容，因此使用了最高位长（dword/qword）\n *  这会不可避免地带来内存损益，为了统一牺牲是值得的。\n *  ********************************************************************************************************************\n */\npublic interface EMCHead extends Pinenut {\n    String          getSignature();\n\n    int             getSignatureLength();\n\n    int             sizeof();\n\n    int             fieldsSize(); // UMCHead (Non-Dynamic) Fields size.\n\n    int             getExtraHeadLength();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/ExtraEncode.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\npublic enum ExtraEncode {\n    Undefined        ( 0x01, \"Undefined\"        ),\n    JSONString       ( 0x02, \"JSONString\"       ),\n    Binary           ( 0x03, \"Binary\"           ), // Bson\n    Prototype        ( 0x04, \"Prototype\"        ), // Prototype Raw Binary\n    Iussum           ( 0x05, \"Iussum\"           ), // Reduced instruction\n    Custom           ( 0xFF, \"Custom\"           );\n\n    private final int value;\n\n    private final String name;\n\n    ExtraEncode( int value, String name ){\n        this.value = value;\n        this.name  = name;\n    }\n\n    public String getName(){\n        return this.name;\n    }\n\n    public int getValue() {\n        return this.value;\n    }\n\n    public byte getByteValue() {\n        return (byte) this.value;\n    }\n\n    public static ExtraEncode asValue( int val ) {\n        for ( ExtraEncode type : ExtraEncode.values() ) {\n            if ( type.getValue() == val ) {\n                return type;\n            }\n        }\n\n        return ExtraEncode.Custom;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/FairChannelPool.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.util.Queue;\n\npublic interface FairChannelPool extends AsynChannelAllocator {\n    long getMajorWaitTimeout();\n\n    FairChannelPool setMajorWaitTimeout( long nMillisTimeout );\n\n\n    FairChannelPool pushBack( ChannelControlBlock channel );\n\n    ChannelControlBlock pop();\n\n    @Override\n    FairChannelPool setIdleChannel( ChannelControlBlock block );\n\n    @Override\n    ChannelControlBlock nextAsynChannel( long nMillisTimeout, boolean bEager ) ;\n\n    @Override\n    default ChannelControlBlock nextAsynChannel( long nMillisTimeout ) {\n        return this.nextAsynChannel( nMillisTimeout, true );\n    }\n\n    @Override\n    default ChannelControlBlock nextAsynChannel() {\n        return this.nextAsynChannel( 5000 );\n    }\n\n    Queue getMajorQueue();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/GenericEMCBytesDecoder.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.io.IOException;\n\nimport com.pinecone.framework.util.json.JSONException;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\npublic class GenericEMCBytesDecoder implements EMCBytesDecoder {\n    protected boolean isQualified ( byte[] buf, String szSignature ) throws IOException {\n        if ( buf.length < szSignature.length() ) { // Signature size is minimum.\n            throw new StreamTerminateException( \"StreamEndException:[EMCBytesDecoder] Stream is ended.\" );\n        }\n\n        byte[] des = szSignature.getBytes();  // UMC | UMC-C | UMC-BP\n        return buf[ 4 ] ==  des[ 4 ] && buf[ 5 ] ==  des[ 5 ] && buf[ 6 ] ==  des[ 6 ];\n    }\n\n    @Override\n    public UMCHead decode( byte[] buf, ExtraHeadCoder extraHeadCoder ) throws IOException {\n        if ( this.isQualified( buf, UMCHeadV1.ProtocolSignature ) ) {\n            return UMCHeadV1.decode( buf, UMCHeadV1.ProtocolSignature, extraHeadCoder );\n        }\n        else if ( this.isQualified( buf, UMCCHeadV1.ProtocolSignature ) ) {\n            return UMCCHeadV1.decode( buf, UMCCHeadV1.ProtocolSignature, extraHeadCoder );\n        }\n\n        return null;\n    }\n\n    @Override\n    public UMCHead decodeIntegrated( byte[] buf, ExtraHeadCoder extraHeadCoder ) throws IOException {\n        UMCHead head = this.decode( buf, extraHeadCoder );\n\n        byte[] headBuf = new byte[ head.getExtraHeadLength() ];\n        int headSize = head.sizeof();\n        System.arraycopy( buf, headSize, headBuf, 0, head.getExtraHeadLength() );\n\n        if ( buf.length < head.getExtraHeadLength() ) {\n            throw new StreamTerminateException(\"[UMCProtocol] Buffer is not long enough.\");\n        }\n\n        try {\n            Object jo = extraHeadCoder.getDecoder().decode( head, headBuf );\n            AbstractUMCHead.setExtraHeadExplicitly( head, jo );\n        }\n        catch ( JSONException e ) {\n            throw new IOException(\" [UMCProtocol] Illegal protocol head.\");\n        }\n\n        return head;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/IdleFirstBalanceStrategy.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.hydra.umc.io.IOLoadBalanceStrategy;\n\npublic class IdleFirstBalanceStrategy implements IOLoadBalanceStrategy {\n    public IdleFirstBalanceStrategy() {\n\n    }\n\n    @Override\n    public boolean matched( Object condition ) {\n        ChannelControlBlock ccb = (ChannelControlBlock) condition;\n        return ccb.getChannelStatus().isIdle();\n    }\n\n    @Override\n    public boolean readPriorityMatched( Object condition ) {\n        return this.matched( condition );\n    }\n\n    @Override\n    public boolean writePriorityMatched( Object condition ) {\n        return this.matched( condition );\n    }\n\n    public IdleFirstBalanceStrategy clone() {\n        IdleFirstBalanceStrategy clone;\n        try {\n            clone = (IdleFirstBalanceStrategy) super.clone();\n        }\n        catch ( CloneNotSupportedException e ) {\n            throw new InternalError(e);\n        }\n\n        return clone;\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/InformMessage.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\npublic interface InformMessage extends UMCMessage {\n    @Override\n    default InformMessage evinceInformMessage() {\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MappedChannelPool.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.util.Collection;\nimport java.util.Map;\n\npublic interface MappedChannelPool extends ChannelPool {\n    Map getPooledMap();\n\n    @Override\n    default Collection getPooledChannels() {\n        return this.getPooledMap().values();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/Medium.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStream;\n\npublic interface Medium extends Pinenut {\n    Object getNativeMessageSource();\n\n    OutputStream getOutputStream();\n\n    InputStream getInputStream();\n\n    default byte[] receive( int nLength ) throws IOException {\n        return this.getInputStream().readNBytes( nLength );\n    }\n\n    default void send   ( byte[] bytes, int off, int n ) throws IOException {\n        this.getOutputStream().write( bytes, off, n );\n    }\n\n    default void send   ( byte[] bytes ) throws IOException {\n        this.getOutputStream().write( bytes, 0, bytes.length );\n    }\n\n    String sourceName();\n\n    void release();\n\n    MessageNodus getMessageNode();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MediumTerminationException.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\npublic class MediumTerminationException extends UMCServiceException {\n    public MediumTerminationException() {\n        super();\n    }\n\n    public MediumTerminationException( String message ) {\n        super(message);\n    }\n\n    public MediumTerminationException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public MediumTerminationException( Throwable cause ) {\n        super(cause);\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/Message.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Message extends Pinenut {\n    long getMessageLength();\n\n    default long queryMessageLength(){\n        return this.getMessageLength();\n    }\n\n    void release();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MessageNode.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.system.Hydrogen;\n\npublic interface MessageNode extends Processum, MessageNodus {\n\n    @Override\n    Hydrogen parentSystem();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MessageNodus.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\nimport com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit;\n\npublic interface MessageNodus extends Messagus {\n\n    ExtraHeadCoder       getExtraHeadCoder();\n\n    ErrorMessageAudit    getErrorMessageAudit();\n\n    void                 setErrorMessageAudit( ErrorMessageAudit audit );\n\n    MsgNodeConfig        getMessageNodeConfig();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MessageStereotypes.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface MessageStereotypes extends Pinenut {\n    Class<? >  putType();\n\n    Class<? > postBytesType();\n\n    Class<? > postStreamType();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/Messagus.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.util.concurrent.atomic.AtomicInteger;\n\nimport com.pinecone.framework.system.regimentation.Nodus;\n\npublic interface Messagus extends Nodus {\n\n    AtomicInteger LocalNodeIdAllocator = new AtomicInteger( 0 );\n\n    static int nextLocalId() {\n        return MessageNodus.LocalNodeIdAllocator.getAndIncrement();\n    }\n\n    long getMessageNodeId();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/Messenger.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.io.IOException;\n\nimport com.pinecone.hydra.umc.wolf.client.ClientConnectArguments;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\n\npublic interface Messenger extends MessageNode {\n    UMCMessage sendSyncMsg( UMCMessage request, boolean bNoneBuffered, long nWaitTime ) throws IOException;\n\n    ClientConnectArguments getConnectionArguments();\n\n    UMCTExpressHandler getAsyncMsgHandler();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MessengerChannelControlBlock.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.io.IOException;\nimport java.util.concurrent.locks.Lock;\n\npublic interface MessengerChannelControlBlock extends ChannelControlBlock {\n    @Override\n    default Messenger getParentMessageNode(){\n        return (Messenger) this.getChannel().getParentMessageNode();\n    }\n\n    Lock              getSynRequestLock();\n\n    UMCMessage        sendSyncMsg( UMCMessage message, boolean bNoneBuffered, long nWaitTime ) throws IOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MsgNodeConfig.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface MsgNodeConfig extends Pinenut {\n    long getSyncWaitingMillis();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MsgProtocol.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface MsgProtocol extends Pinenut {\n    Medium getMessageSource();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/MultiClientChannelRegistry.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface MultiClientChannelRegistry<CID > extends Pinenut {\n    int size();\n\n    void clear();\n\n    boolean isEmpty();\n\n    void register( CID id, ChannelControlBlock controlBlock );\n\n    void deregister( CID id, ChannelControlBlock controlBlock );\n\n    void deregister( CID id );\n\n    ChannelPool getPool( CID id );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/Recipient.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.hydra.umc.wolf.server.ServerConnectArguments;\n\npublic interface Recipient extends MessageNode {\n\n    int getMaximumConnections();\n\n    ServerConnectArguments getConnectionArguments();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/RecipientChannelControlBlock.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\npublic interface RecipientChannelControlBlock extends ChannelControlBlock {\n    @Override\n    default Recipient getParentMessageNode(){\n        return (Recipient) this.getChannel().getParentMessageNode();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/RegisterChannelPool.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\npublic interface RegisterChannelPool extends MappedChannelPool {\n    long getMajorWaitTimeout();\n\n    RegisterChannelPool setMajorWaitTimeout( long nMillisTimeout );\n\n    int getMaximumPoolSize();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/Status.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\npublic enum Status {\n    SwitchingProtocols      ( 101, \"SwitchingProtocols\"  ),\n\n    // For messenger(a.k.a, `Client`) or recipient (a.k.a, `Server`)\n    // The confirmed and successful session\n    OK                      ( 200, \"OK\"                  ),\n\n    // For messenger(a.k.a, `Client`) or recipient (a.k.a, `Server`)\n    // BadRequest or BadResponse\n    BadSession              ( 400, \"BadSession\"          ),\n    Unauthorized            ( 401, \"Unauthorized\"        ),\n    IllegalMessage          ( 402, \"IllegalMessage\"      ),\n    Forbidden               ( 403, \"Forbidden\"           ),\n    MappingNotFound         ( 404, \"MappingNotFound\"     ),\n\n\n    InternalError           ( 500, \"InternalError\"       ),\n    NotImplemented          ( 501, \"NotImplemented\"      ),\n    BadGateway              ( 502, \"BadGateway\"          ),\n    Unavailable             ( 503, \"Unavailable\"         ),\n    GatewayTimeout          ( 504, \"GatewayTimeout\"      ),\n    VersionNotSupported     ( 505, \"VersionNotSupported\" ),\n    TooManyConnections      ( 506, \"TooManyConnections\"  );\n\n\n    private final int value;\n\n    private final String name;\n\n    Status( int value, String name ){\n        this.value = value;\n        this.name  = name;\n    }\n\n    public String getName(){\n        return this.name;\n    }\n\n    public int getValue() {\n        return this.value;\n    }\n\n    public short getShortValue() {\n        return (short) this.value;\n    }\n\n    public static Status asValue( int val ) {\n        for ( Status type : Status.values() ) {\n            if ( type.getValue() == val ) {\n                return type;\n            }\n        }\n        throw new IllegalArgumentException( \"Invalid status value: \" + val );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/StreamTerminateException.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.io.IOException;\n\npublic class StreamTerminateException extends IOException implements Pinenut {\n    public StreamTerminateException() {\n        super();\n    }\n\n    public StreamTerminateException( String message ) {\n        super(message);\n    }\n\n    public StreamTerminateException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public StreamTerminateException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/SyncFairChannelPool.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\npublic interface SyncFairChannelPool extends FairChannelPool {\n    ChannelControlBlock nextSyncChannel( long nMillisTimeout, boolean bEager ) ;\n\n    default ChannelControlBlock nextSyncChannel( long nMillisTimeout ) {\n        return this.nextSyncChannel( nMillisTimeout, false );\n    }\n\n    default ChannelControlBlock nextSyncChannel() {\n        return this.nextSyncChannel( 5000 );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/TransferMessage.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\npublic interface TransferMessage extends UMCMessage {\n    @Override\n    default TransferMessage evinceTransferMessage() {\n        return this;\n    }\n\n    Object      getBody() ;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCCHead.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\npublic interface UMCCHead extends UMCHead {\n    void enableField( int at );\n\n    void disableField( int at );\n\n    void enableField( String fieldName );\n\n    void disableField( String fieldName );\n\n    long getFieldIndexBitmap();\n\n    long evalIndexBitmap();\n\n    void setBodyLength ( long length );\n\n\n    void setExtraHead            ( JSONObject jo           ) ;\n\n    void setExtraHead            ( Map<String,Object > jo  ) ;\n\n    void setExtraHead            ( Object o                ) ;\n\n    void setExtraEncode          ( ExtraEncode encode      ) ;\n\n    UMCCHead applyExHead         ( Map<String, Object > jo ) ;\n\n    void applyExtraHeadCoder     ( ExtraHeadCoder coder    ) ;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCCHeadV1.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Comparator;\nimport java.util.Map;\n\nimport com.pinecone.framework.unit.BitSet64;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.Bytes;\nimport com.pinecone.framework.util.datetime.compact.CompactTimeUnit;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.JSONString;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\n/**\n *  Pinecone Ursus For Java UMCC[ Uniform Message Control - Compacted ]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  **********************************************************\n *  Uniform Message Control Protocol - Base-Mutable - Compacted [UMC-C]\n *  统一消息控制协议-紧凑变基分协议\n *  **********************************************************\n */\npublic class UMCCHeadV1 extends UMCHeadV1 implements UMCCHead {\n    public static final String     ProtocolSignature = \"UMC-C/\" + UMCHeadV1.ProtocolVersion;\n\n    public static final int        BitmapAt          = 1;\n\n    public static final int        BitmapBytes       = Long.BYTES;\n\n    protected long                 fieldIndexBitmap                           ; // :1 sizeof( int64 ) = 8, Field index-control bitmap.\n\n\n    public static final HeadField FieldSignature       = new HeadField( \"signature\"       , 0, ProtocolSignature.length() );\n    public static final HeadField FieldExtraHeadLength = new HeadField( \"extraHeadLength\" , 1, Integer.BYTES );\n    public static final HeadField FieldExtraEncode     = new HeadField( \"extraEncode\"     , 2, Byte.BYTES    );\n    public static final HeadField FieldBodyLength      = new HeadField( \"bodyLength\"      , 3, Long.BYTES    );\n    public static final HeadField FieldKeepAlive       = new HeadField( \"keepAlive\"       , 4, Integer.BYTES );\n    public static final HeadField FieldMethod          = new HeadField( \"method\"          , 5, Byte.BYTES    );\n    public static final HeadField FieldStatus          = new HeadField( \"status\"          , 6, Short.BYTES   );\n    public static final HeadField FieldControlBits     = new HeadField( \"controlBits\"     , 7, Integer.BYTES );\n    public static final HeadField FieldIdentityId      = new HeadField( \"identityId\"      , 8, Long.BYTES    );\n    public static final HeadField FieldSessionId       = new HeadField( \"sessionId\"       , 9, Long.BYTES    );\n\n\n    static final HeadField[] HeadFieldsMap   = new HeadField[ UMCHeadV1.HeadFieldsSize ];\n\n    static final HeadField[] HeadFieldsIndex = {\n            FieldSignature,\n            FieldExtraHeadLength,\n            FieldExtraEncode,\n            FieldBodyLength,\n            FieldKeepAlive,\n            FieldMethod,\n            FieldStatus,\n            FieldControlBits,\n            FieldIdentityId,\n            FieldSessionId\n    };\n\n    static {\n        System.arraycopy( HeadFieldsIndex, 0, HeadFieldsMap, 0, HeadFieldsSize );\n        Arrays.sort( HeadFieldsMap, Comparator.comparing(a -> a.name) );\n    }\n\n    public static HeadField searchField( String fieldName ) {\n        int low = 0;\n        int high = HeadFieldsMap.length - 1;\n\n        while ( low <= high ) {\n            int mid = (low + high) >>> 1;\n            int cmp = HeadFieldsMap[ mid ].name.compareTo(fieldName);\n\n            if ( cmp == 0 ) {\n                return HeadFieldsMap[ mid ];\n            }\n            else if ( cmp < 0 ) {\n                low = mid + 1;\n            }\n            else {\n                high = mid - 1;\n            }\n        }\n\n        return null;\n    }\n\n\n    protected void enableDefaultFields() {\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldSignature.index );\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldExtraHeadLength.index );\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldExtraEncode.index );\n    }\n\n    public UMCCHeadV1(  ) {\n        this( UMCCHeadV1.ProtocolSignature );\n    }\n\n    public UMCCHeadV1( String szSignature ) {\n        super( szSignature, UMCMethod.INFORM );\n        this.enableDefaultFields();\n    }\n\n    public UMCCHeadV1( String szSignature, UMCMethod umcMethod ) {\n        super( szSignature, umcMethod, 0 );\n        this.enableDefaultFields();\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldMethod.index );\n    }\n\n    public UMCCHeadV1( String szSignature, UMCMethod umcMethod, long fieldIndexBitmap ) {\n        super( szSignature, umcMethod, 0 );\n        this.fieldIndexBitmap = fieldIndexBitmap;\n    }\n\n\n\n    @Override\n    public int sizeof() {\n        int totalSize = BitmapBytes;\n\n        for ( int i = 0; i < UMCHeadV1.HeadFieldsSize; ++i ) {\n            if ( ( this.fieldIndexBitmap & (1L << i) ) != 0 ) {\n                totalSize += HeadFieldsIndex[ i ].sizeof;\n            }\n        }\n\n        return totalSize;\n    }\n\n    @Override\n    public int fieldsSize() {\n        return BitSet64.existence( this.fieldIndexBitmap );\n    }\n\n    @Override\n    public long getFieldIndexBitmap() {\n        return this.fieldIndexBitmap;\n    }\n\n    @Override\n    public long evalIndexBitmap() {\n        if ( this.nExtraHeadLength > 0 ) {\n            this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldExtraHeadLength.index );\n            this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldExtraEncode.index );\n        }\n\n        if ( this.nBodyLength > 0 ) {\n            this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldBodyLength.index );\n        }\n\n        if ( this.nKeepAlive != -1 ) {\n            this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldKeepAlive.index );\n        }\n\n        if ( this.method != null && this.method != UMCMethod.UNDEFINED && this.method != UMCMethod.INFORM ) {\n            this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldMethod.index );\n        }\n\n        if ( this.status != Status.OK ) {\n            this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldStatus.index );\n        }\n\n        if ( this.controlBits != 0 ) {\n            this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldControlBits.index );\n        }\n\n        if ( this.identityId != 0 ) {\n            this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldIdentityId.index );\n        }\n\n        if ( this.sessionId != 0 ) {\n            this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldSessionId.index );\n        }\n\n        return this.fieldIndexBitmap;\n    }\n\n    @Override\n    public void enableField( int at ) {\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, at );\n    }\n\n    @Override\n    public void disableField( int at ) {\n        this.fieldIndexBitmap = BitSet64.clearBit( this.fieldIndexBitmap, at );\n    }\n\n    @Override\n    public void enableField( String fieldName ) {\n        HeadField field = UMCCHeadV1.searchField( fieldName );\n        if ( field == null ) {\n            throw new IllegalArgumentException( fieldName + \" is not existed.\" );\n        }\n\n        this.enableField( field.index );\n    }\n\n    @Override\n    public void disableField( String fieldName ) {\n        HeadField field = UMCCHeadV1.searchField( fieldName );\n        if ( field == null ) {\n            throw new IllegalArgumentException( fieldName + \" is not existed.\" );\n        }\n\n        this.disableField( field.index );\n    }\n\n\n\n\n\n\n\n    protected void enableExtraHead() {\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldExtraHeadLength.index );\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldExtraEncode.index );\n    }\n\n    @Override\n    public void setExtraHead               ( JSONObject jo           ) {\n        super.setExtraHead( jo );\n        this.enableExtraHead();\n    }\n\n    @Override\n    public void setExtraHead               ( Map<String,Object > jo  ) {\n        super.setExtraHead( jo );\n        this.enableExtraHead();\n    }\n\n    @Override\n    public void setExtraHead               ( Object o                ) {\n        super.setExtraHead( o );\n        this.enableExtraHead();\n    }\n\n    @Override\n    public void setExtraEncode             ( ExtraEncode encode      ) {\n        super.setExtraEncode( encode );\n        this.transApplyExHead();\n    }\n\n    public UMCCHead applyExHead            ( Map<String, Object > jo ) {\n        super.applyExHead( jo );\n        this.enableExtraHead();\n        return this;\n    }\n\n    @Override\n    protected void transApplyExHead        (                         ) {\n        if ( this.dyExtraHead != null && this.extraHeadCoder == null ) {\n            throw new IllegalStateException( \"ExtraHeadCoder is null.\" );\n        }\n\n        super.transApplyExHead();\n        this.enableExtraHead();\n    }\n\n    @Override\n    public void applyExtraHeadCoder        ( ExtraHeadCoder coder    ) {\n        super.applyExtraHeadCoder( coder );\n        this.enableExtraHead();\n    }\n\n\n\n\n\n    @Override\n    public void setBodyLength              ( long length                              ) {\n        super.setBodyLength( length );\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldBodyLength.index );\n    }\n\n    @Override\n    public void setKeepAlive               ( int nKeepAliveMills                      ) {\n        super.setKeepAlive( nKeepAliveMills );\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldKeepAlive.index );\n    }\n\n    @Override\n    public void setKeepAlive               ( int nKeepAlive, CompactTimeUnit timeUnit ) {\n        super.setKeepAlive( nKeepAlive, timeUnit );\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldKeepAlive.index );\n    }\n\n    @Override\n    protected void setMethod               ( UMCMethod umcMethod                      ) {\n        super.setMethod( umcMethod );\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldMethod.index );\n    }\n\n    @Override\n    public void setStatus                  ( Status status                            ) {\n        super.setStatus( status );\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldStatus.index );\n    }\n\n\n\n    @Override\n    public void setControlBits   ( int controlBits       ) {\n        super.setControlBits( controlBits );\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldControlBits.index );\n    }\n\n    @Override\n    public void setIdentityId    ( long identityId        ) {\n        super.setIdentityId( identityId );\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldIdentityId.index );\n    }\n\n    @Override\n    public void setSessionId     ( long sessionId         ) {\n        super.setSessionId( sessionId );\n        this.fieldIndexBitmap = BitSet64.setBit( this.fieldIndexBitmap, FieldSessionId.index );\n    }\n\n\n    @Override\n    public String toJSONString() {\n        String szExtraHead = this.jsonifyExtraHead();\n\n\n\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"Signature\"        , this.getSignature()                                             ),\n\n                new KeyValue<>( \"FieldIndexBitmap\" , JSONString.wrapRaw(\n                        BitSet64.toIndexJSONString( this.fieldIndexBitmap ) )\n                ),\n\n                new KeyValue<>( \"ExtraHeadLength\"  , this.getExtraHeadLength()                                       ),\n                new KeyValue<>( \"ExtraEncode\"      , this.getExtraEncode().getName()                                 ),\n                new KeyValue<>( \"BodyLength\"       , this.getBodyLength()                                            ),\n                new KeyValue<>( \"KeepAlive\"        , this.getKeepAlive()                                             ),\n                new KeyValue<>( \"Method\"           , this.getMethod()                                                ),\n                new KeyValue<>( \"Status\"           , this.getStatus().getName()                                      ),\n                new KeyValue<>( \"ControlBits\"      , \"0x\" + Integer.toUnsignedString( this.getControlBits(),16 )  ),\n                new KeyValue<>( \"IdentityId\"       , this.getIdentityId()                                            ),\n                new KeyValue<>( \"SessionId\"        , this.getSessionId()                                             ),\n                new KeyValue<>( \"ExtraHead\"        , szExtraHead                                                     ),\n        } );\n    }\n\n    @Override\n    public EncodePair bytesEncode( ExtraHeadCoder extraHeadCoder ) {\n        return UMCCHeadV1.encode( this, extraHeadCoder );\n    }\n\n    public static class HeadField {\n        public final String name;\n\n        public final int index;\n\n        public final int sizeof;\n\n        HeadField( String name, int index, int sizeof ) {\n            this.name = name;\n            this.index = index;\n            this.sizeof = sizeof;\n        }\n    }\n\n\n    public static EncodePair encode( UMCCHead umcHead, ExtraHeadCoder extraHeadCoder ) {\n        UMCCHeadV1 head = (UMCCHeadV1) umcHead;\n        head.applyExtraHeadCoder( extraHeadCoder );\n        head.transApplyExHead();\n\n        int extraHeadLength = head.getExtraHeadLength();\n\n        ByteBuffer byteBuffer = ByteBuffer.allocate( UMCHeadV1.ReadBufferSize + extraHeadLength );\n        byteBuffer.order( UMCHeadV1.BinByteOrder );\n\n        int nBufLength = head.getSignatureLength();\n        byteBuffer.put( head.getSignature().getBytes() );\n\n        byteBuffer.putLong( head.fieldIndexBitmap );\n        nBufLength += Long.BYTES;\n\n        for ( int i = BitmapAt; i < HeadFieldsIndex.length; ++i ) {\n            if ( ( head.fieldIndexBitmap & (1L << i) ) != 0 ) {\n                HeadField field = HeadFieldsIndex[i];\n                switch ( field.index ) {\n                    case 1: { // nExtraHeadLength\n                        byteBuffer.putInt( head.nExtraHeadLength );\n                        nBufLength += Integer.BYTES;\n                        break;\n                    }\n                    case 2: { // extraEncode\n                        byteBuffer.put( head.extraEncode.getByteValue() );\n                        nBufLength += Byte.BYTES;\n                        break;\n                    }\n                    case 3: { // nBodyLength\n                        byteBuffer.putLong( head.nBodyLength );\n                        nBufLength += Long.BYTES;\n                        break;\n                    }\n                    case 4: { // nKeepAlive\n                        byteBuffer.putInt( head.nKeepAlive );\n                        nBufLength += Integer.BYTES;\n                        break;\n                    }\n                    case 5: { // method\n                        byteBuffer.put( head.method.getByteValue() );\n                        nBufLength += Byte.BYTES;\n                        break;\n                    }\n                    case 6: { // status\n                        byteBuffer.putShort( head.status.getShortValue() );\n                        nBufLength += Short.BYTES;\n                        break;\n                    }\n                    case 7: { // controlBits\n                        byteBuffer.putInt( head.controlBits );\n                        nBufLength += Integer.BYTES;\n                        break;\n                    }\n                    case 8: { // identityId\n                        byteBuffer.putLong( head.identityId );\n                        nBufLength += Long.BYTES;\n                        break;\n                    }\n                    case 9: { // sessionId\n                        byteBuffer.putLong( head.sessionId );\n                        nBufLength += Long.BYTES;\n                        break;\n                    }\n                    default: {\n                        break;\n                    }\n                }\n            }\n        }\n\n        if( head.extraHead == null ) {\n            byteBuffer.put( Bytes.Empty );\n        }\n        else {\n            byteBuffer.put( head.extraHead );\n        }\n        nBufLength += head.getExtraHeadLength();\n\n        return new EncodePair( byteBuffer, nBufLength );\n    }\n\n    public static UMCCHead decode( byte[] buf, String szSignature, ExtraHeadCoder extraHeadCoder ) throws IOException {\n        if ( buf.length < szSignature.length() ) { // Signature size is minimum.\n            throw new StreamTerminateException( \"StreamEndException:[UMC-CProtocol] Stream is ended.\" );\n        }\n\n        int nReadAt = szSignature.length();\n        if ( !Arrays.equals( buf, 0, szSignature.length(), szSignature.getBytes(), 0, szSignature.length() )  ) {\n            throw new IOException( \"[UMC-CProtocol] Illegal protocol signature.\" );\n        }\n\n        UMCCHeadV1 head = new UMCCHeadV1();\n        head.applyExtraHeadCoder( extraHeadCoder );\n\n        head.fieldIndexBitmap = ByteBuffer.wrap( buf, nReadAt, Long.BYTES ).order( BinByteOrder ).getLong();\n        nReadAt += Long.BYTES;\n\n        for ( int i = BitmapAt; i < HeadFieldsIndex.length; ++i ) {\n            if ( ( head.fieldIndexBitmap & (1L << i) ) != 0 ) {\n                HeadField field = HeadFieldsIndex[ i ];\n                switch ( field.index ) {\n                    case 1: { // nExtraHeadLength\n                        head.nExtraHeadLength  = ByteBuffer.wrap( buf, nReadAt, Integer.BYTES ).order( BinByteOrder ).getInt();\n                        nReadAt += Integer.BYTES;\n                        break;\n                    }\n                    case 2: { // extraEncode\n                        head.extraEncode       = ExtraEncode.asValue( ByteBuffer.wrap( buf, nReadAt, Byte.BYTES ).order( BinByteOrder ).get() );\n                        nReadAt += Byte.BYTES;\n                        break;\n                    }\n                    case 3: { // nBodyLength\n                        head.nBodyLength = ByteBuffer.wrap(buf, nReadAt, Long.BYTES).order( BinByteOrder ).getLong();\n                        nReadAt += Long.BYTES;\n                        break;\n                    }\n                    case 4: { // nKeepAlive\n                        head.nKeepAlive = ByteBuffer.wrap(buf, nReadAt, Integer.BYTES).order( BinByteOrder ).getInt();\n                        nReadAt += Integer.BYTES;\n                        break;\n                    }\n                    case 5: { // method\n                        head.method = UMCMethod.values()[buf[nReadAt]];\n                        nReadAt += Byte.BYTES;\n                        break;\n                    }\n                    case 6: { // status\n                        head.status = Status.asValue(ByteBuffer.wrap(buf, nReadAt, Short.BYTES).order( BinByteOrder ).getShort());\n                        nReadAt += Short.BYTES;\n                        break;\n                    }\n                    case 7: { // controlBits\n                        head.controlBits = ByteBuffer.wrap(buf, nReadAt, Integer.BYTES).order( BinByteOrder ).getInt();\n                        nReadAt += Integer.BYTES;\n                        break;\n                    }\n                    case 8: { // identityId\n                        head.identityId = ByteBuffer.wrap(buf, nReadAt, Long.BYTES).order( BinByteOrder ).getLong();\n                        nReadAt += Long.BYTES;\n                        break;\n                    }\n                    case 9: { // sessionId\n                        head.sessionId = ByteBuffer.wrap(buf, nReadAt, Long.BYTES).order( BinByteOrder ).getLong();\n                        nReadAt += Long.BYTES;\n                        break;\n                    }\n                    default: {\n                        break;\n                    }\n                }\n            }\n        }\n\n        return head;\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCChannel.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.wolf.UlfChannelStatus;\n\nimport java.io.IOException;\nimport java.net.SocketAddress;\n\npublic interface UMCChannel extends Pinenut {\n    Thread         getAffiliateThread();\n\n    // Target address.\n    SocketAddress  getAddress();\n\n    SocketAddress  remoteAddress();\n\n    SocketAddress  localAddress();\n\n    void           reconnect() throws IOException;\n\n    void           reconnect( long mils ) throws IOException;\n\n    Object         getNativeHandle();\n\n    ChannelStatus  getChannelStatus();\n\n    void           setChannelStatus( UlfChannelStatus status );\n\n    MessageNode    getParentMessageNode();\n\n    Object         getChannelID() ;\n\n    long           getIdentityID();\n\n    void           release();\n\n    void           close();\n\n    boolean        isShutdown();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCConstants.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\npublic final class UMCConstants {\n\n    public static final long DefaultSyncWaitingMillis = 600000; // 10 * 60 * 1000 [s]\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCException.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class UMCException extends Exception implements Pinenut {\n    public UMCException() {\n        super();\n    }\n\n    public UMCException( String message ) {\n        super(message);\n    }\n\n    public UMCException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public UMCException( Throwable cause ) {\n        super(cause);\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCHead.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.datetime.compact.CompactTimeUnit;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\npublic interface UMCHead extends EMCHead {\n    ExtraHeadCoder getExtraHeadCoder();\n\n    UMCMethod       getMethod();\n\n    long            getBodyLength();\n\n    long            getKeepAlive();\n\n    int             getCompactKeepAlive();\n\n    long            getSessionId();\n\n    Status          getStatus();\n\n    ExtraEncode     getExtraEncode();\n\n    int             getControlBits();\n\n    long            getIdentityId();\n\n    byte[]          getExtraHeadBytes();\n\n    Map<String, Object > evalMapExtraHead() ;\n\n    Map<String, Object > getMapExtraHead() ;\n\n    Object getExtraHead();\n\n    Object getExHeaderVal( String key );\n\n    void putExHeaderVal( String key, Object val ) throws IllegalArgumentException;\n\n\n\n    void setStatus        ( Status status                             );\n\n    void setKeepAlive     ( int nKeepAliveMills                       );\n\n    void setKeepAlive     ( int nKeepAlive, CompactTimeUnit timeUnit  );\n\n    void setControlBits   ( int controlBits                           );\n\n    void setIdentityId    ( long identityId                           );\n\n    void setSessionId     ( long sessionId                            );\n\n\n\n\n    void release();\n\n    default AbstractUMCHead inface() {\n        return (AbstractUMCHead) this;\n    }\n\n\n\n\n    class EncodePair {\n        public final ByteBuffer byteBuffer;\n        public final int        bufLength;\n\n        public EncodePair( ByteBuffer byteBuffer, int bufLength ) {\n            this.byteBuffer = byteBuffer;\n            this.bufLength  = bufLength;\n        }\n\n        public byte[] getBytes() {\n            return Arrays.copyOfRange( this.byteBuffer.array(), 0, this.bufLength );\n        }\n    }\n\n    EncodePair bytesEncode( ExtraHeadCoder extraHeadCoder ) ;\n\n    default EncodePair bytesEncode() {\n        return this.bytesEncode( this.getExtraHeadCoder() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCHeadV1.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport com.pinecone.framework.system.prototype.ObjectiveBean;\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.util.Bytes;\nimport com.pinecone.framework.util.ReflectionUtils;\nimport com.pinecone.framework.util.datetime.compact.CompactTimeUnit;\nimport com.pinecone.framework.util.datetime.compact.CompactTimeUnit32;\nimport com.pinecone.framework.util.datetime.compact.CompactTimestamp32;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\nimport java.io.IOException;\nimport java.nio.ByteBuffer;\nimport java.nio.ByteOrder;\nimport java.util.Arrays;\nimport java.util.Map;\n\npublic class UMCHeadV1 extends AbstractUMCHead implements UMCHead {\n    public static final String     ProtocolVersion   = \"1.1\";\n    public static final String     ProtocolSignature = \"UMC/\" + UMCHeadV1.ProtocolVersion;\n    public static final int        StructBlockSize   = Integer.BYTES + Byte.BYTES + Long.BYTES + Integer.BYTES + Byte.BYTES + Short.BYTES + Integer.BYTES + Long.BYTES + Long.BYTES;\n    public static final int        HeadBlockSize     = UMCHeadV1.ProtocolSignature.length() + UMCHeadV1.StructBlockSize;\n    public static final ByteOrder  BinByteOrder      = ByteOrder.LITTLE_ENDIAN ;// Using x86, C/C++\n    public static final int        HeadFieldsSize    = 10;\n    public static final int        ReadBufferSize    = 64;\n\n\n    protected String                 szSignature                                ; // :0\n    protected int                    nExtraHeadLength  = 2                      ; // :1 sizeof( int32 ) = 4\n    protected ExtraEncode            extraEncode       = ExtraEncode.Undefined  ; // :2 sizeof( ExtraEncode/byte ) = 1\n\n    protected long                   nBodyLength       = 0                      ; // :3 sizeof( int64 ) = 8\n    protected int                    nKeepAlive        = -1                     ; // :4 sizeof( int32 ) = 4, CompactTimestamp32, [-1 for forever, 0 for off, others for millis(default, or seconds/hours/etc).]\n    protected UMCMethod              method                                     ; // :5 sizeof( UMCMethod/byte ) = 1\n    protected Status                 status            = Status.OK              ; // :6 sizeof( Status/Short ) = 2\n    protected int                    controlBits       = 0                      ; // :7 sizeof( int32 ) = 4, Custom control bytes.\n    protected long                   identityId        = 0                      ; // :8 sizeof( int64 ) = 8, Client / Node ID\n    protected long                   sessionId         = 0                      ; // :9 sizeof( int64 ) = 8\n\n    protected byte[]                 extraHead         = {}                     ;\n    protected Object                 dyExtraHead                                ;\n    protected ExtraHeadCoder         extraHeadCoder                             ;\n\n\n    public UMCHeadV1(  ) {\n        this( UMCHeadV1.ProtocolSignature, UMCMethod.INFORM );\n    }\n\n    public UMCHeadV1( String szSignature ) {\n        this( szSignature, UMCMethod.INFORM );\n    }\n\n    public UMCHeadV1( String szSignature, int controlBits ) {\n        this( szSignature, UMCMethod.INFORM, controlBits );\n    }\n\n    public UMCHeadV1( String szSignature, UMCMethod umcMethod ) {\n        this( szSignature, umcMethod, 0 );\n    }\n\n    public UMCHeadV1( String szSignature, UMCMethod umcMethod, int controlBits ) {\n        this( szSignature, umcMethod, new LinkedTreeMap<>(), controlBits );\n    }\n\n    public UMCHeadV1( String szSignature, UMCMethod umcMethod, Object ex, int controlBits ) {\n        this.szSignature       = szSignature;\n        this.method            = umcMethod;\n        this.dyExtraHead       = ex;\n        this.controlBits       = controlBits;\n    }\n\n    UMCHeadV1( String szSignature, UMCMethod umcMethod, Map<String,Object > joEx, int controlBits ) {\n        this( szSignature, umcMethod, (Object) joEx, controlBits );\n    }\n\n    UMCHeadV1( String szSignature, UMCMethod umcMethod, Map<String,Object > joEx ) {\n        this( szSignature, umcMethod, (Object) joEx, 0 );\n    }\n\n\n\n    @Override\n    public int sizeof() {\n        return UMCHeadV1.HeadBlockSize;\n    }\n\n    @Override\n    public int fieldsSize() {\n        return UMCHeadV1.HeadFieldsSize;\n    }\n\n\n\n    @Override\n    protected void setSignature            ( String signature                         ) {\n        this.szSignature = signature;\n    }\n\n    @Override\n    protected void setBodyLength           ( long length                              ) {\n        this.nBodyLength = length;\n    }\n\n    @Override\n    public void setKeepAlive               ( int nKeepAliveMills                      ) {\n        this.nKeepAlive = nKeepAliveMills;\n    }\n\n    @Override\n    public void setKeepAlive               ( int nKeepAlive, CompactTimeUnit timeUnit ) {\n        this.nKeepAlive = CompactTimestamp32.encode( nKeepAlive, (CompactTimeUnit32) timeUnit );\n    }\n\n    @Override\n    protected void setMethod               ( UMCMethod umcMethod                      ) {\n        this.method = umcMethod;\n        if ( this.method == UMCMethod.INFORM ) {\n            this.nBodyLength = 0;\n        }\n    }\n\n    @Override\n    protected void setExtraEncode          ( ExtraEncode encode                       ) {\n        this.extraEncode = encode;\n    }\n\n\n\n    @Override\n    public void setControlBits   ( int controlBits       ) {\n        this.controlBits = controlBits;\n    }\n\n    @Override\n    public void setIdentityId    ( long identityId        ) {\n        this.identityId = identityId;\n    }\n\n    @Override\n    public void setSessionId     ( long sessionId         ) {\n        this.sessionId = sessionId;\n    }\n\n\n\n\n\n    @Override\n    protected void setExtraHead            ( JSONObject jo          ) {\n        this.dyExtraHead = jo.getMap();\n    }\n\n    @Override\n    protected void setExtraHead            ( Map<String,Object > jo ) {\n        this.dyExtraHead = jo;\n    }\n\n    @Override\n    protected void setExtraHead            ( Object o               ) {\n        this.dyExtraHead = o;\n        if( o == null ) {\n            this.nExtraHeadLength = 0;\n        }\n    }\n\n    @Override\n    protected void transApplyExHead        (                        ) {\n        if ( this.dyExtraHead != null ) {\n            this.extraHead         = this.extraHeadCoder.getEncoder().encode( this, this.dyExtraHead );\n            this.nExtraHeadLength  = this.extraHead.length;\n        }\n        else {\n            if( this.extraEncode == ExtraEncode.JSONString ) {\n                this.extraHead  = \"{}\".getBytes();\n            }\n            else if( this.extraEncode == ExtraEncode.Prototype ) {\n                this.extraHead         = null;\n                this.nExtraHeadLength  = 0;\n                return;\n            }\n            else if( this.extraEncode == ExtraEncode.Iussum ) {\n                this.extraHead         = new byte[ 0 ];\n                this.nExtraHeadLength  = 0;\n                return;\n            }\n            else {\n                this.dyExtraHead = this.extraHeadCoder.newExtraHead();\n                this.extraHead   = this.extraHeadCoder.getEncoder().encode( this, this.dyExtraHead );\n            }\n        }\n\n        this.nExtraHeadLength  = this.extraHead.length;\n    }\n\n    @Override\n    protected void applyExtraHeadCoder     ( ExtraHeadCoder coder   ) {\n        this.extraHeadCoder = coder;\n\n        if( this.extraEncode == ExtraEncode.Undefined ) {\n            this.extraEncode = coder.getDefaultEncode();\n        }\n    }\n\n\n\n    @Override\n    public void            setStatus ( Status status ) {\n        this.status = status;\n    }\n\n    @Override\n    public ExtraHeadCoder  getExtraHeadCoder() {\n        return this.extraHeadCoder;\n    }\n\n    @Override\n    public String          getSignature() {\n        return this.szSignature;\n    }\n\n    @Override\n    public int             getSignatureLength() {\n        return this.getSignature().length();\n    }\n\n    @Override\n    public UMCMethod       getMethod() {\n        return this.method;\n    }\n\n    @Override\n    public int             getExtraHeadLength() {\n        return this.nExtraHeadLength;\n    }\n\n    @Override\n    public long            getBodyLength() {\n        return this.nBodyLength;\n    }\n\n    @Override\n    public long            getKeepAlive() {\n        return CompactTimestamp32.toMilliseconds( this.nKeepAlive );\n    }\n\n    @Override\n    public int             getCompactKeepAlive() {\n        return this.nKeepAlive;\n    }\n\n    @Override\n    public long            getSessionId() {\n        return this.sessionId;\n    }\n\n    @Override\n    public Status          getStatus() {\n        return this.status;\n    }\n\n    @Override\n    public ExtraEncode     getExtraEncode() {\n        return this.extraEncode;\n    }\n\n    @Override\n    public int             getControlBits() {\n        return this.controlBits;\n    }\n\n    @Override\n    public long            getIdentityId() {\n        return this.identityId;\n    }\n\n    @Override\n    public byte[]          getExtraHeadBytes() {\n        return this.extraHead ;\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Map<String, Object > evalMapExtraHead() {\n        if( this.dyExtraHead instanceof Map ) {\n            return (Map) this.dyExtraHead;\n        }\n        return ( new ObjectiveBean( this.dyExtraHead ) ).toMap();\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Map<String, Object > getMapExtraHead() {\n        if( this.dyExtraHead instanceof Map ) {\n            return (Map) this.dyExtraHead;\n        }\n        return null;\n    }\n\n    @Override\n    public Object getExtraHead() {\n        return this.dyExtraHead;\n    }\n\n    @Override\n    public void putExHeaderVal( String key, Object val ) throws IllegalArgumentException {\n        if( this.dyExtraHead instanceof Map ) {\n            this.getMapExtraHead().put( key, val );\n        }\n        else {\n            ReflectionUtils.beanSet( this.dyExtraHead, key, val );\n        }\n    }\n\n    @Override\n    public Object getExHeaderVal( String key ) {\n        if( this.dyExtraHead instanceof Map ) {\n            return this.getMapExtraHead().get( key );\n        }\n        else {\n            return ReflectionUtils.beanGet( this.dyExtraHead, key );\n        }\n    }\n\n    protected UMCHead applyExHead( Map<String, Object > jo      ) {\n        if( !( this.dyExtraHead instanceof Map ) && this.dyExtraHead != null ) {\n            throw new IllegalArgumentException( \"Current extra headed is not dynamic.\" );\n        }\n\n        if( this.getMapExtraHead() == null || this.getMapExtraHead().size() == 0 ) {\n            this.setExtraHead( jo );\n        }\n        else {\n            if( jo.size() > this.getMapExtraHead().size() ) {\n                jo.putAll( this.getMapExtraHead() );\n                this.setExtraHead( jo );\n            }\n            else {\n                this.getMapExtraHead().putAll( jo );\n            }\n        }\n        return this;\n    }\n\n    public UMCHead receiveSet( Map<String, Object > joExtraHead ) {\n        this.dyExtraHead = joExtraHead;\n        return this;\n    }\n\n    @Override\n    public void release() {\n        // Help GC\n        this.dyExtraHead = null;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n\n    @Override\n    public EncodePair bytesEncode( ExtraHeadCoder extraHeadCoder ) {\n        return UMCHeadV1.encode( this, extraHeadCoder );\n    }\n\n    public static EncodePair encode( UMCHead umcHead, ExtraHeadCoder extraHeadCoder ) {\n        UMCHeadV1 head = (UMCHeadV1) umcHead;\n        head.applyExtraHeadCoder( extraHeadCoder );\n        head.transApplyExHead();\n\n        ByteBuffer byteBuffer = ByteBuffer.allocate( UMCHeadV1.ReadBufferSize + head.getExtraHeadLength() );\n        byteBuffer.order( UMCHeadV1.BinByteOrder );\n\n        int nBufLength = head.getSignatureLength();\n        byteBuffer.put( head.getSignature().getBytes() );\n        //byteBuffer.put( (byte) ' ' );\n        //++nBufLength;\n\n        byteBuffer.putInt( head.nExtraHeadLength );\n        nBufLength += Integer.BYTES;\n\n        byteBuffer.put( head.extraEncode.getByteValue() );\n        nBufLength += Byte.BYTES;\n\n\n\n        byteBuffer.putLong( head.nBodyLength );\n        nBufLength += Long.BYTES;\n\n        byteBuffer.putInt( head.nKeepAlive );\n        nBufLength += Integer.BYTES;\n\n        byteBuffer.put( head.method.getByteValue() );\n        nBufLength += Byte.BYTES;\n\n        byteBuffer.putShort( head.status.getShortValue() );\n        nBufLength += Short.BYTES;\n\n\n\n        byteBuffer.putInt( head.controlBits );\n        nBufLength += Integer.BYTES;\n\n        byteBuffer.putLong( head.identityId );\n        nBufLength += Long.BYTES;\n\n        byteBuffer.putLong( head.sessionId );\n        nBufLength += Long.BYTES;\n\n\n\n        if( head.extraHead == null ) {\n            byteBuffer.put( Bytes.Empty );\n        }\n        else {\n            byteBuffer.put( head.extraHead );\n        }\n        nBufLength += head.getExtraHeadLength();\n\n        return new EncodePair( byteBuffer, nBufLength );\n    }\n\n    public static UMCHead decode( byte[] buf, String szSignature, ExtraHeadCoder extraHeadCoder ) throws IOException {\n        int nBufSize = ArchUMCProtocol.basicHeadLength( szSignature );\n\n        if ( buf.length < nBufSize ) {\n            throw new StreamTerminateException( \"StreamEndException:[UMCProtocol] Stream is ended.\" );\n        }\n        int nReadAt = szSignature.length();\n        if ( !Arrays.equals( buf, 0, szSignature.length(), szSignature.getBytes(), 0, szSignature.length() )  ) {\n            throw new IOException( \"[UMCProtocol] Illegal protocol signature.\" );\n        }\n\n        UMCHeadV1 head = new UMCHeadV1();\n        head.applyExtraHeadCoder( extraHeadCoder );\n        //nReadAt++; // For ' '\n\n\n        head.nExtraHeadLength  = ByteBuffer.wrap( buf, nReadAt, Integer.BYTES ).order( UMCHeadV1.BinByteOrder ).getInt();\n        nReadAt += Integer.BYTES;\n\n        head.extraEncode       = ExtraEncode.asValue( ByteBuffer.wrap( buf, nReadAt, Byte.BYTES ).order( UMCHeadV1.BinByteOrder ).get() );\n        nReadAt += Byte.BYTES;\n\n\n\n        head.nBodyLength       = ByteBuffer.wrap( buf, nReadAt, Long.BYTES ).order( UMCHeadV1.BinByteOrder ).getLong();\n        nReadAt += Long.BYTES;\n\n        head.nKeepAlive       = ByteBuffer.wrap( buf, nReadAt, Integer.BYTES ).order( UMCHeadV1.BinByteOrder ).getInt();\n        nReadAt += Integer.BYTES;\n\n        head.method            = UMCMethod.values()[ buf[nReadAt] ];\n        nReadAt += Byte.BYTES;\n\n        head.status            = Status.asValue( ByteBuffer.wrap( buf, nReadAt, Short.BYTES ).order( UMCHeadV1.BinByteOrder ).getShort() );\n        nReadAt += Short.BYTES;\n\n        head.controlBits      = ByteBuffer.wrap( buf, nReadAt, Integer.BYTES ).order( UMCHeadV1.BinByteOrder ).getInt();\n        nReadAt += Integer.BYTES;\n\n        head.identityId       = ByteBuffer.wrap( buf, nReadAt, Long.BYTES ).order( UMCHeadV1.BinByteOrder ).getLong();\n        nReadAt += Long.BYTES;\n\n        head.sessionId        = ByteBuffer.wrap( buf, nReadAt, Long.BYTES ).order( UMCHeadV1.BinByteOrder ).getLong();\n        nReadAt += Long.BYTES;\n\n        return head;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCMessage.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\npublic interface UMCMessage extends Message {\n    UMCHead     getHead();\n\n    default UMCMethod   getMethod(){\n        return this.getHead().getMethod();\n    }\n\n    default Object getExHead() {\n        return this.getHead().getExtraHead();\n    }\n\n    default InformMessage evinceInformMessage() {\n        return null;\n    }\n\n    default TransferMessage evinceTransferMessage() {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCMethod.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\npublic enum UMCMethod {\n    UNDEFINED ( 0x00, \"Undefined\" ),\n    INFORM    ( 0x01, \"Inform\"    ),\n    TRANSFER  ( 0x02, \"Transfer\"  );\n\n    private final int value;\n\n    private final String name;\n\n    UMCMethod( int value, String name ){\n        this.value = value;\n        this.name  = name;\n    }\n\n    public String getName(){\n        return this.name;\n    }\n\n    public int getValue() {\n        return this.value;\n    }\n\n    public byte getByteValue() {\n        return (byte) this.value;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCProtocol.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\n/**\n *  Pinecone Ursus For Java UMCProtocol [ Unified Message Control Protocol ]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n */\npublic interface UMCProtocol extends MsgProtocol {\n\n    UMCProtocol applyMessageSource( Medium medium ) ;\n\n    String getVersion();\n\n    String getSignature();\n\n    void release();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCReceiver.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.io.IOException;\n\npublic interface UMCReceiver extends UMCProtocol{\n    Object readInformMsg() throws IOException;\n\n    UMCMessage readTransferMsg() throws IOException;\n\n    UMCMessage readTransferMsgBytes() throws IOException;\n\n    UMCMessage readMsg() throws IOException;\n\n    UMCMessage readMsgBytes() throws IOException;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCServiceException.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\npublic class UMCServiceException extends UMCException {\n    public UMCServiceException() {\n        super();\n    }\n\n    public UMCServiceException( String message ) {\n        super(message);\n    }\n\n    public UMCServiceException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public UMCServiceException( Throwable cause ) {\n        super(cause);\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/UMCTransmit.java",
    "content": "package com.pinecone.hydra.umc.msg;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\npublic interface UMCTransmit extends UMCProtocol {\n\n    void sendInformMsg( Object msg ) throws IOException;\n\n    void sendInformMsg( Object msg, Status status ) throws IOException;\n\n    void sendTransferMsg( Object msg, byte[] bytes ) throws IOException;\n\n    void sendTransferMsg( Object msg, byte[] bytes, Status status ) throws IOException;\n\n    default void sendTransferMsg( Object msg, String sz ) throws IOException {\n        this.sendTransferMsg( msg, sz.getBytes() );\n    }\n\n    void sendTransferMsg( Object msg, InputStream is ) throws IOException;\n\n\n    void sendMsg( UMCMessage msg, boolean bNoneBuffered ) throws IOException;\n\n    default void sendMsg( UMCMessage msg ) throws IOException {\n        this.sendMsg( msg, false );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/event/ChannelDataInterceptor.java",
    "content": "package com.pinecone.hydra.umc.msg.event;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.ChannelHandleException;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\nimport io.netty.channel.ChannelHandlerContext;\n\npublic interface ChannelDataInterceptor extends Pinenut {\n\n    boolean interceptAfterDataArrived ( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws ChannelHandleException;\n\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/event/ChannelEventHandler.java",
    "content": "package com.pinecone.hydra.umc.msg.event;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\n\nimport io.netty.channel.ChannelHandlerContext;\n\npublic interface ChannelEventHandler extends Pinenut {\n    void afterEventTriggered( ChannelControlBlock block, Object context );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/event/ChannelInactiveHandler.java",
    "content": "package com.pinecone.hydra.umc.msg.event;\n\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.ChannelHandleException;\n\nimport io.netty.channel.ChannelHandlerContext;\n\npublic interface ChannelInactiveHandler extends ChannelEventHandler {\n    boolean afterChannelInactive( ChannelControlBlock ccb, Object context ) throws ChannelHandleException;\n\n    @Override\n    default void afterEventTriggered( ChannelControlBlock block, Object context ) {\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/ExtraHeadCoder.java",
    "content": "package com.pinecone.hydra.umc.msg.extra;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.ExtraEncode;\n\nimport java.util.Map;\n\npublic interface ExtraHeadCoder extends Pinenut {\n    ExtraHeadEncoder       getEncoder();\n\n    ExtraHeadDecoder       getDecoder();\n\n    Map<String, Object >   newExtraHead();\n\n    ExtraEncode            getDefaultEncode();\n\n    void                   setDefaultEncode( ExtraEncode encode );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/ExtraHeadDecoder.java",
    "content": "package com.pinecone.hydra.umc.msg.extra;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.UMCHead;\n\npublic interface ExtraHeadDecoder extends Pinenut {\n    Object decode( UMCHead head, byte[] raw );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/ExtraHeadEncoder.java",
    "content": "package com.pinecone.hydra.umc.msg.extra;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.UMCHead;\n\npublic interface ExtraHeadEncoder extends Pinenut {\n    byte[] encode( UMCHead head, Object jo );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/ExtraHeadMarshalingException.java",
    "content": "package com.pinecone.hydra.umc.msg.extra;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class ExtraHeadMarshalingException extends PineRuntimeException {\n    public ExtraHeadMarshalingException() {\n        super();\n    }\n\n    public ExtraHeadMarshalingException( String message ) {\n        super( message );\n    }\n\n    public ExtraHeadMarshalingException( String message, Throwable cause ) {\n        super( message, cause );\n    }\n\n    public ExtraHeadMarshalingException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/GenericExtraHeadCoder.java",
    "content": "package com.pinecone.hydra.umc.msg.extra;\n\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.umc.msg.ExtraEncode;\n\nimport java.util.Map;\n\npublic class GenericExtraHeadCoder implements ExtraHeadCoder {\n    protected ExtraHeadEncoder encoder;\n    protected ExtraHeadDecoder decoder;\n    protected ExtraEncode      extraEncode;\n\n    public GenericExtraHeadCoder () {\n        this( new GenericExtraHeadEncoder(), new GenericExtraHeadDecoder() );\n    }\n\n    public GenericExtraHeadCoder ( ExtraHeadEncoder encoder, ExtraHeadDecoder decoder ) {\n        this( encoder, decoder, ExtraEncode.JSONString );\n    }\n\n    public GenericExtraHeadCoder ( ExtraHeadEncoder encoder, ExtraHeadDecoder decoder, ExtraEncode extraEncode ) {\n        this.encoder     = encoder;\n        this.decoder     = decoder;\n        this.extraEncode = extraEncode;\n    }\n\n    @Override\n    public ExtraHeadEncoder getEncoder() {\n        return this.encoder;\n    }\n\n    @Override\n    public ExtraHeadDecoder getDecoder() {\n        return this.decoder;\n    }\n\n    @Override\n    public ExtraEncode getDefaultEncode() {\n        return this.extraEncode;\n    }\n\n    @Override\n    public void setDefaultEncode( ExtraEncode encode ) {\n        this.extraEncode = encode;\n    }\n\n    @Override\n    public Map<String, Object > newExtraHead() {\n        return new JSONMaptron();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/GenericExtraHeadDecoder.java",
    "content": "package com.pinecone.hydra.umc.msg.extra;\n\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.umc.msg.ExtraEncode;\nimport com.pinecone.hydra.umc.msg.UMCHead;\nimport com.pinecone.ulf.util.bson.UlfJSONDecompiler;\n\nimport java.io.ByteArrayInputStream;\nimport java.util.Map;\n\npublic class GenericExtraHeadDecoder implements ExtraHeadDecoder {\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Object decode( UMCHead head, byte[] raw ) {\n        ExtraEncode encode = head.getExtraEncode();\n        switch ( encode ) {\n            case JSONString: {\n                JSONObject jo = new JSONMaptron( head.evalMapExtraHead(), true );\n                jo.jsonDecode( new String( raw ) );\n                return jo;\n            }\n            case Binary: {\n                ByteArrayInputStream       is = new ByteArrayInputStream( raw );\n                UlfJSONDecompiler decompiler = new UlfJSONDecompiler( is );\n\n                Object o = decompiler.decompile();\n                if( o instanceof JSONObject ) {\n                    return o;\n                }\n                else if( o instanceof Map ) {\n                    return new JSONMaptron( (Map<String, Object >)o, true ) ;\n                }\n\n                throw new ExtraHeadMarshalingException(\n                        \"Illegal decompiler Binary json, requires Map<String, Object > but \" + o.getClass().getSimpleName() + \" found.\"\n                );\n            }\n            case Iussum:\n            case Prototype: {\n                return raw;\n            }\n        }\n\n        throw new ExtraHeadMarshalingException( \"Unsupported encode mode[\" + encode.getName() + \"].\" );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/extra/GenericExtraHeadEncoder.java",
    "content": "package com.pinecone.hydra.umc.msg.extra;\n\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.hydra.umc.msg.ExtraEncode;\nimport com.pinecone.hydra.umc.msg.UMCHead;\nimport com.pinecone.ulf.util.bson.UlfJSONCompiler;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\n\npublic class GenericExtraHeadEncoder implements ExtraHeadEncoder {\n    @Override\n    public byte[] encode( UMCHead head, Object raw ) throws ExtraHeadMarshalingException {\n        ExtraEncode encode = head.getExtraEncode();\n        switch ( encode ) {\n            case JSONString: {\n                return JSON.stringify( raw ).getBytes();\n            }\n            case Binary: {\n                UlfJSONCompiler compiler = new UlfJSONCompiler();\n                ByteArrayOutputStream  os = new ByteArrayOutputStream();\n\n                try{\n                    compiler.compile( raw, os );\n                }\n                catch ( IOException e ) {\n                    throw new ExtraHeadMarshalingException( e );\n                }\n\n                return os.toByteArray();\n            }\n            case Iussum:\n            case Prototype: {\n                return (byte[]) raw;\n            }\n        }\n\n        throw new ExtraHeadMarshalingException( \"Unsupported encode mode[\" + encode.getName() + \"].\" );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/handler/ErrorMessageAudit.java",
    "content": "package com.pinecone.hydra.umc.msg.handler;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\npublic interface ErrorMessageAudit extends Pinenut {\n    boolean isErrorMessage( UMCMessage message );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/handler/GenericErrorMessageAudit.java",
    "content": "package com.pinecone.hydra.umc.msg.handler;\n\nimport com.pinecone.hydra.umc.msg.MessageNodus;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\npublic class GenericErrorMessageAudit implements ErrorMessageAudit {\n    protected MessageNodus mMessageNode;\n\n    public GenericErrorMessageAudit( MessageNodus node ) {\n        this.mMessageNode = node;\n    }\n\n    @Override\n    public boolean isErrorMessage( UMCMessage message ) {\n        return message.getHead().getStatus().getValue() >= 500;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/msg/handler/WrappedErrorMessageException.java",
    "content": "package com.pinecone.hydra.umc.msg.handler;\n\nimport com.pinecone.framework.system.PineRuntimeException;\nimport com.pinecone.hydra.umc.msg.Status;\nimport com.pinecone.hydra.umc.msg.UMCHead;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\npublic class WrappedErrorMessageException extends PineRuntimeException {\n    protected Status status;\n\n    protected UMCMessage message;\n\n    public WrappedErrorMessageException( Status status ) {\n        super();\n\n        this.status = status;\n    }\n\n    public WrappedErrorMessageException( String message, Status status  ) {\n        super( message );\n\n        this.status = status;\n    }\n\n    public WrappedErrorMessageException( String message, Throwable cause, Status status ) {\n        super( message, cause );\n\n        this.status = status;\n    }\n\n    public WrappedErrorMessageException( Throwable cause, Status status ) {\n        super(cause);\n\n        this.status = status;\n    }\n\n    public Status getStatus() {\n        return this.status;\n    }\n\n    public UMCMessage getUMCMessage() {\n        return this.message;\n    }\n\n    public void setUMCMessage( UMCMessage message ) {\n        this.message = message;\n    }\n\n    public static WrappedErrorMessageException wrap( UMCHead head ) {\n        Object what = head.getExHeaderVal( \"What\" );\n        if( what instanceof String ) {\n            return new WrappedErrorMessageException( (String) what, head.getStatus() );\n        }\n\n        return new WrappedErrorMessageException( head.getStatus() );\n    }\n\n    public static WrappedErrorMessageException wrap( UMCMessage message ) {\n        WrappedErrorMessageException exception = wrap( message.getHead() );\n        exception.setUMCMessage( message );\n        return exception;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/vita/HeartbeatControl.java",
    "content": "package com.pinecone.hydra.umc.vita;\n\nimport java.io.IOException;\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\npublic interface HeartbeatControl extends Pinenut {\n\n    void registerChannels( Collection<ChannelControlBlock> channels, long intervalMillis ) ;\n\n    void registerChannel( ChannelControlBlock ccb, long intervalMillis ) ;\n\n    void deregisterChannel( ChannelControlBlock ccb ) ;\n\n    void shutdown() ;\n\n    boolean interceptFeedback( ChannelControlBlock block, UMCMessage msg ) throws IOException;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/vita/HeartbeatFeedbackor.java",
    "content": "package com.pinecone.hydra.umc.vita;\n\nimport java.io.IOException;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\npublic interface HeartbeatFeedbackor extends Pinenut {\n    boolean interceptHeartbeat( ChannelControlBlock block, UMCMessage msg ) throws IOException ;\n\n    void feedback( ChannelControlBlock block, UMCMessage msg ) throws IOException ;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/ArchChannelControlBlock.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.io.IOCounter;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.MessageNode;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.framework.system.executum.ArchThreadum;\n\nimport java.io.IOException;\nimport java.util.concurrent.BlockingDeque;\nimport java.util.concurrent.LinkedBlockingDeque;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.locks.Lock;\n\npublic abstract class ArchChannelControlBlock extends ArchThreadum implements NettyChannelControlBlock {\n    protected UlfChannel             mChannel;\n    protected MessageNode            mMessageNode;\n\n    // For Load Balance.\n    protected IOCounter              mIOCounter;\n\n    protected boolean                mbForceSyncMode;\n    protected boolean                mbInSyncMode;\n\n    protected UlfMCTransmit          mTransmit;\n    protected UlfMCReceiver          mReceiver;\n\n    protected BlockingDeque<UlfAsyncMsgHandleAdapter > mAsyncMsgHandleQueue = new LinkedBlockingDeque<>();\n\n    protected ArchChannelControlBlock( MessageNode parentNode, UlfChannel channel, boolean bForceSyncMode ) {\n        super( null, parentNode );\n        this.mChannel          = channel;\n        this.mbForceSyncMode   = bForceSyncMode;\n        this.mbInSyncMode      = bForceSyncMode;\n        this.mMessageNode      = parentNode;\n    }\n\n    @Override\n    public ArchChannelControlBlock setThreadAffinity( Thread affinity ) {\n        super.setThreadAffinity( affinity );\n        this.getChannel().setThreadAffinity( affinity );\n        return this;\n    }\n\n    public UlfChannel        getChannel() {\n        return this.mChannel;\n    }\n\n    public IOCounter         getIOCounter() {\n        return this.mIOCounter;\n    }\n\n    public boolean           getInSyncMode() {\n        return this.mbInSyncMode;\n    }\n\n    public UlfMCTransmit     getTransmit() {\n        return this.mTransmit;\n    }\n\n    public UlfMCReceiver     getReceiver() {\n        return this.mReceiver;\n    }\n\n    public MessageNode       getParentMessageNode() {\n        return this.mMessageNode;\n    }\n\n\n    @Override\n    public  void              sendMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException {\n        this.getChannel().setChannelStatus( UlfChannelStatus.WAITING_FOR_SEND );\n        this.mTransmit.sendMsg( request, bNoneBuffered );\n        this.getChannel().setChannelStatus( UlfChannelStatus.WAITING_FOR_RECEIVE );\n    }\n\n    protected void           afterConnectionArrive( Medium medium, boolean bRenew, Lock forceSyncLock ) {\n        if( this.mbForceSyncMode ) {\n            forceSyncLock.lock();\n        }\n\n        try{\n            if( bRenew ) {\n                this.mTransmit.applyMessageSource( medium );\n                this.mReceiver.applyMessageSource( medium );\n            }\n            this.mTransmit = new UlfMCTransmit( medium );\n            this.mReceiver = new UlfMCReceiver( medium );\n        }\n        finally {\n            if( this.mbForceSyncMode ) {\n                forceSyncLock.unlock();\n            }\n        }\n    }\n\n    public void              release() {\n        this.mChannel.release();\n        //this.mChannel            = null;\n        this.mIOCounter          = null;\n        this.mTransmit           = null;\n        this.mReceiver           = null;\n    }\n\n    public void              close(){\n        this.mChannel.close();\n    }\n\n    public boolean           isShutdown() {\n        return this.getChannel().isShutdown();\n    }\n\n    public UlfChannelStatus  getChannelStatus() {\n        return this.getChannel().getChannelStatus();\n    }\n\n    @Override\n    public void              kill() {\n        this.close();\n        this.release();\n    }\n\n\n\n    @Override\n    public void                     pushMsgHandle ( UlfAsyncMsgHandleAdapter msgHandle ) {\n        this.mAsyncMsgHandleQueue.add( msgHandle );\n    }\n\n    @Override\n    public UlfAsyncMsgHandleAdapter pollMsgHandle ( long nWaitMillis ) throws InterruptedException {\n        return this.mAsyncMsgHandleQueue.poll( nWaitMillis, TimeUnit.MICROSECONDS );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/ArchChannelPool.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.ChannelPool;\nimport com.pinecone.hydra.umc.wolf.client.MessengerNettyChannelControlBlock;\n\npublic abstract class ArchChannelPool implements ChannelPool {\n    @Override\n    public boolean isAllChannelsTerminated() {\n        if( this.isEmpty() ) {\n            return true;\n        }\n\n        //boolean b = true;\n        for ( Object o : this.getPooledChannels() ){\n            MessengerNettyChannelControlBlock block = (MessengerNettyChannelControlBlock) o;\n            //b = b && block.isShutdown();\n            if( !block.isShutdown() ) {\n                return false;\n            }\n        }\n        //return b;\n        return true;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/ArchUMCChannel.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.MessageNode;\nimport io.netty.channel.Channel;\nimport io.netty.channel.ChannelFuture;\nimport io.netty.channel.ChannelId;\n\nimport java.net.SocketAddress;\n\npublic abstract class ArchUMCChannel implements NettyUMCChannel {\n    protected ChannelId                  mChannelID          ;\n    protected long                       mIdentityID         ;\n    protected Thread                     mAffiliateThread    = Thread.currentThread();\n    protected MessageNode                mParentMessageNode  ;\n    protected ChannelFuture              mLastChannelFuture  ;\n    protected Channel                    mChannel            ;\n\n    protected SocketAddress              mAddress            ;\n    protected volatile UlfChannelStatus  mChannelStatus      = UlfChannelStatus.IDLE;\n\n    protected ArchUMCChannel( MessageNode node ) {\n        this.mParentMessageNode  = node;\n    }\n\n    protected ArchUMCChannel( MessageNode node, Channel nativeChannel, SocketAddress address ) {\n        this( node );\n        this.mChannel            = nativeChannel;\n        this.mChannelID          = this.mChannel.id();\n        this.mAddress            = address;\n    }\n\n    protected ArchUMCChannel( MessageNode node, Channel nativeChannel ) {\n        this( node, nativeChannel, null );\n    }\n\n\n\n\n    public ArchUMCChannel    setThreadAffinity( Thread affinity ) {\n        this.mAffiliateThread = affinity;\n        return this;\n    }\n\n    public ArchUMCChannel    bindAffiliateThread( Thread affinity ) {\n        if( this.mAffiliateThread == null ) {\n            return this.setThreadAffinity( affinity );\n        }\n        return this;\n    }\n\n    public synchronized ArchUMCChannel bindThisThread() {\n        return this.bindAffiliateThread( Thread.currentThread() );\n    }\n\n    public ChannelFuture     getLastChannelFuture() {\n        return this.mLastChannelFuture;\n    }\n\n\n\n    @Override\n    public SocketAddress     getAddress(){\n        return this.mAddress;\n    }\n\n    @Override\n    public SocketAddress     remoteAddress() {\n        return this.mChannel.remoteAddress();\n    }\n\n    @Override\n    public SocketAddress     localAddress() {\n        return this.mChannel.localAddress();\n    }\n\n    @Override\n    public Thread            getAffiliateThread(){\n        return this.mAffiliateThread;\n    }\n\n    @Override\n    public ChannelId         getChannelID() {\n        return this.mChannelID;\n    }\n\n    @Override\n    public long              getIdentityID() {\n        return this.mIdentityID;\n    }\n\n    void                     setIdentityID( long identityID ) {\n        this.mIdentityID = identityID;\n    }\n\n    @Override\n    public Channel           getNativeHandle(){\n        return this.mChannel;\n    }\n\n    @Override\n    public UlfChannelStatus  getChannelStatus() {\n        return this.mChannelStatus;\n    }\n\n    @Override\n    public void              setChannelStatus( UlfChannelStatus status ) {\n        this.mChannelStatus = status;\n    }\n\n    @Override\n    public MessageNode       getParentMessageNode() {\n        return this.mParentMessageNode;\n    }\n\n\n    @Override\n    public void              release() {\n        this.mAffiliateThread    = null;\n        this.mLastChannelFuture  = null;\n//        this.mChannel            = null;\n//        this.mChannelStatus      = null;\n        this.mParentMessageNode  = null;\n    }\n\n    @Override\n    public void              close() {\n        this.setChannelStatus( UlfChannelStatus.WAITING_FOR_SHUTDOWN );\n        this.getNativeHandle().close();\n        this.setChannelStatus( UlfChannelStatus.SHUTDOWN );\n    }\n\n    @Override\n    public boolean           isShutdown() {\n        return this.getChannelStatus().isTerminated() || !this.getNativeHandle().isActive();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/AsyncUlfMedium.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.io.ChannelInputStream;\nimport com.pinecone.hydra.umc.io.ChannelOutputStream;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.MessageNode;\nimport io.netty.buffer.ByteBuf;\nimport io.netty.channel.ChannelHandlerContext;\n\nimport java.io.InputStream;\nimport java.io.OutputStream;\n\npublic class AsyncUlfMedium implements Medium {\n    protected ChannelHandlerContext mContext;\n\n    protected ByteBuf               mInBuf;\n\n    protected OutputStream          mOutputStream ;\n\n    protected InputStream           mInputStream;\n\n    protected MessageNode           mMessageNode;\n\n    public AsyncUlfMedium( ChannelHandlerContext context, MessageNode messageNode ) {\n        this.mContext       = context;\n        this.mInBuf         = null;\n        this.mOutputStream  = new ChannelOutputStream( this.mContext );\n        this.mInputStream   = null;\n        this.mMessageNode   = messageNode;\n    }\n\n    public AsyncUlfMedium( ChannelHandlerContext context, ByteBuf byteBuf, MessageNode messageNode ) {\n        this.mContext       = context;\n        this.mInBuf         = byteBuf;\n        this.mOutputStream  = new ChannelOutputStream( this.mContext );\n        this.mInputStream   = new ChannelInputStream( this.mInBuf );\n        this.mMessageNode   = messageNode;\n    }\n\n    @Override\n    public OutputStream getOutputStream(){\n        return this.mOutputStream;\n    }\n\n    @Override\n    public InputStream getInputStream(){\n        return this.mInputStream;\n    }\n\n    @Override\n    public Object getNativeMessageSource(){\n        return this.mContext.channel();\n    }\n\n    @Override\n    public String sourceName(){\n        return \"WolfUMC\";\n    }\n\n    @Override\n    public MessageNode getMessageNode() {\n        return this.mMessageNode;\n    }\n\n    @Override\n    public void release() {\n        this.mContext        = null;\n        this.mInBuf          = null;\n        this.mOutputStream   = null;\n        this.mInputStream    = null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/ChannelUtils.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\npublic final class ChannelUtils {\n    public static void setChannelIdentityID( UlfChannel channel, long nIdentityID ) {\n        channel.setIdentityID( nIdentityID );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/GenericUMCByteMessageDecoder.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.UMCHeadV1;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\nimport io.netty.buffer.ByteBuf;\nimport io.netty.channel.ChannelHandlerContext;\nimport io.netty.handler.codec.ByteToMessageDecoder;\nimport com.pinecone.hydra.umc.msg.ArchUMCProtocol;\nimport com.pinecone.hydra.umc.msg.UMCHead;\n\nimport java.util.List;\n\npublic class GenericUMCByteMessageDecoder extends ByteToMessageDecoder {\n    private ByteBuf        cumulation;\n    private ExtraHeadCoder extraHeadCoder;\n    private long           byteSum;\n    private long           bodyBytes;\n    private int            readAt;\n    private int            readBytes; // Each package\n\n    public GenericUMCByteMessageDecoder( ExtraHeadCoder extraHeadCoder ) {\n        this.extraHeadCoder = extraHeadCoder;\n        this.byteSum   = -1;\n        this.bodyBytes = 0;\n        this.readAt    = 0;\n        this.readBytes = 0;\n    }\n\n    private static int countOccurrences( byte[] bfs, byte[] target ) {\n        int count = 0;\n        for ( int i = 0; i <= bfs.length - target.length; ++i ) {\n            boolean match = true;\n            for ( int j = 0; j < target.length; ++j ) {\n                if (bfs[i + j] != target[j]) {\n                    match = false;\n                    break;\n                }\n            }\n            if (match) {\n                count++;\n            }\n        }\n        return count;\n    }\n\n    @Override\n    protected void decode( ChannelHandlerContext ctx, ByteBuf in, List<Object> out ) throws Exception {\n//        ByteBuf bufs = in.copy();\n//        byte[] bfs = new byte[ bufs.readableBytes() ];\n//        bufs.readBytes( bfs );\n//        int occurrences = countOccurrences(bfs, \"UMC/1.1\".getBytes());\n//        int kf = countOccurrences(bfs, \"afd\".getBytes());\n//        if ( kf > 0 ) {\n//            IC += occurrences;\n//            Debug.redfs(IC);\n//        }\n\n\n        while ( in.readableBytes() > 0 ) {\n            boolean bContinueRead = false;\n            if ( this.byteSum == -1 ) {\n\n                // For debug reference.\n//                if ( in.readableBytes() > 100 ) {\n//                    Debug.traceSyn( in );\n//                }\n\n                int nBufSize = ArchUMCProtocol.basicHeadLength( UMCHeadV1.ProtocolSignature );\n                // Waiting for more data to arrive, and that will be enough to decode the header.\n                if ( in.readableBytes() < nBufSize ) {\n                    return;\n                }\n                this.readBytes = 0;\n                byte[] buf = new byte[ nBufSize ];\n                in.readBytes(buf);\n\n                // For debug reference.\n//                if ( buf[ 0 ] != 85 ) {\n//                    Debug.traceSyn( buf );\n//                }\n\n                UMCHead head = ArchUMCProtocol.onlyReadMsgBasicHead( buf, UMCHeadV1.ProtocolSignature, this.extraHeadCoder );\n                this.bodyBytes = head.getBodyLength();\n                this.byteSum   = nBufSize + head.getExtraHeadLength() + this.bodyBytes;\n                this.readAt    += nBufSize;\n                this.readBytes += nBufSize;\n\n                if ( this.byteSum < 0 ) {\n                    throw new IllegalArgumentException( \"Invalid byteSum calculation: \" + this.byteSum );\n                }\n                bContinueRead = true;\n            }\n\n            if ( bContinueRead ) {\n                int startAt = this.readAt - this.readBytes;\n                in.readerIndex( startAt );\n                this.readAt -= this.readBytes;\n                this.readBytes = 0;\n            }\n            if ( in.readableBytes() >= this.byteSum ) {\n                this.readBytes = (int)this.byteSum;\n                ByteBuf completeMessage = in.readRetainedSlice((int) this.readBytes);\n                this.readAt += this.readBytes;\n\n\n                // For debug reference.\n//                byte[] bs = new byte[ (int) this.byteSum ];\n//                ByteBuf byteBuf = completeMessage.copy();\n//                byteBuf.readBytes(bs);\n//                byteBuf.release();\n//                if ( bs[ 0 ] != 85 ) {\n//                    Debug.traceSyn( bs, bContinueRead );\n//                }\n//                head = ArchUMCProtocol.onlyReadMsgBasicHead( bs, UMCHeadV1.ProtocolSignature, this.extraHeadCoder );\n//                Debug.warnSyn( bs, head );\n\n\n                try {\n                    //Debug.bluefs( invokes.getAndIncrement() );\n                    ctx.fireChannelRead(completeMessage);\n                }\n                finally {\n                    completeMessage.release();\n                }\n\n                this.byteSum   = -1;\n                this.bodyBytes = 0;\n                this.readBytes = 0;\n            }\n            else {\n                return;\n            }\n        }\n\n        if ( this.byteSum == -1 ) {\n            this.readAt = 0;\n        }\n\n        // Waiting for more data to arrive.\n//        else {\n//            return;\n//        }\n    }\n\n    private void resetState() {\n        this.byteSum   = -1;\n        this.bodyBytes = 0;\n        this.readAt    = 0;\n        this.readBytes = 0;\n    }\n\n    @Override\n    public void channelInactive( ChannelHandlerContext ctx ) throws Exception {\n        super.channelInactive(ctx);\n        if ( this.cumulation != null ) {\n            this.cumulation.clear();\n            this.cumulation.release();\n            this.cumulation = null;\n        }\n        this.resetState();\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/InternalErrors.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.Status;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\nimport java.io.IOException;\n\npublic final class InternalErrors {\n    public static void sendInternalError( ChannelControlBlock channel, Status errorCode ) throws IOException {\n        UMCMessage errorMsg = new UlfInformMessage( (Object) null );\n        errorMsg.getHead().setStatus( errorCode );\n        channel.sendMsg( errorMsg, true );\n    }\n\n    public static void sendDefaultInternalError( ChannelControlBlock channel ) throws IOException {\n        InternalErrors.sendInternalError( channel, Status.InternalError );\n    }\n\n    public static void sendNotImplemented( ChannelControlBlock channel ) throws IOException {\n        InternalErrors.sendInternalError( channel, Status.NotImplemented );\n    }\n\n    public static void sendBadGateway( ChannelControlBlock channel ) throws IOException {\n        InternalErrors.sendInternalError( channel, Status.BadGateway );\n    }\n\n    public static void sendUnavailable( ChannelControlBlock channel ) throws IOException {\n        InternalErrors.sendInternalError( channel, Status.Unavailable );\n    }\n\n    public static void sendGatewayTimeout( ChannelControlBlock channel ) throws IOException {\n        InternalErrors.sendInternalError( channel, Status.GatewayTimeout );\n    }\n\n    public static void sendVersionNotSupported( ChannelControlBlock channel ) throws IOException {\n        InternalErrors.sendInternalError( channel, Status.VersionNotSupported );\n    }\n\n    public static void sendTooManyConnections( ChannelControlBlock channel ) throws IOException {\n        InternalErrors.sendInternalError( channel, Status.TooManyConnections );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/MCConnectionArguments.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.MsgNodeConfig;\n\npublic interface MCConnectionArguments extends MsgNodeConfig {\n    String getHost();\n\n    void setHost( String host );\n\n    short getPort();\n\n    void setPort( short port );\n\n    int getKeepAliveTimeout();\n\n    void setKeepAliveTimeout( int keepAliveTimeout );\n\n    int getSocketTimeout();\n\n    void setSocketTimeout( int socketTimeout );\n\n    boolean isEnableHeartbeat() ;\n\n    void setHeartbeatState( boolean enable ) ;\n\n    long getHeartbeatInterval();\n\n    void setHeartbeatInterval( long heartbeatIntervalMills );\n\n    @Override\n    default long getSyncWaitingMillis() {\n        return this.getKeepAliveTimeout() * 1000L;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/MCSecurityAuthentication.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface MCSecurityAuthentication extends Pinenut {\n    String getUsername();\n\n    void setUsername( String username );\n\n    String getDomain();\n\n    void setDomain( String domain );\n\n    String getPassword();\n\n    void setPassword( String password );\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/MCSecurityToken.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface MCSecurityToken extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/NettyChannelControlBlock.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\n\npublic interface NettyChannelControlBlock extends ChannelControlBlock {\n    @Override\n    NettyUMCChannel     getChannel();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/NettyUMCChannel.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.UMCChannel;\nimport io.netty.channel.Channel;\nimport io.netty.channel.ChannelId;\n\npublic interface NettyUMCChannel extends UMCChannel {\n    @Override\n    Channel        getNativeHandle();\n\n    @Override\n    ChannelId      getChannelID() ;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/SharedConnectionArguments.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.umc.wolf.client.ArchAsyncMessenger;\n\npublic abstract class SharedConnectionArguments implements MCConnectionArguments {\n    protected String         mszHost;\n    protected short          mnPort;\n    protected int            mnKeepAliveTimeout;\n    protected int            mnSocketTimeout;\n    protected boolean        mbEnableHeartbeat;\n    protected long           mnHeartbeatInterval;\n\n\n    public SharedConnectionArguments( JSONObject args ) {\n        this.mszHost             = args.optString( \"host\", null );\n        this.mnPort              = (short) args.optInt( \"port\", -1 );\n        this.mnKeepAliveTimeout  = args.optInt( \"KeepAliveTimeout\" );\n        this.mnSocketTimeout     = args.optInt( \"SocketTimeout\", 800 );\n        this.mbEnableHeartbeat   = args.optBoolean( \"EnableHeartbeat\", false );\n        this.mnHeartbeatInterval = args.optLong( \"HeartbeatInterval\", 10000 ); // 10s\n    }\n\n    public SharedConnectionArguments( ArchAsyncMessenger args ) {\n        this( args.getSectionConf() );\n    }\n\n    @Override\n    public String getHost() {\n        return this.mszHost;\n    }\n\n    @Override\n    public void setHost( String host ) {\n        this.mszHost = host;\n    }\n\n    @Override\n    public short getPort() {\n        return this.mnPort;\n    }\n\n    @Override\n    public void setPort( short port ) {\n        this.mnPort = port;\n    }\n\n    @Override\n    public int getKeepAliveTimeout() {\n        return this.mnKeepAliveTimeout;\n    }\n\n    @Override\n    public void setKeepAliveTimeout( int keepAliveTimeout ) {\n        this.mnKeepAliveTimeout = keepAliveTimeout;\n    }\n\n    @Override\n    public int getSocketTimeout() {\n        return this.mnSocketTimeout;\n    }\n\n    @Override\n    public void setSocketTimeout( int socketTimeout ) {\n        this.mnSocketTimeout = socketTimeout;\n    }\n\n    @Override\n    public boolean isEnableHeartbeat() {\n        return this.mbEnableHeartbeat;\n    }\n\n    @Override\n    public void setHeartbeatState( boolean enable ) {\n        this.mbEnableHeartbeat = enable;\n    }\n\n    @Override\n    public void setHeartbeatInterval( long heartbeatIntervalMills ) {\n        this.mnHeartbeatInterval = heartbeatIntervalMills;\n    }\n\n    @Override\n    public long getHeartbeatInterval() {\n        return this.mnHeartbeatInterval;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/StandardRemoteUserAuthentication.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\npublic class StandardRemoteUserAuthentication implements MCSecurityAuthentication {\n    protected String mszUsername;\n    protected String mszDomain;\n    protected String mszPassword;\n\n    public StandardRemoteUserAuthentication( String szUsername, String szDomain, String szPassword ) {\n        this.mszUsername   = szUsername;\n        this.mszDomain     = szDomain;\n        this.mszPassword   = szPassword;\n    }\n\n    public StandardRemoteUserAuthentication( String szUsername, String szPassword ) {\n        this( szUsername, \"\", szPassword );\n    }\n\n\n    @Override\n    public String getUsername() {\n        return this.mszUsername;\n    }\n\n    @Override\n    public void setUsername( String username ) {\n        this.mszUsername = username;\n    }\n\n    @Override\n    public String getDomain() {\n        return this.mszDomain;\n    }\n\n    @Override\n    public void setDomain( String domain ) {\n        this.mszDomain = domain;\n    }\n\n    @Override\n    public String getPassword() {\n        return this.mszPassword;\n    }\n\n    @Override\n    public void setPassword( String password ) {\n        this.mszPassword = password;\n    }\n}\n\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfAsyncMsgHandleAdapter.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.AsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\nimport io.netty.channel.ChannelHandlerContext;\n\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\n\npublic interface UlfAsyncMsgHandleAdapter extends AsyncMsgHandleAdapter {\n    default void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception {\n        this.onSuccessfulMsgReceived( medium, block.getTransmit(), block.getReceiver(), msg, new Object[]{ block, rawMsg } );\n    }\n\n    default void onErrorMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception {\n        this.onErrorMsgReceived( medium, block.getTransmit(), block.getReceiver(), msg, new Object[]{ block, rawMsg } );\n    }\n\n    default void onError( ChannelHandlerContext ctx, Throwable cause ) {\n        this.onError( (Object) ctx, cause );\n    }\n\n    static UlfAsyncMsgHandleAdapter wrap( UMCTExpressHandler handler ) {\n        return new UlfAsyncMsgHandleAdapter() {\n            @Override\n            public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception {\n                handler.onSuccessfulMsgReceived( medium, block.getTransmit(), block.getReceiver(), msg, new Object[]{ block, rawMsg } );\n            }\n\n            @Override\n            public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n                handler.onSuccessfulMsgReceived( medium, transmit, receiver, msg, args );\n            }\n\n            @Override\n            public void onErrorMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception {\n                handler.onErrorMsgReceived( medium, block.getTransmit(), block.getReceiver(), msg, new Object[]{ block, rawMsg } );\n            }\n\n            @Override\n            public void onErrorMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n                handler.onErrorMsgReceived( medium, transmit, receiver, msg, args );\n            }\n\n            @Override\n            public void onError( ChannelHandlerContext ctx, Throwable cause ) {\n                handler.onError( (Object) ctx, cause );\n            }\n\n            @Override\n            public void onError( Object data, Throwable cause ) {\n                handler.onError( data, cause );\n            }\n        };\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfBytesTransferMessage.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport java.util.Map;\n\nimport com.pinecone.hydra.umc.msg.ArchBytesTransferMessage;\nimport com.pinecone.hydra.umc.msg.ExtraEncode;\nimport com.pinecone.hydra.umc.msg.UMCHead;\n\npublic class UlfBytesTransferMessage extends ArchBytesTransferMessage {\n    public UlfBytesTransferMessage( UMCHead head ) {\n        super( head );\n    }\n\n    public UlfBytesTransferMessage( UMCHead head, byte[] sBytesBody   ) {\n        super( head, sBytesBody );\n    }\n\n    public UlfBytesTransferMessage( UMCHead head, String szStringBody ) {\n        this( head, szStringBody.getBytes() );\n    }\n\n    public UlfBytesTransferMessage( Map<String,Object > joExHead, byte[] sBytesBody, int controlBits ) {\n        super( joExHead, sBytesBody, controlBits );\n    }\n\n    public UlfBytesTransferMessage( Map<String,Object > joExHead, String szStringBody, int controlBits ) {\n        this( joExHead, szStringBody.getBytes(), controlBits );\n    }\n\n    public UlfBytesTransferMessage( Map<String,Object > joExHead, byte[] sBytesBody ) {\n        this( joExHead, sBytesBody, 0 );\n    }\n\n    public UlfBytesTransferMessage( Map<String,Object > joExHead, String szStringBody ) {\n        this( joExHead, szStringBody.getBytes(), 0 );\n    }\n\n\n    public UlfBytesTransferMessage( Object exHead, ExtraEncode encode, byte[] sBytesBody, int controlBits ) {\n        super( exHead, encode, sBytesBody, controlBits );\n    }\n\n    public UlfBytesTransferMessage( Object exHead, ExtraEncode encode, String szStringBody, int controlBits ) {\n        this( exHead, encode, szStringBody.getBytes(), controlBits );\n    }\n\n    public UlfBytesTransferMessage( Object exHead, byte[] sBytesBody ) {\n        this( exHead, ExtraEncode.Prototype, sBytesBody, 0 );\n    }\n\n    public UlfBytesTransferMessage( Object exHead, String szStringBody ) {\n        this( exHead, ExtraEncode.Prototype, szStringBody, 0 );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfChannel.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.pinecone.hydra.umc.msg.MessageNode;\nimport com.pinecone.hydra.umc.wolf.client.WolfMCClient;\nimport io.netty.bootstrap.Bootstrap;\nimport io.netty.channel.Channel;\nimport io.netty.channel.ChannelFuture;\nimport io.netty.channel.ChannelFutureListener;\nimport io.netty.channel.EventLoopGroup;\nimport io.netty.util.AttributeKey;\n\nimport java.io.IOException;\nimport java.net.SocketAddress;\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.TimeoutException;\n\npublic class UlfChannel extends ArchUMCChannel {\n    protected EventLoopGroup             mExecutorGroup      ;\n    protected Bootstrap                  mBootstrap          ;\n\n\n    public UlfChannel( MessageNode node ) {\n        super( node );\n\n        if( node instanceof WolfMCClient) {\n            WolfMCClient messenger   = (WolfMCClient) node;\n            this.mExecutorGroup      = messenger.getEventLoopGroup();\n            this.mBootstrap          = messenger.getBootstrap();\n        }\n    }\n\n    // Auto set address while connection.\n    public UlfChannel( MessageNode node, Channel nativeChannel, @Nullable SocketAddress address ) {\n        super( node, nativeChannel, address );\n    }\n\n    public UlfChannel( MessageNode node, Channel nativeChannel ) {\n        this( node, nativeChannel, null );\n    }\n\n    public EventLoopGroup    getExecutorGroup() {\n        return this.mExecutorGroup;\n    }\n\n    public Bootstrap         getBootstrap() {\n        return this.mBootstrap;\n    }\n\n\n    @Override\n    public void              reconnect( long mils ) throws IOException {\n        if ( this.isShutdown() ) {\n            ChannelFuture future = this.toConnect( this.getAddress() ).getLastChannelFuture();\n            CompletableFuture<Void> completableFuture = new CompletableFuture<>();\n            future.addListener(new ChannelFutureListener() {\n                @Override\n                public void operationComplete( ChannelFuture channelFuture ) throws Exception {\n                    try {\n                        completableFuture.complete( null );\n                    }\n                    catch (Exception e) {\n                        completableFuture.completeExceptionally( e );\n                    }\n                }\n            });\n\n            try {\n                if ( mils != -1 ) {\n                   future.get( mils, TimeUnit.MILLISECONDS );\n                }\n                else {\n                    future.get();\n                }\n            }\n            catch ( InterruptedException e ) {\n                Thread.currentThread().interrupt();\n                throw new IOException( e );\n            }\n            catch ( TimeoutException | ExecutionException e ) {\n                throw new IOException( e.getCause() );\n            }\n\n\n            try{\n                ( (Slf4jTraceable) this.getParentMessageNode() ).getLogger().info(\n                        \"[ChannelReconnect] <id:`{}`, Addr: `{}`>\", this.getNativeHandle().id(), this.getAddress()\n                );\n            }\n            catch ( ClassCastException ignore ) {\n                // Ignore them.\n            }\n        }\n    }\n\n    @Override\n    public void              reconnect() throws IOException {\n        this.reconnect( -1 );\n    }\n\n    public static void copyChannelAttr( Channel leg, Channel neo, String key ) {\n        Object val = leg.attr( AttributeKey.valueOf( key ) ).get();\n        if ( val != null ) {\n            neo.attr( AttributeKey.valueOf( key ) ).set( val );\n        }\n    }\n\n    public ArchUMCChannel    toConnect( SocketAddress address ) {\n        this.mAddress           = address;\n        this.mLastChannelFuture = this.getBootstrap().connect( address );\n\n        Channel channel         = this.getLastChannelFuture().channel();\n        if ( this.mChannel != null ) { // Reconnect\n            Object ccb = this.mChannel.attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY ) ).get();\n            channel.attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY ) ).set( ccb );\n            WolfMCStandardConstants.copyChannelStandardAttrs( this.mChannel, channel );\n        }\n        this.mChannel           = channel;\n        this.mChannelID         = this.mChannel.id();\n\n        return this;\n    }\n\n    @Override\n    public void release() {\n        super.release();\n\n        this.mExecutorGroup      = null;\n        this.mBootstrap          = null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfChannelStatus.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.ChannelStatus;\n\npublic enum UlfChannelStatus implements ChannelStatus {\n    IDLE                     ( 0x00, \"Idle\"                   ),\n    WAITING_FOR_SEND         ( 0x01, \"WaitingSend\"            ),\n    WAITING_FOR_RECEIVE      ( 0x02, \"WaitingReceive\"         ),\n    WAITING_FOR_RECALL_FUN   ( 0x03, \"WaitingRecallFun\"       ),\n    WAITING_THREAD_RESUME    ( 0x04, \"WaitingThreadResume\"    ),\n\n    FORCE_SYNCHRONIZED       ( 0x05, \"ForceSynchronized\"      ),\n    WAITING_FOR_SHUTDOWN     ( 0x06, \"WaitingShutdown\"        ),\n    SHUTDOWN                 ( 0x07, \"Shutdown\"               ),\n\n    WAITING_PASSIVE_SEND     ( 0xA1, \"WaitingPassiveSend\"     ),\n    WAITING_PASSIVE_RECEIVE  ( 0xA2, \"WaitingPassiveReceive\"  ),\n\n    ;\n\n    public static final int PassiveStatusMask = 0xA0;\n\n    private final int value;\n\n    private final String name;\n\n    UlfChannelStatus( int value, String name ){\n        this.value = value;\n        this.name  = name;\n    }\n\n    @Override\n    public String getName(){\n        return this.name;\n    }\n\n    @Override\n    public int getValue() {\n        return this.value;\n    }\n\n    @Override\n    public byte getByteValue() {\n        return (byte) this.value;\n    }\n\n    @Override\n    public boolean isIdle() {\n        return this == UlfChannelStatus.IDLE;\n    }\n\n    @Override\n    public boolean isTerminated() {\n        return this == UlfChannelStatus.WAITING_FOR_SHUTDOWN;\n    }\n\n    @Override\n    public boolean isWaitingForIOCompleted(){\n        return this.value >= UlfChannelStatus.WAITING_FOR_SEND.value && this.value <= UlfChannelStatus.WAITING_FOR_RECEIVE.value;\n    }\n\n    @Override\n    public boolean isWaitingForLocalCompleted(){\n        return this.value >= UlfChannelStatus.WAITING_FOR_RECALL_FUN.value && this.value <= UlfChannelStatus.WAITING_THREAD_RESUME.value;\n    }\n\n    @Override\n    public boolean isAsynAvailable() {\n        return !this.isTerminated() &&\n                this != UlfChannelStatus.FORCE_SYNCHRONIZED &&\n                this != UlfChannelStatus.WAITING_FOR_SEND &&\n                ( (this.value & PassiveStatusMask) != PassiveStatusMask );\n    }\n\n    @Override\n    public boolean isSyncAvailable() {\n        return !this.isTerminated() || this.isIdle();\n    }\n\n    @Override\n    public String toString() {\n        return this.getName();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfIOLoadBalanceStrategy.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.io.IOLoadBalanceStrategy;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\n\npublic interface UlfIOLoadBalanceStrategy extends IOLoadBalanceStrategy {\n\n    boolean match( ChannelControlBlock ccb );\n\n    UlfIOLoadBalanceStrategy clone();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfIdleFirstBalanceStrategy.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.IdleFirstBalanceStrategy;\n\npublic class UlfIdleFirstBalanceStrategy extends IdleFirstBalanceStrategy implements UlfIOLoadBalanceStrategy {\n    public UlfIdleFirstBalanceStrategy() {\n        super();\n    }\n\n    @Override\n    public boolean match( ChannelControlBlock ccb ) {\n        return ccb.getChannelStatus().isIdle();\n    }\n\n    @Override\n    public boolean matched( Object condition ) {\n        return this.match( (ChannelControlBlock) condition );\n    }\n\n    @Override\n    public UlfIdleFirstBalanceStrategy clone() {\n        return (UlfIdleFirstBalanceStrategy)super.clone();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfInformMessage.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.ArchInformMessage;\nimport com.pinecone.hydra.umc.msg.ExtraEncode;\nimport com.pinecone.hydra.umc.msg.UMCHead;\n\nimport java.util.Map;\n\npublic class UlfInformMessage extends ArchInformMessage {\n    public UlfInformMessage( UMCHead head ) {\n        super(head);\n    }\n\n    public UlfInformMessage( Map<String,Object > joExHead, int controlBits ) {\n        super( joExHead, controlBits );\n    }\n\n    public UlfInformMessage( Object protoExHead , int controlBits ) {\n        super( protoExHead, controlBits );\n    }\n\n    public UlfInformMessage( Map<String,Object > joExHead ) {\n        super( joExHead );\n    }\n\n    public UlfInformMessage( Object protoExHead, ExtraEncode encode ) {\n        super( protoExHead, encode );\n    }\n\n    public UlfInformMessage( Object protoExHead ) {\n        super( protoExHead );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfInstructMessage.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.ArchUMCMessage;\nimport com.pinecone.hydra.umc.msg.ExtraEncode;\nimport com.pinecone.hydra.umc.msg.InformMessage;\nimport com.pinecone.hydra.umc.msg.UMCMethod;\n\npublic class UlfInstructMessage extends ArchUMCMessage implements InformMessage {\n    public UlfInstructMessage( UMCMethod method, int controlBits ) {\n        super( (Object) null, ExtraEncode.Iussum, method, controlBits );\n    }\n\n    public UlfInstructMessage( int controlBits ) {\n        this( UMCMethod.UNDEFINED, controlBits );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfMCReceiver.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.ArchUMCReceiver;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.TransferMessage;\nimport com.pinecone.hydra.umc.msg.UMCHead;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\nimport java.io.IOException;\n\npublic class UlfMCReceiver extends ArchUMCReceiver {\n    public UlfMCReceiver( Medium messageSource ) {\n        super( messageSource );\n    }\n\n    public UMCMessage readTransferMsg( boolean bAllBytes ) throws IOException {\n        UMCHead head = this.readTransferHead();\n\n        TransferMessage message;\n        if( bAllBytes ) {\n            message = new UlfBytesTransferMessage( head );\n        }\n        else {\n            message = new UlfStreamTransferMessage( head );\n        }\n        this.onlyReadTransferBody( message, bAllBytes );\n        return message;\n    }\n\n    @Override\n    public UMCMessage readTransferMsg() throws IOException {\n        return this.readTransferMsg( false );\n    }\n\n    @Override\n    public UMCMessage readTransferMsgBytes() throws IOException {\n        return this.readTransferMsg( true );\n    }\n\n    @Override\n    public UMCMessage readMsg() throws IOException {\n        return this.readMsg( false, UlfMessageStereotypes.Default );\n    }\n\n    @Override\n    public UMCMessage readMsgBytes() throws IOException {\n        return this.readMsg( true, UlfMessageStereotypes.Default );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfMCTransmit.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.ArchUMCTransmit;\nimport com.pinecone.hydra.umc.msg.Medium;\n\npublic class UlfMCTransmit extends ArchUMCTransmit {\n    public UlfMCTransmit( Medium messageSource ) {\n        super( messageSource );\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfMessageNode.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.ChannelPool;\nimport com.pinecone.hydra.umc.msg.CascadeMessageNode;\nimport com.pinecone.hydra.umc.msg.event.ChannelDataInterceptor;\nimport com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler;\n\npublic interface UlfMessageNode extends CascadeMessageNode {\n    ChannelPool          getChannelPool();\n\n    void                 close();\n\n    UlfMessageNode       registerChannelInactiveHandler( ChannelInactiveHandler handler ) throws IllegalStateException;\n\n    UlfMessageNode       deregisterChannelInactiveHandler( ChannelInactiveHandler handler ) throws IllegalStateException;\n\n    UlfMessageNode       registerArrivedDataInterceptor( ChannelDataInterceptor handler ) throws IllegalStateException;\n\n    UlfMessageNode       deregisterArrivedDataInterceptor( ChannelDataInterceptor handler ) throws IllegalStateException;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfMessageStereotypes.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.MessageStereotypes;\n\npublic class UlfMessageStereotypes implements MessageStereotypes {\n    public static final MessageStereotypes Default = new UlfMessageStereotypes();\n\n    protected Class<? > putType        = UlfInformMessage.class;\n    protected Class<? > postBytesType  = UlfBytesTransferMessage.class;\n    protected Class<? > postStreamType = UlfStreamTransferMessage.class;\n\n    @Override\n    public Class<? > putType() {\n        return this.putType;\n    }\n\n    @Override\n    public Class<? > postBytesType() {\n        return this.postBytesType;\n    }\n\n    @Override\n    public Class<? > postStreamType() {\n        return this.postStreamType;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UlfStreamTransferMessage.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport java.io.InputStream;\nimport java.util.Map;\n\nimport com.pinecone.hydra.umc.msg.ArchStreamTransferMessage;\nimport com.pinecone.hydra.umc.msg.ExtraEncode;\nimport com.pinecone.hydra.umc.msg.UMCHead;\n\npublic class UlfStreamTransferMessage extends ArchStreamTransferMessage {\n    public UlfStreamTransferMessage( UMCHead head ) {\n        super( head );\n    }\n\n    public UlfStreamTransferMessage( UMCHead head, InputStream inStream ) {\n        super( head, inStream );\n    }\n\n    public UlfStreamTransferMessage( Map<String,Object > joExHead, InputStream inStream, int controlBits ) {\n        super( joExHead, inStream, controlBits );\n    }\n\n    public UlfStreamTransferMessage( Map<String,Object > joExHead, InputStream inStream ) {\n        super( joExHead, inStream, 0 );\n    }\n\n    public UlfStreamTransferMessage(Object exHead, ExtraEncode encode, InputStream inStream, int controlBits ) {\n        super( exHead, encode, inStream, controlBits );\n    }\n\n    public UlfStreamTransferMessage( Object exHead, InputStream inStream ) {\n        this( exHead, ExtraEncode.Prototype, inStream, 0 );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/UnsetUlfAsyncMsgHandleAdapter.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.MessageNode;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\n\nimport io.netty.channel.ChannelHandlerContext;\n\n/**\n * UnsetUlfAsyncMsgHandleAdapter\n * Dummy UlfAsyncMsgHandleAdapter\n */\npublic final class UnsetUlfAsyncMsgHandleAdapter implements UlfAsyncMsgHandleAdapter {\n    private MessageNode mMessageNode;\n    private Logger      mLogger;\n\n    public UnsetUlfAsyncMsgHandleAdapter( MessageNode node ) {\n        this.mMessageNode = node;\n\n        if ( this.mMessageNode instanceof Slf4jTraceable ) {\n            this.mLogger = ((Slf4jTraceable) this.mMessageNode).getLogger();\n        }\n        else {\n            this.mLogger = LoggerFactory.getLogger( this.getClass() );\n        }\n    }\n\n    @Override\n    public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) {\n        this.mLogger.warn( \"Warning, MsgHandleAdapter is unset. Info => {}, {}\", block.getChannel().getChannelID(), msg );\n    }\n\n    @Override\n    public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n        this.mLogger.warn( \"Warning, MsgHandleAdapter is unset. Info => {}\", msg );\n    }\n\n    @Override\n    public void onErrorMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n        this.mLogger.warn( \"Warning, MsgHandleAdapter is unset. Info => {}\", msg );\n    }\n\n    @Override\n    public void onErrorMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) {\n        this.mLogger.warn( \"Warning, MsgHandleAdapter is unset. Info => {}\", msg );\n    }\n\n    @Override\n    public void onError( ChannelHandlerContext ctx, Throwable cause ) {\n        this.onError( (Object) ctx, cause );\n    }\n\n    @Override\n    public void onError( Object data, Throwable cause ) {\n        this.mLogger.error( \"UnsetMsgHandleAdapter. Error => {}, {}\", cause.getMessage(), cause.toString() );\n        if( !( cause instanceof Exception ) ) {\n            throw new ProvokeHandleException( cause );\n        }\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/WolfMCInitializationException.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\npublic class WolfMCInitializationException extends WolfMCServiceException {\n    public WolfMCInitializationException() {\n        super();\n    }\n\n    public WolfMCInitializationException( String message ) {\n        super(message);\n    }\n\n    public WolfMCInitializationException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public WolfMCInitializationException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/WolfMCNode.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.regimentation.CascadeNodus;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.ChannelHandleException;\nimport com.pinecone.hydra.umc.msg.ExtraEncode;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.event.ChannelDataInterceptor;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\nimport com.pinecone.hydra.umc.msg.extra.GenericExtraHeadCoder;\nimport com.pinecone.hydra.umc.msg.handler.ErrorMessageAudit;\nimport com.pinecone.hydra.umc.msg.handler.GenericErrorMessageAudit;\nimport com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.locks.ReentrantLock;\n\nimport io.netty.channel.ChannelHandlerContext;\n\n\npublic abstract class WolfMCNode extends WolfNettyServgram implements UlfMessageNode {\n    protected ExtraHeadCoder               mExtraHeadCoder          ;\n    protected final ReentrantLock          mMajorIOLock             = new ReentrantLock();\n    protected ErrorMessageAudit            mErrorMessageAudit       ;\n    protected UlfMessageNode               mParentNode              ;\n    protected Namespace                    mNodeNamespace           ;\n    protected long                         mnMessageNodeId          ;\n\n    protected List<ChannelInactiveHandler> mChannelInactiveHandlers ;\n    protected List<ChannelDataInterceptor> mArrivedDataInterceptors ;\n\n    public WolfMCNode( long nodeId, String szName, Processum parentProcess, UlfMessageNode parent, Map<String, Object> joConf, @Nullable ExtraHeadCoder extraHeadCoder ) {\n        super( szName, parentProcess, joConf );\n\n        this.mExtraHeadCoder           = extraHeadCoder;\n        this.mErrorMessageAudit        = new GenericErrorMessageAudit( this );\n        this.mParentNode               = parent;\n        this.mnMessageNodeId           = nodeId;\n        this.mChannelInactiveHandlers  = new ArrayList<>();\n        this.mArrivedDataInterceptors  = new ArrayList<>();\n        this.setTargetingName( szName );\n    }\n\n    public WolfMCNode(long nodeId, String szName, Hydrogen system, Map<String, Object> joConf, @Nullable ExtraHeadCoder extraHeadCoder ) {\n        this( nodeId, szName, system, null, joConf, extraHeadCoder );\n    }\n\n    protected void checkDeregisterHandlerStatus() throws IllegalStateException  {\n        if ( !this.isShutdown() ) {\n            throw new IllegalStateException( \"Service is already running.\" );\n        }\n    }\n\n    @Override\n    public UlfMessageNode registerChannelInactiveHandler( ChannelInactiveHandler handler ) throws IllegalStateException {\n        this.checkDeregisterHandlerStatus();\n        this.mChannelInactiveHandlers.add( handler );\n        return this;\n    }\n\n    @Override\n    public UlfMessageNode deregisterChannelInactiveHandler( ChannelInactiveHandler handler ) throws IllegalStateException {\n        this.checkDeregisterHandlerStatus();\n        this.mChannelInactiveHandlers.remove( handler );\n        return this;\n    }\n\n    @Override\n    public UlfMessageNode registerArrivedDataInterceptor( ChannelDataInterceptor handler ) throws IllegalStateException {\n        this.checkDeregisterHandlerStatus();\n        this.mArrivedDataInterceptors.add( handler );\n        return this;\n    }\n\n    @Override\n    public UlfMessageNode deregisterArrivedDataInterceptor( ChannelDataInterceptor handler ) throws IllegalStateException {\n        this.checkDeregisterHandlerStatus();\n        this.mArrivedDataInterceptors.remove( handler );\n        return this;\n    }\n\n    protected boolean tryInvokeOrInterceptArrivedData( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws ChannelHandleException {\n        for( ChannelDataInterceptor h : this.mArrivedDataInterceptors ) {\n            if ( h.interceptAfterDataArrived( medium, block, msg, ctx, rawMsg ) ){\n                return true;\n            }\n        }\n\n        return false;\n    }\n\n\n\n    @Override\n    public CascadeNodus parent() {\n        return this.mParentNode;\n    }\n\n    @Override\n    public Namespace getTargetingName() {\n        return this.mNodeNamespace;\n    }\n\n    @Override\n    public void setTargetingName( Namespace name ) {\n        this.mNodeNamespace = name;\n    }\n\n    @Override\n    public ExtraHeadCoder getExtraHeadCoder() {\n        return this.mExtraHeadCoder;\n    }\n\n    @Override\n    public long getMessageNodeId() {\n        return this.mnMessageNodeId;\n    }\n\n    public ReentrantLock getMajorIOLock() {\n        return this.mMajorIOLock;\n    }\n\n    public WolfMCNode apply( Map<String, Object> joConf ) {\n        this.setConfig( joConf );\n\n        try{\n            if( this.mExtraHeadCoder == null ) {\n                String szExtraHeadCoder   = (String) joConf.get( \"ExtraHeadCoder\" );\n                if( StringUtils.isEmpty( szExtraHeadCoder ) ) {\n                    this.mExtraHeadCoder  = new GenericExtraHeadCoder() ;\n                }\n                else {\n                    this.mExtraHeadCoder  = (ExtraHeadCoder) DynamicFactory.DefaultFactory.loadInstance( szExtraHeadCoder, null, null );\n                }\n\n                String szDefaultExtraEncode = (String) joConf.get( \"DefaultExtraEncode\" );\n                if( StringUtils.isEmpty( szDefaultExtraEncode ) ) {\n                    this.mExtraHeadCoder.setDefaultEncode( ExtraEncode.JSONString );\n                }\n                else {\n                    this.mExtraHeadCoder.setDefaultEncode( ExtraEncode.valueOf( szDefaultExtraEncode ) );\n                }\n            }\n        }\n        catch ( ClassNotFoundException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n\n        return this;\n    }\n\n    public abstract WolfMCNode apply( UlfAsyncMsgHandleAdapter fnRecipientMsgHandler );\n\n    public WolfMCNode apply( UMCTExpressHandler handler ){\n        this.apply( UlfAsyncMsgHandleAdapter.wrap( handler ) );\n        return this;\n    }\n\n    @Override\n    public ErrorMessageAudit getErrorMessageAudit() {\n        return this.mErrorMessageAudit;\n    }\n\n    @Override\n    public void setErrorMessageAudit( ErrorMessageAudit audit ) {\n        this.mErrorMessageAudit = audit;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/WolfMCServiceException.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.hydra.umc.msg.UMCServiceException;\n\npublic class WolfMCServiceException extends UMCServiceException {\n    public WolfMCServiceException() {\n        super();\n    }\n\n    public WolfMCServiceException( String message ) {\n        super(message);\n    }\n\n    public WolfMCServiceException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public WolfMCServiceException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/WolfMCStandardConstants.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport io.netty.channel.Channel;\n\npublic abstract class WolfMCStandardConstants {\n    public static final String CB_CONTROL_BLOCK_KEY         = \"ControlBlock\";\n    public static final String CB_ASYNC_MSG_HANDLE_KEY      = \"AsyncMsgHandle\";\n    public static final String CB_ASY_EXCLUSIVE_HANDLE_KEY  = \"AsyncExclusiveHandle\";\n    public static final String CB_EXTERNAL_CHANNEL_KEY      = \"ExternalChannel\";\n\n    public static void copyChannelStandardAttrs( Channel leg, Channel neo ) {\n        UlfChannel.copyChannelAttr( leg, neo, WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY );\n        UlfChannel.copyChannelAttr( leg, neo, WolfMCStandardConstants.CB_ASY_EXCLUSIVE_HANDLE_KEY );\n        UlfChannel.copyChannelAttr( leg, neo, WolfMCStandardConstants.CB_EXTERNAL_CHANNEL_KEY );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/WolfNettyServgram.java",
    "content": "package com.pinecone.hydra.umc.wolf;\n\nimport com.pinecone.framework.system.IrrationalProvokedException;\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.servgram.ArchServgramium;\nimport com.pinecone.framework.system.RedirectRuntimeException;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.umc.msg.UMCException;\nimport com.pinecone.hydra.umc.msg.UMCServiceException;\n\nimport java.io.IOException;\nimport java.util.Map;\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.concurrent.locks.ReentrantLock;\n\npublic abstract class WolfNettyServgram extends ArchServgramium {\n    protected JSONObject                mjoSectionConf;\n    protected final Object              mPrimaryThreadJoinMutex         = new Object(); // Joining the primary thread, waiting for client-sub-system terminated.\n    protected final Object              mOuterThreadDetachMutex         = new Object(); // Waiting for primary thread initialized. [Outer refers invoked thead, e.g. Usually main-thread]\n\n    protected ReentrantLock             mStateMutex                     = new ReentrantLock();\n\n    public WolfNettyServgram( String szName, Processum parentProcess, Map<String, Object> joConf ) {\n        super( szName, parentProcess );\n\n        this.setConfig( joConf );\n    }\n\n    public JSONObject getSectionConf() {\n        return this.mjoSectionConf;\n    }\n\n    @Override\n    public Hydrogen parentSystem() {\n        return (Hydrogen) super.parentSystem();\n    }\n\n\n\n    public abstract boolean isShutdown() ;\n\n    @Override\n    public abstract boolean isTerminated() ;\n\n    protected void setConfig( Map<String, Object> joConf ) {\n        if( joConf instanceof JSONObject ) {\n            this.mjoSectionConf = (JSONObject) joConf;\n        }\n        else {\n            this.mjoSectionConf = new JSONMaptron( joConf, true );\n        }\n    }\n\n    protected void unlockOuterThreadDetachMutex() {\n        synchronized ( this.mOuterThreadDetachMutex ) {\n            this.mOuterThreadDetachMutex.notify();\n        }\n    }\n\n    protected void preparePrimaryThread( Thread primaryThread ) {\n        primaryThread.setName( ( this.className() + \"-primary-\" + primaryThread.getName() ).toLowerCase() );\n        this.setThreadAffinity( primaryThread );\n    }\n\n    protected void joinOuterThread() {\n        synchronized ( this.mOuterThreadDetachMutex ) {\n            try {\n                this.mOuterThreadDetachMutex.wait();// Waiting for primary thread initialized.\n                // This mutex will not locks the parent thread, if you wish to lock it, adding more locks.\n                // If primary thread successfully executed, do nothing, and goto back to parent thread.\n                // If primary exception thrown, redirected it to parent thread.\n            }\n            catch ( InterruptedException e ) {\n                Thread.currentThread().interrupt();\n                throw new ProvokeHandleException( e );\n            }\n        }\n    }\n\n    protected void redirectException2ParentThread( Exception previousException ) throws IOException, UMCServiceException {\n        if( previousException instanceof RuntimeException ) {\n            throw new RedirectRuntimeException( previousException );\n        }\n        else if( previousException instanceof IOException ) {\n            throw (IOException) previousException;\n        }\n        else if( previousException instanceof UMCServiceException ) {\n            throw (UMCServiceException) previousException;\n        }\n        else if( previousException instanceof UMCException ) {\n            throw new UMCServiceException( previousException );\n        }\n        else if( previousException != null ){\n            throw new IrrationalProvokedException( previousException ); // This should never be happened.\n        }\n    }\n\n\n\n    @Override\n    public String toString() {\n        return String.format(\n                \"[object %s(0x%s)<\\uD83D\\uDC3A>]\",\n                this.className() , Integer.toHexString( this.hashCode() )\n        );\n    }\n\n    @Override\n    public String toJSONString() {\n        return \"\\\"\" + this.toString() + \"\\\"\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/ArchAsyncMessenger.java",
    "content": "package com.pinecone.hydra.umc.wolf.client;\n\nimport io.netty.channel.ChannelHandlerContext;\nimport io.netty.channel.ChannelId;\nimport io.netty.channel.EventLoop;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.umc.msg.AsyncMessenger;\nimport com.pinecone.hydra.umc.msg.ChannelAllocateException;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.MediumTerminationException;\nimport com.pinecone.hydra.umc.msg.Messenger;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umc.wolf.UlfIdleFirstBalanceStrategy;\nimport com.pinecone.hydra.umc.wolf.UlfMessageNode;\nimport com.pinecone.hydra.umc.wolf.WolfMCNode;\n\nimport java.io.IOException;\nimport java.util.Map;\nimport java.util.concurrent.CompletableFuture;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.TimeoutException;\nimport java.util.concurrent.locks.Lock;\nimport java.util.concurrent.locks.ReentrantLock;\n\npublic abstract class ArchAsyncMessenger extends WolfMCNode implements AsyncMessenger, UlfMessageNode {\n    protected final ReentrantLock                                  mSynRequestLock  = new ReentrantLock();\n    protected ProactiveParallelFairSyncChannelPool<ChannelId >     mChannelPool     ;\n    //protected BlockingDeque<UMCMessage>                            mSyncRetMsgQueue = new LinkedBlockingDeque<>();\n\n    public ArchAsyncMessenger( long nodeId, String szName, Processum parentProcess, UlfMessageNode parent, Map<String, Object> joConf, ExtraHeadCoder extraHeadCoder ) {\n        super( nodeId, szName, parentProcess, parent, joConf, extraHeadCoder );\n\n        this.mChannelPool   = new ProactiveParallelFairSyncChannelPool<>( this.mSynRequestLock, new UlfIdleFirstBalanceStrategy() ); //TODO\n        //this.makeNameAndId();\n    }\n\n    public ArchAsyncMessenger(long nodeId, String szName, Hydrogen system, Map<String, Object> joConf, ExtraHeadCoder extraHeadCoder ) {\n        this( nodeId, szName, system, null, joConf, extraHeadCoder );\n    }\n\n\n    @Override\n    public ProactiveParallelFairSyncChannelPool   getChannelPool() {\n        return this.mChannelPool;\n    }\n\n    Lock                                      getSynRequestLock() {\n        return this.mSynRequestLock;\n    }\n\n    protected long getSyncWaitingMillis() {\n        return ArchAsyncMessenger.getSyncWaitingMillis( this );\n    }\n\n    UlfAsyncMessengerChannelControlBlock      nextSynChannelCB() throws IOException {\n        UlfAsyncMessengerChannelControlBlock block = (UlfAsyncMessengerChannelControlBlock) this.getChannelPool().nextSyncChannel( this.getChannelPool().getMajorWaitTimeout() * 2 );\n        if( block == null ) {\n            throw new ChannelAllocateException( \"Channel allocate failed.\" );\n        }\n        reconnect( block, this.getSyncWaitingMillis() );\n        return block;\n    }\n\n    UlfAsyncMessengerChannelControlBlock      nextAsyChannelCB() throws IOException  {\n        UlfAsyncMessengerChannelControlBlock block = (UlfAsyncMessengerChannelControlBlock) this.getChannelPool().nextAsynChannel( this.getChannelPool().getMajorWaitTimeout() * 2 );\n        if( block == null ) {\n            throw new ChannelAllocateException( \"Channel allocate failed.\" );\n        }\n        reconnect( block, this.getSyncWaitingMillis() );\n        return block;\n    }\n\n//    BlockingDeque<UMCMessage >                getSyncRetMsgQueue() {\n//        return this.mSyncRetMsgQueue;\n//    }\n\n\n    @Override\n    public UMCMessage sendSyncMsg( UMCMessage request, boolean bNoneBuffered, long nWaitTime ) throws IOException {\n        return this.nextSynChannelCB().sendSyncMsg( request, bNoneBuffered, nWaitTime );\n    }\n\n    @Override\n    public void sendAsynMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException {\n        this.nextAsyChannelCB().sendAsynMsg( request, bNoneBuffered );\n    }\n\n    @Override\n    public void sendAsynMsg( UMCMessage request, boolean bNoneBuffered, UlfAsyncMsgHandleAdapter handler ) throws IOException {\n        UlfAsyncMessengerChannelControlBlock cb = this.nextAsyChannelCB();\n        if ( handler != null ) {\n            // If the handler is null, do not set it; otherwise, it will disrupt the subsequent handler-setting pipeline.\n            // Additionally, if there is no-response request, it will not affect the later pipeline.\n            // 如果 handler 为 null 不要设置, 否则破坏后面的设置流水线，且无响应的请求不会影响后面的流水线.\n            cb.pushMsgHandle( handler );\n            //cb.getChannel().getNativeHandle().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY ) ).set( handler );\n        }\n        cb.sendAsynMsg( request, bNoneBuffered );\n    }\n\n    protected static void reconnect( ChannelControlBlock block, long mils ) throws IOException {\n        if( block.isShutdown() ) {\n            block.getChannel().reconnect( mils );\n            ( (UlfMessageNode)block.getParentMessageNode() ).getChannelPool().setIdleChannel( block );\n        }\n    }\n\n    protected static long getSyncWaitingMillis( Messenger messenger ) {\n        return messenger.getConnectionArguments().getSyncWaitingMillis();\n    }\n\n    public static void reconnect( ChannelControlBlock block, Messenger messenger ) throws IOException {\n        long mils = ArchAsyncMessenger.getSyncWaitingMillis( messenger );\n        ArchAsyncMessenger.reconnect( block, mils );\n    }\n\n    public static void reconnect( ChannelControlBlock block, Messenger messenger, Object context ) throws IOException, MediumTerminationException {\n        if ( context instanceof ChannelHandlerContext ) {\n            ChannelHandlerContext ctx = (ChannelHandlerContext) context;\n            EventLoop loop = ctx.channel().eventLoop();\n\n            if ( !loop.isShuttingDown() ) {\n                reconnect( block, messenger );\n            }\n            else {\n                throw new MediumTerminationException( \"Medium has already terminated.\" );\n            }\n        }\n        else {\n            reconnect( block, messenger );\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/ClientConnectArguments.java",
    "content": "package com.pinecone.hydra.umc.wolf.client;\n\nimport com.pinecone.hydra.umc.wolf.MCConnectionArguments;\n\npublic interface ClientConnectArguments extends MCConnectionArguments {\n    int getParallelChannels();\n\n    void setParallelChannels( int parallelChannels );\n\n    boolean isAutoReconnect();\n\n    void setAutoReconnect( boolean autoReconnect );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/ClientConnectionArguments.java",
    "content": "package com.pinecone.hydra.umc.wolf.client;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.umc.wolf.SharedConnectionArguments;\n\npublic class ClientConnectionArguments extends SharedConnectionArguments implements ClientConnectArguments {\n    protected int            mnParallelChannels;\n\n    protected boolean        mbAutoReconnect;\n\n    public ClientConnectionArguments( JSONObject args ) {\n        super( args );\n        this.mnParallelChannels  = args.optInt( \"ParallelChannels\", 1 );\n        this.mbAutoReconnect     = args.optBoolean( \"AutoReconnect\", false );\n    }\n\n    public ClientConnectionArguments( ArchAsyncMessenger args ) {\n        this( args.getSectionConf() );\n    }\n\n    @Override\n    public int getParallelChannels() {\n        return this.mnParallelChannels;\n    }\n\n    @Override\n    public void setParallelChannels( int parallelChannels ) {\n        this.mnParallelChannels = parallelChannels;\n    }\n\n    @Override\n    public boolean isAutoReconnect() {\n        return this.mbAutoReconnect;\n    }\n\n    @Override\n    public void setAutoReconnect( boolean autoReconnect ) {\n        this.mbAutoReconnect = autoReconnect;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/MessengerNettyChannelControlBlock.java",
    "content": "package com.pinecone.hydra.umc.wolf.client;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.wolf.*;\nimport io.netty.channel.Channel;\n\nimport java.io.IOException;\nimport java.util.concurrent.BlockingDeque;\nimport java.util.concurrent.LinkedBlockingDeque;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.locks.Lock;\n\npublic class MessengerNettyChannelControlBlock extends ArchChannelControlBlock implements UlfAsyncMessengerChannelControlBlock {\n    protected ArchAsyncMessenger          mParentMessenger;\n    protected BlockingDeque<UMCMessage >  mSyncRetMsgQueue = new LinkedBlockingDeque<>();\n\n    public MessengerNettyChannelControlBlock( ArchAsyncMessenger messenger, UlfChannel channel, boolean bForceSyncMode ) {\n        super( messenger, channel, bForceSyncMode );\n        this.mParentMessenger  = messenger;\n    }\n\n    public MessengerNettyChannelControlBlock( ArchAsyncMessenger messenger, Channel nativeChannel, boolean bForceSyncMode ) {\n        this( messenger, new UlfChannel( messenger, nativeChannel ), bForceSyncMode );\n    }\n\n    public MessengerNettyChannelControlBlock( ArchAsyncMessenger messenger, Channel nativeChannel ) {\n        this( messenger, nativeChannel, false );\n    }\n\n    public MessengerNettyChannelControlBlock( ArchAsyncMessenger messenger, boolean bForceSyncMode ) {\n        this( messenger, new UlfChannel( messenger ), bForceSyncMode );\n    }\n\n    public MessengerNettyChannelControlBlock( ArchAsyncMessenger messenger ) {\n        this( messenger, false );\n    }\n\n\n    protected void                        afterConnectionArrive( Medium medium, boolean bRenew ) {\n        super.afterConnectionArrive( medium, bRenew, this.getSynRequestLock() );\n    }\n\n\n    BlockingDeque<UMCMessage >            getSyncRetMsgQueue() {\n        return this.mSyncRetMsgQueue;\n    }\n\n\n    @Override\n    public Lock                           getSynRequestLock() {\n        return this.getParentMessageNode().getSynRequestLock();\n    }\n\n    @Override\n    public ArchAsyncMessenger             getParentMessageNode() {\n        return (ArchAsyncMessenger) super.getParentMessageNode();\n    }\n\n    protected UMCMessage                  onlySendSyncMsg( UMCMessage message, boolean bNoneBuffered, long nWaitTime ) throws IOException {\n        UMCMessage msg;\n\n        this.mTransmit.sendMsg( message, bNoneBuffered );\n\n        try{\n            //msg = this.getParentMessageNode().getSyncRetMsgQueue().poll( nWaitTime, TimeUnit.MILLISECONDS );\n            msg = this.getSyncRetMsgQueue().poll( nWaitTime, TimeUnit.MILLISECONDS );\n\n            if( msg == null ) { // Close channel, preventing server sent messages late which could disrupted the sync deque.\n                try{\n                    this.getChannel().close();\n                    ArchAsyncMessenger.reconnect( this, nWaitTime );\n                }\n                catch ( ProvokeHandleException e ) {\n                    if( e.getCause() instanceof IOException ) {\n                        throw new IOException( e );\n                    }\n                }\n\n                throw new IOException( \"Waiting for receive synchronization message timeout [Max -> \" + nWaitTime + \" millis].\" );\n            }\n        }\n        catch ( InterruptedException e ) {\n            msg = null;\n        }\n\n        return msg;\n    }\n\n    @Override\n    public UMCMessage                     sendSyncMsg( UMCMessage message, boolean bNoneBuffered, long nWaitTime ) throws IOException {\n        if( this.mbForceSyncMode ) {\n            return this.onlySendSyncMsg( message, bNoneBuffered, nWaitTime );\n        }\n        else {\n            this.getSynRequestLock().lock();\n            UMCMessage msg = null;\n            try{\n                this.mbInSyncMode = true;\n                this.getChannel().setChannelStatus( UlfChannelStatus.FORCE_SYNCHRONIZED );\n                msg = this.onlySendSyncMsg( message, bNoneBuffered, nWaitTime );\n                this.getParentMessageNode().getChannelPool().setIdleChannel( this ); // There will to set channel status.\n                this.mbInSyncMode = false;\n            }\n            finally {\n                this.getSynRequestLock().unlock();\n            }\n            return msg;\n        }\n    }\n\n    @Override\n    public void                           sendAsynMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException {\n        super.sendMsg( request, bNoneBuffered );\n    }\n\n    @Override\n    public void                           release() {\n        super.release();\n        this.mParentMessenger    = null;\n        this.mSyncRetMsgQueue    = null;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/ProactiveParallelFairChannelPool.java",
    "content": "package com.pinecone.hydra.umc.wolf.client;\n\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.ChannelPool;\nimport com.pinecone.hydra.umc.msg.FairChannelPool;\nimport com.pinecone.hydra.umc.msg.MappedChannelPool;\nimport com.pinecone.hydra.umc.msg.UMCChannel;\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.hydra.umc.wolf.ArchChannelPool;\nimport com.pinecone.hydra.umc.wolf.UlfChannelStatus;\nimport com.pinecone.hydra.umc.wolf.UlfIOLoadBalanceStrategy;\n\nimport java.util.Map;\nimport java.util.Queue;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\n\npublic class ProactiveParallelFairChannelPool<ID > extends ArchChannelPool implements FairChannelPool, MappedChannelPool {\n    protected ReentrantReadWriteLock   mPoolIOLock = new ReentrantReadWriteLock();\n\n    protected ChannelControlBlock      mExclusiveSyncChannelCB; // The exclusive channel only for synchronized messages only.\n\n    protected UlfIOLoadBalanceStrategy mLoadBalanceStrategy;\n\n    protected long                     mnMajorWaitTimeout = 5000;\n\n    protected LinkedTreeMap<ID, ChannelControlBlock >  mChannelMapQueue;\n\n    protected LinkedTreeMap<ID, ChannelControlBlock >  mChannelIdleQueue;\n\n    protected final Object             mPullQueryLock     = new Object();\n\n\n    public ProactiveParallelFairChannelPool( UlfIOLoadBalanceStrategy strategy ) {\n        this.mLoadBalanceStrategy  = strategy;\n        this.mChannelMapQueue      = new LinkedTreeMap<>();\n        this.mChannelIdleQueue     = new LinkedTreeMap<>();\n    }\n\n\n    public ProactiveParallelFairChannelPool setExclusiveSyncChannel( ChannelControlBlock exclusiveSyncChannelCB ) {\n        this.mExclusiveSyncChannelCB = exclusiveSyncChannelCB;\n        return this;\n    }\n\n    public UMCChannel getExclusiveSyncChannel() {\n        if( this.mExclusiveSyncChannelCB != null ) {\n            return this.mExclusiveSyncChannelCB.getChannel();\n        }\n        return null;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    protected ID warpKey( Object id ) {\n        return (ID)id;\n    }\n\n    @Override\n    public ChannelControlBlock queryChannelById( Object id ) {\n        return this.mChannelMapQueue.get( this.warpKey( id ) );\n    }\n\n    @Override\n    public void onlyRemove( Object id ) {\n        this.mChannelMapQueue.remove ( this.warpKey( id ) );\n        this.mChannelIdleQueue.remove( this.warpKey( id ) );\n    }\n\n    @Override\n    public long getMajorWaitTimeout() {\n        return this.mnMajorWaitTimeout;\n    }\n\n    @Override\n    public ProactiveParallelFairChannelPool setMajorWaitTimeout( long nMillisTimeout ){\n        this.mnMajorWaitTimeout = nMillisTimeout;\n        return this;\n    }\n\n\n    // [1, 2] -> [1, 2, 3]\n    @Override\n    public ProactiveParallelFairChannelPool pushBack( ChannelControlBlock channel ) {\n        ID id = this.warpKey( channel.getChannel().getChannelID() );\n        this.mChannelMapQueue.put( id, channel );\n        this.mChannelIdleQueue.put( id, channel );\n        return this;\n    }\n\n    // [1, 2, 3] ->[2, 3]\n    public ChannelControlBlock pop() {\n        return this.mChannelMapQueue.pop().getValue();\n    }\n\n    @Override\n    public ChannelControlBlock depriveIdleChannel() {\n        this.mPoolIOLock.writeLock().lock();\n        try{\n            ChannelControlBlock qualified = null;\n            for ( Map.Entry<ID, ChannelControlBlock> kv : this.mChannelMapQueue.entrySet() ) {\n                ChannelControlBlock block = kv.getValue();\n                if( block.getChannelStatus().isIdle() )  {\n                    qualified = block;\n                    break;\n                }\n            }\n\n            if ( qualified != null ) {\n                this.onlyRemove( qualified.getChannel().getChannelID() );\n            }\n\n            return qualified;\n        }\n        finally {\n            this.mPoolIOLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public ProactiveParallelFairChannelPool setIdleChannel( ChannelControlBlock block ) {\n        this.mPoolIOLock.writeLock().lock();\n        try{\n            block.getChannel().setChannelStatus( UlfChannelStatus.IDLE );\n            this.mChannelIdleQueue.put( this.warpKey( block.getChannel().getChannelID() ), block );\n            //Debug.trace( this.mChannelIdleQueue, this.mChannelIdleQueue.size(), block );\n        }\n        finally {\n            this.mPoolIOLock.writeLock().unlock();\n        }\n        return this;\n    }\n\n    @Override\n    public ChannelPool add( ChannelControlBlock block ) {\n        this.mPoolIOLock.writeLock().lock();\n        try{\n            this.pushBack( block );\n        }\n        finally {\n            this.mPoolIOLock.writeLock().unlock();\n        }\n        return this;\n    }\n\n    protected ChannelControlBlock queryNextChannel( long nMillisTimeout, boolean bEager, boolean bSync ) {\n        ChannelControlBlock nextChannel     = null;\n\n        this.mPoolIOLock.readLock().lock();\n        try {\n            if( this.mChannelMapQueue.isEmpty() ) {\n                return null;\n            }\n        }\n        finally {\n            this.mPoolIOLock.readLock().unlock();\n        }\n\n        long nLastTime = System.currentTimeMillis();\n        while ( true ) {\n            boolean bIsIdleEmpty = this.mChannelIdleQueue.isEmpty();\n            if( !bIsIdleEmpty ) {\n                // Condition1: If there has an idle, just use it.\n                this.mPoolIOLock.writeLock().lock();\n                try{\n                    bIsIdleEmpty = this.mChannelIdleQueue.isEmpty();\n                    if ( !bIsIdleEmpty ) {\n                        nextChannel = this.mChannelIdleQueue.pop().getValue();\n                    }\n                }\n                finally {\n                    this.mPoolIOLock.writeLock().unlock();\n                }\n            }\n\n            if ( nextChannel == null ) {\n                // Condition2: If there are no idles, waiting and found balance channel.\n                // Notice: In asynchronous condition, the producer could produce over-allocated messages and dump them into the queue of one channel so that consumers will mismatch the produced messages.\n                // Using LinkedTreeMapQueue to sift repetitive idle channel and keep the queue.\n                try {\n                    this.mPoolIOLock.readLock().lock();\n                    if( bSync ) {\n                        for ( Map.Entry<ID, ChannelControlBlock> kv : this.mChannelMapQueue.entrySet() ) {\n                            ChannelControlBlock block = kv.getValue();\n                            if( this.mLoadBalanceStrategy.matched( block ) || block.isShutdown() ) {\n                                nextChannel = block;\n                                break;\n                            }\n                        }\n                    }\n                    else {\n                        for ( Map.Entry<ID, ChannelControlBlock> kv : this.mChannelMapQueue.entrySet() ) {\n                            ChannelControlBlock block = kv.getValue();\n                            boolean bFirstStrategyMatched = this.mLoadBalanceStrategy.matched( block );\n                            if( bFirstStrategyMatched || block.getChannelStatus().isAsynAvailable() || block.isShutdown() )  {\n                                nextChannel = block;\n                                break;\n                            }\n                        }\n                    }\n                }\n                finally {\n                    this.mPoolIOLock.readLock().unlock();\n                }\n            }\n\n\n            if( nextChannel != null ) {\n                this.mPoolIOLock.writeLock().lock();\n                try{\n                    ID id = this.warpKey( nextChannel.getChannel().getChannelID() );\n                    this.mChannelMapQueue.remove( id );\n                    this.mChannelMapQueue.put( id, nextChannel ); // push back to queue tail\n                }\n                finally {\n                    this.mPoolIOLock.writeLock().unlock();\n                }\n                break;\n            }\n\n            if( !bEager ) {\n                try{\n                    this.mPullQueryLock.wait( 10 );\n                }\n                catch ( InterruptedException e ) {\n                    // Just return null.\n                    break;\n                }\n            }\n\n            if( nMillisTimeout > 0 && System.currentTimeMillis() - nLastTime > nMillisTimeout ) {\n                break;\n            }\n        }\n        return nextChannel;\n    }\n\n    @Override\n    public ChannelControlBlock nextAsynChannel( long nMillisTimeout, boolean bEager ) {\n        return this.queryNextChannel( nMillisTimeout, bEager, false );\n    }\n\n    @Override\n    public ChannelControlBlock nextAsynChannel( long nMillisTimeout ) {\n        return this.nextAsynChannel( nMillisTimeout, true );\n    }\n\n    @Override\n    public ChannelControlBlock nextAsynChannel() {\n        return this.nextAsynChannel( this.mnMajorWaitTimeout );\n    }\n\n    @Override\n    public boolean isEmpty() {\n        this.mPoolIOLock.readLock().lock();\n        try {\n            return this.mChannelMapQueue.isEmpty();\n        }\n        finally {\n            this.mPoolIOLock.readLock().unlock();\n        }\n    }\n\n    @Override\n    public int size() {\n        this.mPoolIOLock.readLock().lock();\n        try {\n            return this.mChannelMapQueue.size();\n        }\n        finally {\n            this.mPoolIOLock.readLock().unlock();\n        }\n    }\n\n    @Override\n    public void clear() {\n        this.mPoolIOLock.writeLock().lock();\n        try {\n            for( ChannelControlBlock block : this.mChannelMapQueue.values() ) {\n                block.close();\n                block.release();\n            }\n\n            this.mChannelMapQueue.clear();\n            this.mChannelIdleQueue.clear();\n        }\n        finally {\n            this.mPoolIOLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public Map getPooledMap() {\n        return this.mChannelMapQueue;\n    }\n\n    @Override\n    public Queue getMajorQueue() {\n        return this.mChannelMapQueue.toQueue();\n    }\n\n    @Override\n    public void remove( ChannelControlBlock ccb ) {\n        this.mPoolIOLock.writeLock().lock();\n        try {\n            ID id = this.warpKey( ccb.getChannel().getChannelID() );\n            this.onlyRemove( id );\n        }\n        finally {\n            this.mPoolIOLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public void deactivate( ChannelControlBlock ccb ) {\n        this.mPoolIOLock.writeLock().lock();\n        try {\n            ID id = this.warpKey( ccb.getChannel().getChannelID() );\n            if( !ccb.getChannel().isShutdown() ) {\n                ccb.close();\n                ccb.release();\n                this.onlyRemove( id );\n            }\n        }\n        finally {\n            this.mPoolIOLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public ChannelControlBlock terminate( Object id ) throws InterruptedException {\n        this.mPoolIOLock.writeLock().lock();\n        ChannelControlBlock block = null;\n        try{\n            block = this.queryChannelById( id );\n            if( block != null ) {\n                block.close();\n                block.release();\n            }\n            this.onlyRemove( id );\n        }\n        finally {\n            this.mPoolIOLock.writeLock().unlock();\n        }\n        return block;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/ProactiveParallelFairSyncChannelPool.java",
    "content": "package com.pinecone.hydra.umc.wolf.client;\n\nimport java.util.concurrent.locks.Lock;\n\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.SyncFairChannelPool;\nimport com.pinecone.hydra.umc.wolf.UlfIOLoadBalanceStrategy;\n\npublic class ProactiveParallelFairSyncChannelPool<ID > extends ProactiveParallelFairChannelPool<ID > implements SyncFairChannelPool {\n    protected Lock mSynRequestLock;\n\n    public ProactiveParallelFairSyncChannelPool( Lock synRequestLock, UlfIOLoadBalanceStrategy strategy ) {\n        super(strategy);\n        this.mSynRequestLock            = synRequestLock;\n    }\n\n    @Override\n    public ChannelControlBlock nextSyncChannel( long nMillisTimeout, boolean bEager ) {\n        this.mSynRequestLock.lock();\n\n        if( this.mExclusiveSyncChannelCB != null ) {\n            return this.mExclusiveSyncChannelCB;\n        }\n        else {\n            ChannelControlBlock cb = null;\n            try{\n                cb = this.queryNextChannel( nMillisTimeout, bEager, true );\n            }\n            finally {\n                this.mSynRequestLock.unlock();\n            }\n            return cb;\n        }\n    }\n\n    @Override\n    public ChannelControlBlock nextSyncChannel( long nMillisTimeout ) {\n        return this.nextSyncChannel( nMillisTimeout, true );\n    }\n\n    @Override\n    public ChannelControlBlock nextSyncChannel() {\n        return this.nextSyncChannel( this.mnMajorWaitTimeout );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/UlfAsyncMessengerChannelControlBlock.java",
    "content": "package com.pinecone.hydra.umc.wolf.client;\n\nimport com.pinecone.hydra.umc.msg.AsyncMessengerChannelControlBlock;\nimport com.pinecone.hydra.umc.wolf.NettyChannelControlBlock;\nimport com.pinecone.hydra.umc.wolf.UlfChannel;\n\npublic interface UlfAsyncMessengerChannelControlBlock extends AsyncMessengerChannelControlBlock, NettyChannelControlBlock {\n    @Override\n    UlfChannel getChannel();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/UlfClient.java",
    "content": "package com.pinecone.hydra.umc.wolf.client;\n\nimport java.io.IOException;\n\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.event.ChannelEventHandler;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umc.wolf.UlfMessageNode;\n\npublic interface UlfClient extends UlfMessageNode {\n\n    ClientConnectArguments getConnectionArguments();\n\n    UMCMessage sendSyncMsg( UMCMessage request ) throws IOException;\n\n    UMCMessage sendSyncMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException ;\n\n    void       sendAsynMsg( UMCMessage request ) throws IOException ;\n\n    void       sendAsynMsg( UMCMessage request, UlfAsyncMsgHandleAdapter handler ) throws IOException;\n\n    UlfClient  registerChannelConnectedHandler  ( ChannelEventHandler handler ) throws IllegalStateException ;\n\n    UlfClient  deregisterChannelConnectedHandler( ChannelEventHandler handler ) throws IllegalStateException ;\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/client/WolfMCClient.java",
    "content": "package com.pinecone.hydra.umc.wolf.client;\n\nimport io.netty.bootstrap.Bootstrap;\nimport io.netty.buffer.ByteBuf;\n\nimport io.netty.channel.EventLoopGroup;\nimport io.netty.channel.ChannelHandlerContext;\nimport io.netty.channel.ChannelOption;\nimport io.netty.channel.ChannelInitializer;\nimport io.netty.channel.ChannelFuture;\nimport io.netty.channel.ChannelInboundHandlerAdapter;\nimport io.netty.channel.ChannelFutureListener;\nimport io.netty.channel.nio.NioEventLoopGroup;\nimport io.netty.channel.socket.SocketChannel;\nimport io.netty.channel.socket.nio.NioSocketChannel;\nimport io.netty.handler.timeout.ReadTimeoutHandler;\nimport io.netty.util.AttributeKey;\n\nimport com.pinecone.framework.system.IrrationalProvokedException;\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.umc.msg.Messagus;\nimport com.pinecone.hydra.umc.msg.UMCServiceException;\nimport com.pinecone.hydra.umc.msg.event.ChannelEventHandler;\nimport com.pinecone.hydra.umc.wolf.AsyncUlfMedium;\nimport com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler;\nimport com.pinecone.hydra.umc.wolf.ChannelUtils;\nimport com.pinecone.hydra.umc.wolf.GenericUMCByteMessageDecoder;\nimport com.pinecone.hydra.umc.wolf.MCSecurityAuthentication;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umc.wolf.UlfChannel;\nimport com.pinecone.hydra.umc.wolf.UlfChannelStatus;\nimport com.pinecone.hydra.umc.wolf.UlfMCReceiver;\nimport com.pinecone.hydra.umc.wolf.UlfMessageNode;\nimport com.pinecone.hydra.umc.wolf.UnsetUlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umc.wolf.WolfMCInitializationException;\nimport com.pinecone.hydra.umc.wolf.WolfMCStandardConstants;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\n\nimport java.io.IOException;\n\nimport java.net.InetSocketAddress;\nimport java.net.UnknownHostException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.TimeUnit;\n\n\n/**\n *  Pinecone Ursus For Java WolfClient [ Wolf, Uniform Message Control Protocol Client ]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family.\n *  Uniform Message Control Protocol (UMC)\n *    UMC is a simple TCP/IP-based binary transmission protocol.\n *    It supports methods similar to PUT/POST (HTTP), which are designed to fulfill uniform message control.\n *\n *  Uniform Message Control Protocol for WolfMC Service [Client/Server] (Ulf UMC)\n *  *****************************************************************************************\n */\npublic class WolfMCClient extends ArchAsyncMessenger implements UlfClient {\n    protected EventLoopGroup                       mExecutorGroup;\n    protected Bootstrap                            mBootstrap;\n\n    protected ClientConnectArguments               mConnectionArguments;\n    protected MCSecurityAuthentication             mSecurityAuthentication; //TODO\n\n    protected UlfAsyncMsgHandleAdapter             mPrimeAsyncMessageHandler = new UnsetUlfAsyncMsgHandleAdapter( this ); // For all channels.\n\n    protected List<ChannelEventHandler>            mChannelConnectedHandlers = new ArrayList<>();\n\n    public WolfMCClient( long nodeId, String szName, Processum parentProcess, UlfMessageNode parent, Map<String, Object> joConf, ExtraHeadCoder extraHeadCoder ){\n        super( nodeId, szName, parentProcess, parent, joConf, extraHeadCoder );\n\n        this.apply( joConf );\n    }\n\n    public WolfMCClient( String szName, Processum parentProcess, UlfMessageNode parent, Map<String, Object> joConf, ExtraHeadCoder extraHeadCoder ){\n        super( Messagus.nextLocalId(), szName, parentProcess, parent, joConf, extraHeadCoder );\n\n        this.apply( joConf );\n    }\n\n    public WolfMCClient( long nodeId, String szName, Processum parentProcess, Map<String, Object> joConf, ExtraHeadCoder extraHeadCoder ){\n        this( nodeId, szName, parentProcess, null, joConf, extraHeadCoder );\n    }\n\n    public WolfMCClient( String szName, Processum parentProcess, Map<String, Object>  joConf, ExtraHeadCoder extraHeadCoder ){\n        this( Messagus.nextLocalId(), szName, parentProcess, null, joConf, extraHeadCoder );\n    }\n\n    public WolfMCClient( long nodeId, String szName, Processum parentProcess, Map<String, Object>  joConf ){\n        this( nodeId, szName, parentProcess, joConf, null );\n    }\n\n    public WolfMCClient( String szName, Processum parentProcess, Map<String, Object>  joConf ){\n        this( Messagus.nextLocalId(), szName, parentProcess, joConf, null );\n    }\n\n    public WolfMCClient( long nodeId, String szName, UlfMessageNode parent, Processum parentProcess, Map<String, Object>  joConf ){\n        this( nodeId, szName, parentProcess, parent, joConf, null );\n    }\n\n    public WolfMCClient( String szName, UlfMessageNode parent, Processum parentProcess, Map<String, Object>  joConf ){\n        this( Messagus.nextLocalId(), szName, parentProcess, parent, joConf, null );\n    }\n\n    protected WolfMCClient( Builder builder ){\n        this( builder.nodeId, builder.szName, builder.parentProcess, builder.parent, builder.joConf, builder.extraHeadCoder );\n    }\n\n\n    @Override\n    public UlfClient                      registerChannelConnectedHandler( ChannelEventHandler handler ) throws IllegalStateException {\n        this.checkDeregisterHandlerStatus();\n        this.mChannelConnectedHandlers.add( handler );\n        return this;\n    }\n\n    @Override\n    public UlfClient                      deregisterChannelConnectedHandler( ChannelEventHandler handler ) throws IllegalStateException {\n        this.checkDeregisterHandlerStatus();\n        this.mChannelConnectedHandlers.remove( handler );\n        return this;\n    }\n\n    @Override\n    public WolfMCClient                   apply( Map<String, Object>  joConf ) {\n        super.apply( joConf );\n        this.mConnectionArguments = new ClientConnectionArguments( this.getSectionConf() );\n\n        return this;\n    }\n\n    @Override\n    public WolfMCClient                   apply( UlfAsyncMsgHandleAdapter fnAsyncMessageAdapter ) {\n        this.mPrimeAsyncMessageHandler = fnAsyncMessageAdapter;\n\n        return this;\n    }\n\n    @Override\n    public UMCTExpressHandler             getAsyncMsgHandler() {\n        return this.mPrimeAsyncMessageHandler;\n    }\n\n    @Override\n    public ClientConnectArguments         getConnectionArguments() {\n        return this.mConnectionArguments;\n    }\n\n    @Override\n    public ClientConnectArguments         getMessageNodeConfig() {\n        return this.getConnectionArguments();\n    }\n\n    public EventLoopGroup                 getEventLoopGroup() {\n        return this.mExecutorGroup;\n    }\n\n    public Bootstrap                      getBootstrap() {\n        return this.mBootstrap;\n    }\n\n    public int                            getParallelChannels() {\n        return this.getConnectionArguments().getParallelChannels();\n    }\n\n    protected void                        clear(){\n        this.mChannelPool.clear();\n    }\n\n    @Override\n    public void                           close() throws ProvokeHandleException {\n        this.mStateMutex.lock();\n        try {\n            if( this.mExecutorGroup != null ) {\n                this.mExecutorGroup.shutdownGracefully();\n                this.clear();\n                this.mExecutorGroup = null;\n            }\n        }\n        finally {\n            this.mStateMutex.unlock();\n        }\n\n        try {\n            synchronized ( this.mPrimaryThreadJoinMutex ) {\n                WolfMCClient.this.mPrimaryThreadJoinMutex.notify();\n            }\n        }\n        catch ( IllegalMonitorStateException e ) {\n            throw new ProvokeHandleException( \"IllegalMonitorStateException [WolfMCClient::close], this exception has been redirected to parent thread.\", e );\n        }\n    }\n\n    @Override\n    public void                           kill() {\n        try {\n            this.close();\n        }\n        catch ( ProvokeHandleException e ) {\n            super.kill(); // Kill master thread forcefully.\n            this.clear();\n        }\n    }\n\n    @Override\n    public boolean                        isShutdown() {\n        if ( this.mExecutorGroup == null ) {\n            return true;\n        }\n        return this.mExecutorGroup.isShutdown();\n    }\n\n    @Override\n    public boolean                        isTerminated() {\n        if ( this.mExecutorGroup == null ) {\n            return true;\n        }\n        return this.mExecutorGroup.isTerminated();\n    }\n\n    protected void                        notifyChannelConnected( ChannelControlBlock block, ChannelHandlerContext ctx ) {\n        for( ChannelEventHandler h : this.mChannelConnectedHandlers ) {\n            h.afterEventTriggered( block, ctx );\n        }\n    }\n\n    protected MessengerNettyChannelControlBlock syncSpawnSoloChannel() throws IOException, UMCServiceException {\n        MessengerNettyChannelControlBlock ccb = null;\n        ccb                                   = new MessengerNettyChannelControlBlock( this );\n        ChannelFuture future                  = ccb.getChannel().toConnect(\n                new InetSocketAddress( this.getConnectionArguments().getHost(), this.getConnectionArguments().getPort() )\n        ).getLastChannelFuture();\n        UlfChannel channel = ccb.getChannel();\n        channel.getNativeHandle().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY ) ).set( ccb );\n        ChannelUtils.setChannelIdentityID( channel, this.mnMessageNodeId );\n\n        this.getTaskManager().add( ccb );\n\n        future.addListener(new ChannelFutureListener() {\n            @Override\n            public void operationComplete( ChannelFuture channelFuture ) throws Exception {\n                synchronized ( WolfMCClient.this.mPrimaryThreadJoinMutex ) {\n//                    if ( WolfMCClient.this.isShutdown() ) {\n//                        WolfMCClient.this.mShutdown = !channelFuture.isSuccess();\n//                    }\n                    WolfMCClient.this.mPrimaryThreadJoinMutex.notify();\n                }\n            }\n        });\n        //channel.closeFuture().sync();\n        this.getChannelPool().pushBack( ccb );\n\n        synchronized ( this.mPrimaryThreadJoinMutex ) {\n            try {\n                this.mPrimaryThreadJoinMutex.wait( this.getConnectionArguments().getSocketTimeout() );\n                if( WolfMCClient.this.isShutdown() ) {\n                    throw new UnknownHostException( \"Connect failed with '\" + this.getConnectionArguments().getHost() + \":\" + this.getConnectionArguments().getPort() + \"'\" );\n                }\n            }\n            catch ( InterruptedException e ) {\n                Thread.currentThread().interrupt();\n                throw new WolfMCInitializationException( e );\n            }\n        }\n\n        this.notifyChannelConnected( ccb, null );\n        return ccb;\n    }\n\n    protected void                        syncSpawnChannels() throws IOException, UMCServiceException {\n        int n = this.getConnectionArguments().getParallelChannels();\n\n        for ( int i = 0; i < n; i++ ) {\n            MessengerNettyChannelControlBlock block = this.syncSpawnSoloChannel();\n            this.infoLifecycle( String.format( \"Channel%d(%s)\", i, block.getChannel().getChannelID() ), \"Spawned\" );\n        }\n    }\n\n    protected void                        invokeChannelOwnedOnError( ChannelHandlerContext ctx, Throwable cause ) {\n        try {\n            UlfAsyncMsgHandleAdapter handle = (UlfAsyncMsgHandleAdapter)ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY ) ).get();\n            if( handle == null ) {\n                ChannelControlBlock ccb = (ChannelControlBlock)ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY ) ).get();\n                handle = ccb.pollMsgHandle( ArchAsyncMessenger.getSyncWaitingMillis( this ) );\n                if( handle == null ) {\n                    handle = WolfMCClient.this.mPrimeAsyncMessageHandler;\n                }\n            }\n            handle.onError( ctx, cause );\n        }\n        catch ( InterruptedException e ) {\n            Thread.currentThread().interrupt();\n        }\n    }\n\n    protected void                        handleArrivedMessage( UlfAsyncMsgHandleAdapter handle, Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception {\n        if( this.getErrorMessageAudit().isErrorMessage( msg ) ) {\n            handle.onErrorMsgReceived( medium, block, msg, ctx, msg );\n        }\n        else {\n            handle.onSuccessfulMsgReceived( medium, block, msg, ctx, msg );\n        }\n    }\n\n    protected void                        shutdownIfAllChannelDetached ( MessengerNettyChannelControlBlock ccb ) {\n        if ( !WolfMCClient.this.getConnectionArguments().isAutoReconnect() ) {\n            if( WolfMCClient.this.getChannelPool().isAllChannelsTerminated() ) {\n                try{\n                    WolfMCClient.this.getLogger().warn( \"<AutoReconnection is disabled> All channels are terminated, client terminating.\" );\n                    WolfMCClient.this.close();\n                }\n                catch ( ProvokeHandleException e ) {\n                    throw new IrrationalProvokedException( e ); // Those should never have happened.\n                }\n\n                return;\n            }\n\n            WolfMCClient.this.getChannelPool().deactivate( ccb );\n            WolfMCClient.this.getMajorIOLock().lock();\n            try{\n                WolfMCClient.this.getTaskManager().erase( ccb );\n            }\n            finally {\n                WolfMCClient.this.getMajorIOLock().unlock();\n            }\n        }\n    }\n\n    protected void                        initNettySubsystem() throws IOException, UMCServiceException {\n        this.mExecutorGroup = new NioEventLoopGroup();\n        this.mBootstrap     = new Bootstrap();\n        Bootstrap bootstrap = this.mBootstrap;\n        bootstrap.group  ( this.mExecutorGroup    );\n        bootstrap.channel( NioSocketChannel.class );\n        bootstrap.option ( ChannelOption.CONNECT_TIMEOUT_MILLIS, this.getConnectionArguments().getSocketTimeout() );\n        bootstrap.handler( new ChannelInitializer<SocketChannel>() {\n            @Override\n            protected void initChannel( SocketChannel sc ) throws Exception {\n                sc.pipeline().addLast( new ReadTimeoutHandler( WolfMCClient.this.getConnectionArguments().getKeepAliveTimeout(), TimeUnit.SECONDS ) );\n                sc.pipeline().addLast( new GenericUMCByteMessageDecoder( WolfMCClient.this.getExtraHeadCoder() ) );\n                sc.pipeline().addLast( new ChannelInboundHandlerAdapter (){\n                    @Override\n                    public void channelActive( ChannelHandlerContext ctx ) throws Exception {\n                        super.channelActive(ctx);\n\n                        //UlfChannelControlBlock channel = WolfMCClient.this.getChannelPool().queryChannelById( ctx.channel().id() );\n                        MessengerNettyChannelControlBlock channel = (MessengerNettyChannelControlBlock)ctx.channel().attr(\n                                AttributeKey.valueOf(WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY)\n                        ).get();\n\n                        channel.afterConnectionArrive(\n                                new AsyncUlfMedium( ctx, null, WolfMCClient.this ),  false\n                        );\n                        channel.setThreadAffinity( Thread.currentThread() );\n                        synchronized ( WolfMCClient.this.mPrimaryThreadJoinMutex ) {\n                            WolfMCClient.this.mPrimaryThreadJoinMutex.notify();\n                        }\n                    }\n\n                    @Override\n                    public void channelRead( ChannelHandlerContext ctx, Object msg ) throws Exception {\n                        Medium medium          = new AsyncUlfMedium( ctx, (ByteBuf) msg, WolfMCClient.this );\n                        UlfMCReceiver receiver = new UlfMCReceiver( medium );\n                        UMCMessage message     = receiver.readMsg();\n\n                        MessengerNettyChannelControlBlock channelControlBlock = (MessengerNettyChannelControlBlock)ctx.channel().attr(\n                                AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY )\n                        ).get();\n\n\n                        //Debug.trace( channelControlBlock.getChannel().getChannelID() );\n                        if( channelControlBlock.getChannelStatus() == UlfChannelStatus.FORCE_SYNCHRONIZED ){\n                            channelControlBlock.getSyncRetMsgQueue().add( message );\n                            //WolfMCClient.this.mSyncRetMsgQueue.add( message );\n                        }\n                        else {\n                            if ( !WolfMCClient.this.tryInvokeOrInterceptArrivedData( medium, channelControlBlock, message, ctx, msg ) ) {\n                                UlfAsyncMsgHandleAdapter handle = (UlfAsyncMsgHandleAdapter)ctx.channel().attr(\n                                        AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY )\n                                ).get();\n                                if ( handle == null ) {\n                                    handle = channelControlBlock.pollMsgHandle( WolfMCClient.this.getSyncWaitingMillis() ); // Try pipeline.\n                                }\n\n                                if( handle != null ) {\n                                    WolfMCClient.this.handleArrivedMessage( handle, medium, channelControlBlock, message, ctx, msg );\n\n                                    // Preserving binding-status for exclusive handler-binding channel.\n                                    Object dyAsynExclusiveHandle = ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASY_EXCLUSIVE_HANDLE_KEY ) ).get();\n                                    if ( dyAsynExclusiveHandle == null || !(Boolean) dyAsynExclusiveHandle ){\n                                        ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_ASYNC_MSG_HANDLE_KEY ) ).set( null ); // For another channel to reset, likes ajax.\n                                    }\n                                }\n                                else {\n                                    WolfMCClient.this.handleArrivedMessage( WolfMCClient.this.mPrimeAsyncMessageHandler, medium, channelControlBlock, message, ctx, msg );\n                                }\n                            }\n\n                            Object dyExternalChannel = ctx.channel().attr( AttributeKey.valueOf( WolfMCStandardConstants.CB_EXTERNAL_CHANNEL_KEY ) ).get();\n                            if ( dyExternalChannel == null || !(Boolean) dyExternalChannel ){\n                                WolfMCClient.this.getChannelPool().setIdleChannel( channelControlBlock );\n                            }\n                        }\n\n                        medium.release();\n                        medium = new AsyncUlfMedium( ctx, null, WolfMCClient.this );\n                        channelControlBlock.afterConnectionArrive( medium,  true );\n                    }\n\n                    @Override\n                    public void channelInactive( ChannelHandlerContext ctx ) throws Exception {\n                        MessengerNettyChannelControlBlock ccb = (MessengerNettyChannelControlBlock)ctx.channel().attr(\n                                AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY )\n                        ).get();\n\n                        if ( !WolfMCClient.this.mChannelInactiveHandlers.isEmpty() ) {\n                            boolean bBlocked = false;\n                            for ( ChannelInactiveHandler handler : WolfMCClient.this.mChannelInactiveHandlers ) {\n                                if ( handler.afterChannelInactive( ccb, ctx ) ) {\n                                    bBlocked = true;\n                                }\n                            }\n\n                            if ( bBlocked ) {\n                                WolfMCClient.this.shutdownIfAllChannelDetached( ccb );\n                                return;\n                            }\n                        }\n\n                        WolfMCClient.this.shutdownIfAllChannelDetached( ccb );\n                    }\n\n                    @Override\n                    public void exceptionCaught( ChannelHandlerContext ctx, Throwable cause ) throws Exception {\n                        WolfMCClient.this.invokeChannelOwnedOnError( ctx, cause );\n                    }\n                } );\n            }\n\n            @Override\n            public void exceptionCaught( ChannelHandlerContext ctx, Throwable cause ) throws Exception {\n                WolfMCClient.this.invokeChannelOwnedOnError( ctx, cause );\n            }\n        });\n\n        this.syncSpawnChannels();\n        this.infoLifecycle( \"Wolf<\\uD83D\\uDC3A>::initNettySubsystem\", \"Ready\" );\n    }\n\n    public void                           connect() throws IOException, UMCServiceException {\n        this.mStateMutex.lock();\n\n        try{\n            if( this.isShutdown() ) {\n                this.initNettySubsystem(); // Exception thrown and truncating next detach-mutex-release, redirecting to primary thread.\n            }\n        }\n        finally {\n            this.mStateMutex.unlock();\n            WolfMCClient.this.unlockOuterThreadDetachMutex();  // This lock shouldn`t be released in `finally`, waiting for primary thread to process.\n        }\n\n        synchronized ( this.mPrimaryThreadJoinMutex ) {\n            try {\n                this.mPrimaryThreadJoinMutex.wait( ); // Join the primary thread.\n            }\n            catch ( InterruptedException e ) {\n                Thread.currentThread().interrupt();\n                throw new WolfMCInitializationException( e );\n            }\n        }\n    }\n\n    @Override\n    public void                           execute() throws UMCServiceException {\n        if ( !this.isShutdown() ) {\n            this.mLogger.info( \"WolfMCClient [{}:{}] is already started. <Pass>\", this.getName(), this.hashCode() );\n            return;\n        }\n\n        Exception[] lastException = new Exception[] { null };\n        Thread primaryThread      = new Thread( new Runnable() {\n            @Override\n            public void run() {\n                WolfMCClient.this.getTaskManager().notifyExecuting( WolfMCClient.this );\n                try{\n                    WolfMCClient.this.connect();\n                }\n                catch ( Exception e ) {\n                    lastException[0] = e;\n                    WolfMCClient.this.kill();\n                }\n                finally {\n                    WolfMCClient.this.getTaskManager().notifyFinished( WolfMCClient.this );\n                    WolfMCClient.this.unlockOuterThreadDetachMutex();\n                }\n            }\n        });\n\n        this.preparePrimaryThread( primaryThread );\n        primaryThread.start();\n\n        this.joinOuterThread();\n\n        try {\n            this.redirectException2ParentThread( lastException[0] );\n        }\n        catch ( IOException e ) {\n            throw new WolfMCInitializationException( e );\n        }\n    }\n\n    @Override\n    public UMCMessage                     sendSyncMsg( UMCMessage request ) throws IOException {\n        return this.sendSyncMsg( request, false );\n    }\n\n    @Override\n    public UMCMessage                     sendSyncMsg( UMCMessage request, boolean bNoneBuffered ) throws IOException {\n        return this.sendSyncMsg( request, bNoneBuffered, this.getConnectionArguments().getSyncWaitingMillis() );\n    }\n\n    @Override\n    public void                           sendAsynMsg( UMCMessage request ) throws IOException {\n        this.sendAsynMsg( request, false );\n    }\n\n    @Override\n    public void                           sendAsynMsg( UMCMessage request, UlfAsyncMsgHandleAdapter handler ) throws IOException {\n        this.sendAsynMsg( request, false, handler );\n    }\n\n\n\n\n    public static class Builder {\n        private long                nodeId = -1;\n        private String              szName;\n        private Processum           parentProcess;\n        private UlfMessageNode      parent;\n        private Map<String, Object> joConf;\n        private ExtraHeadCoder      extraHeadCoder;\n\n        public Builder setNodeId( long nodeId ) {\n            this.nodeId = nodeId;\n            return this;\n        }\n\n        public Builder setName( String szName ) {\n            this.szName = szName;\n            return this;\n        }\n\n        public Builder setParentProcess( Processum parentProcess ) {\n            this.parentProcess = parentProcess;\n            return this;\n        }\n\n        public Builder setParent( UlfMessageNode parent ) {\n            this.parent = parent;\n            return this;\n        }\n\n        public Builder setJoConf( Map<String, Object> joConf ) {\n            this.joConf = joConf;\n            return this;\n        }\n\n        public Builder setExtraHeadCoder( ExtraHeadCoder extraHeadCoder ) {\n            this.extraHeadCoder = extraHeadCoder;\n            return this;\n        }\n\n        public WolfMCClient build() {\n            this.validate();\n            return new WolfMCClient(this);\n        }\n\n        private void validate() {\n            if ( this.szName == null || this.szName.isEmpty() ) {\n                long nId = this.nodeId;\n                if ( nId == -1 ) {\n                    nId = System.nanoTime();\n                }\n                this.szName = WolfMCClient.class.getSimpleName() + \"_\" + nId;\n            }\n            if ( this.joConf == null ) {\n                throw new IllegalArgumentException( \"Configuration (Conf) cannot be null\" );\n            }\n        }\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/AbstractTimerTask.java",
    "content": "package com.pinecone.hydra.umc.wolf.server;\n\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport io.netty.util.Timeout;\nimport io.netty.util.TimerTask;\n\n/**\n * @Description\n * @Author welsir\n * @Date 2024/6/11 23:34\n */\npublic abstract class AbstractTimerTask implements TimerTask {\n\n    @Override\n    public void run( Timeout timeout ) {\n//        Collection<UlfRecipientChannelControlBlock > allChannels = NettyServerChannelRecordPool.getAllChannels();\n//        for ( UlfRecipientChannelControlBlock channel : allChannels ) {\n//            if (!channel.isShutdown()) {\n//                doTask(channel);\n//            }\n//        }\n    }\n\n    protected abstract void doTask( ChannelControlBlock channel );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/IdleChannelTimerTask.java",
    "content": "package com.pinecone.hydra.umc.wolf.server;\n\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\n\n/**\n * @Description\n * @Author welsir\n * @Date 2024/6/11 23:35\n */\npublic class IdleChannelTimerTask extends AbstractTimerTask {\n\n    private final int idleTimeout;\n\n    public IdleChannelTimerTask( int idleTimeout ) {\n        this.idleTimeout = idleTimeout;\n    }\n\n    @Override\n    protected void doTask( ChannelControlBlock channel ) {\n        try {\n            if(channel.isShutdown()){\n                return;\n            }\n            long now = System.currentTimeMillis();\n            boolean isReadTimeout = isReadTimeout(channel, now);\n            boolean isWriteTimeout = isWriteTimeout(channel, now);\n\n            if (isReadTimeout || isWriteTimeout) {\n                Debug.echo(\"连接超时，尝试关闭连接....\");\n                channel.close();\n                //NettyServerChannelRecordPool.removeChannel(channel);\n            }\n        }\n        catch (Throwable t){\n            throw new RuntimeException(t);\n        }\n    }\n\n    protected boolean isReadTimeout( ChannelControlBlock channel, long now ) {\n        Long lastRead = lastRead(channel);\n        return lastRead != null && now - lastRead > idleTimeout;\n    }\n\n    protected boolean isWriteTimeout( ChannelControlBlock channel, long now ) {\n        Long lastWrite = lastWrite(channel);\n        return lastWrite != null && now - lastWrite > idleTimeout;\n    }\n\n    public Long lastRead( ChannelControlBlock channel ){\n        return 0L;\n        //return channel.getAttribute( IdleChannelHandler.KEY_READ_TIMESTAMP, Long.class );\n    }\n\n    public Long lastWrite( ChannelControlBlock channel ){\n        return 0L;\n        //return channel.getAttribute( IdleChannelHandler.KEY_WRITE_TIMESTAMP, Long.class );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/PassiveRegisterChannelPool.java",
    "content": "package com.pinecone.hydra.umc.wolf.server;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.hydra.umc.msg.ChannelPool;\nimport com.pinecone.hydra.umc.msg.RegisterChannelPool;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.wolf.ArchChannelPool;\nimport com.pinecone.hydra.umc.wolf.InternalErrors;\nimport com.pinecone.hydra.umc.wolf.UlfIOLoadBalanceStrategy;\n\nimport java.io.IOException;\nimport java.util.Map;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\n\n/**\n * @Description PassiveRegisterChannelPool\n * @Author DragonKing, welsir\n * @Date 2024/6/30 16.41\n */\npublic class PassiveRegisterChannelPool<ID > extends ArchChannelPool implements RegisterChannelPool {\n    protected LinkedTreeMap<ID, ChannelControlBlock >    mChannelMapPool;\n    protected UlfIOLoadBalanceStrategy                   mLoadBalanceStrategy;\n    protected final int                                  mnMaximumPoolSize   ;\n    protected long                                       mnMajorWaitTimeout = 5000;\n    protected WolfMCServer                               mRecipient;\n    protected ReentrantReadWriteLock                     mPoolIOLock = new ReentrantReadWriteLock();\n\n    public PassiveRegisterChannelPool( WolfMCServer recipient, UlfIOLoadBalanceStrategy strategy, int nMaximumPoolSize ) {\n        this.mRecipient            = recipient;\n        this.mLoadBalanceStrategy  = strategy;\n        this.mChannelMapPool       = new LinkedTreeMap<>();\n        this.mnMaximumPoolSize     = nMaximumPoolSize;\n    }\n\n    protected ChannelControlBlock addChannel( ChannelControlBlock channel ){\n        try {\n            this.mPoolIOLock.writeLock().lock();\n            if( this.size() >= this.mnMaximumPoolSize ){\n                try{\n                    InternalErrors.sendTooManyConnections( channel );\n                    channel.close();\n                }\n                catch ( IOException e ) {\n                    throw new ProxyProvokeHandleException( e );\n                }\n                return null;\n            }\n            ID channelId = this.warpKey( channel.getChannel().getChannelID() ) ;\n            this.mChannelMapPool.put( channelId, channel );\n            return channel;\n        }\n        finally {\n            this.mPoolIOLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public ChannelControlBlock depriveIdleChannel() {\n        throw new UnsupportedOperationException( \"Method `depriveIdleChannel` is inapplicable for `PassiveRegisterChannelPool`.\" );\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    protected ID warpKey( Object id ) {\n        return (ID)id;\n    }\n\n    @Override\n    public ChannelControlBlock queryChannelById( Object id ) {\n        return this.mChannelMapPool.get( this.warpKey( id ) );\n    }\n\n    @Override\n    public void onlyRemove( Object id ) {\n        this.mChannelMapPool.remove( this.warpKey( id ) );\n    }\n\n    @Override\n    public int size() {\n        return this.mChannelMapPool.size();\n    }\n\n    @Override\n    public void clear() {\n        this.mPoolIOLock.writeLock().lock();\n        try{\n            for( ChannelControlBlock block : this.mChannelMapPool.values() ) {\n                block.close();\n                block.release();\n            }\n\n            this.mChannelMapPool.clear();\n        }\n        finally {\n            this.mPoolIOLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mChannelMapPool.isEmpty();\n    }\n\n    @Override\n    public Map getPooledMap() {\n        return this.mChannelMapPool;\n    }\n\n\n    @Override\n    public void remove(ChannelControlBlock ccb) {\n        this.mPoolIOLock.writeLock().lock();\n        try {\n            ID id = this.warpKey( ccb.getChannel().getChannelID() );\n            this.onlyRemove( id );\n        }\n        finally {\n            this.mPoolIOLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public void deactivate( ChannelControlBlock ccb ) {\n        this.mPoolIOLock.writeLock().lock();\n        try {\n            ID id = this.warpKey( ccb.getChannel().getChannelID() );\n            if( !ccb.getChannel().isShutdown() ) {\n                ccb.close();\n                ccb.release();\n            }\n            this.onlyRemove( id );\n        }\n        finally {\n            this.mPoolIOLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public ChannelControlBlock terminate( Object id ) throws InterruptedException {\n        this.mPoolIOLock.writeLock().lock();\n        ChannelControlBlock block;\n        try {\n            block = this.queryChannelById( id );\n            if( block != null ) {\n                block.close();\n                block.release();\n                this.onlyRemove( id );\n            }\n        }\n        finally {\n            this.mPoolIOLock.writeLock().unlock();\n        }\n        return block;\n    }\n\n    @Override\n    public long getMajorWaitTimeout() {\n        return this.mnMajorWaitTimeout;\n    }\n\n    @Override\n    public PassiveRegisterChannelPool setMajorWaitTimeout( long nMillisTimeout ){\n        this.mnMajorWaitTimeout = nMillisTimeout;\n        return this;\n    }\n\n    @Override\n    public int getMaximumPoolSize() {\n        return this.mnMaximumPoolSize;\n    }\n\n    @Override\n    public PassiveRegisterChannelPool setIdleChannel( ChannelControlBlock block ) {\n        this.addChannel( block ); // TODO\n        return this;\n    }\n\n    @Override\n    public ChannelPool add( ChannelControlBlock block ) {\n        this.addChannel( block );\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/RecipientNettyChannelControlBlock.java",
    "content": "package com.pinecone.hydra.umc.wolf.server;\n\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.wolf.ArchChannelControlBlock;\nimport com.pinecone.hydra.umc.wolf.UlfChannel;\n\nimport io.netty.channel.Channel;\n\nimport java.util.concurrent.locks.Lock;\n\npublic class RecipientNettyChannelControlBlock extends ArchChannelControlBlock implements UlfRecipientChannelControlBlock {\n    protected WolfMCServer mParentRecipient;\n\n    public RecipientNettyChannelControlBlock( WolfMCServer recipient, UlfChannel channel, boolean bForceSyncMode ) {\n        super( recipient, channel, bForceSyncMode );\n        this.mParentRecipient  = recipient;\n    }\n\n    public RecipientNettyChannelControlBlock( WolfMCServer recipient, Channel nativeChannel, boolean bForceSyncMode ) {\n        this( recipient, new UlfChannel( recipient, nativeChannel ), bForceSyncMode );\n    }\n\n    public RecipientNettyChannelControlBlock( WolfMCServer recipient, Channel nativeChannel ) {\n        this( recipient, nativeChannel, false );\n    }\n\n    public RecipientNettyChannelControlBlock( WolfMCServer recipient, boolean bForceSyncMode ) {\n        this( recipient, new UlfChannel( recipient ), bForceSyncMode );\n    }\n\n    public RecipientNettyChannelControlBlock( WolfMCServer recipient ) {\n        this( recipient, false );\n    }\n\n    @Override\n    public WolfMCServer getParentMessageNode() {\n        return (WolfMCServer) super.getParentMessageNode();\n    }\n\n    protected void      afterConnectionArrive( Medium medium, boolean bRenew ) {\n        super.afterConnectionArrive( medium, bRenew, this.getSynRequestLock() );\n    }\n\n\n    protected Lock      getSynRequestLock() {\n        return this.getParentMessageNode().getSynRequestLock();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/ServerConnectArguments.java",
    "content": "package com.pinecone.hydra.umc.wolf.server;\n\nimport com.pinecone.hydra.umc.wolf.MCConnectionArguments;\n\npublic interface ServerConnectArguments extends MCConnectionArguments {\n    int getMaximumClients() ;\n\n    void setMaximumClients( int mnMaximumClients );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/ServerConnectionArguments.java",
    "content": "package com.pinecone.hydra.umc.wolf.server;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.umc.wolf.SharedConnectionArguments;\nimport com.pinecone.hydra.umc.wolf.client.ArchAsyncMessenger;\n\npublic class ServerConnectionArguments extends SharedConnectionArguments implements ServerConnectArguments {\n    protected int mnMaximumClients; // 0 <= for unlimited clients\n\n    public ServerConnectionArguments( JSONObject args ) {\n        super( args );\n        this.mnMaximumClients  = args.optInt( \"MaximumClients\", 0 );\n    }\n\n    public ServerConnectionArguments( ArchAsyncMessenger args ) {\n        this( args.getSectionConf() );\n    }\n\n    @Override\n    public int getMaximumClients() {\n        return this.mnMaximumClients;\n    }\n\n    @Override\n    public void setMaximumClients( int mnMaximumClients ) {\n        this.mnMaximumClients = mnMaximumClients;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/UlfRecipientChannelControlBlock.java",
    "content": "package com.pinecone.hydra.umc.wolf.server;\n\nimport com.pinecone.hydra.umc.msg.RecipientChannelControlBlock;\nimport com.pinecone.hydra.umc.wolf.NettyChannelControlBlock;\nimport com.pinecone.hydra.umc.wolf.UlfChannel;\n\npublic interface UlfRecipientChannelControlBlock extends RecipientChannelControlBlock, NettyChannelControlBlock {\n    @Override\n    UlfChannel getChannel();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/UlfServer.java",
    "content": "package com.pinecone.hydra.umc.wolf.server;\n\nimport com.pinecone.hydra.umc.msg.Recipient;\nimport com.pinecone.hydra.umc.msg.event.ChannelEventHandler;\nimport com.pinecone.hydra.umc.wolf.UlfMessageNode;\nimport com.pinecone.hydra.umc.wolf.WolfMCNode;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\n\npublic interface UlfServer extends UlfMessageNode, Recipient {\n    WolfMCNode apply( UMCTExpressHandler handler );\n\n    UlfServer registerDataArrivedEventHandlers( ChannelEventHandler handler ) throws IllegalStateException;\n\n    UlfServer deregisterDataArrivedEventHandlers( ChannelEventHandler handler ) throws IllegalStateException;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umc/wolf/server/WolfMCServer.java",
    "content": "package com.pinecone.hydra.umc.wolf.server;\n\nimport io.netty.bootstrap.ServerBootstrap;\nimport io.netty.buffer.ByteBuf;\nimport io.netty.channel.Channel;\nimport io.netty.channel.ChannelFuture;\nimport io.netty.channel.ChannelFutureListener;\nimport io.netty.channel.ChannelHandlerContext;\nimport io.netty.channel.ChannelId;\nimport io.netty.channel.ChannelInboundHandlerAdapter;\nimport io.netty.channel.ChannelInitializer;\nimport io.netty.channel.ChannelOption;\nimport io.netty.channel.EventLoopGroup;\nimport io.netty.channel.nio.NioEventLoopGroup;\nimport io.netty.channel.socket.SocketChannel;\nimport io.netty.channel.socket.nio.NioServerSocketChannel;\nimport io.netty.handler.timeout.ReadTimeoutHandler;\nimport io.netty.util.AttributeKey;\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.umc.msg.Messagus;\nimport com.pinecone.hydra.umc.msg.RecipientChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.UMCServiceException;\nimport com.pinecone.hydra.umc.msg.event.ChannelEventHandler;\nimport com.pinecone.hydra.umc.wolf.AsyncUlfMedium;\nimport com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler;\nimport com.pinecone.hydra.umc.wolf.ChannelUtils;\nimport com.pinecone.hydra.umc.wolf.GenericUMCByteMessageDecoder;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umc.wolf.UlfIdleFirstBalanceStrategy;\nimport com.pinecone.hydra.umc.wolf.UlfMCReceiver;\nimport com.pinecone.hydra.umc.wolf.UlfMessageNode;\nimport com.pinecone.hydra.umc.wolf.UnsetUlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umc.wolf.WolfMCInitializationException;\nimport com.pinecone.hydra.umc.wolf.WolfMCNode;\nimport com.pinecone.hydra.umc.wolf.WolfMCStandardConstants;\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.ChannelPool;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\n\nimport java.io.IOException;\nimport java.lang.reflect.InvocationTargetException;\nimport java.net.BindException;\nimport java.net.InetSocketAddress;\nimport java.net.SocketAddress;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.locks.Lock;\nimport java.util.concurrent.locks.ReentrantLock;\n\n/**\n *  Pinecone Ursus For Java WolfServer [ Wolf, Uniform Message Control Protocol Server ]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family.\n *  Uniform Message Control Protocol (UMC)\n *    UMC is a simple TCP/IP-based binary transmission protocol.\n *    It supports methods similar to PUT/POST (HTTP), which are designed to fulfill uniform message control.\n *\n *  Uniform Message Control Protocol for WolfMC Service [Client/Server] (Ulf UMC)\n *  *****************************************************************************************\n */\npublic class WolfMCServer extends WolfMCNode implements UlfServer {\n    protected ServerConnectArguments                          mConnectionArguments        ;\n\n    protected EventLoopGroup                                  mMasterEventGroup           ;\n    protected EventLoopGroup                                  mWorkersEventGroup          ;\n    protected ServerBootstrap                                 mBootstrap                  ;\n    protected ChannelFuture                                   mPrimaryBindFuture          ;\n    protected SocketAddress                                   mPrimaryBindAddress         ;\n    protected PassiveRegisterChannelPool<ChannelId >          mChannelPool                ;\n\n    protected UlfAsyncMsgHandleAdapter                        mRecipientMsgHandler        ;\n    protected List<ChannelEventHandler >                      mDataArrivedEventHandlers   ;\n\n    private final ReentrantLock                               mSynRequestLock      = new ReentrantLock(); // For inner purposes.\n\n    public WolfMCServer( long nodeId, String szName, Processum parentProcess, UlfMessageNode parent, Map<String, Object> joConf, ExtraHeadCoder extraHeadCoder ) {\n        super( nodeId, szName, parentProcess, parent, joConf, extraHeadCoder );\n        this.mDataArrivedEventHandlers = new ArrayList<>();\n        this.apply( joConf );\n    }\n\n    public WolfMCServer( String szName, Processum parentProcess, UlfMessageNode parent, Map<String, Object> joConf, ExtraHeadCoder extraHeadCoder ) {\n        this( Messagus.nextLocalId(), szName, parentProcess, parent, joConf, extraHeadCoder );\n    }\n\n    public WolfMCServer( long nodeId, String szName, Processum parentProcess, Map<String, Object> joConf, ExtraHeadCoder extraHeadCoder ) {\n        this( nodeId, szName, parentProcess, null, joConf, extraHeadCoder );\n    }\n\n    public WolfMCServer( String szName, Processum parentProcess, Map<String, Object> joConf, ExtraHeadCoder extraHeadCoder ) {\n        this( Messagus.nextLocalId(), szName, parentProcess, null, joConf, extraHeadCoder );\n    }\n\n    public WolfMCServer( long nodeId, String szName, Processum parentProcess, UlfMessageNode parent, Map<String, Object> joConf ) {\n        this( nodeId, szName, parentProcess, parent, joConf, null );\n    }\n\n    public WolfMCServer( String szName, Processum parentProcess, UlfMessageNode parent, Map<String, Object> joConf ) {\n        this( Messagus.nextLocalId(), szName, parentProcess, parent, joConf, null );\n    }\n\n    public WolfMCServer( long nodeId, String szName, Processum parentProcess, Map<String, Object> joConf ) {\n        this( nodeId, szName, parentProcess, null, joConf );\n    }\n\n    public WolfMCServer( String szName, Processum parentProcess, Map<String, Object> joConf ) {\n        this( Messagus.nextLocalId(), szName, parentProcess, null, joConf );\n    }\n\n    protected WolfMCServer( Builder builder ){\n        this( builder.nodeId, builder.szName, builder.parentProcess, builder.parent, builder.joConf, builder.extraHeadCoder );\n    }\n\n\n    @Override\n    public WolfMCServer apply( Map<String, Object> conf ) {\n        super.apply( conf );\n        JSONObject joConf = this.getSectionConf();\n\n        this.mConnectionArguments = new ServerConnectionArguments( joConf );\n        this.mChannelPool         = new PassiveRegisterChannelPool<>(\n                this, new UlfIdleFirstBalanceStrategy(), joConf.optInt( \"MaximumConnections\", (int)1e7 )\n        );\n\n        try{\n            String szRecipientMsgHandler   = joConf.optString( \"RecipientMsgHandler\" );\n            if( StringUtils.isEmpty( szRecipientMsgHandler ) ) {\n                this.mRecipientMsgHandler  = new UnsetUlfAsyncMsgHandleAdapter( this ) ;\n            }\n            else {\n                this.mRecipientMsgHandler  = (UlfAsyncMsgHandleAdapter) DynamicFactory.DefaultFactory.loadInstance(\n                        szRecipientMsgHandler, null, null\n                );\n            }\n        }\n        catch ( ClassNotFoundException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n\n        return this;\n    }\n\n    @Override\n    public WolfMCServer apply( UlfAsyncMsgHandleAdapter fnRecipientMsgHandler ) {\n        this.mRecipientMsgHandler = fnRecipientMsgHandler;\n\n        return this;\n    }\n\n    @Override\n    public UlfServer registerDataArrivedEventHandlers( ChannelEventHandler handler ) throws IllegalStateException {\n        this.checkDeregisterHandlerStatus();\n        this.mDataArrivedEventHandlers.add( handler );\n        return this;\n    }\n\n    @Override\n    public UlfServer deregisterDataArrivedEventHandlers( ChannelEventHandler handler ) throws IllegalStateException {\n        this.checkDeregisterHandlerStatus();\n        this.mDataArrivedEventHandlers.remove( handler );\n        return this;\n    }\n\n\n    protected void notifyDataArrivedEventHandlers( RecipientChannelControlBlock block, ChannelHandlerContext ctx ) {\n        for( ChannelEventHandler h : this.mDataArrivedEventHandlers ) {\n            h.afterEventTriggered( block, ctx );\n        }\n    }\n\n    @Override\n    public int getMaximumConnections() {\n        return this.mChannelPool.getMaximumPoolSize();\n    }\n\n    @Override\n    public void close() throws ProvokeHandleException {\n        this.mStateMutex.lock();\n        try {\n            if( this.mMasterEventGroup != null ) {\n                this.mMasterEventGroup.shutdownGracefully();\n                this.mMasterEventGroup = null;\n                //this.clear();\n                //this.mShutdown = true;\n            }\n\n            if( this.mWorkersEventGroup != null ) {\n                this.mWorkersEventGroup.shutdownGracefully();\n            }\n        }\n        finally {\n            this.mStateMutex.unlock();\n        }\n\n        try {\n            synchronized ( this.mPrimaryThreadJoinMutex ) {\n                WolfMCServer.this.mPrimaryThreadJoinMutex.notify();\n            }\n        }\n        catch ( IllegalMonitorStateException e ) {\n            throw new ProvokeHandleException( \"IllegalMonitorStateException [WolfMCClient::close], this exception has been redirected to parent thread.\", e );\n        }\n    }\n\n    @Override\n    public void  kill() {\n        try{\n            this.close();\n        }\n        catch ( ProvokeHandleException e ) {\n            super.kill(); // Kill master thread forcefully.\n            //this.clear();\n        }\n    }\n\n    @Override\n    public boolean isShutdown() {\n        if ( this.mMasterEventGroup == null ) {\n            return true;\n        }\n        return this.mMasterEventGroup.isShutdown();\n    }\n\n    @Override\n    public boolean isTerminated() {\n        if ( this.mMasterEventGroup == null ) {\n            return true;\n        }\n        return this.mMasterEventGroup.isTerminated();\n    }\n\n    protected void handleArrivedMessage(UlfAsyncMsgHandleAdapter handle, Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception {\n        if( this.getErrorMessageAudit().isErrorMessage( msg ) ) {\n            handle.onErrorMsgReceived( medium, block, msg, ctx, msg );\n        }\n        else {\n            handle.onSuccessfulMsgReceived( medium, block, msg, ctx, msg );\n        }\n    }\n\n\n    protected void initNettySubsystem() throws IOException, UMCServiceException {\n        this.mMasterEventGroup    = new NioEventLoopGroup();\n        this.mWorkersEventGroup   = new NioEventLoopGroup();\n        this.mBootstrap           = new ServerBootstrap();\n\n        this.mBootstrap.group   ( this.mMasterEventGroup , this.mWorkersEventGroup );\n        this.mBootstrap.channel ( NioServerSocketChannel.class );\n        this.mBootstrap.option  ( ChannelOption.SO_BACKLOG, 1024 );\n        this.mBootstrap.childHandler( new ChannelInitializer<SocketChannel>() {\n            @Override\n            protected void initChannel( SocketChannel sc ) throws Exception {\n                sc.pipeline().addLast( new ReadTimeoutHandler( 1000, TimeUnit.SECONDS ) );\n                sc.pipeline().addLast( new GenericUMCByteMessageDecoder( WolfMCServer.this.getExtraHeadCoder() ) );\n\n                sc.pipeline().addLast( new ChannelInboundHandlerAdapter(){\n                    @Override\n                    public void channelActive( ChannelHandlerContext ctx ) throws Exception {\n                        super.channelActive(ctx);\n\n                        RecipientNettyChannelControlBlock ccb = new RecipientNettyChannelControlBlock(\n                                WolfMCServer.this, ctx.channel(), false\n                        );\n\n                        ccb.getChannel().getNativeHandle().attr(\n                                AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY )\n                        ).set( ccb );\n\n\n                        Channel channel = ctx.channel();\n                        SocketAddress remote = channel.remoteAddress();\n                        String ipInfo = \"??\";\n                        if ( remote instanceof InetSocketAddress ) {\n                            InetSocketAddress inet = (InetSocketAddress) remote;\n                            String ip   = inet.getAddress().getHostAddress();\n                            int port    = inet.getPort();\n\n                            ipInfo = ip + \":\" + port;\n                        }\n                        WolfMCServer.this.getLogger().info( \"[MessengerConnected] <id:`{}`, ip: `{}`>\", ctx.channel().id(), ipInfo );\n\n                        ccb.afterConnectionArrive(\n                                new AsyncUlfMedium( ctx, null, WolfMCServer.this ),  false\n                        );\n                        ccb.setThreadAffinity( Thread.currentThread() );\n                        WolfMCServer.this.getTaskManager().add( ccb );\n                        WolfMCServer.this.mChannelPool.setIdleChannel( ccb );\n                    }\n\n                    @Override\n                    public void channelRead( ChannelHandlerContext ctx, Object msg ) throws Exception {\n                        Medium medium = new AsyncUlfMedium( ctx, (ByteBuf) msg, WolfMCServer.this );\n                        UlfMCReceiver receiver = new UlfMCReceiver( medium );\n                        UMCMessage message = receiver.readMsg();\n\n                        RecipientNettyChannelControlBlock channelControlBlock = (RecipientNettyChannelControlBlock)ctx.channel().attr(\n                                AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY )\n                        ).get();\n                        ChannelUtils.setChannelIdentityID( channelControlBlock.getChannel(), message.getHead().getIdentityId() );\n\n                        if ( !WolfMCServer.this.tryInvokeOrInterceptArrivedData( medium, channelControlBlock, message, ctx, msg ) ) {\n                            WolfMCServer.this.handleArrivedMessage(\n                                    WolfMCServer.this.mRecipientMsgHandler, medium, channelControlBlock, message, ctx, msg\n                            );\n\n                            WolfMCServer.this.notifyDataArrivedEventHandlers( channelControlBlock, ctx );\n                        }\n\n                        medium.release();\n                        medium = new AsyncUlfMedium( ctx, null, WolfMCServer.this );\n                        channelControlBlock.afterConnectionArrive( medium,  true );\n                    }\n\n                    @Override\n                    public void channelInactive( ChannelHandlerContext ctx ) throws Exception {\n                        RecipientNettyChannelControlBlock ccb = (RecipientNettyChannelControlBlock)ctx.channel().attr(\n                                AttributeKey.valueOf( WolfMCStandardConstants.CB_CONTROL_BLOCK_KEY )\n                        ).get();\n\n                        if ( !WolfMCServer.this.mChannelInactiveHandlers.isEmpty() ) {\n                            boolean bBlocked = false;\n                            for ( ChannelInactiveHandler handler : WolfMCServer.this.mChannelInactiveHandlers ) {\n                                if ( handler.afterChannelInactive( ccb, ctx ) ) {\n                                    bBlocked = true;\n                                }\n                            }\n\n                            if ( bBlocked ) {\n                                return;\n                            }\n                        }\n\n                        WolfMCServer.this.mChannelPool.deactivate( ccb );\n                        WolfMCServer.this.getMajorIOLock().lock();\n                        try{\n                            WolfMCServer.this.getTaskManager().erase( ccb );\n                        }\n                        finally {\n                            WolfMCServer.this.getMajorIOLock().unlock();\n                        }\n                        WolfMCServer.this.getLogger().info( \"[MessengerDetached] <id:`{}`>\", ctx.channel().id() );\n                    }\n\n                    @Override\n                    public void exceptionCaught( ChannelHandlerContext ctx, Throwable cause ) throws Exception {\n                        WolfMCServer.this.mRecipientMsgHandler.onError( ctx, cause );\n                    }\n                } );\n            }\n\n            @Override\n            public void exceptionCaught( ChannelHandlerContext ctx, Throwable cause ) throws Exception {\n                WolfMCServer.this.mRecipientMsgHandler.onError( ctx, cause );\n            }\n        });\n\n        String szHost           = this.getConnectionArguments().getHost();\n        short  nPort            = this.getConnectionArguments().getPort();\n        if( StringUtils.isEmpty( szHost ) ) {\n            this.mPrimaryBindAddress = new InetSocketAddress( nPort );\n        }\n        else {\n            this.mPrimaryBindAddress = new InetSocketAddress( szHost, nPort );\n        }\n        this.mPrimaryBindFuture = this.mBootstrap.bind( this.mPrimaryBindAddress );\n\n        this.mPrimaryBindFuture.addListener( new ChannelFutureListener() {\n            @Override\n            public void operationComplete( ChannelFuture channelFuture ) throws Exception {\n                synchronized ( WolfMCServer.this.mPrimaryThreadJoinMutex ) {\n//                    if ( WolfMCServer.this.isShutdown() ) {\n//                        WolfMCServer.this.mShutdown = !channelFuture.isSuccess();\n//                    }\n                    WolfMCServer.this.mPrimaryThreadJoinMutex.notify();\n                }\n            }\n        } );\n\n        synchronized ( this.mPrimaryThreadJoinMutex ) {\n            try {\n                this.mPrimaryThreadJoinMutex.wait( this.getConnectionArguments().getSocketTimeout() );\n                if( this.isShutdown() ) {\n                    throw new BindException( String.format( \"%s [Serve], binding `%s` compromised.\", this.className(), this.mPrimaryBindAddress.toString() ) );\n                }\n            }\n            catch ( InterruptedException e ) {\n                Thread.currentThread().interrupt();\n                throw new WolfMCInitializationException( e );\n            }\n        }\n\n        /*try {\n            this.mPrimaryBindFuture.channel().closeFuture().sync();\n        }\n        catch ( InterruptedException e ) {\n            throw new RuntimeException(e);\n        }*/\n    }\n\n    public void serve() throws UMCServiceException {\n        this.mStateMutex.lock();\n\n        try{\n            if( this.isShutdown() ) {\n                try {\n                    this.initNettySubsystem(); // Exception thrown and truncating next detach-mutex-release, redirecting to primary thread.\n                }\n                catch ( IOException e ) {\n                    throw new WolfMCInitializationException( e );\n                }\n            }\n        }\n        finally {\n            this.mStateMutex.unlock();\n            WolfMCServer.this.unlockOuterThreadDetachMutex();  // This lock shouldn`t be released in `finally`, waiting for primary thread to process.\n        }\n\n        synchronized ( this.mPrimaryThreadJoinMutex ) {\n            try {\n                this.mPrimaryThreadJoinMutex.wait(); // Join the primary thread.\n            }\n            catch ( InterruptedException e ) {\n                Thread.currentThread().interrupt();\n            }\n        }\n    }\n\n    @Override\n    public void execute() throws UMCServiceException {\n        if ( !this.isShutdown() ) {\n            this.mLogger.info( \"WolfMCServer [{}:{}] is already started. <Pass>\", this.getName(), this.hashCode() );\n            return;\n        }\n\n        Exception[] lastException = new Exception[] { null };\n        Thread primaryThread      = new Thread( new Runnable() {\n            @Override\n            public void run() {\n                WolfMCServer.this.getTaskManager().notifyExecuting( WolfMCServer.this );\n                try{\n                    WolfMCServer.this.serve();\n                }\n                catch ( Exception e ) {\n                    lastException[0] = e;\n                    WolfMCServer.this.kill();\n                }\n                finally {\n                    WolfMCServer.this.getTaskManager().notifyFinished( WolfMCServer.this );\n                    WolfMCServer.this.unlockOuterThreadDetachMutex();\n                }\n            }\n        });\n\n        this.preparePrimaryThread( primaryThread );\n        primaryThread.start();\n\n        this.joinOuterThread();\n        if( !this.isShutdown() ) {\n            this.infoLifecycle( String.format( \"Wolf<\\uD83D\\uDC3A>::BindServer(%s)\", this.mPrimaryBindAddress.toString() ), \"Ready\" );\n        }\n\n        try {\n            this.redirectException2ParentThread( lastException[0] );\n        }\n        catch ( IOException e ) {\n            throw new WolfMCInitializationException( e );\n        }\n    }\n\n    protected Lock getSynRequestLock() {\n        return this.mSynRequestLock;\n    }\n\n    @Override\n    public ServerConnectArguments getConnectionArguments() {\n        return this.mConnectionArguments;\n    }\n\n    @Override\n    public ServerConnectArguments getMessageNodeConfig() {\n        return this.getConnectionArguments();\n    }\n\n    @Override\n    public ChannelPool getChannelPool() {\n        return null;\n    }\n\n\n    public static class Builder {\n        private long                nodeId = -1;\n        private String              szName;\n        private Processum           parentProcess;\n        private UlfMessageNode      parent;\n        private Map<String, Object> joConf;\n        private ExtraHeadCoder      extraHeadCoder;\n\n        public Builder setNodeId( long nodeId ) {\n            this.nodeId = nodeId;\n            return this;\n        }\n\n        public Builder setName( String szName ) {\n            this.szName = szName;\n            return this;\n        }\n\n        public Builder setParentProcess( Processum parentProcess ) {\n            this.parentProcess = parentProcess;\n            return this;\n        }\n\n        public Builder setParent( UlfMessageNode parent ) {\n            this.parent = parent;\n            return this;\n        }\n\n        public Builder setJoConf( Map<String, Object> joConf ) {\n            this.joConf = joConf;\n            return this;\n        }\n\n        public Builder setExtraHeadCoder( ExtraHeadCoder extraHeadCoder ) {\n            this.extraHeadCoder = extraHeadCoder;\n            return this;\n        }\n\n        public WolfMCServer build() {\n            this.validate();\n            return new WolfMCServer( this );\n        }\n\n        private void validate() {\n            if ( this.szName == null || this.szName.isEmpty() ) {\n                long nId = this.nodeId;\n                if ( nId == -1 ) {\n                    nId = System.nanoTime();\n                }\n                this.szName = WolfMCServer.class.getSimpleName() + \"_\" + nId;\n            }\n            if ( this.joConf == null ) {\n                throw new IllegalArgumentException( \"Configuration (Conf) cannot be null\" );\n            }\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/AddressMapping.java",
    "content": "package com.pinecone.hydra.umct;\nimport com.pinecone.hydra.umc.msg.UMCMethod;\n\nimport java.lang.annotation.*;\n\n@Target({ElementType.METHOD, ElementType.TYPE})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface AddressMapping {\n    String name() default \"\";\n\n    String[] value() default {};\n\n    boolean relative() default true; // Only for methods.\n\n    UMCMethod[] method() default {};\n\n    boolean selfMappable() default true; // True for using self-method name if value is not given.\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ArchMessagelet.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.JSONMaptron;\n\nimport java.io.IOException;\nimport java.util.Map;\n\npublic abstract class ArchMessagelet implements Messagelet {\n    protected Map<String, Object >      mConfig;\n    protected ArchMessagram             mMessagelet;\n    protected UMCConnection mMsgPackage;\n\n    protected UMCTransmit               mUMCTransmit;\n    protected UMCReceiver               mUMCReceiver;\n\n    public ArchMessagelet( UMCConnection msgPackage, ArchMessagram servtron ) {\n        this.mMsgPackage   = msgPackage;\n        this.mMessagelet   = servtron;\n        this.mConfig       = new JSONMaptron(); //TODO\n    }\n\n    protected abstract Map<String, Object > $_MSG();\n\n    @Override\n    public MessageDeliver getMessageDeliver() {\n        return this.getMessagePackage().getDeliver();\n    }\n\n    @Override\n    public UMCConnection getMessagePackage() {\n        return this.mMsgPackage;\n    }\n\n    @Override\n    public UMCTransmit getTransmit(){\n        return this.mUMCTransmit;\n    }\n\n    @Override\n    public UMCReceiver getReceiver(){\n        return this.mUMCReceiver;\n    }\n\n    @Override\n    public ArchMessagram getMessagelet() {\n        return this.mMessagelet;\n    }\n\n    protected void sendDefaultConfirmResponse() throws IOException {\n        JSONObject jo = new JSONMaptron();\n        jo.put( \"Messagelet\", \"ReceiveConfirm\" );\n        this.getTransmit().sendInformMsg( jo );\n    }\n\n    @Override\n    public Map<String, Object > getConfig() {\n        return this.mConfig;\n    }\n\n    @Override\n    public abstract void dispatch();\n\n    @Override\n    public String serviceName() {\n        return this.className();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ArchMessagram.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.hydra.servgram.ArchServgramium;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.framework.system.executum.Processum;\n\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.Map;\n\npublic abstract class ArchMessagram extends ArchServgramium implements Messagram {\n    public static final String DefaultServiceKey = \"__NAME__\";\n\n    protected Map<String, MessageExpress > mExpresses;\n    protected Map<String, Object >         mProtoConfig;\n\n\n    public ArchMessagram( String szName, Processum parent, Map<String, Object > config ) {\n        super( szName, parent );\n\n        this.mExpresses   = new LinkedTreeMap<>();\n        this.mProtoConfig = config;\n    }\n\n    @Override\n    public Map<String, Object > getProtoConfig() {\n        return this.mProtoConfig;\n    }\n\n    @Override\n    public Messagram addExpress( MessageExpress express ) {\n        this.mExpresses.put( express.getName(), express );\n        return this;\n    }\n\n    @Override\n    public MessageExpress getExpressByName( String name ) {\n        return this.mExpresses.get( name );\n    }\n\n    @Override\n    public Messagram removeExpress( String name ) {\n        this.mExpresses.remove( name );\n        return this;\n    }\n\n    public abstract String getLetsNamespace() ;\n\n    protected Messagelet contriveByClassName( String szClassName, UMCConnection UMCConnection) {\n        Messagelet obj = null;\n        try {\n            Class<?> pVoid = Class.forName( szClassName );\n            try{\n                Constructor<?> constructor = pVoid.getConstructor( UMCConnection.class, ArchMessagram.class );\n                obj = (Messagelet) constructor.newInstance(UMCConnection, this );\n            }\n            catch (NoSuchMethodException | InvocationTargetException e1){\n                e1.printStackTrace();\n            }\n        }\n        catch ( ClassNotFoundException | IllegalAccessException | InstantiationException e ){\n            return null;\n        }\n\n        return obj;\n    }\n\n    public Messagelet contriveByScheme( String szSchemeName, UMCConnection UMCConnection) throws IllegalArgumentException {\n        String szClassName = this.getLetsNamespace() + szSchemeName ;\n        Messagelet obj = this.contriveByClassName( szClassName, UMCConnection);\n        if( obj == null ){\n            throw new IllegalArgumentException( \"[Messagelet] Fantasy scheme with no crew member.\" );\n        }\n        return obj;\n    }\n\n    @Override\n    public Hydrogen parentSystem() {\n        return (Hydrogen) super.parentSystem();\n    }\n\n    @Override\n    public void execute() {\n        this.infoLifecycle( \"toSeek\", \"Can do !\" );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ArchMsgDeliver.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.framework.unit.trie.TrieSegmentor;\nimport com.pinecone.framework.unit.trie.UniTrieMaptron;\n\nimport com.pinecone.framework.util.Bytes;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.hydra.express.Package;\nimport com.pinecone.hydra.umc.msg.Status;\nimport com.pinecone.hydra.umc.msg.UMCHead;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umct.decipher.HeaderDecipher;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.function.Supplier;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic abstract class ArchMsgDeliver implements MessageDeliver {\n    protected String                                      mszName;\n    protected MessageExpress                              mExpress;\n    protected MessageJunction                             mJunction;\n    protected Map<String, MessageHandler>                 mRoutingTable;\n    protected HeaderDecipher                              mHeaderDecipher;\n    protected String                                      mszServicePathKey;\n    protected Logger                                      mLogger;\n\n    public ArchMsgDeliver( String szName, MessageExpress express, HeaderDecipher headerDecipher, String szServicePathKey, Supplier<Map<String, MessageHandler>> routingTableSupplier ) {\n        this.mszName           = szName;\n        this.mExpress          = express;\n        this.mJunction         = this.mExpress.getJunction();\n        this.mHeaderDecipher   = headerDecipher;\n        this.mszServicePathKey = szServicePathKey;\n        this.mLogger           = LoggerFactory.getLogger( this.getClass() );\n        this.mRoutingTable     = routingTableSupplier.get();\n    }\n\n    public ArchMsgDeliver( String szName, MessageExpress express, HeaderDecipher headerDecipher, String szServicePathKey ) {\n        this( szName, express, headerDecipher, szServicePathKey, HashMap::new );\n    }\n\n    public ArchMsgDeliver( String szName, MessageExpress express, HeaderDecipher headerDecipher, String szServicePathKey, boolean bUsingTrie ) {\n        this( szName, express, headerDecipher, szServicePathKey, () -> {\n            if ( bUsingTrie ) {\n                return new UniTrieMaptron<>( HashMap::new, new TrieSegmentor() {\n                    @Override\n                    public String[] segments( String szPathKey ) {\n                        return szPathKey.split( \"\\\\.|\\\\/\" );\n                    }\n\n                    @Override\n                    public String getSeparator() {\n                        return StringUtils.FOLDER_SEPARATOR;\n                    }\n                });\n            }\n            else {\n                return new HashMap<>();\n            }\n        });\n    }\n\n    @Override\n    public String getServiceKeyword() {\n        return this.mszServicePathKey;\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public MessageExpress  getExpress() {\n        return this.mExpress;\n    }\n\n    public MessageJunction getJunction(){\n        return this.mJunction;\n    }\n\n    @Override\n    public Map<String, MessageHandler> getRoutingTable() {\n        return this.mRoutingTable;\n    }\n\n    @Override\n    public void registerHandler( String addr, MessageHandler controller ){\n        this.mRoutingTable.put( addr, controller );\n    }\n\n\n    protected UMCConnection wrap( Package that ) {\n        return (UMCConnection) that;\n    }\n\n    protected abstract void prepareDispatch( Package that ) throws IOException;\n\n    protected abstract boolean sift( Package that );\n\n    protected boolean isMyJob( Package that, String szServiceKey ) {\n        return szServiceKey != null;\n    }\n\n    protected UMCMessage processResponse( UMCMessage request, UMCMessage response ) {\n        MessageExpress me = this.getExpress();\n        try{\n            UMCTExpress ue = (UMCTExpress) me;\n            return ue.processResponse( request, response );\n        }\n        catch ( ClassCastException e ) {\n            return response;\n        }\n    }\n\n    protected void messageDispatch( Package that ) throws IOException, ServiceException {\n        boolean bDenialService = false;\n\n        try{\n            UMCConnection connection  = this.wrap( that );\n            UMCMessage request        = connection.getMessage();\n\n            if ( request.getHead().getStatus() != Status.OK ) {\n                throw new ServiceInternalException( \"Error response.\" );\n            }\n\n            if( this.sift( that ) ) {\n                connection.getTransmit().sendInformMsg( Bytes.Empty, Status.IllegalMessage );\n                return;\n            }\n\n            UMCHead head                = request.getHead();\n            Object  exHead              = head.getExtraHead();\n            String szAddr               = this.mHeaderDecipher.getServicePath( exHead );\n            if( szAddr == null ) {\n                this.mHeaderDecipher.sendIllegalMessage( connection );\n                return;\n            }\n\n            MessageHandler controller = this.mRoutingTable.get( szAddr );\n            if( controller != null ) {\n                connection.entrust( this );\n\n                Object[] args;\n                if( controller.isArgsIndexed() ) {\n                    args = this.mHeaderDecipher.values( exHead, controller.getArgumentsDescriptor(), controller.getArgumentTemplate() ).toArray();\n                }\n                else {\n                    List<String > keys = controller.getArgumentsKey();\n                    args = this.mHeaderDecipher.evals( exHead, controller.getArgumentsDescriptor(), keys, controller.getArgumentTemplate() );\n                }\n\n                try {\n                    Object ret = controller.invoke( args );\n                    UMCMessage response = this.mHeaderDecipher.assembleReturnMsg( ret, controller.getReturnDescriptor() );\n                    connection.getTransmit().sendMsg( this.processResponse( request, response ) );\n                }\n                catch ( Exception e ) {\n                    this.mLogger.warn( \"MessageDeliver has handled an invocation exception, what => \", e );\n                    this.mHeaderDecipher.sendInternalError( connection );\n                }\n            }\n            else {\n                if ( this.mJunction != null ) {\n                    this.doMessagelet( szAddr, that );\n                }\n\n                bDenialService = true;\n            }\n        }\n        catch ( RuntimeException e ) {\n            throw new ServiceInternalException( e );\n        }\n\n        if ( bDenialService ) {\n            throw new DenialServiceException( \"It's none of my business.\" );\n        }\n    }\n\n    protected abstract void doMessagelet( String szMessagelet, Package that ) ;\n\n    @Override\n    public void toDispatch( Package that ) throws IOException, ServiceException {\n        this.prepareDispatch( that );\n        this.messageDispatch( that );\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ArchMsgExpress.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.pinecone.hydra.express.Deliver;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.util.LinkedHashMap;\nimport java.util.Map;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\nimport java.util.concurrent.locks.ReadWriteLock;\n\n/**\n *  Pinecone Ursus For Java MessageExpress [Archetype]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  MessageExpress vs Messenger\n *  Messenger is the abstract producer of domestic.\n *  Messenger is the message reproducer, who import commodities(Messages) from abroad (inbound, the real producer).\n *\n *  MessageExpress is the deliver center, that deliver the package(Message) through deliver man to the consumer(e.g. Messagelet).\n *  [Server -> send message -> (out)] -> [(in) -> Messenger -> Express -> Deliver -> Messagelet ]\n *  *****************************************************************************************\n */\npublic abstract class ArchMsgExpress implements MessageExpress, Slf4jTraceable {\n    protected String           mszName      ;\n    protected MessageJunction  mJunction    ;\n    protected Logger           mLogger      ;\n    protected Map<String, MessageDeliver > mDeliverPool = new LinkedHashMap<>();\n    protected ReadWriteLock                mPoolLock    = new ReentrantReadWriteLock();\n\n    public ArchMsgExpress( String name, MessageJunction junction, Logger logger ) {\n        this.mszName      = name;\n        this.mLogger      = logger;\n        this.mJunction    = junction;\n\n        if( this.mszName == null ){\n            this.mszName = this.className();\n        }\n    }\n\n    public ArchMsgExpress( String name, MessageJunction junction ) {\n        this( name, junction, null );\n        this.mLogger = LoggerFactory.getLogger( this.className() );\n    }\n\n    public ArchMsgExpress( ArchMessagram messagram ) {\n        this( null, messagram );\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public MessageJunction getJunction() {\n        return this.mJunction;\n    }\n\n    @Override\n    public Logger getLogger() {\n        return this.mLogger;\n    }\n\n    protected ReadWriteLock        getPoolLock() {\n        return this.mPoolLock;\n    }\n\n    protected Map<String, MessageDeliver > getDeliverPool() {\n        return this.mDeliverPool;\n    }\n\n    protected MessageDeliver wrap( Deliver deliver ) {\n        return (MessageDeliver) deliver;\n    }\n\n    protected abstract MessageDeliver spawn( String szName );\n\n    @Override\n    public MessageDeliver   recruit ( String szName ) {\n        if( this.getDeliverPool().containsKey( szName ) ) {\n            return this.getDeliverPool().get( szName );\n        }\n        MessageDeliver deliver = this.spawn( szName );\n        this.register( deliver );\n        return deliver;\n    }\n\n    @Override\n    public ArchMsgExpress   register    ( Deliver deliver ) {\n        this.getDeliverPool().put( deliver.getName(), this.wrap( deliver ) );\n        return this;\n    }\n\n    @Override\n    public ArchMsgExpress   fired       ( Deliver deliver ) {\n        this.getDeliverPool().remove( deliver.getName(), this.wrap( deliver ) );\n        return this;\n    }\n\n    @Override\n    public MessageDeliver getDeliver    ( String szName ) {\n        return this.getDeliverPool().get( szName );\n    }\n\n    public ArchMsgExpress   syncRegister( Deliver deliver ) {\n        this.getPoolLock().writeLock().lock();\n        try{\n            this.register( deliver );\n        }\n        finally {\n            this.getPoolLock().writeLock().unlock();\n        }\n        return this;\n    }\n\n    public ArchMsgExpress   syncFired   ( Deliver deliver ) {\n        this.getPoolLock().writeLock().lock();\n        try{\n            this.fired( deliver );\n        }\n        finally {\n            this.getPoolLock().writeLock().unlock();\n        }\n        return this;\n    }\n\n    @Override\n    public boolean hasOwnDeliver( Deliver deliver ) {\n        return this.hasOwnDeliver( deliver.getName() );\n    }\n\n    @Override\n    public boolean hasOwnDeliver( String deliverName ) {\n        return this.getDeliverPool().containsKey( deliverName );\n    }\n\n    @Override\n    public int size() {\n        return this.getDeliverPool().size();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ArchUMCConnection.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\nimport com.pinecone.hydra.express.Deliver;\n\npublic abstract class ArchUMCConnection implements UMCConnection {\n    protected MessageDeliver  mDeliver;\n    protected Medium          mMessageSource;\n    protected UMCMessage      mUMCMessage;\n    protected UMCTransmit     mUMCTransmit;\n    protected UMCReceiver     mUMCReceiver;\n\n    public ArchUMCConnection( Medium medium, UMCMessage message, UMCTransmit transmit, UMCReceiver receiver ) {\n        this.mMessageSource = medium;\n        this.mUMCMessage    = message;\n        this.mUMCTransmit   = transmit;\n        this.mUMCReceiver   = receiver;\n    }\n\n    @Override\n    public MessageDeliver getDeliver() {\n        return this.mDeliver;\n    }\n\n    @Override\n    public UMCMessage getMessage() {\n        return this.mUMCMessage;\n    }\n\n    @Override\n    public UMCTransmit getTransmit() {\n        return this.mUMCTransmit;\n    }\n\n    @Override\n    public UMCReceiver getReceiver() {\n        return this.mUMCReceiver;\n    }\n\n    @Override\n    public Medium getMessageSource() {\n        return this.mMessageSource;\n    }\n\n    @Override\n    public ArchUMCConnection entrust( Deliver deliver ) {\n        this.mDeliver = (MessageDeliver)deliver;\n        return this;\n    }\n\n    @Override\n    public void release() {\n        this.mMessageSource.release();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/DenialServiceException.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class DenialServiceException extends ServiceException implements Pinenut {\n    public DenialServiceException() {\n        super();\n    }\n\n    public DenialServiceException( String message ) {\n        super(message);\n    }\n\n    public DenialServiceException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public DenialServiceException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/DuplexExpress.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport java.io.IOException;\n\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.ChannelPool;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.uma.AsynMsgHandler;\n\npublic interface DuplexExpress extends UMCTExpress {\n\n    void afterChannelInactive( ChannelControlBlock controlBlock ) ;\n\n    ChannelPool getPoolByClientId( long clientId ) ;\n\n    void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, UlfAsyncMsgHandleAdapter handler ) throws IOException;\n\n    void sendAsynMsg( long clientId, UMCMessage request, boolean bNoneBuffered, AsynMsgHandler handler ) throws IOException;\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/GenericMessagramScanner.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.framework.util.lang.ClassScope;\nimport com.pinecone.framework.util.lang.ClassScopeNSProtocolIteratorsFactory;\nimport com.pinecone.framework.util.lang.NSProtocolIteratorsFactoryAdapter;\nimport com.pinecone.ulf.util.lang.GenericPreloadClassInspector;\nimport com.pinecone.ulf.util.lang.HierarchyClassInspector;\nimport com.pinecone.ulf.util.lang.PooledClassCandidateScanner;\n\nimport javassist.ClassPool;\n\npublic class GenericMessagramScanner extends PooledClassCandidateScanner implements MessagramScanner {\n    protected HierarchyClassInspector mClassInspector     ;\n\n    public GenericMessagramScanner     ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory, ClassPool classPool ) {\n        super( searchScope, classLoader, iteratorsFactory, classPool );\n\n        this.mClassInspector = new GenericPreloadClassInspector( this.mClassPool );\n    }\n\n    public GenericMessagramScanner     ( ClassScope searchScope, ClassLoader classLoader, ClassPool classPool ) {\n        this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), classPool );\n    }\n\n    public GenericMessagramScanner     ( ClassScope searchScope, ClassLoader classLoader ) {\n        this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), ClassPool.getDefault() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/IlleagalResponseException.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class IlleagalResponseException extends ServiceException implements Pinenut {\n    public IlleagalResponseException() {\n        super();\n    }\n\n    public IlleagalResponseException( String message ) {\n        super(message);\n    }\n\n    public IlleagalResponseException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public IlleagalResponseException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/IntegratedMessagram.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\n\nimport java.util.Map;\n\npublic class IntegratedMessagram extends ArchMessagram implements Messagram {\n    protected String                          mszExpressFactory;\n    protected Map<String, Object >            mExpressesConf;\n    protected DynamicFactory                  mExpressFactory;\n\n    public IntegratedMessagram(String szName, Processum parent, Map<String, Object > config ) {\n        super( szName, parent, config );\n\n        this.prepareConfig();\n        this.prepareExpresses();\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    protected void prepareConfig() {\n        this.mszExpressFactory = (String) this.getProtoConfig().get( \"ExpressFactory\" );\n        this.mExpressesConf    = (Map) this.getProtoConfig().get( \"Expresses\" );\n    }\n\n    protected void prepareExpresses() {\n        if( StringUtils.isEmpty( this.mszExpressFactory ) ) {\n            this.mExpressFactory = new GenericDynamicFactory( this.parentSystem().getTaskManager().getClassLoader() );\n        }\n        else {\n            this.mExpressFactory = (DynamicFactory) DynamicFactory.DefaultFactory.optLoadInstance(\n                    this.mszExpressFactory, null, new Object[] { this.parentSystem().getTaskManager().getClassLoader() }\n            );\n        }\n\n        try{\n            if( this.mExpressesConf != null ) {\n                for( Map.Entry<String, Object > kv : this.mExpressesConf.entrySet() ) {\n                    Object v = kv.getValue();\n                    Map map = (Map) v;\n\n                    String szEngine = (String) map.get( \"Engine\" );\n                    Object node = this.mExpressFactory.loadInstance( szEngine, null, new Object[] { kv.getKey(), this } );\n                    if( node instanceof MessageExpress ){\n                        MessageExpress express = (MessageExpress) node;\n                        this.mExpresses.put( express.getName(), express );\n                    }\n                    else {\n                        throw new IllegalArgumentException( \"Illegal message express engine, should be `MessageExpress`: \" + szEngine );\n                    }\n                }\n            }\n        }\n        catch ( Exception e ) {\n            throw new ProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public Map<String, Object > getExpressesConfig() {\n        return this.mExpressesConf;\n    }\n\n    @Override\n    public String getLetsNamespace() {\n        return \"\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/InvokeEntity.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface InvokeEntity extends Pinenut {\n    String getAddress();\n\n    String getEntityName();\n\n    /**\n     * Invocation Path | 调用实体路径\n     * Address + EntityName\n     * e.g. Package + MethodName     => com.dragonking.method\n     * e.g. Namespace + FunctionName => std::printf\n     * e.g. Mapping + TargetName     => /admin/audit/methodName\n     */\n    String getInvocationPath();\n\n    List<Class<?> > getParameters();\n\n    /**\n     * Invoked Interface (defaulted, [::function], anonymous global namespace)\n     * Usually a class, struct, namespace or interface\n     */\n    Object getInvokeIface();\n\n    /**\n     * Invoked Entity\n     * Usually a method, function or apis\n     */\n    Object getInvokeEntity();\n\n    Object invoke( Object... args ) throws Exception;\n\n    default Object call( Object... args ) {\n        try{\n            return this.invoke( args );\n        }\n        catch ( Exception e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    void execute( Object... args ) throws Exception;\n\n    default void enforce( Object... args ) {\n        try{\n            this.execute( args );\n        }\n        catch ( Exception e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/JSONLetMsgDeliver.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.hydra.express.Package;\nimport com.pinecone.hydra.umct.decipher.JSONHeaderDecipher;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.function.Supplier;\n\npublic class JSONLetMsgDeliver extends ArchMsgDeliver {\n\n    public JSONLetMsgDeliver( String name, MessageExpress express ) {\n        this( name, express, ArchMessagram.DefaultServiceKey );\n    }\n\n    public JSONLetMsgDeliver( String name, MessageExpress express, String szServiceKey, Supplier<Map<String, MessageHandler>> routingTableSupplier ) {\n        super( name, express, new JSONHeaderDecipher( szServiceKey ), szServiceKey, routingTableSupplier );\n    }\n\n    public JSONLetMsgDeliver( String name, MessageExpress express, String szServiceKey ) {\n        this( name, express, szServiceKey, HashMap::new );\n    }\n\n    public JSONLetMsgDeliver( MessageExpress express ) {\n        this( ProtoletMsgDeliver.class.getSimpleName(), express );\n    }\n\n    @Override\n    protected void prepareDispatch( Package that ) throws IOException {\n\n    }\n\n    @Override\n    protected boolean sift( Package that ) {\n        return false;\n    }\n\n    @Override\n    protected void doMessagelet( String szMessagelet, Package that ) {\n        if ( this.getJunction() instanceof ArchMessagram ) {\n            ( (ArchMessagram)this.getJunction() ).contriveByScheme( szMessagelet, (UMCConnection) that ).dispatch();\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/MessageDeliver.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.hydra.express.Deliver;\nimport com.pinecone.hydra.express.Package;\n\nimport java.io.IOException;\nimport java.util.Map;\n\npublic interface MessageDeliver extends Deliver {\n\n    MessageExpress  getExpress();\n\n    void toDispatch( Package that ) throws IOException, ServiceException;\n\n    String  getServiceKeyword();\n\n    Map<String, MessageHandler> getRoutingTable();\n\n    void registerHandler( String addr, MessageHandler controller );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/MessageExpress.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.hydra.express.Deliver;\nimport com.pinecone.hydra.express.Express;\n\n/**\n *  Pinecone Ursus For Java UMCT Message Express [ Uniform Message Control Transmit ]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  **********************************************************\n *  Uniform Message Control Transmit Integrated Model - Express\n *  统一消息控制与数据传输一体化模型 - 总线分发调度器\n *  **********************************************************\n */\n\npublic interface MessageExpress extends Express {\n\n    String getName();\n\n    MessageJunction getJunction();\n\n    MessageDeliver  recruit     ( String szName );\n\n    MessageExpress  register    ( Deliver deliver );\n\n    MessageExpress  fired       ( Deliver deliver );\n\n    MessageDeliver  getDeliver  ( String szName );\n\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/MessageHandler.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport java.util.List;\n\nimport com.pinecone.framework.lang.field.FieldEntity;\nimport com.pinecone.framework.system.functions.Function;\n\npublic interface MessageHandler extends Function {\n    String getAddressMapping();\n\n    @Override\n    Object invoke( Object... args ) throws Exception;\n\n    List<String > getArgumentsKey();\n\n    default boolean isArgsIndexed() {\n        return this.getArgumentsKey() == null;\n    }\n\n    Object getReturnDescriptor();\n\n    String getReturnGenericLabel();\n\n    Object getArgumentsDescriptor();\n\n    FieldEntity[] getArgumentTemplate();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/MessageJunction.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.hydra.servgram.Servgram;\n\npublic interface MessageJunction extends Servgram {\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/Messagelet.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.Map;\n\npublic interface Messagelet extends Pinenut {\n    UMCMessage getReceivedMessage();\n\n    UMCTransmit getTransmit();\n\n    UMCReceiver getReceiver();\n\n    MessageDeliver getMessageDeliver();\n\n    UMCConnection getMessagePackage();\n\n    ArchMessagram getMessagelet();\n\n    Map<String, Object > getConfig();\n\n    Object     getLetLocal();\n\n    void dispatch();\n\n    void terminate();\n\n    String serviceName();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/Messagram.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport java.util.Map;\n\npublic interface Messagram extends MessageJunction {\n    Messagram addExpress( MessageExpress express );\n\n    MessageExpress getExpressByName( String name );\n\n    Messagram removeExpress(  String name  );\n\n    Map<String, Object > getProtoConfig();\n\n    Map<String, Object > getExpressesConfig();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/MessagramScanner.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.framework.util.lang.ClassScanner;\n\npublic interface MessagramScanner extends ClassScanner {\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ProtoletMsgDeliver.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport java.io.IOException;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.function.Supplier;\n\nimport com.pinecone.hydra.express.Package;\nimport com.pinecone.hydra.umct.decipher.PrototypeDecipher;\nimport com.pinecone.hydra.umct.husky.compiler.CompilerEncoder;\nimport com.pinecone.hydra.umct.husky.machinery.PMCTContextMachinery;\n\npublic class ProtoletMsgDeliver extends ArchMsgDeliver {\n\n    protected CompilerEncoder      mCompilerEncoder;\n\n    protected PMCTContextMachinery mPMCTContextMachinery;\n\n    public ProtoletMsgDeliver( String name, MessageExpress express, PMCTContextMachinery machinery, CompilerEncoder encoder ) {\n        this( name, express, ArchMessagram.DefaultServiceKey, machinery, encoder );\n    }\n\n    public ProtoletMsgDeliver( String name, MessageExpress express, String szServiceKey, PMCTContextMachinery machinery, CompilerEncoder encoder, Supplier<Map<String, MessageHandler>> routingTableSupplier ) {\n        super( name, express, new PrototypeDecipher( szServiceKey, encoder, machinery.getFieldProtobufDecoder() ), szServiceKey, routingTableSupplier );\n        this.mCompilerEncoder = encoder;\n        this.mPMCTContextMachinery = machinery;\n    }\n\n    public ProtoletMsgDeliver( String name, MessageExpress express, String szServiceKey, PMCTContextMachinery machinery, CompilerEncoder encoder ) {\n        this( name, express, szServiceKey, machinery, encoder, HashMap::new );\n    }\n\n    public ProtoletMsgDeliver( MessageExpress express, PMCTContextMachinery machinery, CompilerEncoder encoder ) {\n        this( ProtoletMsgDeliver.class.getSimpleName(), express, machinery, encoder );\n    }\n\n    @Override\n    protected void prepareDispatch( Package that ) throws IOException {\n\n    }\n\n    @Override\n    protected boolean sift( Package that ) {\n        return false;\n    }\n\n    @Override\n    protected void doMessagelet( String szMessagelet, Package that ) {\n        if ( this.getJunction() instanceof ArchMessagram ) {\n            ( (ArchMessagram)this.getJunction() ).contriveByScheme( szMessagelet, (UMCConnection) that ).dispatch();\n        }\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ServiceException.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.io.IOException;\n\npublic class ServiceException extends IOException implements Pinenut {\n    public ServiceException() {\n        super();\n    }\n\n    public ServiceException( String message ) {\n        super(message);\n    }\n\n    public ServiceException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ServiceException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/ServiceInternalException.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class ServiceInternalException extends ServiceException implements Pinenut {\n    public ServiceInternalException() {\n        super();\n    }\n\n    public ServiceInternalException( String message ) {\n        super(message);\n    }\n\n    public ServiceInternalException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ServiceInternalException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/UMCConnection.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\nimport com.pinecone.hydra.express.Deliver;\nimport com.pinecone.hydra.express.Package;\n\npublic interface UMCConnection extends Package {\n    MessageDeliver getDeliver();\n\n    UMCMessage getMessage();\n\n    UMCTransmit getTransmit();\n\n    UMCReceiver getReceiver();\n\n    Medium getMessageSource();\n\n    @Override\n    default String  getConsignee() {\n        Object e = this.getMessage().getHead().getExHeaderVal( this.getDeliver().getServiceKeyword() );\n        if( e instanceof String ) {\n            return (String) e;\n        }\n        return e.toString();\n    }\n\n    UMCConnection entrust(Deliver deliver );\n\n    void release();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/UMCTExpress.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\npublic interface UMCTExpress extends MessageExpress, UMCTExpressHandler {\n    UMCMessage processResponse( UMCMessage request, UMCMessage response );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/UMCTExpressHandler.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\n\npublic interface UMCTExpressHandler extends Pinenut {\n    default void onSuccessfulMsgReceived ( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n\n    }\n\n    default void onErrorMsgReceived      ( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n\n    }\n\n    default void onError                 ( Object data, Throwable cause ) {\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/UMCTNode.java",
    "content": "package com.pinecone.hydra.umct;\n\npublic interface UMCTNode extends MessageJunction {\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/UlfConnection.java",
    "content": "package com.pinecone.hydra.umct;\nimport com.pinecone.hydra.umc.msg.*;\n\npublic class UlfConnection extends ArchUMCConnection {\n    protected Object[]  mArguments;\n\n    UlfConnection( Medium medium, UMCMessage message, UMCTransmit transmit, UMCReceiver receiver ) {\n        super( medium, message, transmit, receiver );\n    }\n\n\n    public UlfConnection( Medium medium, UMCMessage message, UMCTransmit transmit, UMCReceiver receiver, Object[] args ) {\n        this( medium, message, transmit, receiver );\n        this.mArguments = args;\n    }\n\n    public Object[] getExArguments() {\n        return this.mArguments;\n    }\n\n    @Override\n    public void release() {\n        super.release();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/UlfMessageHandler.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport java.util.List;\n\nimport com.pinecone.framework.lang.field.FieldEntity;\n\npublic class UlfMessageHandler implements MessageHandler {\n    @Override\n    public String getAddressMapping() {\n        return null;\n    }\n\n    @Override\n    public Object invoke( Object... args ) throws Exception {\n        return null;\n    }\n\n    @Override\n    public List<String > getArgumentsKey() {\n        return null;\n    }\n\n    @Override\n    public Object getReturnDescriptor() {\n        return null;\n    }\n\n    @Override\n    public String getReturnGenericLabel() {\n        return null;\n    }\n\n    @Override\n    public Object getArgumentsDescriptor() {\n        return null;\n    }\n\n    @Override\n    public FieldEntity[] getArgumentTemplate() {\n        return new FieldEntity[0];\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/WolfMCExpress.java",
    "content": "package com.pinecone.hydra.umct;\n\nimport java.util.Map;\n\nimport org.slf4j.Logger;\n\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\nimport com.pinecone.hydra.umct.husky.HuskyServiceErrorMessages;\n\n/**\n *  Pinecone Ursus For Java Hydra Ulfar, Wolf Express\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n */\npublic class WolfMCExpress extends ArchMsgExpress implements UMCTExpress {\n    public WolfMCExpress( String name, MessageJunction messagram, Logger logger ) {\n        super( name, messagram, logger );\n    }\n\n    public WolfMCExpress( String name, MessageJunction messagram ) {\n        super( name, messagram );\n    }\n\n    public WolfMCExpress( MessageJunction messagram ) {\n        this( null, messagram );\n    }\n\n    @Override\n    protected MessageDeliver spawn( String szName ) { // TODO\n        if( szName.equals( \"Messagelet\" ) ) {\n            return new JSONLetMsgDeliver( this );\n        }\n        return null;\n    }\n\n\n    @Override\n    public UMCMessage processResponse( UMCMessage request, UMCMessage response ) {\n        return response;\n    }\n\n    @Override\n    public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n        UlfConnection connection = new UlfConnection(  medium, msg, transmit, receiver, args );\n        this.onSuccessfulMsgReceived( connection, args );\n    }\n\n    protected void onSuccessfulMsgReceived( UMCConnection connection, Object[] args ) throws Exception {\n        int c = 0;\n        for( Map.Entry<String, MessageDeliver > kv : this.mDeliverPool.entrySet() ) {\n            try{\n                MessageDeliver deliver = kv.getValue();\n                deliver.toDispatch( connection );\n            }\n            catch ( DenialServiceException e ) {\n                // Just continue.\n                // 你不干有的是人干.\n                ++c;\n            }\n        }\n\n        if( c == this.mDeliverPool.size() ) {\n            connection.getTransmit().sendMsg( HuskyServiceErrorMessages.HCTP_MAPPING_NOT_FOUND );\n        }\n\n        connection.release();\n    }\n\n    @Override\n    public void onErrorMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n\n    }\n\n    @Override\n    public void onError( Object ctx, Throwable cause ) {\n        if( cause instanceof Exception ) {\n            this.getLogger().error( \"Express error, {}, {}\" , cause.getMessage(), cause.toString(), cause );\n        }\n        else {\n            throw new ProvokeHandleException( cause );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/bind/ArgParam.java",
    "content": "package com.pinecone.hydra.umct.bind;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\nimport org.springframework.core.annotation.AliasFor;\n\n@Target({ElementType.PARAMETER})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface ArgParam {\n    @AliasFor(\"name\")\n    String value() default \"\";\n\n    @AliasFor(\"value\")\n    String name() default \"\";\n\n    boolean required() default true;\n\n    String defaultValue() default \"\\n\\t\\t\\n\\t\\t\\n\\ue000\\ue001\\ue002\\n\\t\\t\\t\\t\\n\";\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/decipher/HeaderDecipher.java",
    "content": "package com.pinecone.hydra.umct.decipher;\n\nimport java.io.IOException;\nimport java.util.Collection;\nimport java.util.List;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umct.UMCConnection;\nimport com.pinecone.hydra.umct.husky.HuskyServiceErrorMessages;\n\npublic interface HeaderDecipher extends Pinenut {\n    Object eval( Object that, @Nullable Object descriptor, String key );\n\n    default String evalString( Object that, @Nullable Object descriptor, String key ) {\n        return this.eval( that, descriptor, key ).toString();\n    }\n\n    default Number evalNumber( Object that, @Nullable Object descriptor, String key ) {\n        Object o = this.eval( that, descriptor, key );\n        if( o instanceof Number ) {\n            return (Number) o;\n        }\n        return null;\n    }\n\n    Collection<Object > values( Object that, @Nullable Object descriptor, @Nullable Object argTpl );\n\n    Object[] evals( Object that, @Nullable Object descriptor, List<String > keys, @Nullable Object argTpl );\n\n    String getServicePath( Object that );\n\n    default void sendIllegalMessage( UMCConnection connection ) throws IOException {\n        connection.getTransmit().sendMsg( HuskyServiceErrorMessages.HCTP_ILLEGAL_MESSAGE );\n    }\n\n    default void sendInternalError( UMCConnection connection ) throws IOException {\n        connection.getTransmit().sendMsg( HuskyServiceErrorMessages.HCTP_INTERNAL_ERROR );\n    }\n\n    UMCMessage assembleReturnMsg( Object that, Object descriptor ) ;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/decipher/JSONHeaderDecipher.java",
    "content": "package com.pinecone.hydra.umct.decipher;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.umc.msg.ExtraEncode;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.wolf.UlfInformMessage;\n\npublic class JSONHeaderDecipher implements HeaderDecipher {\n    protected String mszServicePathKey;\n\n    public JSONHeaderDecipher( String szServicePathKey ) {\n        this.mszServicePathKey = szServicePathKey;\n    }\n\n    @Override\n    public String getServicePath( Object that ) {\n        return this.evalString( that, null, this.mszServicePathKey );\n    }\n\n    @Override\n    public Object eval( Object that, Object descriptor, String key ) {\n        return ( (Map) that ).get( key );\n    }\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    public Collection<Object > values( Object that, Object descriptor, Object argTpl ) {\n        return ( (Map) that ).values();\n    }\n\n    @Override\n    public Object[] evals( Object that, Object descriptor, List<String> keys, Object argTpl ) {\n        Map map = (Map) that;\n        Object[] ret = new Object[ keys.size() ];\n        int i = 0;\n        for( String k : keys ) {\n            ret[ i ] = map.get( k );\n            ++i;\n        }\n        return ret;\n    }\n\n    @Override\n    public UMCMessage assembleReturnMsg( Object that, Object descriptor ) {\n        if ( that instanceof UMCMessage ) {\n            return (UMCMessage) that;\n        }\n        if ( that == null ) {\n            return new UlfInformMessage( null, ExtraEncode.JSONString );\n        }\n\n        JSONObject jo = new JSONMaptron();\n        jo.put( \"__RESPONSE__\", that );\n        return new UlfInformMessage( jo ); // TODO, Transfer\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/decipher/PrototypeDecipher.java",
    "content": "package com.pinecone.hydra.umct.decipher;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\n\nimport com.google.protobuf.DescriptorProtos;\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DynamicMessage;\nimport com.google.protobuf.InvalidProtocolBufferException;\nimport com.pinecone.framework.lang.field.FieldEntity;\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.umc.msg.ExtraEncode;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.wolf.UlfInformMessage;\nimport com.pinecone.hydra.umct.husky.compiler.CompilerEncoder;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufDecoder;\n\npublic class PrototypeDecipher implements HeaderDecipher {\n    protected volatile static Descriptors.Descriptor PathDescriptor = null;\n\n    protected static Descriptors.FieldDescriptor PathFieldDescriptor = null;\n\n    protected String               mszServicePathKey;\n\n    protected CompilerEncoder      mCompilerEncoder;\n\n    protected FieldProtobufDecoder mFieldProtobufDecoder;\n\n    public PrototypeDecipher( String szServicePathKey, CompilerEncoder encoder, FieldProtobufDecoder decoder ) {\n        this.mszServicePathKey     = szServicePathKey;\n        this.mCompilerEncoder      = encoder;\n        this.mFieldProtobufDecoder = decoder;\n    }\n\n    @Override\n    public String getServicePath( Object that ) {\n        try{\n            Descriptors.Descriptor des = PrototypeDecipher.getPathDescriptor( this.mszServicePathKey );\n            if ( ! ( that instanceof byte[] ) ) {\n                Debug.warnSyn( that );\n            }\n            DynamicMessage ms = DynamicMessage.parseFrom( des, (byte[]) that );\n            return (String) ms.getField( PrototypeDecipher.PathFieldDescriptor );\n        }\n        catch ( InvalidProtocolBufferException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public Object eval( Object that, Object descriptor, String key ) {\n        byte[] data = (byte[]) that;\n        Descriptors.Descriptor messageDescriptor = (Descriptors.Descriptor) descriptor;\n\n        try {\n            DynamicMessage message = DynamicMessage.parseFrom( messageDescriptor, data );\n            Descriptors.FieldDescriptor fieldDescriptor = messageDescriptor.findFieldByName( key );\n\n            if ( fieldDescriptor == null ) {\n                throw new IllegalArgumentException( \"Field '\" + key + \"' not found in the descriptor\" );\n            }\n\n            return message.getField(fieldDescriptor);\n        }\n        catch ( InvalidProtocolBufferException e ) {\n            throw new IllegalArgumentException( e );\n        }\n    }\n\n    @Override\n    public Collection<Object > values( Object that, Object descriptor, Object argTpl ) {\n        byte[] data = (byte[]) that;\n        Descriptors.Descriptor messageDescriptor = (Descriptors.Descriptor) descriptor;\n\n        try {\n            DynamicMessage message = DynamicMessage.parseFrom( messageDescriptor, data );\n            Collection<Object > fieldValues = new ArrayList<>();\n\n            Object[] decodes = this.mFieldProtobufDecoder.decodeValues(\n                    (FieldEntity[]) argTpl, messageDescriptor, message, this.mCompilerEncoder.getExceptedKeys(), this.mCompilerEncoder.getOptions()\n            );\n\n            int i = 0;\n            for ( Object val : decodes ) {\n                if ( i != 0 ) {\n                    fieldValues.add( val );\n                }\n                ++i;\n            }\n            return fieldValues;\n        }\n        catch ( InvalidProtocolBufferException e ) {\n            throw new IllegalArgumentException( e );\n        }\n    }\n\n    @Override\n    public Object[] evals( Object that, Object descriptor, List<String> keys, Object argTpl ) {\n        byte[] data = (byte[]) that;\n        Descriptors.Descriptor messageDescriptor = (Descriptors.Descriptor) descriptor;\n\n        try {\n            DynamicMessage message = DynamicMessage.parseFrom( messageDescriptor, data );\n            Object[] results = new Object[ keys.size() ];\n\n            Object[] decodes = this.mFieldProtobufDecoder.decodeValues(\n                    (FieldEntity[]) argTpl, messageDescriptor, message, this.mCompilerEncoder.getExceptedKeys(), this.mCompilerEncoder.getOptions()\n            );\n\n            for ( int i = 1; i < keys.size(); ++i ) {\n                String key = keys.get(i);\n                Descriptors.FieldDescriptor fieldDescriptor = messageDescriptor.findFieldByName(key);\n\n                if ( fieldDescriptor == null ) {\n                    results[i] = null; // Field not found\n                }\n                else {\n                    results[i] = decodes[i];\n                }\n            }\n\n            return results;\n        }\n        catch ( InvalidProtocolBufferException e ) {\n            throw new IllegalArgumentException( e );\n        }\n    }\n\n    @Override\n    public UMCMessage assembleReturnMsg( Object that, Object descriptor ) {\n        if ( that instanceof UMCMessage ) {\n            return (UMCMessage) that;\n        }\n        if ( that == null ) {\n            return new UlfInformMessage( null, ExtraEncode.Prototype );\n        }\n\n        Descriptors.Descriptor retDes = (Descriptors.Descriptor) descriptor;\n        DynamicMessage retMsg = this.mCompilerEncoder.getEncoder().encode( retDes, that, this.mCompilerEncoder.getExceptedKeys(), this.mCompilerEncoder.getOptions() );\n        return new UlfInformMessage( retMsg.toByteArray() ); // TODO, Transfer\n    }\n\n    public static Descriptors.Descriptor getPathDescriptor( String fieldName ) {\n        if ( PrototypeDecipher.PathDescriptor == null ) {\n            synchronized ( PrototypeDecipher.class ) {\n                if ( PrototypeDecipher.PathDescriptor == null ) {\n                    PrototypeDecipher.PathDescriptor      = PrototypeDecipher.createPathDescriptor( \"PathDescriptor\", fieldName );\n                    PrototypeDecipher.PathFieldDescriptor = PrototypeDecipher.PathDescriptor.findFieldByName( fieldName );\n                }\n            }\n        }\n\n        return PrototypeDecipher.PathDescriptor;\n    }\n\n    public static Descriptors.Descriptor createPathDescriptor( String messageName, String fieldName ) {\n        try {\n            DescriptorProtos.FieldDescriptorProto fieldDescriptorProto = DescriptorProtos.FieldDescriptorProto.newBuilder()\n                    .setName(fieldName)\n                    .setNumber(1)\n                    .setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING)\n                    .build();\n\n            DescriptorProtos.DescriptorProto descriptorProto = DescriptorProtos.DescriptorProto.newBuilder()\n                    .setName(messageName)\n                    .addField(fieldDescriptorProto)\n                    .build();\n\n            DescriptorProtos.FileDescriptorProto fileDescriptorProto = DescriptorProtos.FileDescriptorProto.newBuilder()\n                    .setName(messageName + \".proto\")\n                    .addMessageType(descriptorProto)\n                    .build();\n\n            Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom(\n                    fileDescriptorProto, new Descriptors.FileDescriptor[]{});\n\n            return fileDescriptor.findMessageTypeByName(messageName);\n        }\n        catch ( Descriptors.DescriptorValidationException e ) {\n            throw new ProxyProvokeHandleException( \"Failed to create descriptor\", e );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/AddressedEntity.java",
    "content": "package com.pinecone.hydra.umct.husky;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface AddressedEntity extends Pinenut {\n    /**\n     * Full Name / Path\n     */\n    String getInterceptedPath();\n\n    /**\n     * Function / Method / Mapping\n     */\n    String getInterceptorName();\n\n    /**\n     * Namespace / Domain / Package\n     */\n    String getAddressPath();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/ArchAddressedEntity.java",
    "content": "package com.pinecone.hydra.umct.husky;\n\npublic abstract class ArchAddressedEntity implements MessagePackage {\n    protected String mszInterceptedPath;\n\n    public ArchAddressedEntity( String szInterceptedPath ) {\n        this.mszInterceptedPath = szInterceptedPath;\n    }\n\n    @Override\n    public String getInterceptedPath() {\n        return this.mszInterceptedPath;\n    }\n\n    @Override\n    public String getInterceptorName() {\n        String[] debris = this.mszInterceptedPath.split(StandardPathSeparator);\n        if( debris.length > 1 ) {\n            return debris [ debris.length - 1 ];\n        }\n        return this.mszInterceptedPath;\n    }\n\n    @Override\n    public String getAddressPath() {\n        String interceptor = this.getInterceptorName();\n        int lastIndexof = this.mszInterceptedPath.lastIndexOf( interceptor );\n        if( lastIndexof > 0 ) {\n            return this.mszInterceptedPath.substring( 0, lastIndexof );\n        }\n        return this.mszInterceptedPath;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/ArchRequestPackage.java",
    "content": "package com.pinecone.hydra.umct.husky;\n\npublic abstract class ArchRequestPackage extends ArchAddressedEntity {\n    public ArchRequestPackage( String szInterceptedPath ) {\n        super( szInterceptedPath );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/ArchResponsePackage.java",
    "content": "package com.pinecone.hydra.umct.husky;\n\npublic abstract class ArchResponsePackage extends ArchAddressedEntity {\n    public ArchResponsePackage( String szInterceptedPath ) {\n       super( szInterceptedPath );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/HuskyCTPConstants.java",
    "content": "package com.pinecone.hydra.umct.husky;\n\n/**\n *  Pinecone Ursus For Java Wolf-Husky Control Transmission Protocol\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Bean Nuts Walnut Ulfhedinn Wolves/Ulfar Family.\n *  HCTP is an archetypal implementation of the Uniform Message Control Transmission Protocol (UMCT)\n *  哈士奇控制传输协议（HCTP）是统一消息控制传输协议（UMCT）的典型实现\n *  *****************************************************************************************\n */\npublic final class HuskyCTPConstants {\n\n    public static final String HCTP_DUP_PASSIVE_CHANNEL_KEY      = \"HCTPPassiveChannel\";\n\n    public static final int HCTP_CONTROL_MASK                   = 0xFFB00000;\n\n    public static final int HCTP_DUP_CONTROL_MASK               = 0xFFBEA000;\n\n    public static final int HCTP_DUP_CONTROL_REGISTER           = HCTP_DUP_CONTROL_MASK | 0x00000001;\n\n    public static final int HCTP_DUP_CONTROL_ALIVE              = HCTP_DUP_CONTROL_MASK | 0x00000002;\n\n    public static final int HCTP_DUP_CONTROL_PASSIVE_REQUEST    = HCTP_DUP_CONTROL_MASK | 0x00000010;\n\n    public static final int HCTP_DUP_CONTROL_PASSIVE_RESPONSE   = HCTP_DUP_CONTROL_MASK | 0x00000011;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/HuskyServiceErrorMessages.java",
    "content": "package com.pinecone.hydra.umct.husky;\n\nimport com.pinecone.framework.util.Bytes;\nimport com.pinecone.hydra.umc.msg.Status;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.wolf.UlfInformMessage;\n\npublic final class HuskyServiceErrorMessages {\n    \n    public static final UMCMessage HCTP_INTERNAL_ERROR ;\n\n    public static final UMCMessage HCTP_ILLEGAL_MESSAGE ;\n\n    public static final UMCMessage HCTP_MAPPING_NOT_FOUND ;\n\n\n    static {\n        HCTP_INTERNAL_ERROR = new UlfInformMessage( Bytes.Empty );\n        HCTP_INTERNAL_ERROR.getHead().setStatus( Status.InternalError );\n\n        HCTP_ILLEGAL_MESSAGE = new UlfInformMessage( Bytes.Empty );\n        HCTP_INTERNAL_ERROR.getHead().setStatus( Status.IllegalMessage );\n\n        HCTP_MAPPING_NOT_FOUND = new UlfInformMessage( Bytes.Empty );\n        HCTP_INTERNAL_ERROR.getHead().setStatus( Status.MappingNotFound );\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/Interceptor.java",
    "content": "package com.pinecone.hydra.umct.husky;\n\npublic interface Interceptor extends AddressedEntity {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/MessagePackage.java",
    "content": "package com.pinecone.hydra.umct.husky;\n\npublic interface MessagePackage extends AddressedEntity {\n    String StandardPathSeparator = \"\\\\.|\\\\/|::\";\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/RequestPackage.java",
    "content": "package com.pinecone.hydra.umct.husky;\n\npublic interface RequestPackage extends MessagePackage {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/ResponsePackage.java",
    "content": "package com.pinecone.hydra.umct.husky;\n\npublic interface ResponsePackage extends MessagePackage {\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/ArchIfaceCompiler.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.util.List;\n\nimport com.pinecone.hydra.umct.stereotype.Iface;\nimport com.pinecone.hydra.umct.stereotype.IfaceUtils;\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.CtMethod;\nimport javassist.NotFoundException;\n\npublic abstract class ArchIfaceCompiler extends ArchIfaceInspector implements IfaceCompiler {\n\n    public ArchIfaceCompiler(ClassPool classPool, ClassLoader classLoader ) {\n        super( classPool, classLoader );\n    }\n\n    protected IfaceMethodSignature resolveMethodSignature( CtMethod method ) {\n        try{\n            Class<? >[] parameters = this.getParameters( method );\n\n            CtClass retType;\n            try {\n                retType = method.getReturnType();\n            }\n            catch ( NotFoundException e ) {\n                retType = null;\n            }\n\n            Class<? > returnType;\n            if ( retType != null ) {\n                returnType = this.reinterpretClass( retType.getName() );\n            }\n            else {\n                returnType = null;\n            }\n\n            String[] parameterTypes = ArchIfaceCompiler.evalGenericParameterTypes( method );\n            String   returnGType    = ArchIfaceCompiler.evalGenericReturnType( method );\n\n            return new IfaceMethodSignature(\n                    parameters, parameterTypes, returnType, returnGType\n            );\n        }\n        catch ( ClassNotFoundException e ) {\n            throw new CompileException( e );\n        }\n    }\n\n    protected MethodDigest compile ( ClassDigest classDigest, CtMethod method ) {\n        try {\n\n            IfaceMethodSignature signature = this.resolveMethodSignature( method );\n\n\n            Class<? >[] parameters  = signature.getParameters();\n            Class<? >   returnType  = signature.getReturnType();\n            String[] parameterTypes = signature.getParameterGenericTypes();\n            String   returnGType    = signature.getReturnGenericType();\n\n            MethodDigest ret = new GenericMethodDigest(\n                    classDigest, this.getIfaceMethodName( method ), method.getName(), parameters, parameterTypes, returnType, returnGType, null\n            );\n\n            List<IfaceParamsDigest> ifaceParamsDigests = this.inspectArgIfaceParams( ret, method );\n            ret.apply(ifaceParamsDigests);\n            return ret;\n        }\n        catch ( ClassNotFoundException e ) {\n            throw new CompileException( e );\n        }\n    }\n\n    protected String evalLogicClassName( String className ) throws NotFoundException {\n        String szLogicClassName = className;\n        CtClass ctClass = this.mClassPool.get( className );\n        if ( ctClass != null ) {\n            Iface cIface     = this.getAnnotation( ctClass, Iface.class );\n            String szLogicCN = IfaceUtils.queryIfaceLogicClassName( cIface );\n            if ( szLogicCN != null ) {\n                szLogicClassName = szLogicCN;\n            }\n        }\n        return szLogicClassName;\n    }\n\n    @Override\n    public ClassDigest compile( Class<?> clazz, boolean bAsIface ) {\n        return this.compile( clazz.getName(), bAsIface );\n    }\n\n    @Override\n    public ClassDigest compile( String className, boolean bAsIface ) {\n        try {\n            List<CtMethod > ifaceMethods = this.inspect( className, bAsIface );\n            if ( ifaceMethods.isEmpty() ) {\n                return null;\n            }\n\n            String szLogicClassName = this.evalLogicClassName( className );\n            ClassDigest classDigest = new GenericClassDigest( szLogicClassName, className );\n            for ( CtMethod ctMethod : ifaceMethods ) {\n                MethodDigest methodDigest = this.compile( classDigest, ctMethod );\n                classDigest.addMethod( methodDigest );\n            }\n\n            return classDigest;\n        }\n        catch ( NotFoundException e ) {\n            throw new CompileException( e );\n        }\n    }\n\n    public static final class IfaceMethodSignature {\n        protected final Class<?>[] mParameters;\n        protected final String[]   mParameterGenericTypes;\n\n        protected final Class<?>   mReturnType;\n        protected final String     mszReturnGenericType;\n\n        public IfaceMethodSignature(\n                Class<?>[] parameters, String[] parameterGenericTypes, Class<?> returnType, String returnGenericType\n        ) {\n            this.mParameters = parameters;\n            this.mParameterGenericTypes = parameterGenericTypes;\n            this.mReturnType = returnType;\n            this.mszReturnGenericType = returnGenericType;\n        }\n\n        public Class<?>[] getParameters() {\n            return this.mParameters;\n        }\n\n        public String[] getParameterGenericTypes() {\n            return this.mParameterGenericTypes;\n        }\n\n        public Class<?> getReturnType() {\n            return this.mReturnType;\n        }\n\n        public String getReturnGenericType() {\n            return this.mszReturnGenericType;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/ArchIfaceInspector.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.lang.reflect.Modifier;\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.hydra.umct.mapping.ArchMappingInspector;\nimport com.pinecone.hydra.umct.mapping.ParamsDigest;\nimport com.pinecone.hydra.umct.stereotype.Iface;\n\nimport com.pinecone.hydra.umct.stereotype.IfaceUtils;\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.CtMethod;\nimport javassist.NotFoundException;\n\npublic abstract class ArchIfaceInspector extends ArchMappingInspector implements IfaceInspector {\n    public ArchIfaceInspector( ClassPool classPool, ClassLoader classLoader ) {\n        super( classPool, classLoader );\n    }\n\n    @Override\n    public List<CtMethod> inspect( Class<?> clazz, boolean bAsIface ) throws NotFoundException {\n        return this.inspect( clazz.getName(), bAsIface );\n    }\n\n    @Override\n    public List<CtMethod> inspect( String className, boolean bAsIface ) throws NotFoundException {\n        List<CtMethod> ifaceMethods = new ArrayList<>();\n        CtClass ctClass = this.mClassPool.get( className );\n\n        boolean classHasIfaceAnnotation = this.hasOwnAnnotation( ctClass, Iface.class );\n\n        for ( CtMethod method : ctClass.getDeclaredMethods() ) {\n            if ( Modifier.isPublic( method.getModifiers() ) ) {\n                if ( bAsIface || classHasIfaceAnnotation || this.methodHasAnnotation( method, Iface.class ) ) {\n                    ifaceMethods.add( method );\n                }\n            }\n        }\n\n        return ifaceMethods;\n    }\n\n    @Override\n    public String getIfaceMethodName( CtMethod method ) throws ClassNotFoundException {\n        String ifaceName = method.getName();\n\n        Object annotation = method.getAnnotation( Iface.class );\n        if ( annotation != null ) {\n            Iface iface = (Iface) annotation;\n            String name = IfaceUtils.getIfaceNameFieldVal( iface );\n            if ( StringUtils.isNoneEmpty( name ) ) {\n                ifaceName = name;\n            }\n        }\n\n        return ifaceName;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    protected List<IfaceParamsDigest> inspectArgIfaceParams( Object methodDigest, CtMethod method ) {\n        return (List<IfaceParamsDigest> ) (List) this.inspectArgParams( methodDigest, method );\n    }\n\n    @Override\n    protected ParamsDigest newParamsDigest( Object methodDigest, int parameterIndex, String name, String value, String defaultValue, boolean required ) {\n        return new GenericIfaceParamsDigest(\n                (MethodDigest) methodDigest, parameterIndex, this.annotationKeyNormalize(name), this.annotationKeyNormalize(value), this.annotationKeyNormalize(defaultValue), required\n        );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/ArchProtoIfaceCompiler.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.util.List;\n\nimport javassist.ClassPool;\nimport javassist.CtMethod;\nimport javassist.NotFoundException;\n\npublic abstract class ArchProtoIfaceCompiler extends ArchIfaceCompiler implements ProtoIfaceCompiler {\n    protected CompilerEncoder mCompilerEncoder;\n\n    public ArchProtoIfaceCompiler(ClassPool classPool, ClassLoader classLoader, CompilerEncoder encoder ) {\n        super( classPool, classLoader );\n\n        this.mCompilerEncoder = encoder;\n    }\n\n    public ArchProtoIfaceCompiler(ClassPool classPool, ClassLoader classLoader ) {\n        this( classPool, classLoader, CompilerEncoder.DefaultMethodArgumentsCompilerEncoder );\n    }\n\n    protected MethodDigest compile ( ClassDigest classDigest, CtMethod method, CompilerEncoder encoder ) {\n        try {\n\n            IfaceMethodSignature signature = this.resolveMethodSignature( method );\n\n            Class<? >[] parameters  = signature.getParameters();\n            Class<? >   returnType  = signature.getReturnType();\n            String[] parameterTypes = signature.getParameterGenericTypes();\n            String   returnGType    = signature.getReturnGenericType();\n\n            MethodDigest ret;\n            if( encoder != null ) {\n                ret = new DynamicMethodPrototype(\n                        classDigest, this.getIfaceMethodName( method ), method.getName(), parameters, parameterTypes, returnType, returnGType, encoder, null\n                );\n            }\n            else {\n                ret = new GenericMethodDigest(\n                        classDigest, this.getIfaceMethodName( method ), method.getName(), parameters, parameterTypes, returnType, returnGType, null\n                );\n            }\n\n            List<IfaceParamsDigest> ifaceParamsDigests = this.inspectArgIfaceParams( ret, method );\n            ret.apply(ifaceParamsDigests);\n            return ret;\n        }\n        catch ( ClassNotFoundException e ) {\n            throw new CompileException( e );\n        }\n    }\n\n\n    @Override\n    public ClassDigest compile ( String className, boolean bAsIface ) {\n        return this.compile( className, bAsIface, this.mCompilerEncoder );\n    }\n\n    @Override\n    public ClassDigest compile ( Class<? > clazz, boolean bAsIface ) {\n        return this.compile( clazz.getName(), bAsIface );\n    }\n\n    @Override\n    public ClassDigest compile( Class<?> clazz, boolean bAsIface, CompilerEncoder encoder ) {\n        return this.compile( clazz.getName(), bAsIface, encoder );\n    }\n\n    @Override\n    public ClassDigest reinterpret( Class<?> clazz, boolean bAsIface ) {\n        return this.compile( clazz, bAsIface, null );\n    }\n\n    @Override\n    public ClassDigest reinterpret( String className, boolean bAsIface ) {\n        return this.compile( className, bAsIface, null );\n    }\n\n    @Override\n    public ClassDigest compile( String className, boolean bAsIface, CompilerEncoder encoder ) {\n        try {\n            List<CtMethod > ifaceMethods = this.inspect( className, bAsIface );\n            if ( ifaceMethods.isEmpty() ) {\n                return null;\n            }\n\n            String szLogicClassName = this.evalLogicClassName( className );\n            ClassDigest classDigest = new GenericClassDigest( szLogicClassName, className );\n            for ( CtMethod ctMethod : ifaceMethods ) {\n                MethodDigest methodDigest = this.compile( classDigest, ctMethod, encoder );\n                classDigest.addMethod( methodDigest );\n            }\n\n            return classDigest;\n        }\n        catch ( NotFoundException e ) {\n            throw new CompileException( e );\n        }\n    }\n\n    @Override\n    public CompilerEncoder getCompilerEncoder() {\n        return this.mCompilerEncoder;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/BytecodeIfaceCompiler.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.pinecone.hydra.umct.mapping.MappingDigest;\n\nimport javassist.ClassPool;\n\npublic class BytecodeIfaceCompiler extends ArchProtoIfaceCompiler implements ProtoInterfacialCompiler {\n    public BytecodeIfaceCompiler(ClassPool classPool, ClassLoader classLoader, CompilerEncoder encoder ) {\n        super( classPool, classLoader, encoder );\n    }\n\n    public BytecodeIfaceCompiler(ClassPool classPool, ClassLoader classLoader ) {\n        super( classPool, classLoader );\n    }\n\n    public BytecodeIfaceCompiler(ClassPool classPool ) {\n        super( classPool, Thread.currentThread().getContextClassLoader() );\n    }\n\n    @Override\n    public IfaceMappingDigest compile( MappingDigest digest ) {\n        return new GenericIfaceMappingDigest( digest, this.mCompilerEncoder );\n    }\n\n    @Override\n    public List<IfaceMappingDigest> compile( List<MappingDigest> digests ) {\n        List<IfaceMappingDigest> result = new ArrayList<>( digests.size() );\n        for ( MappingDigest digest : digests ) {\n            result.add( this.compile( digest ) );\n        }\n        return result;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/ClassDigest.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ClassDigest extends Pinenut {\n    String getClassName();\n\n    String getPhyClassName();\n\n    void addMethod( MethodDigest methodDigest );\n\n    List<MethodDigest> getMethodDigests();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/CompileException.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport com.pinecone.hydra.umct.mapping.InspectException;\n\npublic class CompileException extends InspectException {\n    public CompileException    () {\n        super();\n    }\n\n    public CompileException    ( String message ) {\n        super(message);\n    }\n\n    public CompileException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public CompileException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected CompileException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/CompilerEncoder.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.util.Set;\n\nimport com.google.protobuf.Descriptors;\nimport com.pinecone.framework.lang.field.DataStructureEntity;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufEncoder;\nimport com.pinecone.ulf.util.protobuf.Options;\n\npublic interface CompilerEncoder extends Pinenut {\n    CompilerEncoder DefaultMethodArgumentsCompilerEncoder = new GenericCompilerEncoder( \"_Arguments\" );\n\n    FieldProtobufEncoder getEncoder();\n\n    Options getOptions();\n\n    String getEntityExtend();\n\n    Set<String > getExceptedKeys();\n\n\n    Descriptors.Descriptor transform( DataStructureEntity entity );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/DigestIfaceCompiler.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.pinecone.hydra.umct.mapping.MappingDigest;\n\nimport javassist.ClassPool;\n\npublic class DigestIfaceCompiler extends ArchIfaceCompiler implements InterfacialCompiler {\n\n    public DigestIfaceCompiler( ClassPool classPool, ClassLoader classLoader ) {\n        super( classPool, classLoader );\n    }\n\n    public DigestIfaceCompiler( ClassPool classPool ) {\n        super( classPool, Thread.currentThread().getContextClassLoader() );\n    }\n\n    @Override\n    public IfaceMappingDigest compile( MappingDigest digest ) {\n        return new GenericIfaceMappingDigest( digest, CompilerEncoder.DefaultMethodArgumentsCompilerEncoder );\n    }\n\n    @Override\n    public List<IfaceMappingDigest> compile(List<MappingDigest> digests ) {\n        List<IfaceMappingDigest> result = new ArrayList<>( digests.size() );\n        for ( MappingDigest digest : digests ) {\n            result.add( this.compile( digest ) );\n        }\n        return result;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/DynamicMethodPrototype.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.util.List;\n\nimport com.google.protobuf.Descriptors;\nimport com.pinecone.framework.lang.field.DataStructureEntity;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport com.pinecone.hydra.umct.husky.function.ArgumentRequest;\nimport com.pinecone.hydra.umct.husky.function.GenericArgumentRequest;\n\npublic class DynamicMethodPrototype extends GenericMethodDigest implements MethodPrototype {\n    protected Descriptors.Descriptor        mArgumentsDescriptor;\n\n    protected Descriptors.Descriptor        mReturnDescriptor;\n\n    public DynamicMethodPrototype(\n            ClassDigest classDigest, String szName, String szRawName,\n            Class<?>[] parameters, String[] parametersGenericLabels,\n            Class<?> returnType, String genericRLabel,\n            CompilerEncoder encoder, List<IfaceParamsDigest> ifaceParamsDigests\n    ) {\n        super( classDigest, szName, szRawName, parameters, parametersGenericLabels, returnType, genericRLabel, ifaceParamsDigests );\n\n        if( this.mArgumentTemplate != null ) {\n            this.mArgumentsDescriptor = encoder.transform( this.mArgumentTemplate );\n        }\n\n        if( this.mReturnType != null && !this.mReturnType.equals( void.class ) ) {\n            this.mReturnDescriptor    = encoder.getEncoder().transform( this.mReturnType, this.getGenericReturnTypeLabel(), null, encoder.getExceptedKeys() );\n        }\n    }\n\n    @Override\n    public Descriptors.Descriptor getArgumentsDescriptor() {\n        return this.mArgumentsDescriptor;\n    }\n\n    @Override\n    public Descriptors.Descriptor getReturnDescriptor() {\n        return this.mReturnDescriptor;\n    }\n\n    @Override\n    public ArgumentRequest conformRequest() {\n        DataStructureEntity protoEntity = this.getArgumentTemplate();\n        return new GenericArgumentRequest( protoEntity.getName(), protoEntity );\n    }\n\n    @Override\n    public ArgumentRequest conformRequest( Object[] args ) {\n        DataStructureEntity protoEntity = this.getArgumentTemplate();\n        ArgumentRequest request = new GenericArgumentRequest( protoEntity.getName(), protoEntity );\n        if( args != null ) {\n            for ( int i = 0; i < args.length; ++i ) {\n                request.setField( i, args[ i ] );\n            }\n        }\n\n        return request;\n    }\n\n\n    @Override\n    public String toJSONString() {\n        List<Descriptors.FieldDescriptor > argFields = null;\n        if( this.getArgumentsDescriptor() != null ) {\n            argFields = this.getArgumentsDescriptor().getFields();\n        }\n\n        List<Descriptors.FieldDescriptor > retFields = null;\n        if( this.getReturnDescriptor() != null ) {\n            retFields = this.getReturnDescriptor().getFields();\n        }\n\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"name\"             , this.getName()                                 ),\n                new KeyValue<>( \"rawName\"          , this.getRawName()                              ),\n                new KeyValue<>( \"protoArguments\"   , argFields                                      ),\n                new KeyValue<>( \"protoReturn\"      , retFields                                      ),\n                new KeyValue<>( \"__ClassName__\"    , this.className()                               ),\n        } );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/GenericClassDigest.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class GenericClassDigest implements ClassDigest {\n    protected String                 mszClassName;\n\n    protected String                 mszPhyClassName;\n\n    protected List<MethodDigest >    mMethodDigests;\n\n    public GenericClassDigest( String szClassName, String szPhyClassName ) {\n        this.mszClassName    = szClassName;\n        this.mszPhyClassName = szPhyClassName;\n        this.mMethodDigests  = new ArrayList<>();\n    }\n\n    public GenericClassDigest( String szClassName ) {\n        this( szClassName, szClassName );\n    }\n\n\n    @Override\n    public String getClassName() {\n        return this.mszClassName;\n    }\n\n    @Override\n    public String getPhyClassName() {\n        return this.mszPhyClassName;\n    }\n\n    @Override\n    public void addMethod( MethodDigest methodDigest ) {\n        this.mMethodDigests.add( methodDigest );\n    }\n\n    @Override\n    public List<MethodDigest> getMethodDigests() {\n        return this.mMethodDigests;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/GenericCompilerEncoder.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.util.Set;\n\nimport com.google.protobuf.Descriptors;\nimport com.pinecone.framework.lang.field.DataStructureEntity;\nimport com.pinecone.framework.unit.Units;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufEncoder;\nimport com.pinecone.ulf.util.protobuf.GenericFieldProtobufEncoder;\nimport com.pinecone.ulf.util.protobuf.Options;\n\npublic class GenericCompilerEncoder implements CompilerEncoder {\n    protected FieldProtobufEncoder mEncoder;\n    protected Options              mOptions;\n    protected Set<String>          mExceptedKeys;\n    protected String               mszEntityExtend;\n\n    public GenericCompilerEncoder( FieldProtobufEncoder encoder, Options options, Set<String> exceptedKeys, String szEntityExtend ) {\n        this.mEncoder        = encoder;\n        this.mOptions        = options;\n        this.mExceptedKeys   = exceptedKeys;\n        this.mszEntityExtend = szEntityExtend;\n    }\n\n    public GenericCompilerEncoder( String szEntityExtend ) {\n        this( new GenericFieldProtobufEncoder(), Options.DefaultOptions, Units.emptySet(), szEntityExtend );\n    }\n\n    @Override\n    public FieldProtobufEncoder getEncoder() {\n        return this.mEncoder;\n    }\n\n    @Override\n    public Options getOptions() {\n        return this.mOptions;\n    }\n\n    @Override\n    public Set<String> getExceptedKeys() {\n        return this.mExceptedKeys;\n    }\n\n    @Override\n    public String getEntityExtend() {\n        return this.mszEntityExtend;\n    }\n\n    @Override\n    public Descriptors.Descriptor transform( DataStructureEntity entity ) {\n        String szEntityName = this.getOptions().normalizeDescriptorName( entity.getName() + this.getEntityExtend() );\n        return this.getEncoder().transform(\n                entity.getSegments(), szEntityName, this.getExceptedKeys(), this.getOptions()\n        );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/GenericIfaceInspector.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport javassist.ClassPool;\n\npublic class GenericIfaceInspector extends ArchIfaceInspector implements IfaceInspector {\n    public GenericIfaceInspector( ClassPool classPool, ClassLoader classLoader ) {\n        super( classPool, classLoader );\n    }\n\n    public GenericIfaceInspector( ClassPool classPool ) {\n        super( classPool, Thread.currentThread().getContextClassLoader() );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/GenericIfaceMappingDigest.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.lang.reflect.Method;\nimport java.util.List;\n\nimport com.google.protobuf.Descriptors;\nimport com.pinecone.hydra.umc.msg.UMCMethod;\nimport com.pinecone.hydra.umct.mapping.GenericMappingDigest;\nimport com.pinecone.hydra.umct.mapping.MappingDigest;\nimport com.pinecone.hydra.umct.mapping.ParamsDigest;\n\npublic class GenericIfaceMappingDigest extends GenericMappingDigest implements IfaceMappingDigest {\n    protected Descriptors.Descriptor        mArgumentsDescriptor;\n\n    protected Descriptors.Descriptor        mReturnDescriptor;\n\n    public GenericIfaceMappingDigest(\n            String[] szAddresses,\n            Class<?>[] parameters, String[] parametersGenericLabels,\n            Class<?> returnType, String szReturnGenericTypeLabel,\n            Class<?> classType, Method method, List<ParamsDigest> paramsDigests, UMCMethod[] interceptMethods, CompilerEncoder encoder\n    ) {\n        super(\n                szAddresses,\n                parameters, parametersGenericLabels,\n                returnType, szReturnGenericTypeLabel,\n                classType, method, paramsDigests, interceptMethods\n        );\n\n        this.encode( encoder );\n    }\n\n    public GenericIfaceMappingDigest( MappingDigest mappingDigest, CompilerEncoder encoder ) {\n        this.mszAddresses               = mappingDigest.getAddresses();\n        this.mInterceptMethods          = mappingDigest.getInterceptMethods();\n        this.mArgumentTemplate          = mappingDigest.getArgumentTemplate();\n        this.mReturnType                = mappingDigest.getReturnType();\n        this.mClassType                 = mappingDigest.getClassType();\n        this.mMappedMethod              = mappingDigest.getMappedMethod();\n        this.mParamsDigests             = mappingDigest.getParamsDigests();\n        this.mszReturnGenericTypeLabel  = mappingDigest.getReturnGenericTypeLabel();\n\n        this.encode( encoder );\n    }\n\n    protected void encode( CompilerEncoder encoder ) {\n        if( this.mArgumentTemplate != null ) {\n            this.mArgumentsDescriptor = encoder.transform( this.mArgumentTemplate );\n        }\n\n        if( this.mReturnType != null && !this.mReturnType.equals( void.class ) ) {\n            this.mReturnDescriptor    = encoder.getEncoder().transform( this.mReturnType, this.getReturnGenericTypeLabel(),null, encoder.getExceptedKeys() );\n        }\n    }\n\n    @Override\n    public Descriptors.Descriptor getArgumentsDescriptor() {\n        return this.mArgumentsDescriptor;\n    }\n\n    @Override\n    public Descriptors.Descriptor getReturnDescriptor() {\n        return this.mReturnDescriptor;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/GenericIfaceParamsDigest.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\nimport com.pinecone.hydra.umct.mapping.GenericParamsDigest;\n\npublic class GenericIfaceParamsDigest extends GenericParamsDigest implements IfaceParamsDigest {\n    protected MethodDigest mMethodDigest;\n\n    public GenericIfaceParamsDigest( MethodDigest methodDigest, int parameterIndex, String name, String value, String defaultValue, boolean required ) {\n        super( parameterIndex, name, value, defaultValue, required );\n        this.mMethodDigest   = methodDigest;\n    }\n\n\n    @Override\n    public MethodDigest getMethodDigest() {\n        return this.mMethodDigest;\n    }\n\n    @Override\n    public String toJSONString() {\n        return super.toJSONString();\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/GenericMethodDigest.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.util.List;\n\nimport com.pinecone.framework.lang.field.DataStructureEntity;\nimport com.pinecone.framework.lang.field.GenericStructure;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.umct.mapping.MethodDigestUtils;\nimport com.pinecone.hydra.umct.husky.function.MethodTemplates;\n\npublic class GenericMethodDigest implements MethodDigest {\n    protected ClassDigest             mClassDigest;\n\n    protected String                  mszName;\n\n    protected String                  mszRawName;\n\n    protected DataStructureEntity     mArgumentTemplate;\n\n    protected Class<?>                mReturnType;\n\n    protected String                  mszGenericReturnTypeLabel;\n\n    protected List<IfaceParamsDigest> mIfaceParamsDigests;\n\n    public GenericMethodDigest(\n            ClassDigest classDigest, String szName, String szRawName,\n            Class<?>[] parameters, String[] parametersGenericLabels,\n            Class<?> returnType, String genericRLabel,\n            List<IfaceParamsDigest> ifaceParamsDigests\n    ) {\n        this.mClassDigest               = classDigest;\n        this.mszName                    = szName;\n        this.mszRawName                 = szRawName;\n        this.mReturnType                = returnType;\n        this.mIfaceParamsDigests        = ifaceParamsDigests;\n        this.mszGenericReturnTypeLabel  = genericRLabel;\n\n        String szInterceptedPath = classDigest.getClassName() + Namespace.DEFAULT_SEPARATOR + szName;\n        if( parameters == null || parameters.length == 0 ) {\n            this.mArgumentTemplate   = new GenericStructure( szInterceptedPath, 0 );\n        }\n        else {\n            this.mArgumentTemplate   = MethodTemplates.from( null, szInterceptedPath, parameters, parametersGenericLabels );\n        }\n    }\n\n    public GenericMethodDigest(\n            ClassDigest classDigest, String szName,\n            Class<?>[] parameters, String[] parametersGenericLabels,\n            Class<?> returnType, String genericRLabel,\n            List<IfaceParamsDigest> ifaceParamsDigests\n    ) {\n        this( classDigest, szName, szName, parameters, parametersGenericLabels, returnType, genericRLabel, ifaceParamsDigests );\n    }\n\n\n    @Override\n    public String getGenericReturnTypeLabel() {\n        return this.mszGenericReturnTypeLabel;\n    }\n\n    @Override\n    public void applyGenericReturnTypeLabel( String genericTypeLabel ) {\n        this.mszGenericReturnTypeLabel = genericTypeLabel;\n    }\n\n    @Override\n    public void apply( List<IfaceParamsDigest> ifaceParamsDigests) {\n        this.mIfaceParamsDigests = ifaceParamsDigests;\n    }\n\n    @Override\n    public List<String> getArgumentsKey() {\n        return MethodDigestUtils.getArgumentsKey( this.getParamsDigests(), this.getArgumentTemplate() );\n    }\n\n    @Override\n    public ClassDigest getClassDigest() {\n        return this.mClassDigest;\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public String getFullName() {\n        return this.mClassDigest.getClassName() + Namespace.DEFAULT_SEPARATOR + this.getName();\n    }\n\n    @Override\n    public String getRawName() {\n        return this.mszRawName;\n    }\n\n    @Override\n    public DataStructureEntity getArgumentTemplate() {\n        return this.mArgumentTemplate;\n    }\n\n    @Override\n    public Class<?> getReturnType() {\n        return this.mReturnType;\n    }\n\n    @Override\n    public List<IfaceParamsDigest> getParamsDigests() {\n        return this.mIfaceParamsDigests;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"name\"        , this.getName()                                 ),\n                new KeyValue<>( \"rawName\"     , this.getRawName()                              ),\n                new KeyValue<>( \"arguments\"   , this.getArgumentTemplate().getSegments()       ),\n                new KeyValue<>( \"return\"      , this.getReturnType()                           ),\n        } );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/IfaceCompiler.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\npublic interface IfaceCompiler extends IfaceInspector {\n\n    ClassDigest compile ( String className, boolean bAsIface );\n\n    ClassDigest compile ( Class<? > clazz, boolean bAsIface );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/IfaceInspector.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport javassist.CtMethod;\nimport javassist.NotFoundException;\n\npublic interface IfaceInspector extends Pinenut {\n    List<CtMethod> inspect( Class<?> clazz, boolean bAsIface ) throws NotFoundException ;\n\n    List<CtMethod> inspect( String className, boolean bAsIface ) throws NotFoundException;\n\n    String getIfaceMethodName( CtMethod method ) throws ClassNotFoundException;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/IfaceMappingDigest.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport com.google.protobuf.Descriptors;\nimport com.pinecone.hydra.umct.mapping.MappingDigest;\n\npublic interface IfaceMappingDigest extends MappingDigest {\n    Descriptors.Descriptor getArgumentsDescriptor();\n\n    Descriptors.Descriptor getReturnDescriptor();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/IfaceParamsDigest.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport com.pinecone.hydra.umct.mapping.ParamsDigest;\n\npublic interface IfaceParamsDigest extends ParamsDigest {\n    MethodDigest getMethodDigest();\n\n    default ClassDigest getClassDigest() {\n        return this.getMethodDigest().getClassDigest();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/InterfacialCompiler.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.util.List;\n\nimport com.pinecone.hydra.umct.mapping.MappingDigest;\n\npublic interface InterfacialCompiler extends IfaceCompiler {\n    IfaceMappingDigest compile( MappingDigest digest );\n\n    List<IfaceMappingDigest > compile( List<MappingDigest> digests );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/MethodDigest.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport java.util.List;\n\nimport com.pinecone.framework.lang.field.DataStructureEntity;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.ReflectionUtils;\n\npublic interface MethodDigest extends Pinenut {\n\n    ClassDigest getClassDigest();\n\n    String getName();\n\n    String getFullName();\n\n    String getRawName();\n\n    DataStructureEntity getArgumentTemplate();\n\n    Class<?> getReturnType();\n\n    String getGenericReturnTypeLabel();\n\n    default String[] getGenericReturnTypeNames() {\n        return ReflectionUtils.extractGenericClassNames( this.getGenericReturnTypeLabel() );\n    }\n\n    void applyGenericReturnTypeLabel( String genericTypeLabel );\n\n    default boolean hasDeclaredGenericReturnType() {\n        return this.getGenericReturnTypeLabel() != null && this.getGenericReturnTypeLabel().contains( \"<\" ) && this.getGenericReturnTypeLabel().contains( \">\" );\n    }\n\n    List<IfaceParamsDigest> getParamsDigests();\n\n    void apply( List<IfaceParamsDigest> ifaceParamsDigests);\n\n    List<String> getArgumentsKey();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/MethodPrototype.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\nimport com.google.protobuf.Descriptors;\nimport com.pinecone.hydra.umct.husky.function.ArgumentRequest;\n\npublic interface MethodPrototype extends MethodDigest {\n    Descriptors.Descriptor getArgumentsDescriptor();\n\n    Descriptors.Descriptor getReturnDescriptor();\n\n    ArgumentRequest conformRequest();\n\n    ArgumentRequest conformRequest( Object[] args );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/ProtoIfaceCompiler.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\npublic interface ProtoIfaceCompiler extends IfaceCompiler {\n\n    CompilerEncoder getCompilerEncoder();\n\n    ClassDigest compile ( String className, boolean bAsIface, CompilerEncoder encoder );\n\n    ClassDigest compile ( Class<? > clazz, boolean bAsIface, CompilerEncoder encoder );\n\n    ClassDigest reinterpret ( String className, boolean bAsIface );\n\n    ClassDigest reinterpret ( Class<? > clazz, boolean bAsIface );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/compiler/ProtoInterfacialCompiler.java",
    "content": "package com.pinecone.hydra.umct.husky.compiler;\n\npublic interface ProtoInterfacialCompiler extends InterfacialCompiler, ProtoIfaceCompiler {\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/ArgumentRequest.java",
    "content": "package com.pinecone.hydra.umct.husky.function;\n\nimport com.pinecone.framework.lang.field.DataStructureEntity;\nimport com.pinecone.framework.lang.field.FieldEntity;\nimport com.pinecone.hydra.umct.husky.RequestPackage;\n\npublic interface ArgumentRequest extends RequestPackage {\n    void from( Class<? >[] parameters );\n\n    void from( Object[] args );\n\n    void conform( DataStructureEntity tpl );\n\n    DataStructureEntity getDataStructureEntity() ;\n\n    FieldEntity[] getSegments() ;\n\n    void setField( int index, String key, Object val ) ;\n\n    void setField( int index, String key, Object val, String genericLabel );\n\n    void setField( int index, String key, Class<?> type ) ;\n\n    void setField( int index, String key, Class<?> type, String genericLabel );\n\n    void setField( int index, Object val ) ;\n\n    FieldEntity getField( int index );\n\n    FieldEntity findField( String key );\n\n    ArgumentRequest instancing();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/FunctionMold.java",
    "content": "package com.pinecone.hydra.umct.husky.function;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface FunctionMold<TR > extends Pinenut {\n    ArgumentRequest getArgumentForm();\n\n    ReturnResponse<TR> getReturnForm();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/GenericArgumentRequest.java",
    "content": "package com.pinecone.hydra.umct.husky.function;\n\nimport com.pinecone.framework.lang.field.DataStructureEntity;\nimport com.pinecone.framework.lang.field.FieldEntity;\nimport com.pinecone.framework.lang.field.GenericFieldEntity;\nimport com.pinecone.framework.lang.field.GenericStructure;\nimport com.pinecone.hydra.umct.husky.ArchRequestPackage;\n\npublic class GenericArgumentRequest extends ArchRequestPackage implements ArgumentRequest {\n    protected DataStructureEntity mDataStructureEntity;\n\n    protected GenericArgumentRequest( String szInterceptedPath ) {\n        super( szInterceptedPath );\n    }\n\n    public GenericArgumentRequest( DataStructureEntity dataStructureEntity, String szInterceptedPath ) {\n        super( szInterceptedPath );\n        this.mDataStructureEntity = dataStructureEntity;\n    }\n\n    public GenericArgumentRequest( String szInterceptedPath, Class<? >[] parameters ) {\n        super( szInterceptedPath );\n        this.from( parameters );\n    }\n\n    public GenericArgumentRequest( String szInterceptedPath, Object[] args ) {\n        super( szInterceptedPath );\n        this.from( args );\n    }\n\n    public GenericArgumentRequest( String szInterceptedPath, DataStructureEntity tpl ) {\n        super( szInterceptedPath );\n        this.conform( tpl );\n    }\n\n    public GenericArgumentRequest( FieldEntity[] segments ) {\n        this( new GenericStructure( segments, 0, 1 ) );\n    }\n\n    public GenericArgumentRequest( DataStructureEntity entity ) {\n        super( (String) entity.getSegments()[ 0 ].getValue() );\n        this.mDataStructureEntity = entity;\n    }\n\n    @Override\n    public void from( Class<? >[] parameters ) {\n        this.mDataStructureEntity = MethodTemplates.from( this.mDataStructureEntity, this.mszInterceptedPath, parameters );\n    }\n\n    @Override\n    public void from( Object[] args ) {\n        if( this.mDataStructureEntity == null || args.length != this.mDataStructureEntity.size() ) {\n            this.mDataStructureEntity = new GenericStructure( this.mszInterceptedPath, args.length );\n        }\n\n        for ( int i = 0; i < args.length; ++i ) {\n            this.mDataStructureEntity.setDataField( i,\n                    args[ i ].getClass().getName().replace( \".\", \"_\" ) + \"_\" + i,\n                    args[ i ]\n            );\n        }\n    }\n\n    @Override\n    public void conform( DataStructureEntity tpl ) {\n        this.mDataStructureEntity = MethodTemplates.conform( tpl, this.mszInterceptedPath );\n    }\n\n    @Override\n    public DataStructureEntity getDataStructureEntity() {\n        return this.mDataStructureEntity;\n    }\n\n    @Override\n    public FieldEntity[] getSegments() {\n        return this.mDataStructureEntity.getSegments();\n    }\n\n\n    @Override\n    public void setField( int index, String key, Object val ) {\n        this.mDataStructureEntity.setDataField( index, key, val );\n    }\n\n    @Override\n    public void setField( int index, String key, Object val, String genericLabel ) {\n        this.mDataStructureEntity.setDataField( index, key, val, genericLabel );\n    }\n\n    @Override\n    public void setField( int index, String key, Class<?> type ) {\n        this.mDataStructureEntity.setDataField( index, key, type );\n    }\n\n    @Override\n    public void setField( int index, String key, Class<?> type, String genericLabel ) {\n        this.mDataStructureEntity.setDataField( index, key, type, genericLabel );\n    }\n\n    @Override\n    public void setField( int index, Object val ) {\n        FieldEntity field = this.getField( index );\n        if( field != null ) {\n            field.setValue( val );\n        }\n        else {\n            this.setField( index, Integer.toString( index ), val );\n        }\n    }\n\n    @Override\n    public FieldEntity getField( int index ) {\n        return this.mDataStructureEntity.getDataField( index );\n    }\n\n    @Override\n    public FieldEntity findField( String key ) {\n        return this.mDataStructureEntity.findDataField( key );\n    }\n\n    @Override\n    public String toJSONString() {\n        return this.mDataStructureEntity.toJSONString();\n    }\n\n    @Override\n    public ArgumentRequest instancing() {\n        FieldEntity[] proto = this.getSegments();\n\n        FieldEntity[] ins = new FieldEntity[ proto.length ];\n        for ( int i = 1; i < proto.length; ++i ) {\n            FieldEntity entity = proto[ i ];\n            ins[ i ] = new GenericFieldEntity( entity.getName(), null, entity.getType(), entity.getGenericTypeLabel() );\n        }\n        ins[ 0 ] = proto[ 0 ];\n\n        return new GenericArgumentRequest( ins );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/GenericFunctionMold.java",
    "content": "package com.pinecone.hydra.umct.husky.function;\n\npublic class GenericFunctionMold<TR > implements FunctionMold<TR > {\n    protected ArgumentRequest    mArgumentRequest;\n    protected ReturnResponse<TR> mReturnResponse;\n\n    public GenericFunctionMold( ArgumentRequest request, ReturnResponse<TR> returnResponse ) {\n        this.mArgumentRequest = request;\n        this.mReturnResponse  = returnResponse;\n    }\n\n    @Override\n    public ArgumentRequest getArgumentForm() {\n        return this.mArgumentRequest;\n    }\n\n    @Override\n    public ReturnResponse<TR> getReturnForm() {\n        return this.mReturnResponse;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/GenericReturnResponse.java",
    "content": "package com.pinecone.hydra.umct.husky.function;\n\nimport com.pinecone.hydra.umct.husky.ArchResponsePackage;\n\npublic class GenericReturnResponse<T> extends ArchResponsePackage implements ReturnResponse {\n    protected T mReturnTarget;\n\n    public GenericReturnResponse( String szInterceptedPath, T returnVal ) {\n        super( szInterceptedPath );\n\n        this.mReturnTarget = returnVal;\n    }\n\n    @Override\n    public T getReturn() {\n        return this.mReturnTarget;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/MethodTemplates.java",
    "content": "package com.pinecone.hydra.umct.husky.function;\n\nimport com.pinecone.framework.lang.field.DataStructureEntity;\nimport com.pinecone.framework.lang.field.FieldEntity;\nimport com.pinecone.framework.lang.field.GenericStructure;\nimport com.pinecone.ulf.util.protobuf.WolfProtobufConstants;\n\npublic final class MethodTemplates {\n    public static DataStructureEntity from( DataStructureEntity tpl, String szInterceptedPath, Class<? >[] parameters, String[] parametersGenericLabels ) {\n        if( tpl == null || parameters.length != tpl.size() ) {\n            tpl = new GenericStructure( szInterceptedPath, parameters.length );\n        }\n\n        int i = 0;\n        if ( parametersGenericLabels == null || parametersGenericLabels.length == 0 ) {\n            for( Class<? > parameter : parameters ) {\n                MethodTemplates.setDataField( i, parameter, tpl );\n                ++i;\n            }\n        }\n        else {\n            for( Class<? > parameter : parameters ) {\n                MethodTemplates.setDataField( i, parameter, tpl, parametersGenericLabels[ i ] );\n                ++i;\n            }\n        }\n\n        return tpl;\n    }\n\n    public static DataStructureEntity from( DataStructureEntity tpl, String szInterceptedPath, Class<? >[] parameters ) {\n        if( tpl == null || parameters.length != tpl.size() ) {\n            tpl = new GenericStructure( szInterceptedPath, parameters.length );\n        }\n\n        int i = 0;\n        for( Class<? > parameter : parameters ) {\n            MethodTemplates.setDataField( i, parameter, tpl );\n            ++i;\n        }\n\n        return tpl;\n    }\n\n    public static DataStructureEntity conform( DataStructureEntity tpl, String szInterceptedPath ) {\n        if( tpl == null ) {\n            return null;\n        }\n        DataStructureEntity neo = new GenericStructure( szInterceptedPath, tpl.size() );\n        FieldEntity[] segs = tpl.getSegments();\n\n        int j = 0;\n        for ( int i = neo.getDataOffset(); i < segs.length; ++i ) {\n            FieldEntity seg = segs[ i ];\n            Class<? > parameter = seg.getType();\n\n            MethodTemplates.setDataField( j, parameter, tpl );\n            ++j;\n        }\n\n        return tpl;\n    }\n\n    public static DataStructureEntity conform( DataStructureEntity tpl, String szInterceptedPath, String genericLabel ) {\n        if( tpl == null ) {\n            return null;\n        }\n        DataStructureEntity neo = new GenericStructure( szInterceptedPath, tpl.size() );\n        FieldEntity[] segs = tpl.getSegments();\n\n        int j = 0;\n        for ( int i = neo.getDataOffset(); i < segs.length; ++i ) {\n            FieldEntity seg = segs[ i ];\n            Class<? > parameter = seg.getType();\n\n            MethodTemplates.setDataField( j, parameter, tpl, genericLabel );\n            ++j;\n        }\n\n        return tpl;\n    }\n\n    protected static void setDataField( int i, Class<? > parameter, DataStructureEntity tpl, String genericLabel ) {\n        String szNormalName = parameter.getName();\n        if( szNormalName.startsWith( \"[\" ) ) {\n            szNormalName = szNormalName.replace( \"[\", \"\" );\n            szNormalName += WolfProtobufConstants.ArrayTransformedName;\n        }\n\n        szNormalName = szNormalName.replaceAll( \"[^a-zA-Z0-9_]\", \"_\" ) + \"_\" + i;\n        tpl.setDataField( i, szNormalName, parameter, genericLabel );\n    }\n\n    protected static void setDataField( int i, Class<? > parameter, DataStructureEntity tpl ) {\n        String szNormalName = parameter.getName();\n        if( szNormalName.startsWith( \"[\" ) ) {\n            szNormalName = szNormalName.replace( \"[\", \"\" );\n            szNormalName += WolfProtobufConstants.ArrayTransformedName;\n        }\n        tpl.setDataField( i,\n                szNormalName.replaceAll( \"[^a-zA-Z0-9_]\", \"_\" ) + \"_\" + i,\n                parameter\n        );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/function/ReturnResponse.java",
    "content": "package com.pinecone.hydra.umct.husky.function;\n\nimport com.pinecone.hydra.umct.husky.ResponsePackage;\n\npublic interface ReturnResponse<T>  extends ResponsePackage {\n    T getReturn();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/heartbeat/HeartbeatConstants.java",
    "content": "package com.pinecone.hydra.umct.husky.heartbeat;\n\nimport com.pinecone.hydra.umc.msg.InformMessage;\nimport com.pinecone.hydra.umc.wolf.UlfInstructMessage;\n\npublic final class HeartbeatConstants {\n    public static final int HCTP_HEART_CONTROL_MASK              = 0xFFBEB000;   // 0x000EB000 | 0xFFB00000\n\n    public static final int HCTP_HEART_REQUEST_ALIVE             = HCTP_HEART_CONTROL_MASK | 0x00000010;\n\n    public static final int HCTP_HEART_RESPONSE_ACK              = HCTP_HEART_CONTROL_MASK | 0x00000011;\n\n    public static final InformMessage HCTP_HEART_ALIVE           = new UlfInstructMessage( HCTP_HEART_REQUEST_ALIVE );\n\n    public static final InformMessage HCTP_HEART_ACK             = new UlfInstructMessage( HCTP_HEART_RESPONSE_ACK );\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/heartbeat/HuskyHeartbeatControl.java",
    "content": "package com.pinecone.hydra.umct.husky.heartbeat;\n\nimport java.io.IOException;\nimport java.util.Collection;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ConcurrentMap;\nimport java.util.concurrent.TimeUnit;\n\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.vita.HeartbeatControl;\n\nimport io.netty.util.HashedWheelTimer;\nimport io.netty.util.Timeout;\nimport io.netty.util.TimerTask;\n\npublic class HuskyHeartbeatControl implements HeartbeatControl {\n    protected final HashedWheelTimer                            mTimer;\n    protected final ConcurrentMap<ChannelControlBlock, Timeout> mHeartbeatTasks;\n\n    protected static int nextPowerOfTwo( int num ) {\n        int n = 1;\n        while (n < num) {\n            n <<= 1;\n        }\n        return n;\n    }\n\n    public static HashedWheelTimer createTimer( long heartIntervalMillis ) {\n        // tickDuration ∈ [ 100ms, 1s ]\n        // Tick too short (<100ms) results in high CPU polling.\n        // Tick too long (>1s) causes significant heartbeat delay.\n        // Optimal range is between 250ms and 500ms.\n        // 40 is an empirical value for balancing, ensuring that heartbeats are evenly distributed on the time wheel (helps avoid instantaneous load).\n\n        // Tick 过短（<100ms）CPU 轮询过高\n        // Tick 过长（>1s）心跳延迟较大\n        // 取 250ms ~ 800ms 较优\n        // 40 是均分调优经验值，让心跳能在时间轮上较好地分布均匀（较好地避免瞬时负载）\n        long tickDuration = Math.min( Math.max( heartIntervalMillis / 40, 250 ), 800 );\n\n        // Ensure time-wheel covered all HeartbeatIntervals.\n        int ticksPerWheel = (int) Math.ceil( (double) heartIntervalMillis / tickDuration );\n\n        // Adjust ticksPerWheel => 2^N\n        ticksPerWheel = HuskyHeartbeatControl.nextPowerOfTwo( ticksPerWheel );\n\n        return new HashedWheelTimer( tickDuration, TimeUnit.MILLISECONDS, ticksPerWheel );\n    }\n\n    public HuskyHeartbeatControl( long heartIntervalMillis ) {\n        this.mTimer          = HuskyHeartbeatControl.createTimer( heartIntervalMillis );\n        //this.mTimer          = new HashedWheelTimer( 100, TimeUnit.MILLISECONDS, 512 );\n        this.mHeartbeatTasks = new ConcurrentHashMap<>();\n    }\n\n    @Override\n    public void registerChannels( Collection<ChannelControlBlock> channels, long intervalMillis ) {\n        for ( ChannelControlBlock ccb : channels ) {\n            this.registerChannel( ccb, intervalMillis );\n        }\n    }\n\n    @Override\n    public void registerChannel( ChannelControlBlock ccb, long intervalMillis ) {\n        if ( this.mHeartbeatTasks.containsKey( ccb ) ) {\n            return;\n        }\n        Timeout timeout = this.scheduleHeartbeat( ccb, intervalMillis );\n        this.mHeartbeatTasks.put( ccb, timeout );\n    }\n\n    @Override\n    public void deregisterChannel( ChannelControlBlock ccb ) {\n        Timeout timeout = this.mHeartbeatTasks.remove( ccb );\n        if ( timeout != null ) {\n            timeout.cancel();\n        }\n    }\n\n    protected Timeout scheduleHeartbeat( ChannelControlBlock ccb, long intervalMillis ) {\n        return this.mTimer.newTimeout( new HeartbeatTask( ccb, intervalMillis ), intervalMillis, TimeUnit.MILLISECONDS );\n    }\n\n    protected class HeartbeatTask implements TimerTask {\n        private final ChannelControlBlock  mChannelControlBlock;\n        private final long                 mIntervalMillis;\n\n        HeartbeatTask( ChannelControlBlock ccb, long intervalMillis ) {\n            this.mChannelControlBlock = ccb;\n            this.mIntervalMillis = intervalMillis;\n        }\n\n        @Override\n        public void run( Timeout timeout ) throws IOException {\n            if ( !HuskyHeartbeatControl.this.mHeartbeatTasks.containsKey( this.mChannelControlBlock ) ) {\n                return;\n            }\n\n            if ( !this.mChannelControlBlock.isShutdown() ) {\n                HuskyHeartbeatControl.this.sendHeartbeat( this.mChannelControlBlock );\n                Timeout newTimeout = HuskyHeartbeatControl.this.scheduleHeartbeat( this.mChannelControlBlock, this.mIntervalMillis );\n                HuskyHeartbeatControl.this.mHeartbeatTasks.put( mChannelControlBlock, newTimeout );\n            }\n        }\n    }\n\n    protected void sendHeartbeat( ChannelControlBlock ccb ) throws IOException {\n        if ( ccb.getChannelStatus().isAsynAvailable() && !ccb.isShutdown() ) {\n            ccb.sendMsg( HeartbeatConstants.HCTP_HEART_ALIVE, true );\n        }\n    }\n\n    @Override\n    public boolean interceptFeedback( ChannelControlBlock block, UMCMessage msg ) throws IOException {\n        int nControlBits = msg.getHead().getControlBits();\n        if ( nControlBits == HeartbeatConstants.HCTP_HEART_RESPONSE_ACK ) {\n            //Debug.traceSyn( msg );\n            // Do nothing. [Keep the format]\n            return true;\n        }\n        return false;\n    }\n\n    @Override\n    public void shutdown() {\n        this.mTimer.stop();\n        this.mHeartbeatTasks.clear();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/heartbeat/HuskyHeartbeatFeedbackor.java",
    "content": "package com.pinecone.hydra.umct.husky.heartbeat;\n\nimport java.io.IOException;\n\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.vita.HeartbeatFeedbackor;\n\npublic class HuskyHeartbeatFeedbackor implements HeartbeatFeedbackor {\n\n    public HuskyHeartbeatFeedbackor() {\n\n    }\n\n    @Override\n    public boolean interceptHeartbeat( ChannelControlBlock block, UMCMessage msg ) throws IOException {\n        int nControlBits = msg.getHead().getControlBits();\n        if ( nControlBits == HeartbeatConstants.HCTP_HEART_REQUEST_ALIVE ) {\n            this.feedback( block, msg );\n            return true;\n        }\n        return false;\n    }\n\n    @Override\n    public void feedback( ChannelControlBlock block, UMCMessage msg ) throws IOException {\n        if ( block.getChannelStatus().isAsynAvailable() && !block.isShutdown() ) {\n            block.sendMsg( HeartbeatConstants.HCTP_HEART_ACK, true );\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/ArchRouteDispatcher.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport java.lang.reflect.Method;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.stream.Collectors;\n\nimport com.pinecone.framework.lang.field.FieldEntity;\nimport com.pinecone.hydra.express.Deliver;\nimport com.pinecone.hydra.umct.MessageDeliver;\nimport com.pinecone.hydra.umct.MessageExpress;\nimport com.pinecone.hydra.umct.MessageHandler;\nimport com.pinecone.hydra.umct.UMCTExpress;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype;\nimport com.pinecone.hydra.umct.husky.compiler.IfaceMappingDigest;\nimport com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodDigest;\nimport com.pinecone.hydra.umct.mapping.InspectException;\nimport com.pinecone.hydra.umct.mapping.MappingDigest;\nimport com.pinecone.hydra.umct.stereotype.IfaceUtils;\n\nimport javassist.NotFoundException;\n\npublic abstract class ArchRouteDispatcher implements RouteDispatcher {\n\n    protected MCTContextMachinery     mMCTContextMachinery;\n    protected UMCTExpress             mUMCTExpress;\n    protected MessageDeliver          mDefaultDeliver;\n\n    protected ArchRouteDispatcher() {\n\n    }\n\n\n\n    protected void registerInstance( MessageDeliver deliver, Object instance, Class<?> iface ) {\n        if ( !iface.isInterface() ) {\n            throw new IllegalArgumentException( \"The provided class is not an interface: \" + iface.getName() );\n        }\n\n        List<MethodDigest> digests = this.compile( iface, false ).getMethodDigests();\n        Map<String, MethodDigest > digestMap = digests.stream()\n                .collect( Collectors.toMap(MethodDigest::getName, digest -> digest) );\n\n        Method[] methods = iface.getMethods();\n        for ( Method method : methods ) {\n            String methodName = IfaceUtils.getIfaceMethodName( method );\n\n            DynamicMethodPrototype digest = (DynamicMethodPrototype)digestMap.get( methodName );\n\n            String fullPath = digest.getFullName();\n\n            MessageHandler handler = new MessageHandler() {\n                @Override\n                public String getAddressMapping() {\n                    return digest.getFullName();\n                }\n\n                @Override\n                public Object invoke( Object... args ) throws Exception {\n                    return method.invoke( instance, args );\n                }\n\n                @Override\n                public List<String> getArgumentsKey() {\n                    return digest.getArgumentsKey();\n                }\n\n                @Override\n                public Object getReturnDescriptor() {\n                    return digest.getReturnDescriptor();\n                }\n\n                @Override\n                public String getReturnGenericLabel() {\n                    return digest.getGenericReturnTypeLabel();\n                }\n\n                @Override\n                public Object getArgumentsDescriptor() {\n                    return digest.getArgumentsDescriptor();\n                }\n\n                @Override\n                public FieldEntity[] getArgumentTemplate() {\n                    return digest.getArgumentTemplate().getSegments();\n                }\n\n            };\n\n            deliver.registerHandler( fullPath, handler );\n            this.mMCTContextMachinery.getMessageHandlerMap().put( fullPath, handler );\n        }\n    }\n\n    @Override\n    public void setUMCTExpress( UMCTExpress handler ) {\n        this.mUMCTExpress = handler;\n    }\n\n    @Override\n    public MCTContextMachinery getContextMachinery() {\n        return this.mMCTContextMachinery;\n    }\n\n    @Override\n    public UMCTExpress getUMCTExpress() {\n        return this.mUMCTExpress;\n    }\n\n    @Override\n    public MessageExpress register( Deliver deliver ) {\n        return this.mUMCTExpress.register( deliver );\n    }\n\n    @Override\n    public MessageExpress  fired   ( Deliver deliver ) {\n        return this.mUMCTExpress.fired( deliver );\n    }\n\n    @Override\n    public MessageDeliver getDeliver( String name ) {\n        return this.mUMCTExpress.getDeliver( name );\n    }\n\n    @Override\n    public MessageDeliver getDefaultDeliver() {\n        return this.mDefaultDeliver;\n    }\n\n    @Override\n    public InterfacialCompiler getInterfacialCompiler() {\n        return this.mMCTContextMachinery.getInterfacialCompiler();\n    }\n\n\n    @Override\n    public void registerInstance( String deliverName, Object instance, Class<?> iface ) {\n        MessageDeliver deliver = this.getDeliver( deliverName );\n        if ( deliver == null ) {\n            throw new IllegalArgumentException( \"No such deliver: \" + deliverName );\n        }\n\n        this.registerInstance( deliver, instance, iface );\n    }\n\n    @Override\n    public void registerInstance( Object instance, Class<?> iface ) {\n        this.registerInstance( this.mDefaultDeliver, instance, iface );\n    }\n\n    protected void registerController( MessageDeliver deliver, Object instance, Class<?> controllerType ) {\n        try {\n            List<MappingDigest> digests   = this.mMCTContextMachinery.getControllerInspector().characterize( controllerType );\n            List<IfaceMappingDigest>  ifs = this.getInterfacialCompiler().compile( digests );\n\n            for ( IfaceMappingDigest imd : ifs ) {\n                String[] addresses = imd.getAddresses();\n                for ( int i = 0; i < addresses.length; ++i ) {\n                    String address = addresses[ i ];\n\n                    MessageHandler handler = new MessageHandler() {\n                        @Override\n                        public String getAddressMapping() {\n                            return address;\n                        }\n\n                        @Override\n                        public Object invoke( Object... args ) throws Exception {\n                            return imd.getMappedMethod().invoke( instance, args );\n                        }\n\n                        @Override\n                        public List<String> getArgumentsKey() {\n                            return imd.getArgumentsKey();\n                        }\n\n                        @Override\n                        public Object getReturnDescriptor() {\n                            return imd.getReturnDescriptor();\n                        }\n\n                        @Override\n                        public String getReturnGenericLabel() {\n                            return imd.getReturnGenericTypeLabel();\n                        }\n\n                        @Override\n                        public Object getArgumentsDescriptor() {\n                            return imd.getArgumentsDescriptor();\n                        }\n\n                        @Override\n                        public FieldEntity[] getArgumentTemplate() {\n                            return imd.getArgumentTemplate().getSegments();\n                        }\n                    };\n\n                    deliver.registerHandler( address, handler );\n                    this.mMCTContextMachinery.getMessageHandlerMap().put( address, handler );\n                }\n            }\n        }\n        catch ( NotFoundException e ) {\n            throw new InspectException( e );\n        }\n    }\n\n    @Override\n    public void registerController( String deliverName, Object instance, Class<?> controllerType ) {\n        MessageDeliver deliver = this.getDeliver( deliverName );\n        if ( deliver == null ) {\n            throw new IllegalArgumentException( \"No such deliver: \" + deliverName );\n        }\n\n        this.registerController( deliver, instance, controllerType );\n    }\n\n    @Override\n    public void registerController( Object instance, Class<?> controllerType ) {\n        this.registerController( this.mDefaultDeliver, instance, controllerType );\n    }\n\n    @Override\n    public ClassDigest queryClassDigest(String name ) {\n        return this.mMCTContextMachinery.queryClassDigest( name );\n    }\n\n    @Override\n    public MethodDigest queryMethodDigest( String name ) {\n        return this.mMCTContextMachinery.queryMethodDigest( name );\n    }\n\n    @Override\n    public void addClassDigest( ClassDigest that ) {\n        this.mMCTContextMachinery.addClassDigest( that );\n    }\n\n    @Override\n    public void addMethodDigest( MethodDigest that ) {\n        this.mMCTContextMachinery.addMethodDigest( that );\n    }\n\n    @Override\n    public ClassDigest compile( Class<? > clazz, boolean bAsIface ) {\n        return this.mMCTContextMachinery.compile( clazz, bAsIface );\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/DigestContextMachinery.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\nimport com.pinecone.framework.util.lang.ScopedPackage;\nimport com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler;\nimport com.pinecone.hydra.umct.mapping.ControllerInspector;\n\n\n/**\n *  Pinecone Ursus For Java Hydra Ulfar, DigestContextMachinery\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n */\npublic class DigestContextMachinery extends DigestTransformer implements MCTContextMachinery {\n    protected DynamicFactory       mIfaceFactory;\n    protected MultiMappingLoader   mMultiMappingLoader;\n\n    public DigestContextMachinery( InterfacialCompiler compiler, ControllerInspector controllerInspector ) {\n        super( compiler, controllerInspector );\n\n        this.mIfaceFactory       = new GenericDynamicFactory( controllerInspector.getClassLoader() );\n        this.mMultiMappingLoader = new DigestMappingLoader( this.mIfaceFactory, this );\n    }\n\n    @Override\n    public MultiMappingLoader getMultiMappingLoader() {\n        return this.mMultiMappingLoader;\n    }\n\n    @Override\n    public MCTContextMachinery addScope ( String szPackageName ) {\n        this.mIfaceFactory.getClassScope().addScope( szPackageName );\n        return this;\n    }\n\n    @Override\n    public MCTContextMachinery addScope ( ScopedPackage scope ) {\n        this.mIfaceFactory.getClassScope().addScope( scope );\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/DigestMappingLoader.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport com.pinecone.framework.util.lang.ClassScope;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.ulf.util.lang.ArchMultiScopeLoader;\nimport com.pinecone.ulf.util.lang.GenericPreloadClassInspector;\nimport com.pinecone.ulf.util.lang.PooledClassCandidateScanner;\n\nimport javassist.ClassPool;\nimport javassist.bytecode.annotation.Annotation;\n\npublic class DigestMappingLoader extends ArchMultiScopeLoader implements MultiMappingLoader {\n    protected MCTContextMachinery mMCTContextMachinery;\n\n    protected DigestMappingLoader( ClassScope classScope, ClassLoader classLoader, ClassPool classPool, MCTContextMachinery machinery ) {\n        super( classScope, classLoader, classPool, null, null );\n\n        this.mMCTContextMachinery = machinery;\n        this.mClassScanner         = new PooledClassCandidateScanner( new HuskyMappingScopeSet( this.mClassLoader ), this.mClassLoader, this.mClassPool );\n        this.mClassInspector       = new GenericPreloadClassInspector( this.mClassPool );\n        this.mClassScanner.addExcludeFilter( new ExcludeDigestMappingFilters( this.mClassInspector, this.mMCTContextMachinery) );\n    }\n\n    protected DigestMappingLoader( ClassScope classScope, ClassLoader classLoader, MCTContextMachinery marshal ) {\n        this( classScope, classLoader, ClassPool.getDefault(), marshal );\n    }\n\n    public DigestMappingLoader( DynamicFactory factory, MCTContextMachinery marshal ) {\n        this( factory.getClassScope(), factory.getClassLoader(), marshal );\n    }\n\n    @Override\n    protected boolean isAnnotationQualified( Annotation that, String szName ) {\n        return false;\n    }\n\n    @Override\n    public Class<? > load( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? > )super.load( simpleName );\n    }\n\n    // Directly by it`s name.\n    @Override\n    public Class<? > loadByName( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? > )super.loadByName( simpleName );\n    }\n\n    // Scanning class`s annotations, methods or others.\n    @Override\n    public Class<? > loadInClassTrait( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? > )super.loadInClassTrait( simpleName );\n    }\n\n    @Override\n    protected Class<? > loadSingleByFullClassName( String szFullClassName ) {\n        try {\n            Class<?> clazz = this.mClassLoader.loadClass( szFullClassName );\n            if( this.filter( clazz ) ) {\n                return null;\n            }\n\n        }\n        catch ( ClassNotFoundException e ) {\n            return null;\n        }\n\n        return null;\n    }\n\n    @Override\n    public MultiMappingLoader updateScope() {\n        return (MultiMappingLoader)super.updateScope();\n    }\n\n    @Override\n    public void clearCache() {\n        this.mLoadedClassesPool.clear();\n        this.mVisitedClasses.clear();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/DigestTransformer.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.hydra.umct.MessageHandler;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodDigest;\nimport com.pinecone.hydra.umct.mapping.ControllerInspector;\nimport com.pinecone.hydra.umct.mapping.MappingDigest;\n\npublic class DigestTransformer implements MCTTransformer {\n    protected InterfacialCompiler          mInterfacialCompiler;\n\n    protected Map<String, ClassDigest >    mClassDigests;\n\n    protected Map<String, MethodDigest >   mMethodDigests;\n\n    protected ControllerInspector          mControllerInspector;\n\n    protected List<MappingDigest >         mMappingDigests;\n\n    protected Map<String, MessageHandler>  mMessageHandlerMap;\n\n    public DigestTransformer(InterfacialCompiler compiler, ControllerInspector controllerInspector ) {\n        this.mInterfacialCompiler   = compiler;\n        this.mControllerInspector   = controllerInspector;\n        this.mClassDigests          = new LinkedHashMap<>();\n        this.mMethodDigests         = new LinkedHashMap<>();\n\n        this.mMessageHandlerMap     = new HashMap<>();\n        this.mMappingDigests        = new ArrayList<>();\n    }\n\n    @Override\n    public InterfacialCompiler getInterfacialCompiler() {\n        return this.mInterfacialCompiler;\n    }\n\n    @Override\n    public ControllerInspector getControllerInspector() {\n        return this.mControllerInspector;\n    }\n\n    @Override\n    public List<MappingDigest > getMappingDigests() {\n        return this.mMappingDigests;\n    }\n\n    @Override\n    public Map<String, MessageHandler> getMessageHandlerMap() {\n        return this.mMessageHandlerMap;\n    }\n\n    @Override\n    public ClassDigest queryClassDigest( String name ) {\n        return this.mClassDigests.get( name );\n    }\n\n    @Override\n    public MethodDigest queryMethodDigest( String name ) {\n        return this.mMethodDigests.get( name );\n    }\n\n    @Override\n    public void addClassDigest( ClassDigest that ) {\n        this.mClassDigests.put( that.getClassName(), that );\n        List<MethodDigest> digests = that.getMethodDigests();\n        for ( MethodDigest digest : digests ) {\n            this.addMethodDigest( digest );\n        }\n    }\n\n    @Override\n    public void addMethodDigest( MethodDigest that ) {\n        this.mMethodDigests.put( that.getFullName(), that );\n    }\n\n    @Override\n    public ClassDigest compile( Class<? > clazz, boolean bAsIface ) {\n        ClassDigest neo = this.mInterfacialCompiler.compile( clazz, bAsIface );\n        this.addClassDigest( neo );\n        return neo;\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/ExcludeDigestMappingFilters.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport java.io.IOException;\nimport java.util.List;\n\nimport com.pinecone.framework.util.lang.TypeFilter;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.mapping.MappingDigest;\nimport com.pinecone.ulf.util.lang.HierarchyClassInspector;\n\nimport javassist.NotFoundException;\n\npublic class ExcludeDigestMappingFilters implements TypeFilter {\n    protected HierarchyClassInspector mClassInspector;\n    protected MCTContextMachinery     mMCTContextMachinery;\n\n    public ExcludeDigestMappingFilters(HierarchyClassInspector inspector, MCTContextMachinery marshal ) {\n        this.mClassInspector = inspector;\n        this.mMCTContextMachinery = marshal;\n    }\n\n    @Override\n    public boolean match( String szClassName, Object pool ) throws IOException {\n        boolean isIface = this.scanIface( szClassName, pool );\n        if ( isIface ) {\n            return false;\n        }\n\n        boolean isController = this.scanController( szClassName, pool );\n        return !isController;\n    }\n\n    protected boolean scanIface( String szClassName, Object pool ) throws IOException {\n        ClassDigest classDigest = this.mMCTContextMachinery.getInterfacialCompiler().compile( szClassName, false );\n        if ( classDigest != null ) {\n            this.mMCTContextMachinery.addClassDigest( classDigest );\n            return true;\n        }\n        return false;\n    }\n\n    protected boolean scanController( String szClassName, Object pool ) throws IOException {\n        try{\n            List<MappingDigest > mappingDigests = this.mMCTContextMachinery.getControllerInspector().characterize( szClassName );\n            if ( mappingDigests != null && !mappingDigests.isEmpty() ) {\n                this.mMCTContextMachinery.addAll( mappingDigests );\n                return true;\n            }\n        }\n        catch ( NotFoundException e ) {\n            return false;\n        }\n\n        return false;\n    }\n\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/HuskyContextMachinery.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\nimport com.pinecone.framework.util.lang.ScopedPackage;\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.hydra.umct.mapping.ControllerInspector;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufDecoder;\n\n\n/**\n *  Pinecone Ursus For Java Hydra Ulfar, Husky Machinery\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Husky Transformer | Husky Machinery\n *  *****************************************************************************************\n */\npublic class HuskyContextMachinery extends HuskyTransformer implements PMCTContextMachinery {\n    protected DynamicFactory       mIfaceFactory;\n    protected MultiMappingLoader   mMultiMappingLoader;\n\n    public HuskyContextMachinery( ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector, FieldProtobufDecoder decoder ) {\n        super( compiler, controllerInspector, decoder );\n\n        this.mIfaceFactory       = new GenericDynamicFactory( controllerInspector.getClassLoader() );\n        this.mMultiMappingLoader = new HuskyMappingLoader( this.mIfaceFactory, this );\n    }\n\n    @Override\n    public MultiMappingLoader getMultiMappingLoader() {\n        return this.mMultiMappingLoader;\n    }\n\n    @Override\n    public PMCTContextMachinery addScope ( String szPackageName ) {\n        this.mIfaceFactory.getClassScope().addScope( szPackageName );\n        return this;\n    }\n\n    @Override\n    public PMCTContextMachinery addScope ( ScopedPackage scope ) {\n        this.mIfaceFactory.getClassScope().addScope( scope );\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/HuskyMappingLoader.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport com.pinecone.framework.util.lang.ClassScope;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.ulf.util.lang.ArchMultiScopeLoader;\nimport com.pinecone.ulf.util.lang.GenericPreloadClassInspector;\nimport com.pinecone.ulf.util.lang.PooledClassCandidateScanner;\n\nimport javassist.ClassPool;\nimport javassist.bytecode.annotation.Annotation;\n\npublic class HuskyMappingLoader extends ArchMultiScopeLoader implements MultiMappingLoader {\n    protected PMCTContextMachinery mPMCTContextMachinery;\n\n    protected HuskyMappingLoader( ClassScope classScope, ClassLoader classLoader, ClassPool classPool, PMCTContextMachinery machinery ) {\n        super( classScope, classLoader, classPool, null, null );\n\n        this.mPMCTContextMachinery = machinery;\n        this.mClassScanner         = new PooledClassCandidateScanner( new HuskyMappingScopeSet( this.mClassLoader ), this.mClassLoader, this.mClassPool );\n        this.mClassInspector       = new GenericPreloadClassInspector( this.mClassPool );\n        this.mClassScanner.addExcludeFilter( new ExcludeDigestMappingFilters( this.mClassInspector, this.mPMCTContextMachinery) );\n    }\n\n    protected HuskyMappingLoader( ClassScope classScope, ClassLoader classLoader, PMCTContextMachinery marshal ) {\n        this( classScope, classLoader, ClassPool.getDefault(), marshal );\n    }\n\n    public HuskyMappingLoader( DynamicFactory factory, PMCTContextMachinery marshal ) {\n        this( factory.getClassScope(), factory.getClassLoader(), marshal );\n    }\n\n    @Override\n    protected boolean isAnnotationQualified( Annotation that, String szName ) {\n        return false;\n    }\n\n    @Override\n    public Class<? > load( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? > )super.load( simpleName );\n    }\n\n    // Directly by it`s name.\n    @Override\n    public Class<? > loadByName( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? > )super.loadByName( simpleName );\n    }\n\n    // Scanning class`s annotations, methods or others.\n    @Override\n    public Class<? > loadInClassTrait( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? > )super.loadInClassTrait( simpleName );\n    }\n\n    @Override\n    protected Class<? > loadSingleByFullClassName( String szFullClassName ) {\n        try {\n            Class<?> clazz = this.mClassLoader.loadClass( szFullClassName );\n            if( this.filter( clazz ) ) {\n                return null;\n            }\n\n        }\n        catch ( ClassNotFoundException e ) {\n            return null;\n        }\n\n        return null;\n    }\n\n    @Override\n    public MultiMappingLoader updateScope() {\n        return (MultiMappingLoader)super.updateScope();\n    }\n\n    @Override\n    public void clearCache() {\n        this.mLoadedClassesPool.clear();\n        this.mVisitedClasses.clear();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/HuskyMappingScopeSet.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport java.util.Set;\n\nimport com.pinecone.framework.unit.LinkedTreeSet;\nimport com.pinecone.framework.util.lang.ArchClassScopeSet;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.ScopedPackage;\n\npublic class HuskyMappingScopeSet extends ArchClassScopeSet {\n    public HuskyMappingScopeSet( Set<ScopedPackage> scope, ClassLoader classLoader ) {\n        super( scope, classLoader );\n    }\n\n    public HuskyMappingScopeSet( ClassLoader classLoader ) {\n        super( new LinkedTreeSet<>(), classLoader );\n    }\n\n    public HuskyMappingScopeSet( DynamicFactory factory ) {\n        super( new LinkedTreeSet<>(), factory.getClassLoader() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/HuskyRouteDispatcher.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.stream.Collectors;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.lang.field.FieldEntity;\nimport com.pinecone.hydra.express.Deliver;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.pinecone.hydra.umct.MessageDeliver;\nimport com.pinecone.hydra.umct.MessageExpress;\nimport com.pinecone.hydra.umct.MessageHandler;\nimport com.pinecone.hydra.umct.MessageJunction;\nimport com.pinecone.hydra.umct.ProtoletMsgDeliver;\nimport com.pinecone.hydra.umct.UMCTExpress;\nimport com.pinecone.hydra.umct.WolfMCExpress;\nimport com.pinecone.hydra.uma.AppointServer;\nimport com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.CompilerEncoder;\nimport com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype;\nimport com.pinecone.hydra.umct.husky.compiler.IfaceMappingDigest;\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodDigest;\nimport com.pinecone.hydra.umct.mapping.BytecodeControllerInspector;\nimport com.pinecone.hydra.umct.mapping.ControllerInspector;\nimport com.pinecone.hydra.umct.mapping.InspectException;\nimport com.pinecone.hydra.umct.mapping.MappingDigest;\nimport com.pinecone.hydra.umct.stereotype.IfaceUtils;\nimport com.pinecone.ulf.util.protobuf.GenericFieldProtobufDecoder;\n\nimport javassist.ClassPool;\nimport javassist.NotFoundException;\n\npublic class HuskyRouteDispatcher extends ArchRouteDispatcher implements ProtoRouteDispatcher {\n\n    protected void applyExpress( ProtoInterfacialCompiler compiler, UMCTExpress express ) {\n        this.mUMCTExpress = express;\n\n        this.mDefaultDeliver      = new ProtoletMsgDeliver( AppointServer.DefaultEntityName, this.mUMCTExpress, this.getContextMachinery(), compiler.getCompilerEncoder() );\n        this.mUMCTExpress.register( this.mDefaultDeliver  );\n    }\n\n    protected HuskyRouteDispatcher( ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector ) {\n        super();\n        this.mMCTContextMachinery = new HuskyContextMachinery( compiler, controllerInspector, new GenericFieldProtobufDecoder() );\n    }\n\n    public HuskyRouteDispatcher( PMCTContextMachinery machinery, UMCTExpress express ) {\n        super();\n        this.mMCTContextMachinery = machinery;\n        this.applyExpress( machinery.getInterfacialCompiler(), express );\n    }\n\n    public HuskyRouteDispatcher( ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector, UMCTExpress express ) {\n        this( compiler, controllerInspector );\n        this.applyExpress( compiler, express );\n    }\n\n    public HuskyRouteDispatcher( CompilerEncoder encoder, UMCTExpress express, ClassLoader classLoader ) {\n        this( new BytecodeIfaceCompiler(\n                ClassPool.getDefault(), classLoader, encoder\n        ), new BytecodeControllerInspector(\n                ClassPool.getDefault(), classLoader\n        ), express );\n    }\n\n    public HuskyRouteDispatcher( UMCTExpress express, ClassLoader classLoader ) {\n        this( new BytecodeIfaceCompiler(\n                ClassPool.getDefault(), classLoader\n        ), new BytecodeControllerInspector(\n                ClassPool.getDefault(), classLoader\n        ), express );\n    }\n\n    public HuskyRouteDispatcher( Class<?> expressType, MessageJunction junction, ClassLoader classLoader ) {\n        this(\n                new BytecodeIfaceCompiler( ClassPool.getDefault(), classLoader ),\n                new BytecodeControllerInspector( ClassPool.getDefault(), classLoader )\n        );\n\n        try {\n            Constructor<?> constructor = expressType.getConstructor( String.class, MessageJunction.class, Logger.class );\n            Logger logger ;\n            if ( junction instanceof Slf4jTraceable ) {\n                logger = ((Slf4jTraceable) junction).getLogger();\n            }\n            else {\n                logger = LoggerFactory.getLogger( this.getClass().getName() );\n            }\n\n            UMCTExpress express = (UMCTExpress) constructor.newInstance( AppointServer.DefaultEntityName, junction, logger );\n\n            this.applyExpress(\n                    this.getInterfacialCompiler(), express\n            );\n        }\n        catch ( NoSuchMethodException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) {\n            throw new IllegalArgumentException( \"`\" + expressType.getSimpleName() + \"` is not UMCTExpress calibre qualified.\" );\n        }\n    }\n\n    public HuskyRouteDispatcher( ClassLoader classLoader, boolean delayExpress ) {\n        this(\n                new BytecodeIfaceCompiler( ClassPool.getDefault(), classLoader ),\n                new BytecodeControllerInspector( ClassPool.getDefault(), classLoader )\n        );\n    }\n\n    public HuskyRouteDispatcher( MessageJunction junction, ClassLoader classLoader ) {\n        this( WolfMCExpress.class, junction, classLoader );\n    }\n\n\n\n\n    @Override\n    public PMCTContextMachinery getContextMachinery() {\n        return (PMCTContextMachinery) super.getContextMachinery();\n    }\n\n    @Override\n    public ProtoInterfacialCompiler getInterfacialCompiler() {\n        return (ProtoInterfacialCompiler) super.getInterfacialCompiler();\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/HuskyRouteDispatcherFabricator.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umct.UMCTExpress;\n\npublic class HuskyRouteDispatcherFabricator implements Pinenut {\n    public static void afterConstructed( HuskyRouteDispatcher dispatcher, UMCTExpress express ) {\n        dispatcher.applyExpress( dispatcher.getInterfacialCompiler(), express );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/HuskyTransformer.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.hydra.umct.mapping.ControllerInspector;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufDecoder;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufEncoder;\n\npublic class HuskyTransformer extends DigestTransformer implements PMCTTransformer {\n\n    protected FieldProtobufEncoder         mFieldProtobufEncoder;\n\n    protected FieldProtobufDecoder         mFieldProtobufDecoder;\n\n    public HuskyTransformer( ProtoInterfacialCompiler compiler, ControllerInspector controllerInspector, FieldProtobufDecoder decoder ) {\n        super( compiler, controllerInspector );\n\n        this.mFieldProtobufEncoder  = compiler.getCompilerEncoder().getEncoder();\n        this.mFieldProtobufDecoder  = decoder;\n    }\n\n    @Override\n    public ProtoInterfacialCompiler getInterfacialCompiler() {\n        return (ProtoInterfacialCompiler) super.getInterfacialCompiler();\n    }\n\n    @Override\n    public FieldProtobufEncoder getFieldProtobufEncoder() {\n        return this.mFieldProtobufEncoder;\n    }\n\n    @Override\n    public FieldProtobufDecoder getFieldProtobufDecoder() {\n        return this.mFieldProtobufDecoder;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/MCTContextMachinery.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport com.pinecone.framework.util.lang.ScopedPackage;\n\npublic interface MCTContextMachinery extends MCTTransformer {\n\n    MCTContextMachinery addScope           ( String szPackageName );\n\n    MCTContextMachinery addScope           ( ScopedPackage scope );\n\n    MultiMappingLoader    getMultiMappingLoader();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/MCTTransformer.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umct.MessageHandler;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodDigest;\nimport com.pinecone.hydra.umct.mapping.ControllerInspector;\nimport com.pinecone.hydra.umct.mapping.MappingDigest;\n\npublic interface MCTTransformer extends Pinenut {\n\n    InterfacialCompiler getInterfacialCompiler();\n\n    ControllerInspector getControllerInspector();\n\n    Map<String, MessageHandler> getMessageHandlerMap();\n\n    List<MappingDigest > getMappingDigests();\n\n    default void addAll( List<MappingDigest > digests ){\n        this.getMappingDigests().addAll( digests );\n    }\n\n    ClassDigest queryClassDigest( String name );\n\n    MethodDigest queryMethodDigest( String name );\n\n    void addClassDigest( ClassDigest that );\n\n    void addMethodDigest( MethodDigest that );\n\n    ClassDigest compile( Class<? > clazz, boolean bAsIface );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/MultiMappingLoader.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.lang.MultiClassScopeLoader;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.ulf.util.lang.MultiTraitClassLoader;\n\npublic interface MultiMappingLoader extends MultiClassScopeLoader, MultiTraitClassLoader {\n    @Override\n    Class<?> load( Name simpleName ) throws ClassNotFoundException ;\n\n    // Directly by it`s name.\n    @Override\n    Class<?> loadByName( Name simpleName ) throws ClassNotFoundException ;\n\n    // Scanning class`s annotations, methods or others.\n    @Override\n    Class<?> loadInClassTrait( Name simpleName ) throws ClassNotFoundException ;\n\n    @Override\n    MultiMappingLoader updateScope();\n\n    @Override\n    List<Class<? > > loads( Name name ) ;\n\n    @Override\n    List<Class<? > > loadsByName( Name simpleName );\n\n    @Override\n    List<Class<? > > loadsInClassTrait( Name simpleName ) ;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/PMCTContextMachinery.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport com.pinecone.framework.util.lang.ScopedPackage;\n\npublic interface PMCTContextMachinery extends PMCTTransformer, MCTContextMachinery {\n\n    @Override\n    PMCTContextMachinery addScope           ( String szPackageName );\n\n    @Override\n    PMCTContextMachinery addScope           ( ScopedPackage scope );\n\n    @Override\n    MultiMappingLoader    getMultiMappingLoader();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/PMCTTransformer.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufDecoder;\nimport com.pinecone.ulf.util.protobuf.FieldProtobufEncoder;\n\npublic interface PMCTTransformer extends MCTTransformer {\n\n    @Override\n    ProtoInterfacialCompiler getInterfacialCompiler();\n\n    default FieldProtobufEncoder getFieldProtobufEncoder() {\n        return this.getInterfacialCompiler().getCompilerEncoder().getEncoder();\n    }\n\n    FieldProtobufDecoder getFieldProtobufDecoder();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/ProtoRouteDispatcher.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport com.pinecone.hydra.umct.husky.compiler.ProtoInterfacialCompiler;\n\npublic interface ProtoRouteDispatcher extends RouteDispatcher {\n\n    @Override\n    PMCTContextMachinery getContextMachinery();\n\n    @Override\n    ProtoInterfacialCompiler getInterfacialCompiler();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/husky/machinery/RouteDispatcher.java",
    "content": "package com.pinecone.hydra.umct.husky.machinery;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.express.Deliver;\nimport com.pinecone.hydra.umct.MessageDeliver;\nimport com.pinecone.hydra.umct.MessageExpress;\nimport com.pinecone.hydra.umct.UMCTExpress;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.compiler.InterfacialCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodDigest;\n\npublic interface RouteDispatcher extends Pinenut {\n    void setUMCTExpress( UMCTExpress handler );\n\n    MCTContextMachinery getContextMachinery();\n\n    UMCTExpress getUMCTExpress();\n\n    MessageExpress register( Deliver deliver );\n\n    MessageExpress  fired   ( Deliver deliver );\n\n    MessageDeliver getDeliver( String name );\n\n    MessageDeliver getDefaultDeliver();\n\n    InterfacialCompiler getInterfacialCompiler();\n\n\n    void registerInstance( String deliverName, Object instance, Class<?> iface ) ;\n\n    void registerInstance( Object instance, Class<?> iface );\n\n    void registerController( String deliverName, Object instance, Class<?> controllerType ) ;\n\n    void registerController( Object instance, Class<?> controllerType ) ;\n\n    default void registerController( Object instance ) {\n        this.registerController( instance, instance.getClass() );\n    }\n\n    ClassDigest queryClassDigest( String name );\n\n    MethodDigest queryMethodDigest( String name ) ;\n\n    void addClassDigest( ClassDigest that );\n\n    void addMethodDigest( MethodDigest that );\n\n    ClassDigest compile( Class<? > clazz, boolean bAsIface ) ;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/lets/MessageServiceScanner.java",
    "content": "package com.pinecone.hydra.umct.lets;\n\nimport com.pinecone.framework.util.lang.ClassScope;\nimport com.pinecone.framework.util.lang.ClassScopeNSProtocolIteratorsFactory;\nimport com.pinecone.framework.util.lang.NSProtocolIteratorsFactoryAdapter;\nimport com.pinecone.ulf.util.lang.GenericPreloadClassInspector;\nimport com.pinecone.ulf.util.lang.HierarchyClassInspector;\nimport com.pinecone.ulf.util.lang.PooledClassCandidateScanner;\nimport com.pinecone.ulf.util.lang.SimpleAnnotationExcludeFilter;\nimport javassist.ClassPool;\n\npublic class MessageServiceScanner extends PooledClassCandidateScanner implements MessageletScanner {\n    protected HierarchyClassInspector mClassInspector     ;\n\n    public MessageServiceScanner     ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory, ClassPool classPool ) {\n        super( searchScope, classLoader, iteratorsFactory, classPool );\n\n        this.mClassInspector = new GenericPreloadClassInspector( this.mClassPool );\n        this.addExcludeFilter( new SimpleAnnotationExcludeFilter( this.mClassInspector, MsgService.class ) );\n    }\n\n    public MessageServiceScanner     ( ClassScope searchScope, ClassLoader classLoader, ClassPool classPool ) {\n        this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), classPool );\n    }\n\n    public MessageServiceScanner     ( ClassScope searchScope, ClassLoader classLoader ) {\n        this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), ClassPool.getDefault() );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/lets/MessageletScanner.java",
    "content": "package com.pinecone.hydra.umct.lets;\n\nimport com.pinecone.framework.util.lang.ClassScanner;\n\npublic interface MessageletScanner extends ClassScanner {\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/lets/MsgService.java",
    "content": "package com.pinecone.hydra.umct.lets;\n\nimport java.lang.annotation.*;\n\n@Target({ElementType.TYPE})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface MsgService {\n    String value() default \"\";\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/ArchMappingInspector.java",
    "content": "package com.pinecone.hydra.umct.mapping;\n\nimport java.lang.reflect.Array;\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.pinecone.hydra.umct.bind.ArgParam;\nimport com.pinecone.ulf.util.lang.GenericPreloadClassInspector;\n\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.CtMethod;\nimport javassist.NotFoundException;\nimport javassist.bytecode.ParameterAnnotationsAttribute;\nimport javassist.bytecode.annotation.Annotation;\n\npublic abstract class ArchMappingInspector extends GenericPreloadClassInspector implements MappingInspector {\n    protected ClassLoader     mClassLoader;\n\n    public ArchMappingInspector( ClassPool classPool, ClassLoader classLoader ) {\n        super( classPool );\n\n        this.mClassLoader = classLoader;\n    }\n\n    @Override\n    public List<ParamsDigest> inspectArgParams( Object methodDigest, CtMethod method ) {\n        List<ParamsDigest> argParams = null;\n\n        ParameterAnnotationsAttribute paramAnnotationsAttr = ( ParameterAnnotationsAttribute) method.getMethodInfo().\n                getAttribute(ParameterAnnotationsAttribute.visibleTag );\n\n        if ( paramAnnotationsAttr != null ) {\n            Annotation[][] parameterAnnotations = paramAnnotationsAttr.getAnnotations();\n            if ( parameterAnnotations.length > 0 ) {\n                argParams = new ArrayList<>();\n            }\n\n            for ( int i = 0; i < parameterAnnotations.length; ++i ) {\n                for ( Annotation annotation : parameterAnnotations[ i ] ) {\n                    if ( ArgParam.class.getName().equals(annotation.getTypeName()) ) {\n                        String name   = annotation.getMemberValue(\"name\") != null ? annotation.getMemberValue(\"name\").toString() : null;\n                        String value  = annotation.getMemberValue(\"value\") != null ? annotation.getMemberValue(\"value\").toString() : null;\n                        String defVal = annotation.getMemberValue(\"defaultValue\") != null ? annotation.getMemberValue(\"defaultValue\").toString() : null;\n\n                        boolean required =\n                                annotation.getMemberValue(\"required\") == null ||\n                                        Boolean.parseBoolean(annotation.getMemberValue(\"required\").toString());\n\n                        argParams.add( this.newParamsDigest(\n                                methodDigest, i, this.annotationKeyNormalize(name), this.annotationKeyNormalize(value), this.annotationKeyNormalize(defVal), required )\n                        );\n                    }\n                }\n            }\n        }\n\n        return argParams;\n    }\n\n    protected ParamsDigest newParamsDigest( Object methodDigest, int parameterIndex, String name, String value, String defaultValue, boolean required ) {\n        return new GenericParamsDigest(\n                parameterIndex, this.annotationKeyNormalize(name), this.annotationKeyNormalize(value), this.annotationKeyNormalize(defaultValue), required\n        );\n    }\n\n    protected String annotationKeyNormalize( String bad ) {\n        if ( bad != null ) {\n            bad = bad.trim();\n            if ( bad.startsWith( \"\\\"\" ) ) {\n                return bad.replace( \"\\\"\", \"\" );\n            }\n        }\n        return bad;\n    }\n\n    protected Class<?> reinterpretClass( String className ) throws ClassNotFoundException {\n        switch (className) {\n            case \"boolean\": {\n                return boolean.class;\n            }\n            case \"byte\": {\n                return byte.class;\n            }\n            case \"char\": {\n                return char.class;\n            }\n            case \"short\": {\n                return short.class;\n            }\n            case \"int\": {\n                return int.class;\n            }\n            case \"long\": {\n                return long.class;\n            }\n            case \"float\": {\n                return float.class;\n            }\n            case \"double\": {\n                return double.class;\n            }\n            case \"void\": {\n                return void.class;\n            }\n            default:\n                if ( className.endsWith( \"[]\" ) ) {\n                    String elementTypeName = className.substring( 0, className.length() - 2 );\n                    Class<?> elementType = this.reinterpretClass( elementTypeName );\n                    return Array.newInstance( elementType, 0 ).getClass();\n                }\n                return this.mClassLoader.loadClass(className);\n        }\n    }\n\n    protected <T > T getAnnotation( CtClass ctClass, Class<T > annotationClass ) {\n        try {\n            Object rawAnnotation = ctClass.getAnnotation( annotationClass );\n            return annotationClass.cast( rawAnnotation );\n        }\n        catch ( ClassNotFoundException e ) {\n            return null;\n        }\n    }\n\n    protected <T> T getAnnotation( CtMethod ctMethod, Class<T> annotationClass ) {\n        try {\n            Object rawAnnotation = ctMethod.getAnnotation( annotationClass );\n            return annotationClass.cast( rawAnnotation );\n        }\n        catch ( ClassNotFoundException e ) {\n            return null;\n        }\n    }\n\n    protected Class<? >[] getParameters ( CtMethod method ) throws ClassNotFoundException {\n        CtClass[] pars;\n        try{\n            pars = method.getParameterTypes();\n        }\n        catch ( NotFoundException e ) {\n            pars = null;\n        }\n\n        Class<? >[] parameters;\n        if( pars != null ) {\n            parameters = new Class<?>[ pars.length ];\n\n            for ( int i = 0; i < pars.length; ++i ) {\n                CtClass par = pars[ i ];\n\n                String parName = par.getName();\n                Class<? > pc = this.reinterpretClass( parName );\n                parameters[ i ] = pc;\n            }\n        }\n        else {\n            parameters = null;\n        }\n\n        return parameters;\n    }\n\n    @Override\n    public ClassLoader getClassLoader() {\n        return this.mClassLoader;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/BytecodeControllerInspector.java",
    "content": "package com.pinecone.hydra.umct.mapping;\n\nimport java.lang.reflect.Method;\nimport java.lang.reflect.Modifier;\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.pinecone.hydra.umc.msg.UMCMethod;\nimport com.pinecone.hydra.umct.AddressMapping;\nimport com.pinecone.hydra.umct.stereotype.Controller;\n\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.CtMethod;\nimport javassist.NotFoundException;\n\npublic class BytecodeControllerInspector extends ArchMappingInspector implements ControllerInspector {\n    public BytecodeControllerInspector( ClassPool classPool, ClassLoader classLoader ) {\n        super( classPool, classLoader );\n    }\n\n    public BytecodeControllerInspector( ClassPool classPool ) {\n        this( classPool, Thread.currentThread().getContextClassLoader() );\n    }\n\n    @Override\n    public List<CtMethod > inspect( String className ) throws NotFoundException {\n        List<CtMethod> mappingMethods = new ArrayList<>();\n        CtClass ctClass = this.mClassPool.get( className );\n\n        boolean classHasControllerAnnotation = this.hasOwnAnnotation( ctClass, Controller.class );\n\n        if ( classHasControllerAnnotation ) {\n            for ( CtMethod method : ctClass.getDeclaredMethods() ) {\n                if ( Modifier.isPublic( method.getModifiers() ) ) {\n                    if ( this.methodHasAnnotation( method, AddressMapping.class ) ) {\n                        mappingMethods.add( method );\n                    }\n                }\n            }\n        }\n\n        return mappingMethods;\n    }\n\n    @Override\n    public List<CtMethod> inspect( Class<?> clazz ) throws NotFoundException {\n        return this.inspect( clazz.getName() );\n    }\n\n    @Override\n    public List<MappingDigest > characterize( String className ) throws NotFoundException {\n        try{\n            List<MappingDigest > mappingDigests = new ArrayList<>();\n\n            CtClass ctClass = this.mClassPool.get(className);\n\n            if ( !this.hasOwnAnnotation( ctClass, Controller.class ) ) {\n                return mappingDigests;\n            }\n\n            AddressMapping classMapping = this.getAnnotation( ctClass, AddressMapping.class );\n            String[] classLevelMappings;\n            if ( classMapping != null )  {\n                classLevelMappings = classMapping.value();\n            }\n            else  {\n                classLevelMappings = new String[]{};\n            }\n\n            for ( CtMethod method : ctClass.getDeclaredMethods() ) {\n                if ( !Modifier.isPublic( method.getModifiers() ) || !this.methodHasAnnotation( method, AddressMapping.class ) ) {\n                    continue;\n                }\n\n                AddressMapping methodMapping = this.getAnnotation( method, AddressMapping.class );\n                if ( methodMapping == null ) {\n                    continue; // Method must have an explicit `AddressMapping`.\n                }\n                String[] methodLevelMappings = methodMapping.value();\n                boolean isRelative = methodMapping.relative();\n\n                if ( methodLevelMappings.length == 0 && methodMapping.selfMappable() ) {\n                    methodLevelMappings = new String[]{ method.getName() };\n                }\n\n                List<String > fullAddresses = new ArrayList<>();\n                for ( String classMappingValue : classLevelMappings ) {\n                    for ( String methodMappingValue : methodLevelMappings ) {\n                        if ( isRelative ) {\n                            fullAddresses.add( classMappingValue + methodMappingValue );\n                        }\n                        else {\n                            fullAddresses.add( methodMappingValue );\n                        }\n                    }\n                }\n\n                List<ParamsDigest> paramsDigests = this.inspectArgParams( null, method );\n                Class<? >[] parameters = this.getParameters( method );\n\n                String[] parameterTypes = BytecodeControllerInspector.evalGenericParameterTypes( method );\n                String   returnGType    = BytecodeControllerInspector.evalGenericReturnType( method );\n\n                Class<? > auth         = this.reinterpretClass( className );\n                Method mappedMethod    = auth.getMethod( method.getName(), parameters );\n                UMCMethod[] intMethods = methodMapping.method();\n                MappingDigest digest   = new GenericMappingDigest(\n                        fullAddresses.isEmpty() ? methodLevelMappings : fullAddresses.toArray( new String[ 0 ] ),\n                        parameters,\n                        parameterTypes,\n                        this.reinterpretClass( method.getReturnType().getName() ),\n                        returnGType,\n                        auth,\n                        mappedMethod,\n                        paramsDigests,\n                        intMethods\n                );\n\n                digest.apply( paramsDigests );\n                mappingDigests.add( digest );\n            }\n\n            return mappingDigests;\n        }\n        catch ( ClassNotFoundException | NoSuchMethodException e ) {\n            throw new InspectException( e );\n        }\n    }\n\n    @Override\n    public List<MappingDigest > characterize( Class<?> clazz ) throws NotFoundException {\n        return this.characterize( clazz.getName() );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/ControllerInspector.java",
    "content": "package com.pinecone.hydra.umct.mapping;\n\nimport java.util.List;\n\nimport javassist.CtMethod;\nimport javassist.NotFoundException;\n\npublic interface ControllerInspector extends MappingInspector {\n    List<CtMethod> inspect( String className ) throws NotFoundException;\n\n    List<CtMethod > inspect( Class<?> clazz ) throws NotFoundException;\n\n    List<MappingDigest > characterize( String className ) throws NotFoundException;\n\n    List<MappingDigest > characterize( Class<?> clazz ) throws NotFoundException;\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/GenericMappingDigest.java",
    "content": "package com.pinecone.hydra.umct.mapping;\n\nimport java.lang.reflect.Method;\nimport java.util.List;\n\nimport com.pinecone.framework.lang.field.DataStructureEntity;\nimport com.pinecone.framework.lang.field.GenericStructure;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.umc.msg.UMCMethod;\nimport com.pinecone.hydra.umct.husky.function.MethodTemplates;\n\npublic class GenericMappingDigest implements MappingDigest {\n    protected String[]                mszAddresses;\n\n    protected UMCMethod[]             mInterceptMethods;\n\n    protected DataStructureEntity     mArgumentTemplate;\n\n    protected Class<?>                mReturnType;\n\n    protected Class<?>                mClassType;\n\n    protected String                  mszReturnGenericTypeLabel;\n\n    protected Method                  mMappedMethod;\n\n    protected List<ParamsDigest>      mParamsDigests;\n\n    protected GenericMappingDigest() {\n\n    }\n\n    public GenericMappingDigest(\n            String[] szAddresses,\n            Class<?>[] parameters, String[] parametersGenericLabels,\n            Class<?> returnType, String szReturnGenericTypeLabel,\n            Class<?> classType, Method method, List<ParamsDigest> paramsDigests, UMCMethod[] interceptMethods\n    ) {\n        this.mszAddresses               = szAddresses;\n        this.mReturnType                = returnType;\n        this.mszReturnGenericTypeLabel  = szReturnGenericTypeLabel;\n        this.mParamsDigests             = paramsDigests;\n        this.mClassType                 = classType;\n        this.mMappedMethod              = method;\n        this.mInterceptMethods          = interceptMethods;\n\n\n        String szDominatedAddress;\n        if ( szAddresses.length > 0 ) {\n            szDominatedAddress = szAddresses[0];\n        }\n        else {\n            szDominatedAddress = \"\";\n            // Using anonymous address. In fact, there is pointless for this argument template, which the address is for Iface only.\n        }\n\n        if( parameters == null || parameters.length == 0 ) {\n            this.mArgumentTemplate   = new GenericStructure( szDominatedAddress, 0 );\n        }\n        else {\n            this.mArgumentTemplate   = MethodTemplates.from( null, szDominatedAddress, parameters, parametersGenericLabels );\n        }\n    }\n\n    @Override\n    public void apply( List<ParamsDigest> ifaceParamsDigests ) {\n        this.mParamsDigests = ifaceParamsDigests;\n    }\n\n    @Override\n    public List<String> getArgumentsKey() {\n        return MethodDigestUtils.getArgumentsKey( this.getParamsDigests(), this.getArgumentTemplate() );\n    }\n\n    @Override\n    public String[] getAddresses() {\n        return this.mszAddresses;\n    }\n\n    @Override\n    public UMCMethod[] getInterceptMethods() {\n        return this.mInterceptMethods;\n    }\n\n    @Override\n    public Method getMappedMethod() {\n        return this.mMappedMethod;\n    }\n\n    @Override\n    public Class<?> getClassType() {\n        return this.mClassType;\n    }\n\n    @Override\n    public DataStructureEntity getArgumentTemplate() {\n        return this.mArgumentTemplate;\n    }\n\n    @Override\n    public Class<?> getReturnType() {\n        return this.mReturnType;\n    }\n\n    @Override\n    public String getReturnGenericTypeLabel() {\n        return this.mszReturnGenericTypeLabel;\n    }\n\n    @Override\n    public void applyReturnGenericTypeLabel( String genericTypeLabel ) {\n        this.mszReturnGenericTypeLabel = genericTypeLabel;\n    }\n\n    @Override\n    public List<ParamsDigest> getParamsDigests() {\n        return this.mParamsDigests;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"addresses\"    , this.getAddresses()                            ),\n                new KeyValue<>( \"return\"       , this.getReturnType().getName()                 ),\n                new KeyValue<>( \"mappedClass\"  , this.getClassType().getName()                  ),\n                new KeyValue<>( \"mappedMethod\" , this.getMappedMethod().getName()               ),\n        } );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/GenericParamsDigest.java",
    "content": "package com.pinecone.hydra.umct.mapping;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\n\npublic class GenericParamsDigest implements ParamsDigest {\n    protected int          mParameterIndex;\n\n    protected String       mszName;\n\n    protected String       mszValue;\n\n    protected String       mszDefaultValue;\n\n    protected boolean      mRequired;\n\n    public GenericParamsDigest( int parameterIndex, String name, String value, String defaultValue, boolean required ) {\n        this.mParameterIndex = parameterIndex;\n        this.mszName         = name;\n        this.mszValue        = value;\n        this.mRequired       = required;\n        this.mszDefaultValue = defaultValue;\n    }\n\n    @Override\n    public int getParameterIndex() {\n        return this.mParameterIndex;\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public String getValue() {\n        return this.mszValue;\n    }\n\n    @Override\n    public boolean isRequired() {\n        return this.mRequired;\n    }\n\n    @Override\n    public String getDefaultValue() {\n        return this.mszDefaultValue;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"index\"         , this.getParameterIndex()                       ),\n                new KeyValue<>( \"name\"          , this.getName()                                 ),\n                new KeyValue<>( \"value\"         , this.getValue()                                ),\n                new KeyValue<>( \"defaultValue\"  , this.getDefaultValue()                         ),\n                new KeyValue<>( \"required\"      , this.isRequired()                              ),\n        } );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/InspectException.java",
    "content": "package com.pinecone.hydra.umct.mapping;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class InspectException extends PineRuntimeException {\n    public InspectException    () {\n        super();\n    }\n\n    public InspectException    ( String message ) {\n        super(message);\n    }\n\n    public InspectException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public InspectException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected InspectException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/MappingDigest.java",
    "content": "package com.pinecone.hydra.umct.mapping;\n\nimport java.lang.reflect.Method;\nimport java.util.List;\n\nimport com.pinecone.framework.lang.field.DataStructureEntity;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.ReflectionUtils;\nimport com.pinecone.hydra.umc.msg.UMCMethod;\n\npublic interface MappingDigest extends Pinenut {\n    String[] getAddresses();\n\n    default boolean isAnonymousAddress() {\n        return this.getAddresses().length == 0;\n    }\n\n    UMCMethod[] getInterceptMethods();\n\n    DataStructureEntity getArgumentTemplate();\n\n    Class<?> getClassType();\n\n    Method getMappedMethod();\n\n\n    Class<?> getReturnType();\n\n    String getReturnGenericTypeLabel();\n\n    default String[] getReturnGenericTypeNames() {\n        return ReflectionUtils.extractGenericClassNames( this.getReturnGenericTypeLabel() );\n    }\n\n    void applyReturnGenericTypeLabel( String genericTypeLabel );\n\n\n\n    List<ParamsDigest> getParamsDigests();\n\n    void apply( List<ParamsDigest> ifaceParamsDigests);\n\n    List<String> getArgumentsKey();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/MappingInspector.java",
    "content": "package com.pinecone.hydra.umct.mapping;\n\nimport java.util.List;\n\nimport com.pinecone.ulf.util.lang.HierarchyClassInspector;\n\nimport javassist.CtMethod;\n\npublic interface MappingInspector extends HierarchyClassInspector {\n    List<ParamsDigest> inspectArgParams( Object methodDigest, CtMethod method );\n\n    ClassLoader getClassLoader();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/MethodDigestUtils.java",
    "content": "package com.pinecone.hydra.umct.mapping;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.pinecone.framework.lang.field.DataStructureEntity;\nimport com.pinecone.framework.util.StringUtils;\n\npublic final class MethodDigestUtils {\n    public static List<String> getArgumentsKey( List<? extends ParamsDigest> paramsDigests, DataStructureEntity argumentTemplate ) {\n        if ( paramsDigests == null || paramsDigests.isEmpty() || paramsDigests.size() != argumentTemplate.size() ) {\n            return null;\n        }\n\n        List<String> keys = new ArrayList<>( paramsDigests.size() );\n        for ( ParamsDigest digest : paramsDigests ) {\n            String n = digest.getName();\n            if ( StringUtils.isEmpty( n ) ) {\n                return null;\n            }\n            keys.add( n );\n        }\n        return keys;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/mapping/ParamsDigest.java",
    "content": "package com.pinecone.hydra.umct.mapping;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ParamsDigest extends Pinenut {\n    int getParameterIndex() ;\n\n    String getName();\n\n    String getValue();\n\n    boolean isRequired();\n\n    String getDefaultValue();\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/proxy/UMCTHub.java",
    "content": "package com.pinecone.hydra.umct.proxy;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class UMCTHub implements Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/stereotype/Controller.java",
    "content": "package com.pinecone.hydra.umct.stereotype;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Target({ElementType.TYPE})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface Controller {\n    String value() default \"\";\n}"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/stereotype/Iface.java",
    "content": "package com.pinecone.hydra.umct.stereotype;\n\nimport org.springframework.core.annotation.AliasFor;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Target({ElementType.TYPE, ElementType.METHOD})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface Iface {\n    @AliasFor(\"name\")\n    String value() default \"\";\n\n    @AliasFor(\"value\")\n    String name() default \"\";\n\n    String objectAddress() default \"\"; // Class only.\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/main/java/com/pinecone/hydra/umct/stereotype/IfaceUtils.java",
    "content": "package com.pinecone.hydra.umct.stereotype;\n\nimport com.pinecone.framework.util.StringUtils;\n\nimport java.lang.reflect.Method;\n\npublic final class IfaceUtils {\n\n    public static String getIfaceNameFieldVal( Iface annotation ) {\n        String name = annotation.name();\n        if ( StringUtils.isEmpty( name ) ) {\n            name = annotation.value();\n        }\n\n        return name;\n    }\n\n    public static String queryIfaceLogicClassName ( Iface cIface ) {\n        String szLogicClassName = null;\n        if ( cIface != null ) {\n            String objectAddress = cIface.objectAddress();\n            if ( StringUtils.isNoneEmpty( objectAddress ) ) {\n                szLogicClassName = objectAddress;\n            }\n            else {\n                objectAddress = IfaceUtils.getIfaceNameFieldVal( cIface );\n            }\n\n            if ( StringUtils.isNoneEmpty( objectAddress ) ) {\n                szLogicClassName = objectAddress;\n            }\n        }\n\n        return szLogicClassName;\n    }\n\n    public static String queryIfaceLogicClassName ( Class<?> clazz ) {\n        Iface cIface = clazz.getAnnotation( Iface.class );\n        return IfaceUtils.queryIfaceLogicClassName( cIface );\n    }\n\n    public static String queryIfaceClassNameAddress ( Class<?> clazz ) {\n        String szLogicCN = IfaceUtils.queryIfaceLogicClassName( clazz );\n\n        if ( szLogicCN != null ) {\n            return szLogicCN;\n        }\n        return clazz.getName();\n    }\n\n    public static String getIfaceMethodName( Method method ){\n        String ifaceName = method.getName();\n\n        Iface annotation = method.getAnnotation(Iface.class);\n        if ( annotation != null ) {\n            String name = IfaceUtils.getIfaceNameFieldVal( annotation );\n            if ( StringUtils.isNoneEmpty( name ) ) {\n                ifaceName = name;\n            }\n        }\n\n        return ifaceName;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-message-control/src/test/java/com/umc/TestUMCC.java",
    "content": "package com.umc;\n\nimport java.nio.ByteBuffer;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.umc.msg.UMCCHead;\nimport com.pinecone.hydra.umc.msg.UMCCHeadV1;\nimport com.pinecone.hydra.umc.msg.UMCMethod;\nimport com.pinecone.hydra.umc.msg.extra.ExtraHeadCoder;\nimport com.pinecone.hydra.umc.msg.extra.GenericExtraHeadCoder;\n\npublic class TestUMCC {\n    public static void testUMCC() throws Exception {\n        UMCCHeadV1 head = new UMCCHeadV1();\n\n        ExtraHeadCoder coder = new GenericExtraHeadCoder();\n\n\n        head.setBodyLength( 136 );\n        head.setControlBits( 512 );\n        head.applyExtraHeadCoder( coder );\n        head.setExtraHead( new JSONMaptron( \"{k:123, k1: abcdefg}\" ) );\n\n        UMCCHeadV1.EncodePair pair = UMCCHeadV1.encode( head, coder );\n        ByteBuffer buffer = pair.byteBuffer;\n\n        //new UMCCHeadV1(\"\", UMCMethod.INFORM );\n\n        Debug.redf( head, pair.bufLength );\n\n\n        UMCCHead dec = UMCCHeadV1.decode( buffer.array(), head.getSignature(), coder );\n\n        byte[] headBuf = new byte[ head.getExtraHeadLength() ];\n        int headSize = head.sizeof();\n        System.arraycopy( buffer.array(), headSize, headBuf, 0, head.getExtraHeadLength() );\n        Object object = coder.getDecoder().decode( dec, headBuf );\n\n        Debug.bluef( dec, object );\n    }\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            TestUMCC.testUMCC();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kernel</groupId>\n    <artifactId>hydra-service-control</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-service</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ClientServiceRegisterException.java",
    "content": "package com.pinecone.hydra.service.registry;\n\npublic class ClientServiceRegisterException extends ServiceControlException {\n\n    public ClientServiceRegisterException() {\n        super();\n    }\n\n    public ClientServiceRegisterException( String message ) {\n        super(message);\n    }\n\n    public ClientServiceRegisterException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ClientServiceRegisterException( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ServiceControlException.java",
    "content": "package com.pinecone.hydra.service.registry;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class ServiceControlException extends Exception implements Pinenut {\n\n    public ServiceControlException() {\n        super();\n    }\n\n    public ServiceControlException( String message ) {\n        super(message);\n    }\n\n    public ServiceControlException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ServiceControlException( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ServiceControlRPCException.java",
    "content": "package com.pinecone.hydra.service.registry;\n\npublic class ServiceControlRPCException extends ServiceControlException {\n\n    public ServiceControlRPCException() {\n        super();\n    }\n\n    public ServiceControlRPCException( String message ) {\n        super(message);\n    }\n\n    public ServiceControlRPCException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ServiceControlRPCException( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ServiceInstanceCreationException.java",
    "content": "package com.pinecone.hydra.service.registry;\n\npublic class ServiceInstanceCreationException extends ServiceControlException {\n\n    public ServiceInstanceCreationException() {\n        super();\n    }\n\n    public ServiceInstanceCreationException( String message ) {\n        super(message);\n    }\n\n    public ServiceInstanceCreationException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ServiceInstanceCreationException( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ServiceValidationException.java",
    "content": "package com.pinecone.hydra.service.registry;\n\npublic class ServiceValidationException extends ServiceControlException {\n\n    public ServiceValidationException() {\n        super();\n    }\n\n    public ServiceValidationException( String message ) {\n        super(message);\n    }\n\n    public ServiceValidationException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ServiceValidationException( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/UniformService.java",
    "content": "package com.pinecone.hydra.service.registry;\n\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.service.ArchService;\nimport com.pinecone.hydra.service.Serviciom;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\n\npublic class UniformService extends ArchService implements Serviciom {\n\n    public UniformService( Identification serviceId, ServiceElement serviceElement ) {\n        super( serviceId, serviceElement );\n    }\n\n    @Override\n    public Namespace getGroupNamespace() {\n        return null;\n    }\n\n    @Override\n    public String getGroupName() {\n        return null;\n    }\n\n    @Override\n    public Object getProcessImageObject() {\n        return null;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/WolfServiceInstance.java",
    "content": "package com.pinecone.hydra.service.registry;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.service.Service;\nimport com.pinecone.hydra.service.Servicium;\nimport com.pinecone.hydra.service.entity.BindUSII;\nimport com.pinecone.hydra.service.entity.USII;\n\npublic class WolfServiceInstance implements Servicium {\n\n    protected USII                      mUSII;\n\n    protected Service                   mService;\n\n    public WolfServiceInstance( long clientId, Service service, GUID guid ) {\n        this.mUSII            = BindUSII.wrap( clientId, service.getId(), guid );\n        this.mService         = service;\n    }\n\n    @Override\n    public Identification getId() {\n        return this.mUSII.getInstanceId();\n    }\n\n    @Override\n    public Identification getServiceId() {\n        return this.mUSII.getServiceId();\n    }\n\n    @Override\n    public USII getUSII() {\n        return this.mUSII;\n    }\n\n    @Override\n    public Processum getProcessObject() {\n        return null;\n    }\n\n    @Override\n    public Service getService() {\n        return this.mService;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/appoint/ServiceAppointServer.java",
    "content": "package com.pinecone.hydra.service.registry.appoint;\n\nimport com.pinecone.hydra.appoints.AppointNodus;\nimport com.pinecone.hydra.service.registry.server.ServiceManager;\n\npublic interface ServiceAppointServer extends AppointNodus {\n\n    ServiceAppointServer hookServiceManager( ServiceManager serviceManager );\n\n    ServiceManager serviceManager();\n\n    boolean isTerminated();\n\n    boolean isStarted();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/appoint/ServiceClientile.java",
    "content": "package com.pinecone.hydra.service.registry.appoint;\n\nimport java.net.SocketAddress;\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ServiceClientile extends Pinenut {\n\n    /**\n     * One Client ID corresponds to one instance and can only have one address.\n     * For scenarios where a single client multiple connections is opened, there can only be one main address.\n     * 一个 ClientId，对应一个实例，只能有一个地址\n     * 对于开了单客户端多复用连接的场景，只能有一个主地址\n     */\n    SocketAddress getRemoteAddress();\n\n    void afterNewConnectionInbound( Long clientId, Object connectId, Object connection, Object context );\n\n    void afterConnectionDetach( Long clientId, Object channelId, Object connection );\n\n    ServiceAppointServer serviceAppointServer();\n\n    long getClientId();\n\n    int connectionCount();\n\n    boolean isDefunct();\n\n\n    /**\n     * Some servers may not be able to obtain connection-id.\n     */\n    Object queryNativeConnection( Object connectionIdentity );\n\n    Collection<?> connections();\n\n    void shutdown();\n\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/client/ArchServiceClient.java",
    "content": "package com.pinecone.hydra.service.registry.client;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.service.registry.ServiceControlRPCException;\n\npublic abstract class ArchServiceClient implements ServiceClient {\n    protected Logger          mLogger;\n\n    protected GuidAllocator   mGuidAllocator;\n\n    protected GUID            mServiceId;\n\n    protected GUID            mInstanceId;\n\n    public ArchServiceClient( @Nullable GUID serviceId, GuidAllocator guidAllocator ) {\n        this.mLogger                = LoggerFactory.getLogger( this.getClass() );\n        this.mGuidAllocator         = guidAllocator;\n        this.mServiceId             = serviceId;\n    }\n\n    public ArchServiceClient( GuidAllocator guidAllocator ) {\n        this( null, guidAllocator );\n    }\n\n    @Override\n    public void startService() throws ServiceControlRPCException {\n        this.initRPCSubsystem();\n    }\n\n    @Override\n    public GuidAllocator getGuidAllocator() {\n        return this.mGuidAllocator;\n    }\n\n    protected abstract void initRPCSubsystem() throws ServiceControlRPCException ;\n\n}"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/client/HuskyServiceClient.java",
    "content": "package com.pinecone.hydra.service.registry.client;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.service.registry.server.ServiceLifecycleIface;\nimport com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface;\nimport com.pinecone.hydra.service.registry.dto.RegisterServiceDTO;\nimport com.pinecone.hydra.service.registry.ClientServiceRegisterException;\nimport com.pinecone.hydra.service.registry.ServiceControlRPCException;\nimport com.pinecone.hydra.uma.DuplexAppointClient;\nimport com.pinecone.hydra.uma.wolf.WolvesAppointClient;\nimport com.pinecone.hydra.umc.wolf.client.UlfClient;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class HuskyServiceClient extends ArchServiceClient implements ServiceClient {\n    protected DuplexAppointClient           mDuplexAppointClient;\n\n    protected UlfClient                     mRPCClient;\n\n    protected ServiceLifecycleIface         mServiceLifecycleIface;\n\n    protected ServiceMetaManipulationIface  mServiceMetaManipulationIface;\n\n    public HuskyServiceClient( @Nullable GUID serviceId, UlfClient ulfClient, GuidAllocator guidAllocator ) {\n        super( serviceId, guidAllocator );\n        this.mRPCClient             = ulfClient;\n    }\n\n    public HuskyServiceClient( UlfClient ulfClient, GuidAllocator guidAllocator ) {\n        this( null, ulfClient, guidAllocator );\n    }\n\n    @Override\n    public void startService() throws ServiceControlRPCException {\n        this.initRPCSubsystem();\n    }\n\n    @Override\n    public void terminateService() {\n        if( this.mDuplexAppointClient == null ) {\n            throw new IllegalStateException( \"RPCClient dose not started yet.\" );\n        }\n\n        this.deregister();\n        this.mDuplexAppointClient.terminate();\n        this.mDuplexAppointClient = null;\n    }\n\n    @Override\n    public DuplexAppointClient getAppointNodus() {\n        return this.mDuplexAppointClient;\n    }\n\n    @Override\n    public GuidAllocator getGuidAllocator() {\n        return this.mGuidAllocator;\n    }\n\n    protected void initRPCSubsystem() throws ServiceControlRPCException {\n        if ( this.mDuplexAppointClient != null && !this.mDuplexAppointClient.getMessageNode().isTerminated() ) {\n            throw new IllegalStateException( \"DuplexAppointClient has started.\" );\n        }\n\n        this.mDuplexAppointClient = new WolvesAppointClient( this.mRPCClient );\n\n        try {\n            this.mDuplexAppointClient.execute();\n            this.mDuplexAppointClient.compile( ServiceLifecycleIface.class, false );\n            this.mDuplexAppointClient.compile( ServiceMetaManipulationIface.class, false );\n            this.mServiceLifecycleIface = this.mDuplexAppointClient.getIface( ServiceLifecycleIface.class );\n            this.mServiceMetaManipulationIface = this.mDuplexAppointClient.getIface( ServiceMetaManipulationIface.class );\n            this.mLogger.info( \"RPC initialization successful\" );\n        }\n        catch ( Exception e ) {\n            this.mServiceLifecycleIface = null;\n            throw new ServiceControlRPCException( e );\n        }\n    }\n\n    @Override\n    public GUID registerService( GUID serviceId, GUID deployGuid ) throws ClientServiceRegisterException {\n        RegisterServiceDTO serviceDTO = new RegisterServiceDTO();\n        serviceDTO.setServiceId( serviceId.toString() );\n        serviceDTO.setClientId( this.mRPCClient.getMessageNodeId() );\n        if ( deployGuid != null ) {\n            serviceDTO.setDeployId( deployGuid.toString() );\n        }\n        this.mServiceId = serviceId;\n\n        try {\n            String insId = this.mServiceLifecycleIface.registerService( serviceDTO );\n            if ( insId != null ) {\n                this.mInstanceId = this.mGuidAllocator.parse( insId );\n                this.mLogger.info( \"Successfully register service : {}, instanceId: {}\", serviceDTO.getServiceId(), insId );\n            }\n        }\n        catch ( Exception e ) {\n            this.mLogger.error( \"Register Service {} failed\", serviceDTO.getServiceId() );\n            throw new ClientServiceRegisterException( e );\n        }\n        return this.mInstanceId;\n    }\n\n    @Override\n    public void deregister() {\n        if ( this.mInstanceId != null ) {\n            this.mServiceLifecycleIface.deregisterServiceByInstanceId( this.mInstanceId.toString() );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/client/ServiceClient.java",
    "content": "package com.pinecone.hydra.service.registry.client;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.appoints.AppointNodus;\nimport com.pinecone.hydra.service.registry.ClientServiceRegisterException;\nimport com.pinecone.hydra.service.registry.ServiceControlRPCException;\n\npublic interface ServiceClient extends Pinenut {\n    void startService () throws ServiceControlRPCException;\n\n    void terminateService ();\n\n    AppointNodus getAppointNodus ();\n\n    GuidAllocator getGuidAllocator ();\n\n    GUID registerService( GUID serviceId, GUID deployGuid ) throws ClientServiceRegisterException;\n\n    void deregister();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/constant/ServiceStatus.java",
    "content": "package com.pinecone.hydra.service.registry.constant;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic enum ServiceStatus implements Pinenut {\n    SERVICE_NEW( 0x00, \"New\" ),\n    SERVICE_RUNNING( 0x01, \"Running\" ), // 服务运行中\n    SERVICE_SUSPENDED( 0x02, \"Suspended\" ), // 服务暂停\n    SERVICE_EXISTED( 0x03, \"Existed\" ), // 服务存活\n    SERVICE_TERMINATED( 0x04, \"Terminated\" ), // 服务终止（正常结束）\n    SERVICE_ERROR( 0x05, \"Error\" ); // 服务终止（因错误结束）\n\n    private final int code;\n\n    private final String name;\n\n    ServiceStatus( int code, String name ) {\n        this.code = code;\n        this.name = name;\n    }\n\n    public int getCode() {\n        return this.code;\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public static ServiceStatus getByCode(int code ) {\n        for ( ServiceStatus type : ServiceStatus.values() ) {\n            if ( type.code == code ) {\n                return type;\n            }\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/constant/ServiceVitalizationStatus.java",
    "content": "package com.pinecone.hydra.service.registry.constant;\n\npublic enum ServiceVitalizationStatus {\n    New              ( 0x00 ),\n    Vitalized        ( 0x01 ),\n    Error            ( 0x02 ),\n    Success          ( 0x03 );\n    private final int code;\n\n    ServiceVitalizationStatus( int code ) {\n        this.code = code;\n    }\n\n    public int getCode() {\n        return this.code;\n    }\n\n    public static ServiceVitalizationStatus getByCode( int code ) {\n        for ( ServiceVitalizationStatus type : ServiceVitalizationStatus.values() ) {\n            if ( type.code == code ) {\n                return type;\n            }\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/dto/ApplicationMetaDTO.java",
    "content": "package com.pinecone.hydra.service.registry.dto;\n\npublic class ApplicationMetaDTO {\n\n    private String guid;\n\n    private String name;\n\n    private String type;\n\n    private String displayName;\n\n    private String alias;\n\n    private String fullName;\n\n    private String deploymentMethod;\n\n    private String resourceType;\n\n    private String level;\n\n\n\n\n    public String getGuid() {\n        return this.guid;\n    }\n\n    public void setGuid(String guid) {\n        this.guid = guid;\n    }\n\n    public String getLevel() {\n        return this.level;\n    }\n\n    public void setLevel(String level) {\n        this.level = level;\n    }\n\n    public String getResourceType() {\n        return this.resourceType;\n    }\n\n    public void setResourceType(String resourceType) {\n        this.resourceType = resourceType;\n    }\n\n    public String getDeploymentMethod() {\n        return this.deploymentMethod;\n    }\n\n    public void setDeploymentMethod(String deploymentMethod) {\n        this.deploymentMethod = deploymentMethod;\n    }\n\n    public String getFullName() {\n        return this.fullName;\n    }\n\n    public void setFullName(String fullName) {\n        this.fullName = fullName;\n    }\n\n    public String getAlias() {\n        return this.alias;\n    }\n\n    public void setAlias(String alias) {\n        this.alias = alias;\n    }\n\n    public String getDisplayName() {\n        return this.displayName;\n    }\n\n    public void setDisplayName(String displayName) {\n        this.displayName = displayName;\n    }\n\n    public String getType() {\n        return this.type;\n    }\n\n    public void setType(String type) {\n        this.type = type;\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public void setName(String name) {\n        this.name = name;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/dto/RegisterServiceDTO.java",
    "content": "package com.pinecone.hydra.service.registry.dto;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class RegisterServiceDTO implements Pinenut {\n\n    protected Long clientId;\n\n    protected String serviceId;\n\n    protected String deployId;\n\n    public RegisterServiceDTO() {\n\n    }\n\n    public RegisterServiceDTO( Long clientId, String serviceId, String deployId ) {\n        this.clientId = clientId;\n        this.serviceId = serviceId;\n        this.deployId = deployId;\n    }\n\n    public Long getClientId() {\n        return this.clientId;\n    }\n\n    public void setClientId( Long clientId ) {\n        this.clientId = clientId;\n    }\n\n    public String getServiceId() {\n        return this.serviceId;\n    }\n\n    public void setServiceId( String serviceId ) {\n        this.serviceId = serviceId;\n    }\n\n    public String getDeployId() {\n        return this.deployId;\n    }\n\n    public void setDeployId(String deployId) {\n        this.deployId = deployId;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/dto/ServiceMetaDTO.java",
    "content": "package com.pinecone.hydra.service.registry.dto;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.service.Service;\nimport com.pinecone.hydra.service.kom.entity.GenericServiceElement;\nimport com.pinecone.hydra.service.kom.entity.Namespace;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.time.LocalDateTime;\n\npublic class ServiceMetaDTO implements Pinenut {\n    private String guid;\n\n    private String name;\n\n    private String type;\n\n    private String displayName;\n\n    private String description;\n\n    private String fullName;\n\n    private String groupNamespace;\n\n    private String groupName;\n\n    private String scenario;\n\n    private String primaryImplLang;\n\n    private String extraInformation;\n\n    private String level;\n\n    public String getType() {\n        return this.type;\n    }\n\n    public void setType( String type ) {\n        this.type = type;\n    }\n\n    public String getGuid() {\n        return this.guid;\n    }\n\n    public void setGuid(String guid) {\n        this.guid = guid;\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    public String getDisplayName() {\n        return this.displayName;\n    }\n\n    public void setDisplayName(String displayName) {\n        this.displayName = displayName;\n    }\n\n    public String getDescription() {\n        return this.description;\n    }\n\n    public void setDescription(String description) {\n        this.description = description;\n    }\n\n    public String getFullName() {\n        return this.fullName;\n    }\n\n    public void setFullName(String fullName) {\n        this.fullName = fullName;\n    }\n\n    public String getGroupNamespace() {\n        return this.groupNamespace;\n    }\n\n    public void setGroupNamespace(String groupNamespace) {\n        this.groupNamespace = groupNamespace;\n    }\n\n    public String getGroupName() {\n        return this.groupName;\n    }\n\n    public void setGroupName(String groupName) {\n        this.groupName = groupName;\n    }\n\n    public String getScenario() {\n        return this.scenario;\n    }\n\n    public void setScenario(String scenario) {\n        this.scenario = scenario;\n    }\n\n    public String getPrimaryImplLang() {\n        return this.primaryImplLang;\n    }\n\n    public void setPrimaryImplLang(String primaryImplLang) {\n        this.primaryImplLang = primaryImplLang;\n    }\n\n    public String getExtraInformation() {\n        return this.extraInformation;\n    }\n\n    public void setExtraInformation(String extraInformation) {\n        this.extraInformation = extraInformation;\n    }\n\n    public String getLevel() {\n        return this.level;\n    }\n\n    public void setLevel(String level) {\n        this.level = level;\n    }\n\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n\n    public static ServiceMetaDTO from( Service service ){\n        ServiceMetaDTO serviceMetaDTO = new ServiceMetaDTO();\n        serviceMetaDTO.setGuid( service.getId().toString() );\n        serviceMetaDTO.setName(service.getName());\n        serviceMetaDTO.setDescription( service.getDescription() );\n        serviceMetaDTO.setDisplayName( service.getDisplayName() );\n        serviceMetaDTO.setFullName( service.getFullName() );\n        serviceMetaDTO.setExtraInformation( service.getExtraInformation() );\n        serviceMetaDTO.setLevel( service.getLevel() );\n        serviceMetaDTO.setScenario( service.getScenario() );\n        serviceMetaDTO.setPrimaryImplLang( service.getPrimaryImplLang() );\n        serviceMetaDTO.setGroupName( service.getGroupName() );\n        serviceMetaDTO.setType( service.getType() );\n        return serviceMetaDTO;\n    }\n\n    public static ServiceMetaDTO from( ServiceElement service ){\n        ServiceMetaDTO serviceMetaDTO = new ServiceMetaDTO();\n        serviceMetaDTO.setGuid( service.getId().toString() );\n        serviceMetaDTO.setName(service.getName());\n        serviceMetaDTO.setDescription( service.getDescription() );\n        serviceMetaDTO.setDisplayName( service.getName() );\n        serviceMetaDTO.setFullName( service.getPath() );\n        serviceMetaDTO.setExtraInformation( service.getExtraInformation() );\n        serviceMetaDTO.setLevel( service.getLevel() );\n        serviceMetaDTO.setScenario( service.getScenario() );\n        serviceMetaDTO.setPrimaryImplLang( service.getPrimaryImplLang() );\n        serviceMetaDTO.setGroupName( null );\n        serviceMetaDTO.setType( service.getType() );\n        return serviceMetaDTO;\n    }\n\n    public static ServiceElement toServiceElement( ServiceMetaDTO meta, GuidAllocator guidAllocator ) {\n        ServiceElement element = new GenericServiceElement();\n        if ( meta.getGuid() != null ) {\n            element.setGuid( guidAllocator.parse(meta.getGuid()) );\n        }\n        element.setName( meta.getName());\n        element.setDescription( meta.getDescription() );\n        element.setExtraInformation( meta.getExtraInformation() );\n        element.setLevel( meta.getLevel() );\n        element.setScenario( meta.getScenario() );\n        element.setPrimaryImplLang( meta.getPrimaryImplLang() );\n        element.setType( meta.getType() );\n        return element;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/event/ServiceRegisterEvent.java",
    "content": "package com.pinecone.hydra.service.registry.event;\n\npublic enum ServiceRegisterEvent {\n    Created      ( 0x00, \"Created\" ),\n\n    Registered   ( 0x01, \"Registered\" ),\n\n    Deregistered ( 0x02, \"Deregistered\" ),\n\n    Detached     ( 0x03, \"Detached\" ),\n\n    ;\n\n    private final int code;\n\n    private final String name;\n\n    ServiceRegisterEvent(int code, String name ) {\n        this.code = code;\n        this.name = name;\n    }\n\n    public int getCode() {\n        return this.code;\n    }\n\n    public String getName() {\n        return this.name;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/event/ServiceRegisterEventHandler.java",
    "content": "package com.pinecone.hydra.service.registry.event;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface ServiceRegisterEventHandler extends Pinenut {\n\n    void fired( long clientId, GUID insId, GUID serviceId, ServiceRegisterEvent event, Object caused );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/ServiceEventHooker.java",
    "content": "package com.pinecone.hydra.service.registry.server;\n\nimport java.util.function.Supplier;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.service.registry.appoint.ServiceClientile;\n\npublic interface ServiceEventHooker extends Pinenut {\n\n    void afterNewConnectionInbound(\n            Long clientId, Object connectId, Object connection, Object context,\n            Supplier<ServiceClientile> constructor\n    );\n\n    void afterConnectionDetach( Long clientId, Object channelId, Object connection );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/ServiceLifecycleIface.java",
    "content": "package com.pinecone.hydra.service.registry.server;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry;\nimport com.pinecone.hydra.service.registry.dto.RegisterServiceDTO;\nimport com.pinecone.hydra.umct.stereotype.Iface;\n\n@Iface\npublic interface ServiceLifecycleIface extends Pinenut {\n\n    /**\n     * @return with service-instance-guid\n     */\n    String registerService( RegisterServiceDTO serviceDTO );\n\n    boolean createInstanceMeta( ServiceInstanceEntry serviceInstanceEntry );\n\n    void deregisterServiceByClientId( Long clientId );\n\n    void deregisterServiceByInstanceId( String instanceId );\n\n    boolean hasOwnedServiceByServiceId( String serviceId );\n\n    boolean hasOwnedServiceInstance( Long clientId );\n\n    boolean hasOwnedServiceInstance( String instanceId );\n\n    boolean hasOwnedServiceClient( Long clientId );\n\n    Integer countRegisteredService();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/ServiceLifecycleService.java",
    "content": "package com.pinecone.hydra.service.registry.server;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.entity.GenericServiceInstanceEntity;\nimport com.pinecone.hydra.service.registry.ClientServiceRegisterException;\nimport com.pinecone.hydra.service.registry.ServiceInstanceCreationException;\nimport com.pinecone.hydra.service.registry.dto.RegisterServiceDTO;\n\npublic class ServiceLifecycleService implements Pinenut {\n\n    protected ServiceManager      mServiceManager;\n\n    protected ServiceInstrument   mServiceInstrument;\n\n    protected GuidAllocator       mGuidAllocator;\n\n    protected Logger              mLogger;\n\n    public ServiceLifecycleService( ServiceManager mServiceManager ) {\n        this.mServiceManager        = mServiceManager;\n        this.mServiceInstrument     = mServiceManager.getServicesInstrument();\n        this.mGuidAllocator         = this.mServiceInstrument.getGuidAllocator();\n        this.mLogger                = LoggerFactory.getLogger( this.getClass() );\n    }\n\n\n    public String registerService( RegisterServiceDTO serviceDTO ) throws ClientServiceRegisterException {\n        Long clientId   = serviceDTO.getClientId();\n        String szServId = serviceDTO.getServiceId();\n        GUID serviceId  = this.mGuidAllocator.parse( szServId );\n        GUID deployId   = null;\n        if ( StringUtils.isNotBlank(serviceDTO.getDeployId()) ) {\n            deployId = this.mGuidAllocator.parse( serviceDTO.getDeployId() );\n        }\n\n        GUID insId = this.mServiceManager.registerService( clientId, serviceId, deployId );\n\n        if ( insId != null ) {\n            return insId.toString();\n        }\n        return null;\n    }\n\n    public boolean createInstanceMeta( GenericServiceInstanceEntity instanceEntity ) throws ServiceInstanceCreationException {\n        try {\n            this.mServiceInstrument.createServiceInstance( instanceEntity );\n        }\n        catch (Exception e) {\n            throw new ServiceInstanceCreationException( e );\n        }\n        return true;\n    }\n\n    public void deregisterServiceByClientId( Long clientId ) {\n        this.mServiceManager.deregisterServiceInstance( clientId );\n    }\n\n    public void deregisterServiceByInstanceId( String instanceId ) {\n        this.mServiceManager.deregisterServiceInstance( this.mGuidAllocator.parse( instanceId ) );\n    }\n\n    public boolean hasOwnedServiceByServiceId( String serviceId ) {\n        return this.mServiceManager.hasOwnedService( this.mGuidAllocator.parse( serviceId ) );\n    }\n\n    public boolean hasOwnedServiceInstance( Long clientId ) {\n        return this.mServiceManager.hasOwnedServiceInstance( clientId );\n    }\n\n    public boolean hasOwnedServiceClient( Long clientId ) {\n        return this.mServiceManager.hasOwnedServiceClient( clientId );\n    }\n\n    public boolean hasOwnedServiceInstance( String instanceId ) {\n        return this.mServiceManager.hasOwnedInstance( this.mGuidAllocator.parse( instanceId ) );\n    }\n\n    public Integer countRegisteredService() {\n        return this.mServiceManager.countRegisteredService();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/ServiceManager.java",
    "content": "package com.pinecone.hydra.service.registry.server;\n\nimport java.util.Collection;\nimport java.util.function.Supplier;\n\nimport com.pinecone.framework.system.regime.arch.Manager;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.service.ServiceInstance;\nimport com.pinecone.hydra.service.entity.USII;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.registry.ClientServiceRegisterException;\nimport com.pinecone.hydra.service.registry.ServiceControlRPCException;\nimport com.pinecone.hydra.service.registry.appoint.ServiceAppointServer;\nimport com.pinecone.hydra.service.registry.event.ServiceRegisterEventHandler;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\n\npublic interface ServiceManager extends Manager, Slf4jTraceable {\n\n    Collection<ServiceAppointServer> getServers();\n\n    /**\n     * Add server only.\n     */\n    ServiceManager addAppointServer( ServiceAppointServer appointServer );\n\n    /**\n     * Add, and hook.\n     */\n    ServiceManager hookAppointServer( ServiceAppointServer appointServer );\n\n    /**\n     * Add, hook, and start.\n     */\n    ServiceManager vitalizeAppointServer( ServiceAppointServer appointServer ) throws ServiceControlRPCException;\n\n    ServiceAppointServer getAppointServerById( Long appointNodeId );\n\n    ServiceAppointServer evictAppointServerById( Long appointNodeId );\n\n    int serverSize();\n\n    ServiceEventHooker serviceEventHooker();\n\n\n\n    void startService () throws ServiceControlRPCException;\n\n    void registerServiceInstance( ServiceInstance instance );\n\n    GUID registerService( Long clientId, GUID serviceId, GUID deployGuid ) throws ClientServiceRegisterException;\n\n    void destroyServiceInstance( GUID serviceId, GUID instanceGuid );\n\n    Collection<ServiceInstance >  fetchServiceInstance( Long clientId );\n\n    Collection<ServiceInstance >  fetchServiceInstance( Identification serviceId );\n\n    Collection<ServiceInstance >  fetchServiceInstanceByIId( Identification instanceId );\n\n    Collection<ServiceInstance >  fetchServiceInstance( USII usii );\n\n\n\n    ServiceInstance queryServiceInstance( Long clientId );\n\n    ServiceInstance queryServiceInstance( USII usii );\n\n\n\n    boolean hasOwnedService( Identification serviceId );\n\n    boolean hasOwnedInstance( Identification instanceId );\n\n    boolean hasOwnedService( USII usii );\n\n    boolean hasOwnedServiceInstance( Long clientId );\n\n    boolean hasOwnedServiceClient( Long clientId );\n\n\n\n    default ServiceInstance queryFirstInstance( Long clientId ) {\n        Collection<ServiceInstance > instances = this.fetchServiceInstance( clientId );\n        if ( !instances.isEmpty() ) {\n            return instances.iterator().next();\n        }\n        return null;\n    }\n\n    default ServiceInstance queryFirstInstance( Identification serviceId ) {\n        Collection<ServiceInstance > instances = this.fetchServiceInstance( serviceId );\n        if ( !instances.isEmpty() ) {\n            return instances.iterator().next();\n        }\n        return null;\n    }\n\n    default ServiceInstance queryFirstInstance( USII usii ) {\n        Collection<ServiceInstance > instances = this.fetchServiceInstance( usii );\n        if ( !instances.isEmpty() ) {\n            return instances.iterator().next();\n        }\n        return null;\n    }\n\n\n\n    ServiceInstance getInstance( Identification instanceId ) ;\n\n    Collection<ServiceInstance >  deregisterServiceInstance ( Long clientId );\n\n    Collection<ServiceInstance > deregisterServiceInstance( Identification instanceId );\n\n    Collection<ServiceInstance >  deregisterService( Identification serviceId );\n\n\n\n    ServiceInstrument getServicesInstrument();\n\n    int countRegisteredService();\n\n\n\n    void addRegisterEventHandler( ServiceRegisterEventHandler handler ) ;\n\n    void removeRegisterEventHandler( ServiceRegisterEventHandler handler ) ;\n\n    int registerEventHandlerSize(  ) ;\n\n\n\n\n    ServiceLifecycleService serviceLifecycleService();\n\n    ServiceMetaService getServiceMetaService();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/ServiceMetaManipulationIface.java",
    "content": "package com.pinecone.hydra.service.registry.server;\n\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.service.registry.dto.ServiceMetaDTO;\nimport com.pinecone.hydra.umct.stereotype.Iface;\n\nimport java.util.List;\n\n@Iface\npublic interface ServiceMetaManipulationIface extends Pinenut {\n    List<ServiceMetaDTO> fetchServiceInsMetaByClientId( long clientId );\n\n    List<ServiceMetaDTO> fetchServiceInsMetaByServiceId( String serviceId );\n\n    ServiceMetaDTO queryServiceMetaByPath( String path );\n\n    ServiceMetaDTO queryServiceMetaByGuid( String guid );\n\n    String evalCreationStatement( String jonsStatement );\n\n    String createNewService( String parentAppPath, ServiceMetaDTO meta );\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/ServiceMetaService.java",
    "content": "package com.pinecone.hydra.service.registry.server;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.service.Service;\nimport com.pinecone.hydra.service.ServiceInstance;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.entity.ApplicationElement;\nimport com.pinecone.hydra.service.kom.entity.ElementNode;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\nimport com.pinecone.hydra.service.kom.marshaling.ServiceJSONDecoder;\nimport com.pinecone.hydra.service.registry.dto.ServiceMetaDTO;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\npublic class ServiceMetaService implements Pinenut {\n\n    protected ServiceManager mServiceManager;\n\n    protected ServiceInstrument mServiceInstrument;\n\n    protected ServiceJSONDecoder mServiceJSONDecoder;\n\n    public ServiceMetaService( ServiceManager serviceManager ){\n        this.mServiceManager = serviceManager;\n        this.mServiceInstrument = serviceManager.getServicesInstrument();\n        this.mServiceJSONDecoder = new ServiceJSONDecoder( this.mServiceInstrument );\n    }\n\n    public List<ServiceMetaDTO> fetchServiceInsMetaByClientId(long clientId ){\n        List<ServiceMetaDTO> serviceMetaDTOS = new ArrayList<>();\n        Collection<ServiceInstance> serviceInstances = this.mServiceManager.fetchServiceInstance( clientId );\n        for( ServiceInstance serviceInstance : serviceInstances ){\n            Service service = serviceInstance.getService();\n            serviceMetaDTOS.add( ServiceMetaDTO.from( service ) );\n        }\n        return serviceMetaDTOS;\n    }\n\n    public List<ServiceMetaDTO> fetchServiceInsMetaByServiceId( String serviceId ) {\n        List<ServiceMetaDTO> serviceMetaDTOS = new ArrayList<>();\n        Collection<ServiceInstance> serviceInstances = this.mServiceManager.fetchServiceInstance(\n                this.mServiceInstrument.getGuidAllocator().parse( serviceId )\n        );\n        for( ServiceInstance serviceInstance : serviceInstances ){\n            Service service = serviceInstance.getService();\n            serviceMetaDTOS.add( ServiceMetaDTO.from( service ) );\n        }\n        return serviceMetaDTOS;\n    }\n\n    public ServiceMetaDTO queryServiceMetaByPath( String path ) {\n        ElementNode node = this.mServiceManager.getServicesInstrument().queryElement( path );\n        ServiceElement serviceElement = node.evinceServiceElement();\n        if ( serviceElement == null ) {\n            return null;\n        }\n\n        return ServiceMetaDTO.from( serviceElement );\n    }\n\n    public ServiceMetaDTO queryServiceMetaByGuid( String guid ) {\n        TreeNode node = this.mServiceManager.getServicesInstrument().get( this.mServiceInstrument.getGuidAllocator().parse( guid ) );\n        if ( node instanceof ServiceElement ) {\n            ServiceElement serviceElement = (ServiceElement) node;\n            return ServiceMetaDTO.from( serviceElement );\n        }\n        return null;\n    }\n\n    public String evalCreationStatement( String jonsStatement ) {\n        ElementNode node = this.mServiceJSONDecoder.decode( new JSONMaptron( jonsStatement ) );\n        if ( node == null ) {\n            return null;\n        }\n\n        return node.getGuid().toString();\n    }\n\n    public String createNewService( String parentAppPath, ServiceMetaDTO meta ) {\n        ElementNode node = this.mServiceInstrument.queryElement( parentAppPath );\n        if ( node instanceof ApplicationElement) {\n            ApplicationElement applicationElement = (ApplicationElement) node;\n            ServiceElement serviceElement = ServiceMetaDTO.toServiceElement( meta, this.mServiceInstrument.getGuidAllocator() );\n            if ( serviceElement.getGuid() == null ) {\n                serviceElement.setGuid( this.mServiceInstrument.getGuidAllocator().nextGUID() );\n            }\n            this.mServiceInstrument.put( serviceElement );\n            this.mServiceInstrument.affirmOwnedNode( applicationElement.getGuid(), serviceElement.getGuid() );\n            return serviceElement.getGuid().toString();\n        }\n        return null;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/UniformServiceEventHooker.java",
    "content": "package com.pinecone.hydra.service.registry.server;\n\nimport java.util.function.Supplier;\n\nimport com.pinecone.hydra.service.registry.appoint.ServiceClientile;\n\npublic class UniformServiceEventHooker implements ServiceEventHooker {\n\n    protected UniformServiceManager mUniformServiceManager;\n\n    public UniformServiceEventHooker( UniformServiceManager manager ) {\n        this.mUniformServiceManager = manager;\n    }\n\n\n    @Override\n    public void afterNewConnectionInbound(\n            Long clientId, Object connectId, Object connection, Object context,\n            Supplier<ServiceClientile> constructor\n    ) {\n        this.mUniformServiceManager.mClientRegistry.compute( clientId, (key, ins ) -> {\n            if ( ins == null ) {\n                ins = constructor.get();\n            }\n            ins.afterNewConnectionInbound( clientId, connectId, connection, context );\n            return ins;\n        } );\n    }\n\n    @Override\n    public void afterConnectionDetach( Long clientId, Object channelId, Object connection ) {\n        synchronized ( this.mUniformServiceManager.mClientRegistry ) {\n            ServiceClientile client = this.mUniformServiceManager.mClientRegistry.get( clientId );\n            // It’s not thread-safe beyond this critical zone, as the size may be mutated by other threads after this point.\n            // 该临界区后面线程并不安全, size 可能在该临界区后被其他线程破坏.\n            if ( client != null ) {\n                client.afterConnectionDetach( clientId, channelId, connection );\n\n                if ( client.connectionCount() < 1 ) {\n                    this.mUniformServiceManager.mClientRegistry.remove( clientId );\n                    this.mUniformServiceManager.deregisterServiceInstance( clientId );\n                }\n            }\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/server/UniformServiceManager.java",
    "content": "package com.pinecone.hydra.service.registry.server;\n\nimport com.mysql.cj.exceptions.AssertionFailedException;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.service.ServiceInstance;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.entity.USII;\nimport com.pinecone.hydra.service.kom.entity.GenericServiceInstanceEntity;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\nimport com.pinecone.hydra.service.kom.entity.ServiceInstanceEntry;\nimport com.pinecone.hydra.service.registry.ClientServiceRegisterException;\nimport com.pinecone.hydra.service.registry.ServiceControlRPCException;\nimport com.pinecone.hydra.service.registry.UniformService;\nimport com.pinecone.hydra.service.registry.WolfServiceInstance;\nimport com.pinecone.hydra.service.registry.appoint.ServiceClientile;\nimport com.pinecone.hydra.service.registry.appoint.ServiceAppointServer;\nimport com.pinecone.hydra.service.registry.constant.ServiceStatus;\nimport com.pinecone.hydra.service.registry.event.ServiceRegisterEvent;\nimport com.pinecone.hydra.service.registry.event.ServiceRegisterEventHandler;\nimport com.pinecone.hydra.system.component.LogStatuses;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.net.InetSocketAddress;\nimport java.net.SocketAddress;\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ConcurrentMap;\nimport java.util.concurrent.locks.ReadWriteLock;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\n\npublic class UniformServiceManager implements ServiceManager {\n    protected final ServiceInstrument                                                       mServiceInstrument;\n\n    protected final ConcurrentMap<Long, ServiceAppointServer >                              mServerPoolMap;  // ServerId => Node\n    protected final ConcurrentMap<Long, ServiceInstance >                                   mCIdInstanceRegistry; // ClientId => Instance\n    protected final ConcurrentMap<Identification, ConcurrentMap<Long, ServiceInstance> >    mServiceRegistry;  // ServiceId => <CId, Instance>\n    protected final ConcurrentMap<Identification, ClientInstance >                          mInstanceRegistry; // InstanceId => Instance\n    protected final ConcurrentMap<Long, ServiceClientile>                                   mClientRegistry; // ClientId => Client\n\n    protected final List<ServiceRegisterEventHandler>                                       mRegisterEventHandlers;\n    protected final GuidAllocator                                                           mGuidAllocator;\n    protected final ServiceEventHooker                                                      mServiceEventHooker;\n\n    protected final ServiceLifecycleService                                                 mServiceLifecycleService;\n    protected final ServiceMetaService                                                      mServiceMetaService;\n\n    private final Logger mLogger;\n\n    private final ReadWriteLock mEventHandlerLock = new ReentrantReadWriteLock();\n\n    @Override\n    public Logger getLogger() {\n        return this.mLogger;\n    }\n\n\n    protected void vitalizeRPCSubsystem() throws ServiceControlRPCException {\n        try {\n            for ( Map.Entry<Long, ServiceAppointServer> entry : this.mServerPoolMap.entrySet() ) {\n                if ( !entry.getValue().isStarted() ) {\n                    entry.getValue().execute();\n                }\n            }\n            this.infoLifecycle( \"RPC Subsystem Service Vitalization\", LogStatuses.StatusDone );\n        }\n        catch ( Exception e ) {\n            throw new ServiceControlRPCException( e );\n        }\n    }\n\n    public UniformServiceManager( ServiceInstrument serviceInstrument ) {\n        this.mServiceInstrument         = serviceInstrument;\n        this.mServerPoolMap             = new ConcurrentHashMap<>();\n        this.mServiceRegistry           = new ConcurrentHashMap<>();\n        this.mCIdInstanceRegistry       = new ConcurrentHashMap<>();\n        this.mInstanceRegistry          = new ConcurrentHashMap<>();\n        this.mClientRegistry            = new ConcurrentHashMap<>();\n        this.mLogger                    = LoggerFactory.getLogger( this.getClass() );\n        this.mRegisterEventHandlers     = new ArrayList<>();\n        this.mGuidAllocator             = serviceInstrument.getGuidAllocator();\n        this.mServiceEventHooker        = new UniformServiceEventHooker( this );\n\n\n        this.mServiceLifecycleService   = new ServiceLifecycleService( this );\n        this.mServiceMetaService        = new ServiceMetaService( this );\n\n    }\n\n    @Override\n    public Collection<ServiceAppointServer> getServers() {\n        return this.mServerPoolMap.values();\n    }\n\n    @Override\n    public ServiceManager addAppointServer( ServiceAppointServer appointServer ) {\n        this.mServerPoolMap.put( appointServer.getMessageNodeId(), appointServer );\n        return this;\n    }\n\n    @Override\n    public ServiceManager hookAppointServer( ServiceAppointServer appointServer ) {\n        this.addAppointServer( appointServer );\n        appointServer.hookServiceManager( this );\n        return this;\n    }\n\n    @Override\n    public ServiceManager vitalizeAppointServer( ServiceAppointServer appointServer ) throws ServiceControlRPCException {\n        try {\n            this.hookAppointServer( appointServer );\n            appointServer.execute();\n            return this;\n        }\n        catch ( Exception e ) {\n            throw new ServiceControlRPCException( e );\n        }\n    }\n\n    @Override\n    public ServiceAppointServer getAppointServerById( Long appointNodeId ) {\n        return this.mServerPoolMap.get( appointNodeId );\n    }\n\n    @Override\n    public ServiceAppointServer evictAppointServerById( Long appointNodeId ) {\n        ServiceAppointServer legacy = this.mServerPoolMap.remove( appointNodeId );\n        if ( legacy != null ) {\n            legacy.close(); // In principle, all connections will be closed cascadingly.\n            return legacy;\n        }\n        return null;\n    }\n\n    @Override\n    public int serverSize() {\n        return this.mServerPoolMap.size();\n    }\n\n    @Override\n    public ServiceEventHooker serviceEventHooker() {\n        return this.mServiceEventHooker;\n    }\n\n\n\n\n    @Override\n    public ServiceLifecycleService serviceLifecycleService() {\n        return this.mServiceLifecycleService;\n    }\n\n    @Override\n    public ServiceMetaService getServiceMetaService() {\n        return this.mServiceMetaService;\n    }\n\n    @Override\n    public void startService() throws ServiceControlRPCException {\n        this.vitalizeRPCSubsystem();\n    }\n\n    @Override\n    public void addRegisterEventHandler( ServiceRegisterEventHandler handler ) {\n        try {\n            this.mEventHandlerLock.writeLock().lock();\n            this.mRegisterEventHandlers.add( handler );\n        }\n        finally {\n            this.mEventHandlerLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public void removeRegisterEventHandler( ServiceRegisterEventHandler handler ) {\n        try {\n            this.mEventHandlerLock.readLock().lock();\n            this.mRegisterEventHandlers.remove( handler );\n        }\n        finally {\n            this.mEventHandlerLock.readLock().unlock();\n        }\n    }\n\n    @Override\n    public int registerEventHandlerSize() {\n        try {\n            this.mEventHandlerLock.readLock().lock();\n            return this.mRegisterEventHandlers.size();\n        }\n        finally {\n            this.mEventHandlerLock.readLock().unlock();\n        }\n    }\n\n    protected void triggerServiceEvent( long clientId, Identification insId, ServiceRegisterEvent event, Object caused ) {\n        ServiceInstance instance = this.mCIdInstanceRegistry.get( clientId );\n        if ( instance == null ) {\n            return;\n        }\n        GUID serviceId = (GUID) instance.getUSII().getServiceId();\n\n        for ( ServiceRegisterEventHandler handler : this.mRegisterEventHandlers ) {\n            handler.fired( clientId, (GUID) insId, serviceId, event, caused );\n        }\n    }\n\n\n\n    //    @Override\n//    public void registerService( ServiceInstance instance ) {\n//        USII primaryKey = instance.getUSII();\n//        Long clientId   = primaryKey.getClientId();\n//\n//        this.mServiceRegistry.compute( primaryKey, ( key, ins ) -> {\n//            if ( ins == null ) {\n//                ins = new ConcurrentHashMap<>();\n//            }\n//            ins.put( clientId, instance );\n//            return ins;\n//        } );\n//    }\n\n    @Override\n    public void registerServiceInstance( ServiceInstance instance ) {\n        Identification primaryKey = instance.getUSII().getServiceId();\n        Long clientId   = instance.getUSII().getClientId();\n\n        this.mCIdInstanceRegistry.put( clientId, instance );\n        this.mInstanceRegistry.put( instance.getId(), new ClientInstance( clientId, instance ) );\n\n        this.mServiceRegistry.compute( primaryKey, ( key, ins ) -> {\n            if ( ins == null ) {\n                ins = new ConcurrentHashMap<>();\n            }\n            ins.put( clientId, instance );\n            return ins;\n        } );\n\n        this.triggerServiceEvent( clientId, instance.getId(), ServiceRegisterEvent.Registered, instance );\n    }\n\n    @Override\n    public GUID registerService( Long clientId, GUID serviceId, GUID deployGuid ) throws ClientServiceRegisterException {\n        synchronized ( this.mServiceRegistry ) {\n            ServiceClientile client = this.mClientRegistry.get( clientId );\n            if ( client == null ) {\n                throw new ClientServiceRegisterException( \"Client \" + clientId + \" is not existed.\" );\n            }\n\n            SocketAddress remote = client.getRemoteAddress();\n            String ip = \"\";\n            if ( remote instanceof InetSocketAddress ) {\n                InetSocketAddress inet = (InetSocketAddress) remote;\n                ip = inet.getAddress().getHostAddress();\n            }\n\n            ServiceInstanceEntry neo = this.createServiceInstanceMeta( serviceId, deployGuid, ip ); // new\n            ServiceInstanceEntry element = this.updateServiceInstanceStatus( neo.getGuid(), ServiceStatus.SERVICE_RUNNING );\n\n            TreeNode node = this.mServiceInstrument.get( serviceId );\n            ServiceElement serviceElement = (ServiceElement) node;\n            ServiceInstance serviceInstance = new WolfServiceInstance( clientId, new UniformService( serviceId, serviceElement ), element.getGuid() );\n            this.registerServiceInstance( serviceInstance );\n            this.mLogger.info( \"Remote serviceInstance {} register success. <IP:{}>\", element.getGuid(), ip );\n\n            return element.getGuid();\n        }\n    }\n\n    protected ServiceInstanceEntry updateServiceInstanceStatus( GUID id, ServiceStatus status ) {\n        ServiceInstanceEntry element = this.mServiceInstrument.queryServiceInstance( id );\n        if ( element != null ) {\n            element.setStatus( status.getName() );\n            element.setRunCount( element.getRunCount() + 1 );\n            this.mServiceInstrument.updateServiceInstance( element );\n        }\n\n        return element;\n    }\n\n    @Override\n    public void destroyServiceInstance( GUID serviceId, GUID instanceGuid ) {\n\n    }\n\n    @Override\n    public Collection<ServiceInstance > fetchServiceInstance( Long clientId ) {\n        return List.of( this.mCIdInstanceRegistry.get( clientId ) );\n    }\n\n    @Override\n    public Collection<ServiceInstance >  fetchServiceInstance( Identification serviceId ) {\n        ConcurrentMap<Long, ServiceInstance> map = this.mServiceRegistry.get( serviceId );\n        if ( map != null ) {\n            return map.values();\n        }\n        return List.of();\n    }\n\n    @Override\n    public Collection<ServiceInstance> fetchServiceInstanceByIId( Identification instanceId ) {\n        ClientInstance i = this.mInstanceRegistry.get( instanceId );\n        if ( i == null ) {\n            return List.of();\n        }\n        return List.of( i.getInstance() );\n    }\n\n    @Override\n    public Collection<ServiceInstance >  fetchServiceInstance( USII usii ) {\n        return this.fetchServiceInstance( usii.getServiceId() );\n    }\n\n    @Override\n    public ServiceInstance queryServiceInstance( USII usii ) {\n        return this.queryServiceInstance( usii.getClientId() );\n    }\n\n    @Override\n    public ServiceInstance queryServiceInstance( Long clientId ) {\n        return this.mCIdInstanceRegistry.get( clientId );\n    }\n\n    @Override\n    public boolean hasOwnedService( USII usii ) {\n        return this.hasOwnedService( usii.getServiceId() );\n    }\n\n    @Override\n    public boolean hasOwnedService( Identification serviceId ) {\n        return this.mServiceRegistry.containsKey( serviceId );\n    }\n\n    @Override\n    public boolean hasOwnedInstance( Identification instanceId ) {\n        return this.mInstanceRegistry.containsKey( instanceId );\n    }\n\n    @Override\n    public boolean hasOwnedServiceInstance( Long clientId ) {\n        return this.mClientRegistry.containsKey( clientId );\n    }\n\n    @Override\n    public boolean hasOwnedServiceClient( Long clientId ) {\n        return this.mClientRegistry.containsKey( clientId );\n    }\n\n    @Override\n    public ServiceInstance getInstance( Identification instanceId ) {\n        ClientInstance i = this.mInstanceRegistry.get( instanceId );\n        if ( i == null ) {\n            return null;\n        }\n        return i.getInstance();\n    }\n\n    /**\n     * Finally elimination inlet function.\n     * 终末清除入口点\n     */\n    @Override\n    public Collection<ServiceInstance >  deregisterServiceInstance( Long clientId ) {\n        synchronized ( this.mServiceRegistry ) {\n            ServiceInstance eliminated = this.mCIdInstanceRegistry.remove( clientId );\n            // It’s not thread-safe beyond this critical zone, as the size may be mutated by other threads after this point.\n            // 该临界区后面线程并不安全, size 可能在该临界区后被其他线程破坏.\n            if ( eliminated != null ) {\n                ConcurrentMap<Long, ServiceInstance > instances = this.mServiceRegistry.get( eliminated.getServiceId() );\n                if ( instances != null ) {\n                    this.mInstanceRegistry.remove( eliminated.getId() );\n                    this.updateServiceInstanceStatus( (GUID) eliminated.getId(), ServiceStatus.SERVICE_TERMINATED );\n                    this.getLogger().info(\n                            \"Detached service instance, { clientId: {}, instanceId: {}, serviceId: {} }. <Detached>\",\n                            clientId, eliminated.getId(), eliminated.getServiceId()\n                    );\n\n                    if ( instances.size() <= 1 ) {\n                        instances = this.mServiceRegistry.remove( eliminated.getServiceId() );\n                        return instances.values();\n                    }\n                    else {\n                        // 副本实例，不用额外变更状态\n                        ServiceInstance instance = instances.remove( clientId );\n                        if ( instance != null ) {\n                            return List.of( instance );\n                        }\n                    }\n                }\n                else {\n                    throw new AssertionFailedException( \"Illegal internal statue, mismatched elimination-service size.\" );\n                }\n\n                this.triggerServiceEvent( clientId, eliminated.getId(), ServiceRegisterEvent.Deregistered, eliminated );\n            }\n            return null;\n        }\n    }\n\n    @Override\n    public Collection<ServiceInstance > deregisterServiceInstance( Identification instanceId ) {\n        ClientInstance clientInstance = this.mInstanceRegistry.get( instanceId );\n        if ( clientInstance == null ) {\n            return null;\n        }\n\n        return this.deregisterServiceInstance( clientInstance.getClientId() );\n    }\n\n    @Override\n    public Collection<ServiceInstance > deregisterService( Identification serviceId ) {\n        ConcurrentMap<Long, ServiceInstance > instances = this.mServiceRegistry.remove( serviceId );\n        if ( instances != null ) {\n            for ( Map.Entry<Long, ServiceInstance > kv : instances.entrySet() ) {\n                this.deregisterServiceInstance( kv.getKey() );\n            }\n            return instances.values();\n        }\n        return null;\n    }\n\n    @Override\n    public ServiceInstrument getServicesInstrument() {\n        return this.mServiceInstrument;\n    }\n\n    @Override\n    public int countRegisteredService() {\n        return this.mServiceRegistry.size();\n    }\n\n\n    protected ServiceInstanceEntry createServiceInstanceMeta( GUID serviceId, GUID deployGuid, String ip ) {\n        GUID guid = this.mGuidAllocator.nextGUID();\n        ServiceInstanceEntry instanceEntity = new GenericServiceInstanceEntity();\n\n        instanceEntity.setDeployGuid( deployGuid );\n        instanceEntity.setStatus( ServiceStatus.SERVICE_NEW.getName() );\n        instanceEntity.setLatestStartTime( LocalDateTime.now() );\n        instanceEntity.setIp( ip );\n        instanceEntity.setGuid( guid );\n        instanceEntity.setServiceGuid( serviceId );\n\n        this.mServiceInstrument.createServiceInstance( instanceEntity );\n\n        return instanceEntity;\n    }\n\n\n\n    protected static class ClientInstance {\n        protected Long clientId;\n        protected ServiceInstance instance;\n\n        public ClientInstance( Long clientId, ServiceInstance instance ) {\n            this.clientId = clientId;\n            this.instance = instance;\n        }\n\n        public Long getClientId() {\n            return this.clientId;\n        }\n\n        public ServiceInstance getInstance() {\n            return this.instance;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ulf/HuskyServiceAppointServer.java",
    "content": "package com.pinecone.hydra.service.registry.ulf;\n\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.hydra.service.registry.appoint.ServiceAppointServer;\nimport com.pinecone.hydra.service.registry.server.ServiceManager;\nimport com.pinecone.hydra.uma.DuplexAppointServer;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.ChannelHandleException;\nimport com.pinecone.hydra.umc.msg.MessageNode;\nimport com.pinecone.hydra.umc.msg.event.ChannelEventHandler;\nimport com.pinecone.hydra.umc.msg.event.ChannelInactiveHandler;\nimport com.pinecone.hydra.umc.wolf.server.UlfServer;\n\npublic class HuskyServiceAppointServer implements ServiceAppointServer {\n\n    protected DuplexAppointServer mAppointServer;\n\n    protected ServiceManager      mServiceManager;\n\n    public HuskyServiceAppointServer( DuplexAppointServer duplexAppointServer ) {\n        this.mAppointServer = duplexAppointServer;\n    }\n\n    public HuskyServiceAppointServer( DuplexAppointServer duplexAppointServer, ServiceManager serviceManager ) {\n        this( duplexAppointServer );\n        this.mServiceManager = serviceManager;\n    }\n\n    @Override\n    public ServiceManager serviceManager() {\n        return this.mServiceManager;\n    }\n\n    @Override\n    public ServiceAppointServer hookServiceManager( ServiceManager serviceManager ) {\n        if( this.mServiceManager != null ) {\n            throw new IllegalStateException( \"Manager has already hooked.\" );\n        }\n\n        this.mServiceManager = serviceManager;\n        this.mAppointServer.registerController( new ServiceLifecycleController( this.mServiceManager ) );\n        this.mAppointServer.registerController( new ServiceMetaController( this.mServiceManager ) );\n\n        MessageNode messageNode = this.mAppointServer.getMessageNode();\n        UlfServer ulfServer   = (UlfServer) messageNode;\n        ulfServer.registerDataArrivedEventHandlers(new ChannelEventHandler() {\n            @Override\n            public void afterEventTriggered( ChannelControlBlock block, Object context ) {\n                long clientId    = block.getChannel().getIdentityID();\n                Object channelId = block.getChannel().getChannelID();\n\n                mServiceManager.serviceEventHooker().afterNewConnectionInbound(\n                        clientId, channelId, block.getChannel(), context,\n                        () -> new HuskyServiceClientile( HuskyServiceAppointServer.this )\n                );\n            }\n        });\n\n        ulfServer.registerChannelInactiveHandler(new ChannelInactiveHandler() {\n            @Override\n            public boolean afterChannelInactive( ChannelControlBlock ccb, Object context ) throws ChannelHandleException {\n                Long clientId    = ccb.getChannel().getIdentityID();\n                Object channelId = ccb.getChannel().getChannelID();\n\n                mServiceManager.serviceEventHooker().afterConnectionDetach( clientId, channelId, ccb.getChannel() );\n                return false;\n            }\n        });\n\n        this.mServiceManager.getLogger().info( \"AppointServer[{}] has been hooked to service manager.\", this.mAppointServer.getName() );\n        return this;\n    }\n\n    @Override\n    public String getName() {\n        return this.mAppointServer.getName();\n    }\n\n    @Override\n    public PatriarchalConfig getConfig() {\n        return this.mAppointServer.getConfig();\n    }\n\n    @Override\n    public void close() {\n        this.mAppointServer.close();\n    }\n\n    @Override\n    public void execute() throws Exception {\n        this.mAppointServer.execute();\n    }\n\n    @Override\n    public long getMessageNodeId() {\n        return this.mAppointServer.getMessageNodeId();\n    }\n\n    @Override\n    public boolean isTerminated() {\n        return this.mAppointServer.getMessageNode().isTerminated();\n    }\n\n    @Override\n    public boolean isStarted() {\n        return !this.isTerminated();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ulf/HuskyServiceClientile.java",
    "content": "package com.pinecone.hydra.service.registry.ulf;\n\nimport java.net.SocketAddress;\nimport java.util.Collection;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ConcurrentMap;\n\nimport com.pinecone.hydra.service.registry.appoint.ServiceClientile;\nimport com.pinecone.hydra.service.registry.appoint.ServiceAppointServer;\nimport com.pinecone.hydra.umc.msg.UMCChannel;\n\npublic class HuskyServiceClientile implements ServiceClientile {\n    protected long                                    mClientId = -1;\n\n    // connectionId => channel\n    protected final ConcurrentMap<Object, UMCChannel> mServiceChannels;\n\n    protected final ServiceAppointServer              mServiceAppointServer;\n\n    protected SocketAddress                           mMainRemoteAddress;\n\n    public HuskyServiceClientile( ServiceAppointServer serviceAppointServer ) {\n        this.mServiceChannels      = new ConcurrentHashMap<>();\n        this.mServiceAppointServer = serviceAppointServer;\n    }\n\n    @Override\n    public long getClientId() {\n        return this.mClientId;\n    }\n\n    @Override\n    public int connectionCount() {\n        return this.mServiceChannels.size();\n    }\n\n    @Override\n    public boolean isDefunct() {\n        return this.mServiceChannels.isEmpty();\n    }\n\n    @Override\n    public Object queryNativeConnection( Object connectionIdentity ) {\n        return this.mServiceChannels.get( connectionIdentity );\n    }\n\n    @Override\n    public Collection<?> connections() {\n        return this.mServiceChannels.values();\n    }\n\n    @Override\n    public void shutdown() {\n        for ( UMCChannel umcChannel : this.mServiceChannels.values() ) {\n            umcChannel.close();\n        }\n        this.mServiceChannels.clear();\n    }\n\n    @Override\n    public ServiceAppointServer serviceAppointServer() {\n        return this.mServiceAppointServer;\n    }\n\n    @Override\n    public void afterNewConnectionInbound( Long clientId, Object connectId, Object connection, Object context ) {\n        UMCChannel channel      = (UMCChannel) connection;\n        this.mServiceChannels.put( connectId, channel );\n        this.mClientId          = clientId;\n        this.mMainRemoteAddress = channel.remoteAddress();\n    }\n\n    @Override\n    public void afterConnectionDetach( Long clientId, Object channelId, Object connection ) {\n        this.mServiceChannels.remove( channelId );\n    }\n\n    @Override\n    public SocketAddress getRemoteAddress() {\n        return this.mMainRemoteAddress;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ulf/ServiceLifecycleController.java",
    "content": "package com.pinecone.hydra.service.registry.ulf;\n\nimport com.pinecone.hydra.service.kom.entity.GenericServiceInstanceEntity;\nimport com.pinecone.hydra.service.registry.ClientServiceRegisterException;\nimport com.pinecone.hydra.service.registry.ServiceInstanceCreationException;\nimport com.pinecone.hydra.service.registry.server.ServiceLifecycleService;\nimport com.pinecone.hydra.service.registry.server.ServiceManager;\nimport com.pinecone.hydra.service.registry.dto.RegisterServiceDTO;\nimport com.pinecone.hydra.umct.AddressMapping;\nimport com.pinecone.hydra.umct.stereotype.Controller;\n\n@Controller\n@AddressMapping(\"com.pinecone.hydra.service.registry.server.ServiceLifecycleIface.\")\npublic class ServiceLifecycleController {\n\n    protected ServiceLifecycleService serviceLifecycleService;\n\n    public ServiceLifecycleController( ServiceManager serviceManager ) {\n        this.serviceLifecycleService = serviceManager.serviceLifecycleService();\n    }\n\n    @AddressMapping( \"registerService\" )\n    public String registerService( RegisterServiceDTO serviceDTO ) throws ClientServiceRegisterException {\n        return this.serviceLifecycleService.registerService( serviceDTO );\n    }\n\n    @AddressMapping(\"createInstanceMeta\")\n    boolean createInstanceMeta( GenericServiceInstanceEntity instanceEntity ) throws ServiceInstanceCreationException {\n        return this.serviceLifecycleService.createInstanceMeta( instanceEntity );\n    }\n\n    @AddressMapping(\"deregisterServiceByClientId\")\n    public void deregisterServiceByClientId( Long clientId ) {\n        this.serviceLifecycleService.deregisterServiceByClientId( clientId );\n    }\n\n    @AddressMapping(\"deregisterServiceByInstanceId\")\n    public void deregisterServiceByInstanceId( String instanceId ) {\n        this.serviceLifecycleService.deregisterServiceByInstanceId( instanceId );\n    }\n\n    @AddressMapping(\"hasOwnedServiceByServiceId\")\n    public boolean hasOwnedServiceByServiceId( String serviceId ) {\n        return this.serviceLifecycleService.hasOwnedServiceByServiceId( serviceId );\n    }\n\n    @AddressMapping(\"hasOwnedServiceInstance\")\n    public boolean hasOwnedServiceInstance( Long clientId ) {\n        return this.serviceLifecycleService.hasOwnedServiceInstance( clientId );\n    }\n\n    @AddressMapping(\"hasOwnedServiceClient\")\n    public boolean hasOwnedServiceClient( Long clientId ) {\n        return this.serviceLifecycleService.hasOwnedServiceClient( clientId );\n    }\n\n    @AddressMapping(\"hasOwnedServiceClient\")\n    public boolean hasOwnedServiceInstance( String instanceId ) {\n        return this.serviceLifecycleService.hasOwnedServiceInstance( instanceId );\n    }\n\n    @AddressMapping(\"countRegisteredService\")\n    public Integer countRegisteredService() {\n        return this.serviceLifecycleService.countRegisteredService();\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-service-control/src/main/java/com/pinecone/hydra/service/registry/ulf/ServiceMetaController.java",
    "content": "package com.pinecone.hydra.service.registry.ulf;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.service.registry.server.ServiceManager;\nimport com.pinecone.hydra.service.registry.dto.ServiceMetaDTO;\nimport com.pinecone.hydra.service.registry.server.ServiceMetaService;\nimport com.pinecone.hydra.umct.AddressMapping;\nimport com.pinecone.hydra.umct.stereotype.Controller;\n\n@Controller\n@AddressMapping(\"com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface.\")\npublic class ServiceMetaController implements Pinenut {\n    protected ServiceMetaService serviceMetaService;\n\n    public ServiceMetaController( ServiceManager serviceManager ) {\n        this.serviceMetaService = serviceManager.getServiceMetaService();\n    }\n\n    @AddressMapping( \"fetchServiceInsMetaByClientId\" )\n    public List<ServiceMetaDTO> fetchServiceInsMetaByClientId( long clientId ){\n        return this.serviceMetaService.fetchServiceInsMetaByClientId( clientId );\n    }\n\n    @AddressMapping( \"fetchServiceInsMetaByServiceId\" )\n    public List<ServiceMetaDTO> fetchServiceInsMetaByServiceId( String serviceId ) {\n        return this.serviceMetaService.fetchServiceInsMetaByServiceId( serviceId );\n    }\n\n    @AddressMapping( \"queryServiceMetaByPath\" )\n    public ServiceMetaDTO queryServiceMetaByPath( String path ) {\n        return this.serviceMetaService.queryServiceMetaByPath( path );\n    }\n\n    @AddressMapping( \"queryServiceMetaByGuid\" )\n    public ServiceMetaDTO queryServiceMetaByGuid( String guid ) {\n        return this.serviceMetaService.queryServiceMetaByGuid( guid );\n    }\n\n\n    @AddressMapping( \"evalCreationStatement\" )\n    public String evalCreationStatement( String jonsStatement ) {\n        return this.serviceMetaService.evalCreationStatement( jonsStatement );\n    }\n\n    @AddressMapping( \"createNewService\" )\n    public String createNewService( String parentAppPath, ServiceMetaDTO meta ) {\n        return this.serviceMetaService.createNewService( parentAppPath, meta );\n    }\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-system-reign/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra.kernel</groupId>\n    <artifactId>hydra-system-reign</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-config</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-device</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-message-control</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-message-broadcast</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime</groupId>\n            <artifactId>slime</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime.jelly</groupId>\n            <artifactId>jelly</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n        </dependency>\n\n        <dependency>\n            <groupId>org.javassist</groupId>\n            <artifactId>javassist</artifactId>\n            <version>3.29.0-GA</version>\n        </dependency>\n        <dependency>\n            <groupId>io.netty</groupId>\n            <artifactId>netty-all</artifactId>\n            <version>4.1.80.Final</version>\n        </dependency>\n\n\n\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>org.jsoup</groupId>\n            <artifactId>jsoup</artifactId>\n            <version>1.15.4</version>\n        </dependency>\n\n\n\n\n        <!-- MyBatis dependencies -->\n        <dependency>\n            <groupId>org.mybatis</groupId>\n            <artifactId>mybatis</artifactId>\n            <version>3.5.9</version>\n        </dependency>\n        <dependency>\n            <groupId>org.mybatis</groupId>\n            <artifactId>mybatis-spring</artifactId>\n            <version>2.0.6</version>\n        </dependency>\n\n\n        <!-- MySQL Connector -->\n        <dependency>\n            <groupId>mysql</groupId>\n            <artifactId>mysql-connector-java</artifactId>\n            <version>8.0.26</version>\n        </dependency>\n\n        <!-- Logging dependencies -->\n        <dependency>\n            <groupId>org.slf4j</groupId>\n            <artifactId>slf4j-api</artifactId>\n            <version>1.7.30</version>\n        </dependency>\n\n        <dependency>\n            <groupId>net.spy</groupId>\n            <artifactId>spymemcached</artifactId>\n            <version>2.12.3</version>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.httpcomponents.client5</groupId>\n            <artifactId>httpclient5</artifactId>\n            <version>5.1</version>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.commons</groupId>\n            <artifactId>commons-vfs2</artifactId>\n            <version>2.9.0</version>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.commons</groupId>\n            <artifactId>commons-vfs2-jackrabbit1</artifactId>\n            <version>2.9.0</version>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.commons</groupId>\n            <artifactId>commons-lang3</artifactId>\n            <version>3.12.0</version>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Hydra/hydra-system-reign/src/main/java/com/pinecone/hydra/reign/UnixInstitutionalizedMetaImperiumPrivy.java",
    "content": "package com.pinecone.hydra.reign;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.ArchSystemCascadeComponent;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.imperium.ImperiumPrivy;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.system.ko.kom.ExpressInstrument;\nimport com.pinecone.hydra.system.ko.runtime.GenericRuntimeInstrumentConfig;\nimport com.pinecone.hydra.system.ko.runtime.KernelExpressInstrument;\n\npublic class UnixInstitutionalizedMetaImperiumPrivy extends ArchSystemCascadeComponent implements ImperiumPrivy {\n    protected ExpressInstrument expressInstrument;\n\n    public UnixInstitutionalizedMetaImperiumPrivy( Namespace name, Hydrogen system, HyComponent parent, KernelObjectConfig config ) {\n        super( name, system, system.getComponentManager(), parent );\n\n        this.expressInstrument = new KernelExpressInstrument( system, \"\", config );\n    }\n\n    public UnixInstitutionalizedMetaImperiumPrivy( Namespace name, Hydrogen system, HyComponent parent, @Nullable JSONConfig config ) {\n        this( name, system, parent, new GenericRuntimeInstrumentConfig( config ) );\n    }\n\n    public UnixInstitutionalizedMetaImperiumPrivy( Hydrogen system, HyComponent parent, @Nullable JSONConfig config ) {\n        this( (Namespace) null, system, parent, config );\n    }\n\n    public UnixInstitutionalizedMetaImperiumPrivy( Hydrogen system, @Nullable JSONConfig config ) {\n        this( system, null,config );\n    }\n\n    public UnixInstitutionalizedMetaImperiumPrivy( String name, Hydrogen system, HyComponent parent, @Nullable JSONConfig config ) {\n        this( system, parent, config );\n\n        this.setTargetingName( name );\n    }\n\n    public UnixInstitutionalizedMetaImperiumPrivy( String name, Hydrogen system, @Nullable JSONConfig config ) {\n        this( name, system, null, config );\n    }\n\n    @Override\n    public ExpressInstrument getExpressInstrument() {\n        return this.expressInstrument;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>hydra</artifactId>\n        <groupId>com.pinecone.hydra</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.tritium</groupId>\n    <artifactId>hydra-system-tritium</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-config</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-device</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-message-control</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-message-broadcast</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime</groupId>\n            <artifactId>slime</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime.jelly</groupId>\n            <artifactId>jelly</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n        </dependency>\n<!--        <dependency>-->\n<!--            <groupId>com.pinecone.summer</groupId>-->\n<!--            <artifactId>summer</artifactId>-->\n<!--            <version>2.1.0</version>-->\n<!--        </dependency>-->\n        <dependency>\n            <groupId>com.pinecone.summer.springram</groupId>\n            <artifactId>springram</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <!--<dependency>-->\n        <!--<groupId>com.walnut.sparta</groupId>-->\n        <!--<artifactId>sparta</artifactId>-->\n        <!--<version>2.1.0</version>-->\n        <!--</dependency>-->\n\n        <dependency>\n            <groupId>org.javassist</groupId>\n            <artifactId>javassist</artifactId>\n            <version>3.29.0-GA</version>\n        </dependency>\n        <dependency>\n            <groupId>io.netty</groupId>\n            <artifactId>netty-all</artifactId>\n            <version>4.1.80.Final</version>\n        </dependency>\n\n\n\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>org.jsoup</groupId>\n            <artifactId>jsoup</artifactId>\n            <version>1.15.4</version>\n        </dependency>\n\n        <!-- https://mvnrepository.com/artifact/us.codecraft/webmagic-core -->\n        <dependency>\n            <groupId>us.codecraft</groupId>\n            <artifactId>webmagic-core</artifactId>\n            <version>0.8.0</version>\n        </dependency>\n\n        <!-- https://mvnrepository.com/artifact/us.codecraft/webmagic-extension -->\n        <dependency>\n            <groupId>us.codecraft</groupId>\n            <artifactId>webmagic-extension</artifactId>\n            <version>0.8.0</version>\n        </dependency>\n\n\n\n\n\n\n\n\n        <!-- MyBatis dependencies -->\n        <dependency>\n            <groupId>org.mybatis</groupId>\n            <artifactId>mybatis</artifactId>\n            <version>3.5.9</version>\n        </dependency>\n        <dependency>\n            <groupId>org.mybatis</groupId>\n            <artifactId>mybatis-spring</artifactId>\n            <version>2.0.6</version>\n        </dependency>\n\n        <!-- MyBatis Plus dependencies -->\n        <dependency>\n            <groupId>com.baomidou</groupId>\n            <artifactId>mybatis-plus-core</artifactId>\n            <version>3.4.3.4</version>\n        </dependency>\n        <dependency>\n            <groupId>com.baomidou</groupId>\n            <artifactId>mybatis-plus-annotation</artifactId>\n            <version>3.4.3.4</version>\n        </dependency>\n\n        <!-- MySQL Connector -->\n        <dependency>\n            <groupId>mysql</groupId>\n            <artifactId>mysql-connector-java</artifactId>\n            <version>8.0.26</version>\n        </dependency>\n\n        <!-- Logging dependencies -->\n        <dependency>\n            <groupId>org.slf4j</groupId>\n            <artifactId>slf4j-api</artifactId>\n            <version>1.7.30</version>\n        </dependency>\n\n        <dependency>\n            <groupId>net.spy</groupId>\n            <artifactId>spymemcached</artifactId>\n            <version>2.12.3</version>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.httpcomponents.client5</groupId>\n            <artifactId>httpclient5</artifactId>\n            <version>5.1</version>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.commons</groupId>\n            <artifactId>commons-vfs2</artifactId>\n            <version>2.9.0</version>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.commons</groupId>\n            <artifactId>commons-vfs2-jackrabbit1</artifactId>\n            <version>2.9.0</version>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.commons</groupId>\n            <artifactId>commons-lang3</artifactId>\n            <version>3.12.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.alibaba</groupId>\n            <artifactId>druid</artifactId>\n            <version>1.2.8</version>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ConfigConstants.java",
    "content": "package com.pinecone.tritium;\n\npublic final class ConfigConstants {\n    public static final String KeyMasterOrchestrator = \"MasterOrchestrator\";\n\n\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/MasterServgramOrchestrator.java",
    "content": "package com.pinecone.tritium;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.Pinecore;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.hydra.servgram.*;\n\npublic class MasterServgramOrchestrator extends LocalServgramOrchestrator {\n    public MasterServgramOrchestrator( Pinecore system, PatriarchalConfig sectionConfig, @Nullable GramFactory factory, GramTransaction transaction ) {\n        super( system, sectionConfig, factory, transaction );\n    }\n\n    public MasterServgramOrchestrator( Pinecore system, String szSectionName, @Nullable GramFactory factory, GramTransaction transaction ) {\n        super( system, system.getGlobalConfig().getChild( szSectionName ), factory, transaction );\n    }\n\n    public MasterServgramOrchestrator( Pinecore system, String szSectionName ) {\n        super( system, system.getGlobalConfig().getChild( szSectionName ) );\n    }\n\n    public MasterServgramOrchestrator( Pinecore system ) {\n        this( system, ConfigConstants.KeyMasterOrchestrator );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/Tritium.java",
    "content": "package com.pinecone.tritium;\n\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\nimport com.pinecone.hydra.Hydra;\nimport com.pinecone.hydra.Hydradom;\nimport com.pinecone.hydra.servgram.ServgramOrchestrator;\nimport com.pinecone.hydra.system.component.GenericResourceDispenserCenter;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.component.GenericTracerScope;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.pinecone.hydra.system.component.Slf4jTracerScope;\nimport com.pinecone.hydra.system.component.LogStatuses;\nimport com.pinecone.framework.unit.MultiScopeMap;\nimport com.pinecone.framework.util.config.JSONSystemConfig;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.system.component.TracerConfigurator;\nimport com.pinecone.hydra.umb.rabbit.RabbitMQClient;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.io.Tracerson;\nimport com.pinecone.framework.util.json.homotype.DirectObjectInjector;\nimport com.pinecone.tritium.system.ConfigScope;\nimport com.pinecone.tritium.system.Hierarchy;\nimport com.pinecone.tritium.system.InterWareDirector;\nimport com.pinecone.tritium.system.KnittedMiddlewareDirector;\nimport com.pinecone.hydra.system.component.LoggingConfigurator;\nimport com.pinecone.tritium.system.TritiumConfigScope;\nimport com.pinecone.tritium.system.TritiumSystem;\nimport com.pinecone.tritium.system.ServersScope;\nimport com.pinecone.tritium.system.StorageSystem;\nimport com.pinecone.tritium.system.SystemDaemon;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\nimport java.time.LocalDateTime;\nimport java.time.format.DateTimeFormatter;\nimport java.util.Map;\n\n/**\n *  Bean Nuts Pinecone Hydra Tritium\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Hydra - Tritium Kingdom - Pyramid Hierarchy - Centralized Architecture\n *  Hydra - 九头龙第三帝国 - 金字塔阶级控制架构 - 中央集权架构典型实现\n *  *****************************************************************************************\n *  Hydrogen   | 氕 | Prime Frame  | 框架层原型\n *  Deuterium  | 氘 | Federal Arch | 九头龙第二共和国\n *  Tritium    | 氚 | Hierarchy    | 九头龙第三帝国\n *  *****************************************************************************************\n *  Tritium Hydra | 氚\n *  DragonKing.cn of Harald\n */\npublic class Tritium extends Hydradom implements TritiumSystem, Slf4jTraceable {\n    public static final String  NUTLET_NAME         = \"Bean Nuts Hazelnut Sauron Hydra\";\n    public static final long    VER_PINE            =  202506L;\n    public static final String  VERSION             = \"2.1.0\";\n    public static final String  RELEASE_DATE        = \"2026/06/06\";\n    public static final String  ROOT_SERVER         = \"https://www.dragonking.cn/\";\n    public static final String  CONTACT_INFO        = \"E-Mail:info#dragonking.cn\"; // Giving your contact information, if this program interrupt abnormally.\n\n    public static final String  SYSTEM_PATH         = \"./system/\";\n    public static final String  SETUP_PATH          = Tritium.SYSTEM_PATH + \"setup/\";\n    public static final String  MAIN_CONFIG_FILE    = Tritium.SETUP_PATH + \"config.json5\";\n\n    protected String                               mMinionName;\n\n    protected Hierarchy                            mServiceHierarchy = Hierarchy.H_SLAVE;\n    protected boolean                              mMasterQuery;\n    protected Logger                               mLogger;\n\n    protected Path                                 mPrimaryConfigPath        ;\n\n\n    protected DirectObjectInjector                 mObjectInjector           ;\n    protected ServersScope                         mServersScope             ;\n    protected StorageSystem                        mStorageSystem            ;\n    protected Slf4jTracerScope                     mTracerScope              ;\n    protected TracerConfigurator                   mTracerConfigurator       ;\n    protected SystemDaemon                         mSystemPrimaryDaemon      ;\n    protected ConfigScope                          mPrimaryConfigScope       ; // Program runtime global variable retrieving config-scope.\n    protected InterWareDirector                    mMiddlewareDirector       ;\n    protected ResourceDispenserCenter              mDispenserCenter          ;\n\n    protected DynamicFactory                       mShardDynamicFactory      ;\n\n    protected void prepare_system_log4j_logger() {\n        this.mLogger = LoggerFactory.getLogger( this.className() + \"<PrimarySystem>\" );\n        this.pout().print( \"[System] [TracerReassignment] <Transfer console -> Slf4j>\\n\" );\n    }\n\n    private void load_this_class_config() {\n        this.mjoGlobalConfig.addParentPath( this.getWorkingPath() );\n        this.mjoSystemConfig     = this.mjoGlobalConfig.getChild( \"System\" );\n\n        this.mObjectInjector     = DirectObjectInjector.instance(  true, Tritium.class );\n        this.mObjectInjector.inject( this.mjoSystemConfig, Tritium.class, this );\n        this.mObjectInjector.inject( this.mjoSystemConfig, Hydra.class, this );\n\n        this.mServiceHierarchy   = Hierarchy.queryHierarchy( this.mjoSystemConfig.optString( \"ServiceArch\" ) );\n\n        this.mTracerConfigurator = new LoggingConfigurator( this );\n        this.mTracerConfigurator.apply();\n    }\n\n    protected void prepare_system_skeleton_before() {\n\n    }\n\n    protected void prepare_system_skeleton() {\n        this.infoLifecycle( \"<Hydra Kingdom> Skeleton Initialization\", LogStatuses.StatusStart );\n\n        this.prepare_system_skeleton_before();\n\n        this.mTracerScope            = new GenericTracerScope( this );\n        this.mPrimaryConfigScope     = new TritiumConfigScope( ConfigScope.KeyGlobal, this, this.getGlobalConfig() );\n        this.mMiddlewareDirector     = new KnittedMiddlewareDirector( this );\n        this.mServersScope           = new ServersScope( this );\n        this.mStorageSystem          = new StorageSystem( this );\n        this.mSystemPrimaryDaemon    = new SystemDaemon( this );\n        this.mDispenserCenter        = new GenericResourceDispenserCenter( this );\n\n\n        this.getComponentManager().addComponent( this.mMiddlewareDirector    );\n        this.getComponentManager().addComponent( this.mPrimaryConfigScope   );\n        this.getComponentManager().addComponent( this.mServersScope         );\n        this.getComponentManager().addComponent( this.mStorageSystem        );\n        this.getComponentManager().addComponent( this.mTracerScope          );\n        this.getComponentManager().addComponent( this.mSystemPrimaryDaemon  );\n        this.getComponentManager().addComponent( this.mDispenserCenter      );\n        //Debug.trace( this.getComponentManager().getComponents() );\n        //Debug.echo( ( (JSONObject)this.getGlobalConfigScope().thisScope() ).toJSONStringI(4) );\n\n        super.prepare_system_skeleton();\n        this.infoLifecycle( \"<Hydra Kingdom> Skeleton Initialization\", LogStatuses.StatusReady );\n    }\n\n    @Override\n    protected void loadConfig() {\n        try {\n            Map<String, String[] > map = this.getStartupCommandMap();\n            String[] args = map.get( \"workingPath\" );\n            if( args != null && args.length > 0 ) {\n                this.mWorkingPath =  Path.of( args[ 0 ] );\n            }\n            else {\n                this.mWorkingPath      = Path.of( this.getRuntimeContextPath() );\n            }\n\n            args = map.get( \"config\" );\n            if( args != null && args.length > 0 ) {\n                this.mPrimaryConfigPath = Path.of( args[ 0 ] );\n            }\n            else {\n                this.mPrimaryConfigPath = this.getWorkingPath().resolve( Tritium.MAIN_CONFIG_FILE );\n            }\n            this.mjoGlobalConfig   = (JSONSystemConfig) ( new JSONSystemConfig( this ) ).apply( this.mPrimaryConfigPath.toFile() );\n        }\n        catch ( IOException e ) {\n            this.handleKillException( e );\n        }\n    }\n\n    protected void traceSystemBootingInfo() {\n        this.pout().print( \"\\u001B[34m>>> System Booting...\\u001B[0m\\n\\n\" );\n    }\n\n    protected void loadTracer() {\n        this.mConsole = new Tracerson();\n    }\n\n    @Override\n    protected void onlyLoadTaskManager() {\n        this.mTaskManager = new MasterServgramOrchestrator( this );\n        this.mShardDynamicFactory = new GenericDynamicFactory( this.mTaskManager.getClassLoader() );\n    }\n\n    protected void traceSubsystemWelcomeInfo() {\n        this.pout().print( \"---------------------------------------------------------------\\n\" );\n    }\n\n    @Override\n    protected void traceWelcomeInfo() {\n        this.pout().print( \"---------------------------------------------------------------\\n\" );\n        this.pout().print( \"\\u001B[31mBean Nuts Pinecone Ursus for Java\\u001B[0m\\n\" );\n        this.pout().print( \"\\u001B[31mHydra Kingdom Framework (Tritium, Hydra Empire) \\u001B[0m\\n\" );\n        this.pout().print( \"\\u001B[32mCopyright(C) 2008-2028 Bean Nuts Foundation. All rights reserved.\\u001B[0m\\n\" );\n        this.pout().print( \"---------------------------------------------------------------\\n\" );\n        this.pout().print( \"\\u001B[31mDragon King\\u001B[0m\\n\" );\n        this.pout().print( \"\\u001B[32mWebsit: https://www.dragonking.cn/ \\u001B[0m\\n\" );\n\n        this.traceSubsystemWelcomeInfo();\n        this.traceSystemBootingInfo();\n        this.prepare_system_log4j_logger();\n        this.infoLifecycle( \"Initialization\", LogStatuses.StatusStart );\n    }\n\n    protected void traceSystemInfo() {\n        LocalDateTime now = LocalDateTime.now();\n        this.console().echo( \"----------------------System Information-----------------------\\n\" );\n        this.console().echo( \"MinionName : \" + this.mMinionName, \"\\n\"   );\n        this.console().echo( \"NutletName : \" + Tritium.NUTLET_NAME , \"\\n\"  );\n        this.console().echo( \"Version    : \" + Tritium.VERSION, \"\\n\"        );\n        this.console().echo( \"ReleaseDate: \" + Tritium.RELEASE_DATE, \"\\n\"   );\n        this.console().echo( \"ServiceID  : \" + this.mServiceID, \"\\n\"   );\n        this.console().echo( \"ServiceArch: \" + this.mServiceHierarchy.getName(), \"\\n\" );\n        this.console().echo( \"RuntimePath: \" + this.getRuntimePath(), \"\\n\" );\n        this.console().echo( \"ContextPath: \" + this.getRuntimeContextPath(), \"\\n\" );\n        this.console().echo( \"PrimaryConf: \" + this.mPrimaryConfigPath.toString(), \"\\n\" );\n        this.console().echo( \"StartTime  : \" + now.format( DateTimeFormatter.ofPattern(\"yyyy-MM-dd HH:mm:ss:SSS\") ), \"\\n\"   );\n        this.console().echo( \"---------------------------------------------------------------\\n\" );\n    }\n\n\n    public Tritium( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Tritium( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n\n        this.load_this_class_config();\n        this.prepare_system_skeleton();\n        this.loadTracer();\n        this.dispatchStartupCommand();\n\n        this.traceSystemInfo();\n        this.infoLifecycle( \"Initialization\", LogStatuses.StatusReady );\n        this.console().echo( \"\\n\\n\" );\n        this.console().getOut().flush();\n    }\n\n\n    @Override\n    public Logger getLogger() {\n        return this.mLogger;\n    }\n\n    @Override\n    public Tritium infoLifecycle(String szWhat, String szStateOrExtra ) {\n        this.getLogger().info( \"[SystemLifecycle] [{}] <{}>\", szWhat, szStateOrExtra );\n        return this;\n    }\n\n    @Override\n    public Hierarchy getServiceArch() {\n        return this.mServiceHierarchy;\n    }\n\n    @Override\n    public boolean isTopmostArchy() {\n        return this.getServiceArch() == this.getTopmostArchy();\n    }\n\n    @Override\n    public Hierarchy getTopmostArchy() {\n        return Hierarchy.H_MASTER;\n    }\n\n    @Override\n    public Hierarchy getBottommostArchy() {\n        return Hierarchy.H_SLAVE;\n    }\n\n    @Override\n    public boolean isBottommostArchy() {\n        return this.getServiceArch() == this.getBottommostArchy();\n    }\n\n    @Override\n    public SystemDaemon getSystemDaemon() {\n        return this.mSystemPrimaryDaemon;\n    }\n\n    @Override\n    public ServersScope getServersScope() {\n        return this.mServersScope;\n    }\n\n    @Override\n    public StorageSystem getStorageSystem() {\n        return this.mStorageSystem;\n    }\n\n    @Override\n    public Slf4jTracerScope getTracerScope() {\n        return this.mTracerScope;\n    }\n\n    @Override\n    public ResourceDispenserCenter getDispenserCenter() {\n        return this.mDispenserCenter;\n    }\n\n    @Override\n    public MultiScopeMap<String, Object> getGlobalConfigScope() {\n        return this.getPrimaryConfigScope().getScopeMap();\n    }\n\n    @Override\n    public ConfigScope getPrimaryConfigScope() {\n        return this.mPrimaryConfigScope;\n    }\n\n    public boolean getMasterQuery() {\n        return this.mMasterQuery;\n    }\n\n    public boolean isKingMasterQuery() {\n        return this.getMasterQuery() && (this.isTopmostArchy() || this.getServiceArch() == Hierarchy.H_PALADIN);\n    }\n\n    @Override\n    public InterWareDirector getMiddlewareDirector() {\n        return this.mMiddlewareDirector;\n    }\n\n    public ServgramOrchestrator getServgramOrchestrator() {\n        return (ServgramOrchestrator) this.mTaskManager;\n    }\n\n    @Override\n    public DynamicFactory getShardDynamicFactory() {\n        return this.mShardDynamicFactory;\n    }\n\n    public Path getPrimaryConfigsPath() {\n        return this.getWorkingPath().resolve( Tritium.SETUP_PATH );\n    }\n\n    public void vitalize () throws Exception {\n        this.getServgramOrchestrator().tracer().info( \"[Lifecycle] <System committed prime directive>\" );\n        this.getServgramOrchestrator().orchestrate();\n\n\n        //( new Heistron( \"Heist\", this )).execute();\n\n\n\n//        LocalHeistium heistium = new LocalHeistium( \"Test\", this, 5, null );\n//        heistium.joinStartMultiTasks();\n\n//        RangedPage64 page64   = new RangedPage64( 0, 1000,0 );\n//        DirectPagePool pagePool = new DirectPagePool( LocalTaskPage.class );\n//\n//        LocalMultiActiveTaskPageProducer producer = new LocalMultiActiveTaskPageProducer( new FixedPageDivider64( page64, pagePool, 100 ), page64.getId() + 1 );\n//        LocalSingleTaskPageConsumer consumer = new LocalSingleTaskPageConsumer( producer );\n//\n//        consumer.consume();\n    }\n\n\n    void testBunny() throws Exception {\n        RabbitMQClient bunny = new RabbitMQClient( this, this.getMiddlewareDirector().getMiddlewareConfig().optJSONObject( \"Messengers\" ).optJSONObject( \"RabbitMQKingpin\" ) );\n        bunny.toListen();\n\n        Debug.echo( bunny );\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ally/IndexableManager.java",
    "content": "package com.pinecone.tritium.ally;\n\npublic class IndexableManager {\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ally/messengers/MessagersManager.java",
    "content": "package com.pinecone.tritium.ally.messengers;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.JSONGet;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.centrum.UniformCentralSystem;\nimport com.pinecone.hydra.umct.MessageExpress;\nimport com.pinecone.hydra.umct.Messagram;\nimport com.pinecone.hydra.servgram.Servgram;\nimport com.pinecone.hydra.system.ArchSystemAutoAssembleComponent;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.HyHierarchy;\nimport com.pinecone.hydra.system.component.LogStatuses;\nimport com.pinecone.hydra.umc.msg.MessageNode;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umc.wolf.WolfMCNode;\nimport com.pinecone.hydra.umc.wolf.server.WolfMCServer;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\nimport com.pinecone.tritium.system.InterWareDirector;\nimport com.pinecone.tritium.system.TritiumSystem;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\nimport java.util.Collection;\nimport java.util.Map;\n\npublic class MessagersManager extends ArchSystemAutoAssembleComponent implements Pinenut, HyComponent {\n    @JSONGet( \"Messagers\" )\n    protected JSONConfig                           mjoMessagersConf        ;\n\n    @JSONGet( \"Messagers.Configs\" )\n    protected JSONConfig                           mjoComponentConf        ;\n\n    @JSONGet( \"Messagers.Messagers\" )\n    protected JSONConfig                           mjoMessagers            ;\n\n\n    @JSONGet( \"Messagers.Configs.Enable\" )\n    protected boolean                              mbEnable                ;\n\n    protected Map<String, Pinenut >                mMessagerComponent      ;\n\n\n\n    public MessagersManager( Namespace name, HyComponent parent ) {\n        super( name, parent.getSystem(), parent.getSystem().getComponentManager(), parent );\n\n        InterWareDirector parentManager = (InterWareDirector) parent;\n\n        this.getSystem().getPrimaryConfigScope().autoInject( MessagersManager.class, parentManager.getMiddlewareConfig() , this );\n\n        this.mMessagerComponent = new LinkedTreeMap<>();\n        this.prepareInstanceMessagers();\n\n        this.infoLifecycleInitializationDone();\n    }\n\n    public MessagersManager( HyComponent parent ) {\n        this( null, parent );\n    }\n\n\n    protected void prepareInstanceProcessum( Object node ) {\n        if( node instanceof Processum ) {\n            this.getSystem().getTaskManager().add( (Processum)node );\n        }\n    }\n\n    protected void executeInnerServgram( Object node, JSONObject conf ) {\n        HyHierarchy hierarchy = this.getSystem().getServiceArch();\n\n        if( node instanceof Servgram ) {\n            boolean bIsRecipient = conf.optBoolean( \"IsRecipient\" );\n\n            if( bIsRecipient ) {\n                String[] as = this.getSystem().getStartupCommandMap().get( \"TestWolfMCClient\" );\n                if( as != null && as.length > 0 && as[0].equals( \"true\" ) && node instanceof WolfMCServer ){\n                    return;\n                }\n            }\n\n            if( !bIsRecipient ) {\n                if( node instanceof MessageNode ) {\n                    boolean bAutoStartInMasterMode = conf.optBoolean( \"AutoStartInMasterMode\" );\n                    if( hierarchy.isDominantClass() && !bAutoStartInMasterMode ) {\n                        return;\n                    }\n                }\n            }\n\n            try{\n                ((Servgram) node).execute();\n            }\n            catch ( Exception e ) {\n                throw new ProxyProvokeHandleException( e );\n            }\n        }\n    }\n\n    protected void prepareInstanceMessagers() {\n        for ( Object o : this.mjoMessagers.entrySet() ) {\n            Map.Entry kv   = (Map.Entry) o;\n\n            Object ov = kv.getValue();\n            if ( ov instanceof String ) {\n                try {\n                    ov = this.mjoMessagers.fromPath( Path.of( (String) ov ) );\n                }\n                catch ( IOException e ) {\n                    throw new ProxyProvokeHandleException( e );\n                }\n            }\n\n            JSONObject val = (JSONObject) ov;\n            this.mObjectOverrider.override( val, this.mjoComponentConf, false );\n\n            try {\n                String szEngine        = val.optString( \"Engine\" );\n                String szInsNam        = (String) kv.getKey();\n\n                boolean bEnable        = val.optBoolean( \"Enable\" );\n                boolean bCentralManage = val.optBoolean( \"CentralManage\" );\n                if ( bEnable ) {\n                    TritiumSystem system = this.getSystem();\n                    Object node = null;\n                    if ( system instanceof UniformCentralSystem ) {\n                        UniformCentralSystem uSystem = (UniformCentralSystem) system;\n                        long nodeId = uSystem.getSystemGuidAllocator72().nextGUIDi64();\n                        node = this.mUniformFactory.loadInstance( szEngine, null,\n                                new Object[] { nodeId, szInsNam, this.getSystem(), val }\n                        );\n                    }\n\n                    if ( node == null ) {\n                        node = this.mUniformFactory.loadInstance( szEngine, null, new Object[] { szInsNam, this.getSystem(), val } );\n                    }\n\n                    if ( node instanceof MessageNode ) {\n                        this.mMessagerComponent.put( szInsNam, (MessageNode)node );\n                        this.prepareMessagersMsgHandler( szInsNam, (MessageNode)node, val );\n                    }\n                    else if ( node instanceof Messagram ) {\n                        this.mMessagerComponent.put( szInsNam, (Messagram)node );\n                    }\n                    else {\n                        throw new IllegalArgumentException( \"Illegal message node engine, should be `MessageNode/Messagram`: \" + szEngine );\n                    }\n\n                    this.prepareInstanceProcessum( node );\n                    if ( bCentralManage ) {\n                        this.executeInnerServgram( node, val );\n                    }\n                }\n            }\n            catch ( Exception e ) {\n                throw new ProvokeHandleException( e );\n            }\n        }\n\n        //Debug.fmt( 2, this.mjoMessagersConf );\n    }\n\n    protected MessageExpress getMessageHandlerByName( String name ) {\n        for( Map.Entry<String, Pinenut > kv: this.mMessagerComponent.entrySet() ) {\n            Pinenut p = kv.getValue();\n            if( p instanceof Messagram ) {\n                Messagram messagram = (Messagram) p;\n                MessageExpress me = messagram.getExpressByName( name );\n                if( me != null ) {\n                    return me;\n                }\n            }\n        }\n        return null;\n    }\n\n    protected void prepareMessagersMsgHandler( String szInsNam, MessageNode node, JSONObject conf ) {\n        String szMessageHandler = conf.optString( \"MessageHandler\" );\n        if( !StringUtils.isEmpty(szMessageHandler) ) {\n            MessageExpress me;\n            if( szMessageHandler.contains( \".\" ) ) {\n                try {\n                    Object o = this.mUniformFactory.loadInstance( szMessageHandler, null, null );\n                    if( o instanceof MessageExpress ){\n                        me = (MessageExpress) o;\n                    }\n                    else {\n                        throw new IllegalArgumentException( \"Illegal message handler, should be `MessageExpress`: \" + szMessageHandler );\n                    }\n                }\n                catch ( Exception e ) {\n                    throw new ProvokeHandleException( e );\n                }\n            }\n            else {\n                me = this.getMessageHandlerByName( szMessageHandler );\n            }\n\n            if( me == null ) {\n                throw new IllegalArgumentException( \"Illegal message handler, can`t found: \" + szMessageHandler );\n            }\n\n            if( node instanceof WolfMCNode ) {\n                if( me instanceof UlfAsyncMsgHandleAdapter ) {\n                    ((WolfMCNode) node).apply( (UlfAsyncMsgHandleAdapter)me );\n                }\n                else {\n                    ((WolfMCNode) node).apply( UlfAsyncMsgHandleAdapter.wrap( (UMCTExpressHandler) me ) );\n                }\n\n                this.infoCriticalOperation(\n                        \"SetMessageExpress(`\" + szMessageHandler + \"`) ==> (`\" + szInsNam + \"`)\", LogStatuses.StatusDone\n                );\n            }\n        }\n    }\n\n    @Override\n    public TritiumSystem getSystem() {\n        return (TritiumSystem) super.getSystem();\n    }\n\n    public boolean isEnable() {\n        return this.mbEnable;\n    }\n\n    public JSONObject getMessagers() {\n        return this.mjoMessagers;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public Collection<String > messagersNames() {\n        return (Collection)this.getMessagers().values();\n    }\n\n    public Pinenut getComponentByName (String szName ) {\n        return this.mMessagerComponent.get( szName );\n    }\n\n    public MessageNode getMessageNodeByName ( String szName ) {\n        Pinenut p = this.getComponentByName( szName );\n        if( p instanceof MessageNode ) {\n            return (MessageNode) p;\n        }\n        return null;\n    }\n\n    public Messagram getMessagramByName ( String szName ) {\n        Pinenut p = this.getComponentByName( szName );\n        if( p instanceof Messagram ) {\n            return (Messagram) p;\n        }\n        return null;\n    }\n\n    public Pinenut terminate( String szName ) {\n        Pinenut node = this.getComponentByName( szName );\n        if( node != null ) {\n            if( node instanceof Servgram ) {\n                ((Servgram) node).terminate();\n            }\n            else if( node instanceof Processum ) {\n                ((Processum) node).apoptosis();\n            }\n\n            this.mMessagerComponent.remove( szName );\n        }\n        return node;\n    }\n\n    public int nodesSize() {\n        return this.mMessagerComponent.size();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ally/rdb/DruidDataSourceFactory.java",
    "content": "package com.pinecone.tritium.ally.rdb;\n\nimport java.sql.SQLException;\nimport java.util.Properties;\nimport javax.sql.DataSource;\n\nimport org.apache.ibatis.datasource.DataSourceFactory;\n\nimport com.alibaba.druid.pool.DruidDataSource;\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.homotype.MapStructure;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.slime.source.rdb.RelationalDatabase;\n\npublic class DruidDataSourceFactory implements DataSourceFactory, Pinenut {\n    protected UniformRDBClient rdbClient;\n\n    @MapStructure(\"Ibatis.DruidConfig.initial-size\")\n    protected int initialSize = 1;\n\n    @MapStructure(\"Ibatis.DruidConfig.min-idle\")\n    protected int minIdle = 1;\n\n    @MapStructure(\"Ibatis.DruidConfig.max-active\")\n    protected int maxActive = 20;\n\n    @MapStructure(\"Ibatis.DruidConfig.max-wait\")\n    protected long maxWait = 60000;\n\n    @MapStructure(\"Ibatis.DruidConfig.time-between-eviction-runs-millis\")\n    protected long timeBetweenEvictionRunsMillis = 60000;\n\n    @MapStructure(\"Ibatis.DruidConfig.min-evictable-idle-time-millis\")\n    protected long minEvictableIdleTimeMillis = 300000;\n\n    @MapStructure(\"Ibatis.DruidConfig.validation-query\")\n    protected String validationQuery = \"SELECT 1\";\n\n    @MapStructure(\"Ibatis.DruidConfig.test-while-idle\")\n    protected boolean testWhileIdle = true;\n\n    @MapStructure(\"Ibatis.DruidConfig.test-on-borrow\")\n    protected boolean testOnBorrow = false;\n\n    @MapStructure(\"Ibatis.DruidConfig.test-on-return\")\n    protected boolean testOnReturn = false;\n\n    @MapStructure(\"Ibatis.DruidConfig.pool-prepared-statements\")\n    protected boolean poolPreparedStatements = true;\n\n    @MapStructure(\"Ibatis.DruidConfig.max-pool-prepared-statement-per-connection-size\")\n    protected int maxPoolPreparedStatementPerConnectionSize = 20;\n\n    @MapStructure(\"Ibatis.DruidConfig.keep-alive\")\n    protected boolean keepAlive = true;\n\n    @MapStructure(\"Ibatis.DruidConfig.connection-error-retry-attempts\")\n    protected int connectionErrorRetryAttempts = 3;\n\n    @MapStructure(\"Ibatis.DruidConfig.break-after-acquire-failure\")\n    protected boolean breakAfterAcquireFailure = false;\n\n    @MapStructure(\"Ibatis.DruidConfig.filters\")\n    protected String filters;\n\n\n    public DruidDataSourceFactory( UniformRDBClient rdbClient ) {\n        this.rdbClient = rdbClient;\n        IbatisClient ibatisClient = (IbatisClient) rdbClient;\n\n        rdbClient.getRDBManager().getSystem().getPrimaryConfigScope().autoInject(\n                DruidDataSourceFactory.class, ibatisClient.getClientConf(), this\n        );\n    }\n\n    @Override\n    public void setProperties(Properties properties) {\n\n    }\n\n    @Override\n    public DataSource getDataSource() {\n        IbatisClient ibatisClient = (IbatisClient) this.rdbClient;\n        RelationalDatabase rdb    = (RelationalDatabase) this.rdbClient;\n\n        String driver   = ibatisClient.getJDBCDriverName();\n        String url      = ibatisClient.getJDBCURL();\n        String username = rdb.getUsername();\n        String password = rdb.getPassword();\n\n        DruidDataSource ds = new DruidDataSource();\n\n        /*\n         * 基础 JDBC\n         */\n        ds.setDriverClassName(driver);\n        ds.setUrl(url);\n        ds.setUsername(username);\n        ds.setPassword(password);\n\n        /*\n         * 连接池参数\n         */\n        ds.setInitialSize(this.initialSize);\n        ds.setMinIdle(this.minIdle);\n        ds.setMaxActive(this.maxActive);\n        ds.setMaxWait(this.maxWait);\n\n        /*\n         * 连接回收\n         */\n        ds.setTimeBetweenEvictionRunsMillis(this.timeBetweenEvictionRunsMillis);\n        ds.setMinEvictableIdleTimeMillis(this.minEvictableIdleTimeMillis);\n\n        /*\n         * 连接检测\n         */\n        ds.setValidationQuery(this.validationQuery);\n        ds.setTestWhileIdle(this.testWhileIdle);\n        ds.setTestOnBorrow(this.testOnBorrow);\n        ds.setTestOnReturn(this.testOnReturn);\n\n        /*\n         * PS cache\n         */\n        ds.setPoolPreparedStatements(this.poolPreparedStatements);\n        ds.setMaxPoolPreparedStatementPerConnectionSize(\n                this.maxPoolPreparedStatementPerConnectionSize\n        );\n\n        ds.setKeepAlive(this.keepAlive);\n        ds.setConnectionErrorRetryAttempts(this.connectionErrorRetryAttempts);\n        ds.setBreakAfterAcquireFailure(this.breakAfterAcquireFailure);\n\n        if ( this.filters != null ) {\n            try {\n                ds.setFilters( this.filters );\n            }\n            catch ( SQLException e ) {\n                throw new ProvokeHandleException( e );\n            }\n        }\n\n        this.rdbClient.getRDBManager().getLogger().info(\n                \"[Lifecycle] New druid-data-source created (`{}`). <Done>\", this.rdbClient.getInstanceName()\n        );\n        return ds;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ally/rdb/GenericIbatisClient.java",
    "content": "package com.pinecone.tritium.ally.rdb;\n\nimport javax.sql.DataSource;\nimport java.sql.SQLException;\nimport java.util.Set;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.ArrayList;\nimport java.util.concurrent.locks.ReadWriteLock;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.lang.annotation.Annotation;\nimport java.sql.Connection;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.unit.LinkedTreeSet;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.MapStructure;\nimport com.pinecone.framework.util.lang.ClassScope;\nimport com.pinecone.framework.util.lang.GenericClassScopeSet;\nimport com.pinecone.framework.util.lang.NamespaceCollector;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.pinecone.slime.source.DAOScanner;\nimport com.pinecone.slime.source.DataAccessObject;\nimport com.pinecone.slime.source.XMLResourceScanner;\nimport com.pinecone.slime.source.rdb.ArchRelationalDatabase;\n\nimport org.apache.ibatis.binding.BindingException;\nimport org.apache.ibatis.builder.xml.XMLMapperBuilder;\nimport org.apache.ibatis.datasource.DataSourceFactory;\nimport org.apache.ibatis.datasource.pooled.PooledDataSource;\nimport org.apache.ibatis.mapping.Environment;\nimport org.apache.ibatis.session.*;\nimport org.apache.ibatis.transaction.TransactionFactory;\nimport org.slf4j.Logger;\n\npublic class GenericIbatisClient extends ArchRelationalDatabase implements IbatisClient, UniformRDBClient {\n    protected String                      mszInstanceName       ;\n\n    protected SqlSessionFactory           mSqlSessionFactory    ;\n\n    protected Configuration               mConfiguration        ;\n\n    protected DataSource                  mDataSource           ;\n\n    protected Environment                 mEnvironment          ;\n\n    @MapStructure( \"Ibatis\" )\n    protected JSONObject                  mjoIbatisConf         ;\n\n    @MapStructure( \"JDBC.Driver\" )\n    protected String                      mszJDBCDriverName     ;\n\n    @MapStructure( \"JDBC.ExURL\" )\n    protected String                      mszJDBCExURL          ;\n\n    protected JSONObject                  mjoClientConf         ;\n\n    protected RDBManager                  mRDBManager           ;\n\n    @MapStructure( \"Ibatis.Environment\" )\n    protected String                      mszEnvironment        ;\n\n    @MapStructure( \"Ibatis.DataSource\" )\n    protected String                      mszDataSource         ;\n\n    @MapStructure( \"Ibatis.TransactionFactory\" )\n    protected String                      mszTransactionFactory ;\n\n    protected Logger                      mLogger               ;\n\n    @MapStructure( \"Ibatis.PooledConfig.InitialSize\" )\n    protected int                         mnInitialSize = 0     ;\n\n    @MapStructure( \"Ibatis.PooledConfig.MaxActive\" )\n    protected int                         mnMaxActive = 20      ;\n\n    @MapStructure( \"Ibatis.PooledConfig.MaxIdle\" )\n    protected int                         mnMaxIdle = 20        ;\n\n    @MapStructure( \"Ibatis.PooledConfig.MinIdle\" )\n    protected int                         mnMinIdle  =  1       ;\n\n    @MapStructure( \"Ibatis.PooledConfig.MaxWait\" )\n    protected int                         mnMaxWait = 60000     ;\n\n    @MapStructure( \"Ibatis.DataAccessObject.Scanner\" )\n    protected String                      mszDAOScanner         ;\n\n    @MapStructure( \"Ibatis.DataAccessObject.XMLScanner\" )\n    protected String                      mszXMLScanner         ;\n\n    @MapStructure( \"Ibatis.DataAccessObject.ScanScopes\" )\n    protected Collection<String >         mScannerScopes        ;\n\n    protected DAOScanner                  mDAOScanner           ;\n\n    protected XMLResourceScanner          mXMLScanner           ;\n\n    protected DataSourceFactory           mDataSourceFactory    ;\n\n    protected final ReadWriteLock         mAddScopeLock         = new ReentrantReadWriteLock();\n\n\n    public GenericIbatisClient( RDBManager manager, String szInstanceName ) {\n        this.mRDBManager     = manager;\n        this.mszInstanceName = szInstanceName;\n\n        this.mjoClientConf   = this.mRDBManager.getDatabases().optJSONObject( szInstanceName );\n        this.mRDBManager.getSystem().getPrimaryConfigScope().autoInject( ArchRelationalDatabase.class, this.mjoClientConf, this );\n        this.mRDBManager.getSystem().getPrimaryConfigScope().autoInject( GenericIbatisClient.class, this.mjoClientConf, this );\n        this.mLogger         = this.getRDBManager().getSystem().getTracerScope().newLogger( this.className() );\n\n        this.prepareIbatisSubsystem();\n    }\n\n    protected void prepareKernelXMLList() {\n        ClassLoader classLoader = this.getRDBManager().getSystem().getTaskManager().getClassLoader();\n        ClassScope classScope   = new GenericClassScopeSet( classLoader );\n        if ( this.mszXMLScanner == null ) {\n            this.mszXMLScanner = \"com.pinecone.slime.jelly.source.ibatis.IbatisXMLResourceScanner\";\n        }\n        Object ds = this.getRDBManager().getSharedUniformFactory().optLoadInstance( this.mszXMLScanner, new Object[]{ classScope, classLoader } );\n        if ( ds instanceof XMLResourceScanner) {\n            this.mXMLScanner = (XMLResourceScanner)ds;\n        }\n        else {\n            throw new IllegalArgumentException( \"Illegal class scanner, should be `ClassScanner`: \" + this.mszXMLScanner );\n        }\n    }\n\n    protected void prepareDAOMapperList() {\n        ClassLoader classLoader = this.getRDBManager().getSystem().getTaskManager().getClassLoader();\n        ClassScope classScope   = new GenericClassScopeSet( classLoader );\n        Object ds = this.getRDBManager().getSharedUniformFactory().optLoadInstance( this.mszDAOScanner, new Object[]{ classScope, classLoader } );\n        if ( ds instanceof DAOScanner ) {\n            this.mDAOScanner = (DAOScanner)ds;\n        }\n        else {\n            throw new IllegalArgumentException( \"Illegal class scanner, should be `ClassScanner`: \" + this.mszDAOScanner );\n        }\n\n\n        Set<String > scopes = new LinkedTreeSet<>();\n        if ( this.mScannerScopes != null && !this.mScannerScopes.isEmpty() ) {\n            try {\n                List<String > candidates = new ArrayList<>();\n                for ( String sz : this.mScannerScopes ) {\n                    scopes.add( sz );\n                    this.mDAOScanner.scan( sz, true, candidates );\n                }\n\n                for ( String sz : candidates ) {\n                    this.addMapper( classLoader.loadClass( sz ) );\n                }\n            }\n            catch ( IOException | ClassNotFoundException e ) {\n                throw new ProxyProvokeHandleException( e );\n            }\n        }\n        this.mScannerScopes = scopes;\n    }\n\n    protected void prepareIbatisSubsystem() {\n        this.mLogger.info( \"[Lifecycle] [RDBClient::PrepareIbatisSubsystem::\" + this.mszInstanceName + \"] <Start>\" );\n\n        String szJDBCUrl = this.getJDBCURL();\n        Object ds = this.getRDBManager().getSharedUniformFactory().optLoadInstance(\n                this.mszDataSource, new Object[] { this }\n        );\n        if ( ds == null ) {\n            ds = this.getRDBManager().getSharedUniformFactory().optLoadInstance(\n                    this.mszDataSource, new Object[] { this.mszJDBCDriverName, szJDBCUrl, this.getUsername(), this.getPassword() }\n            );\n            if ( ds instanceof DataSource ) {\n                this.mDataSource = (DataSource) ds;\n                if ( ds instanceof PooledDataSource ) {\n                    PooledDataSource pds = (PooledDataSource) ds;\n                    pds.setPoolMaximumActiveConnections( this.mnMaxActive );\n                    pds.setPoolMaximumIdleConnections( this.mnMaxIdle );\n                    pds.setPoolTimeToWait( this.mnMaxWait );\n                }\n            }\n            else {\n                ds = null;\n            }\n        }\n        else {\n            this.mDataSourceFactory = (DataSourceFactory) ds;\n            this.mDataSource = this.mDataSourceFactory.getDataSource();\n        }\n\n        if ( ds == null ) {\n            throw new IllegalArgumentException( \"Illegal data source, should be `DataSource` / `DataSourceFactory`: \" + this.mszJDBCDriverName );\n        }\n\n\n        TransactionFactory transactionFactory;\n        Object tf = this.getRDBManager().getSharedUniformFactory().optLoadInstance( this.mszTransactionFactory, null );\n        if ( tf instanceof TransactionFactory ) {\n            transactionFactory = (TransactionFactory) tf;\n        }\n        else {\n            throw new IllegalArgumentException( \"Illegal transaction factory, should be `TransactionFactory`: \" + this.mszTransactionFactory );\n        }\n\n        this.mEnvironment       = new Environment( this.mszEnvironment, transactionFactory, this.mDataSource );\n        this.mConfiguration     = new Configuration( this.mEnvironment );\n        this.mSqlSessionFactory = new SqlSessionFactoryBuilder().build( this.mConfiguration );\n\n        this.prepareDAOMapperList();\n        this.prepareKernelXMLList();\n\n        this.mLogger.info( \"[Lifecycle] [RDBClient::PrepareIbatisSubsystem::\" + this.mszInstanceName + \"] <Done>\" );\n    }\n\n    @Override\n    public String getJDBCURL() {\n        if ( this.mszJDBCExURL == null ) {\n            this.mszJDBCExURL = \"\";\n        }\n        if ( !this.mszJDBCExURL.startsWith( \"&\" ) ) {\n            this.mszJDBCExURL = \"&\" + this.mszJDBCExURL;\n        }\n        return super.getJDBCURL() + this.mszJDBCExURL;\n    }\n\n    @Override\n    public String getInstanceName() {\n        return this.mszInstanceName;\n    }\n\n\n    @Override\n    public Configuration getConfiguration() {\n        return this.mConfiguration;\n    }\n\n    @Override\n    public DataSource getDataSource() {\n        return this.mDataSource;\n    }\n\n    @Override\n    public Environment getEnvironment() {\n        return this.mEnvironment;\n    }\n\n    @Override\n    public JSONObject getIbatisConf() {\n        return this.mjoIbatisConf;\n    }\n\n    @Override\n    public String getJDBCDriverName() {\n        return this.mszJDBCDriverName;\n    }\n\n    @Override\n    public JSONObject getClientConf() {\n        return this.mjoClientConf;\n    }\n\n    @Override\n    public DAOScanner getDAOScanner() {\n        return this.mDAOScanner;\n    }\n\n    @Override\n    public <T> void addMapper( Class<T> type ) {\n        try {\n            this.mConfiguration.addMapper( type );\n        }\n        catch ( BindingException ignore ) {\n            // Do nothing.\n        }\n    }\n\n    @Override\n    public SqlSessionFactory getSqlSessionFactory() {\n        return this.mSqlSessionFactory;\n    }\n\n    @Override\n    public SqlSession openSession() {\n        SqlSession sqlSession = this.mSqlSessionFactory.openSession();\n        return sqlSession;\n    }\n\n    @Override\n    public SqlSession openSession( boolean autoCommit ) {\n        SqlSession sqlSession = this.mSqlSessionFactory.openSession(autoCommit);\n        return sqlSession;\n    }\n\n    @Override\n    public SqlSession openSession( Connection connection ) {\n        SqlSession sqlSession = this.mSqlSessionFactory.openSession(connection);\n        return sqlSession;\n    }\n\n    @Override\n    public SqlSession openSession( TransactionIsolationLevel level ) {\n        SqlSession sqlSession = this.mSqlSessionFactory.openSession(level);\n        return sqlSession;\n    }\n\n    @Override\n    public SqlSession openSession( ExecutorType execType ) {\n        SqlSession sqlSession = this.mSqlSessionFactory.openSession(execType);\n        return sqlSession;\n    }\n\n    @Override\n    public SqlSession openSession( ExecutorType execType, boolean autoCommit ) {\n        SqlSession sqlSession = this.mSqlSessionFactory.openSession(execType, autoCommit);\n        return sqlSession;\n    }\n\n    @Override\n    public SqlSession openSession( ExecutorType execType, TransactionIsolationLevel level ) {\n        SqlSession sqlSession = this.mSqlSessionFactory.openSession(execType, level);\n        return sqlSession;\n    }\n\n    @Override\n    public SqlSession openSession( ExecutorType execType, Connection connection ) {\n        SqlSession sqlSession = this.mSqlSessionFactory.openSession( execType, connection );\n        return sqlSession;\n    }\n\n    protected void free0( SqlSession sqlSession ) {\n        sqlSession.commit();\n        sqlSession.close();\n    }\n\n    @Override\n    public RDBManager getRDBManager() {\n        return this.mRDBManager;\n    }\n\n    @Override\n    public DAOScanner getDataAccessObjectScanner() {\n        return this.mDAOScanner;\n    }\n\n    @Override\n    public boolean hasOwnDataAccessObject( Class<?> clazz ) {\n        Annotation[] annotations = clazz.getAnnotations();\n        for ( Annotation annotation : annotations ) {\n            if ( annotation instanceof DataAccessObject ) {\n                String s = ((DataAccessObject) annotation).scope();\n                if ( s.isEmpty() || s.equals( this.getInstanceName() ) ) {\n                    return true;\n                }\n            }\n            else if ( annotation instanceof IbatisDataAccessObject ) {\n                String s = ((IbatisDataAccessObject) annotation).scope();\n                if ( s.isEmpty() || s.equals( this.getInstanceName() ) ) {\n                    return true;\n                }\n            }\n        }\n        return false;\n    }\n\n\n    private List<Class<?> > addDataAccessObjectScope0(\n            String szPacketName, boolean bIgnoreOwnedChecked, List<String > candidates, ClassLoader classLoader\n    ) throws IOException, ClassNotFoundException {\n        this.mScannerScopes.add( szPacketName );\n        this.mDAOScanner.scan( szPacketName, true, candidates );\n\n        List<Class<?> > candidateClasses = new ArrayList<>();\n        for ( String sz : candidates ) {\n            Class<?> clazz = classLoader.loadClass( sz );\n            if ( bIgnoreOwnedChecked || this.hasOwnDataAccessObject( clazz ) ) {\n                candidateClasses.add( clazz );\n                this.addMapper( clazz );\n            }\n        }\n        return candidateClasses;\n    }\n\n    @Override\n    public List<Class<?> > addDataAccessObjectScope( String szPacketName, boolean bIgnoreOwnedChecked ) {\n        ClassLoader classLoader = this.getRDBManager().getSystem().getTaskManager().getClassLoader();\n        try {\n            List<String > candidates = new ArrayList<>();\n            this.mAddScopeLock.writeLock().lock();\n\n            try {\n                return this.addDataAccessObjectScope0( szPacketName, bIgnoreOwnedChecked, candidates, classLoader );\n            }\n            finally {\n                this.mAddScopeLock.writeLock().unlock();\n            }\n        }\n        catch ( IOException | ClassNotFoundException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public List<Class<?> > addDataAccessObjectScopeNoneSync( String szPacketName, boolean bIgnoreOwnedChecked ) {\n        ClassLoader classLoader = this.getRDBManager().getSystem().getTaskManager().getClassLoader();\n        try {\n            List<String > candidates = new ArrayList<>();\n            return this.addDataAccessObjectScope0( szPacketName, bIgnoreOwnedChecked, candidates, classLoader );\n        }\n        catch ( IOException | ClassNotFoundException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public List<Class<? > > addDataAccessObjectScope( String szPacketName ) {\n        return this.addDataAccessObjectScope( szPacketName, false );\n    }\n\n\n\n\n\n    private void addXMLObjectScope0(\n            String szPacketName, List<String > candidates, ClassLoader classLoader\n    ) throws IOException {\n        this.mScannerScopes.add( szPacketName );\n        this.mXMLScanner.scan( szPacketName, true, candidates );\n\n        for ( String szResource : candidates ) {\n            String szResourcePath = szResource.replace(\n                    NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR, NamespaceCollector.RESOURCE_NAME_SEPARATOR\n            ) + \".xml\";\n\n            if ( !this.mConfiguration.isResourceLoaded( szResourcePath ) ) {\n                InputStream inputStream = classLoader.getResourceAsStream( szResourcePath );\n                if ( inputStream == null ) {\n                    continue;\n                }\n\n                XMLMapperBuilder xmlMapperBuilder = new XMLMapperBuilder(\n                        inputStream, this.mConfiguration, szResourcePath, this.mConfiguration.getSqlFragments()\n                );\n\n                xmlMapperBuilder.parse();\n                this.mConfiguration.addLoadedResource( szResourcePath );\n            }\n        }\n    }\n\n    @Override\n    public void addXMLObjectScope( String szPacketName ) {\n        ClassLoader classLoader = this.getRDBManager().getSystem().getTaskManager().getClassLoader();\n        try {\n            List<String > candidates = new ArrayList<>();\n            this.mAddScopeLock.writeLock().lock();\n\n            try {\n                this.addXMLObjectScope0( szPacketName, candidates, classLoader );\n            }\n            finally {\n                this.mAddScopeLock.writeLock().unlock();\n            }\n        }\n        catch ( IOException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public void addXMLObjectScopeNoneSync( String szPacketName ) {\n        ClassLoader classLoader = this.getRDBManager().getSystem().getTaskManager().getClassLoader();\n        try {\n            List<String > candidates = new ArrayList<>();\n            this.addXMLObjectScope0( szPacketName, candidates, classLoader );\n        }\n        catch ( IOException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n\n\n\n    @Override\n    public void close() throws ProxyProvokeHandleException {\n        if ( this.mDataSource != null ) {\n            if ( this.mDataSource instanceof PooledDataSource ) {\n                ((PooledDataSource) this.mDataSource).forceCloseAll();\n            }\n        }\n    }\n\n    @Override\n    public boolean isTerminated() throws ProxyProvokeHandleException {\n        try {\n            if ( this.mDataSource != null ) {\n                if ( this.mDataSource instanceof PooledDataSource ) {\n                    return ((PooledDataSource) this.mDataSource).getPoolState().getActiveConnectionCount() == 0;\n                }\n\n                return this.mDataSource.getConnection().isClosed();\n            }\n            return true;\n        }\n        catch ( SQLException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ally/rdb/RDBManager.java",
    "content": "package com.pinecone.tritium.ally.rdb;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.JSONGet;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.ArchSystemAutoAssembleComponent;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.slime.source.rdb.RDBClient;\nimport com.pinecone.tritium.system.InterWareDirector;\nimport com.pinecone.tritium.system.TritiumSystem;\n\nimport java.util.Collection;\nimport java.util.Map;\n\npublic class RDBManager extends ArchSystemAutoAssembleComponent implements Pinenut, HyComponent {\n    @JSONGet( \"RDBs\" )\n    protected JSONObject                           mjoRDBsConf             ;\n\n    @JSONGet( \"RDBs.Configs\" )\n    protected JSONObject                           mjoComponentConf        ;\n\n    @JSONGet( \"RDBs.Databases\" )\n    protected JSONObject                           mjoDatabases            ;\n\n    @JSONGet( \"RDBs.Configs.JDBC.Driver\" )\n    protected String                               mszJDBCDriverName       ;\n\n    @JSONGet( \"RDBs.Configs.Ibatis.Client\" )\n    protected String                               mszIBatisClient         ;\n\n    @JSONGet( \"RDBs.Configs.Enable\" )\n    protected boolean                              mbEnable                ;\n\n    protected Map<String, RDBClient >              mRDBClientComponent     ;\n\n\n    public RDBManager( Namespace name, HyComponent parent ) {\n        super( name, parent.getSystem(), parent.getSystem().getComponentManager(), parent );\n        Hydrogen system = parent.getSystem();\n\n        InterWareDirector parentManager = (InterWareDirector) parent;\n\n        this.getSystem().getPrimaryConfigScope().autoInject( RDBManager.class, parentManager.getMiddlewareConfig() , this );\n\n        this.mRDBClientComponent = new LinkedTreeMap<>();\n        this.prepareInstanceClient();\n\n        this.infoLifecycleInitializationDone();\n    }\n\n    public RDBManager( HyComponent parent ) {\n        this( null, parent );\n    }\n\n    protected void prepareInstanceClient() {\n        for( Object o : this.mjoDatabases.entrySet() ) {\n            Map.Entry kv   = (Map.Entry) o;\n\n            JSONObject val = (JSONObject) kv.getValue();\n            this.mObjectOverrider.override( val, this.mjoComponentConf, false );\n\n            try{\n                String szEngine = val.optString( \"Engine\" );\n                String szInsNam = (String) kv.getKey();\n\n                boolean bEnable = val.optBoolean( \"Enable\" );\n                if( bEnable ) {\n                    Object client = this.mUniformFactory.loadInstance( szEngine, null, new Object[] { this, szInsNam } );\n                    if( client instanceof RDBClient ){\n                        this.mRDBClientComponent.put( szInsNam, (RDBClient)client );\n                    }\n                    else {\n                        throw new IllegalArgumentException( \"Illegal client engine, should be `RDBClient`: \" + szEngine );\n                    }\n                }\n            }\n            catch ( Exception e ) {\n                throw new ProvokeHandleException( e );\n            }\n        }\n\n        //Debug.fmt( 2, this.mjoDatabases );\n    }\n\n    @Override\n    public TritiumSystem getSystem() {\n        return (TritiumSystem) super.getSystem();\n    }\n\n    public String getJDBCDriverName() {\n        return this.mszJDBCDriverName;\n    }\n\n    public JSONObject getComponentConf() {\n        return this.mjoComponentConf;\n    }\n\n    @Override\n    public DynamicFactory getSharedUniformFactory() {\n        return this.mUniformFactory;\n    }\n\n    public boolean isEnable() {\n        return this.mbEnable;\n    }\n\n    public JSONObject getDatabases() {\n        return this.mjoDatabases;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public Collection<String > databasesNames() {\n        return (Collection)this.getDatabases().values();\n    }\n\n    public RDBClient getRDBClientByName ( String szName ) {\n        return this.mRDBClientComponent.get( szName );\n    }\n\n    public RDBClient terminate( String szName ) {\n        RDBClient client = this.getRDBClientByName( szName );\n        if( client != null ) {\n            client.close();\n            if( client.isTerminated() ) {\n                this.mRDBClientComponent.remove( szName );\n            }\n            else {\n                return null;\n            }\n        }\n        return client;\n    }\n\n    public int clientSize() {\n        return this.mRDBClientComponent.size();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/ally/rdb/UniformRDBClient.java",
    "content": "package com.pinecone.tritium.ally.rdb;\n\nimport com.pinecone.slime.source.rdb.RDBClient;\n\npublic interface UniformRDBClient extends RDBClient {\n    RDBManager getRDBManager();\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/messagron/HeistMessage.java",
    "content": "package com.pinecone.tritium.messagron;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.umct.ArchMessagram;\nimport com.pinecone.hydra.umct.UMCConnection;\n\npublic class HeistMessage extends Messageletson {\n    public HeistMessage(UMCConnection msgPackage, ArchMessagram servtron ) {\n        super( msgPackage, servtron );\n    }\n\n    @Override\n    public void dispatch() {\n        Debug.trace( this.$_MSG() );\n    }\n\n    @Override\n    public void terminate(){\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/messagron/Messageletson.java",
    "content": "package com.pinecone.tritium.messagron;\n\nimport com.pinecone.hydra.umct.ArchMessagram;\nimport com.pinecone.hydra.umct.UMCConnection;\nimport com.pinecone.hydra.umct.JSONLetMsgDeliver;\nimport com.pinecone.hydra.umct.ArchMessagelet;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\n\nimport java.util.Map;\n\npublic abstract class Messageletson extends ArchMessagelet {\n    protected JSONObject       mLetLocal = new JSONMaptron();\n\n    public Messageletson(UMCConnection msgPackage, ArchMessagram servtron ) {\n        super( msgPackage, servtron );\n        this.mUMCReceiver = this.getMessagePackage().getReceiver();\n        this.mUMCTransmit = this.getMessagePackage().getTransmit();\n    }\n\n    // PHP Style\n    @Override\n    protected Map<String, Object > $_MSG() {\n        return this.getReceivedMessage().getHead().evalMapExtraHead();\n    }\n\n    @Override\n    public UMCMessage getReceivedMessage() {\n        return this.getMessagePackage().getMessage();\n    }\n\n    @Override\n    public JSONLetMsgDeliver getMessageDeliver() {\n        return (JSONLetMsgDeliver)super.getMessageDeliver();\n    }\n\n    @Override\n    public JSONObject getLetLocal()  {\n        return this.mLetLocal;\n    }\n\n    @Override\n    public String toJSONString() {\n        return this.getLetLocal().toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/messagron/Messagron.java",
    "content": "package com.pinecone.tritium.messagron;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.umct.IntegratedMessagram;\n\nimport java.util.Map;\n\npublic class Messagron extends IntegratedMessagram {\n    public Messagron( String szName, Processum parent, Map<String, Object > config ) {\n        super( szName, parent, config );\n    }\n\n    @Override\n    public String getLetsNamespace() {\n        return this.getClass().getPackageName() + \".\";\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/BasicServer.java",
    "content": "package com.pinecone.tritium.system;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.server.ArchServer;\n\npublic class BasicServer extends ArchServer {\n    protected ServersScope serversScope;\n\n    BasicServer( ServersScope scope, JSONObject prototype ) {\n        this.serversScope = scope;\n        this.extras       = prototype;\n        this.serversScope.getServerInjector().typeInject( this.getExtras(), this );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/ConfigScope.java",
    "content": "package com.pinecone.tritium.system;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.system.homotype.StereotypicInjector;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.MultiScopeMap;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.hydra.config.MapConfigReinterpreter;\nimport com.pinecone.hydra.system.HyComponent;\n\npublic interface ConfigScope extends Pinenut, HyComponent {\n    String KeyGlobal = \"Global\" + TritiumConfigScope.class.getSimpleName();\n\n    JSONConfig getProtoConfig();\n\n    MultiScopeMap<String, Object > getScopeMap();\n\n    MapConfigReinterpreter getMapConfigReinterpreter();\n\n     MapConfigReinterpreter newMapConfigReinterpreter() ;\n\n\n    StereotypicInjector autoInject(Class<?> stereotype, Object config, Object instance ) ;\n\n    StereotypicInjector autoInject(Class<?> stereotype, Map config, Object instance ) ;\n\n    StereotypicInjector autoConstruct( Class<?> stereotype, Object config, Object instance ) ;\n\n    StereotypicInjector autoConstruct( Class<?> stereotype, Map config, Object instance ) ;\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/Hierarchy.java",
    "content": "package com.pinecone.tritium.system;\n\nimport com.pinecone.hydra.system.HyHierarchy;\n\npublic enum Hierarchy implements HyHierarchy {\n    H_MASTER  ( \"Master\" ),\n    H_PALADIN ( \"Paladin\" ),\n    H_MINION  ( \"Minion\" ),\n    H_SLAVE   ( \"Slave\" );\n\n    private final String value;\n\n    Hierarchy( String value ){\n        this.value = value;\n    }\n\n    @Override\n    public String getName(){\n        return this.value;\n    }\n\n    public static String queryName( Hierarchy hierarchy ) {\n        return hierarchy.getName();\n    }\n\n    public static Hierarchy queryHierarchy( String sz ) {\n        return Hierarchy.valueOf( \"H_\" + sz.toUpperCase() );\n    }\n\n    @Override\n    public boolean isDominantClass() {\n        return this == Hierarchy.H_MASTER || this == Hierarchy.H_PALADIN;\n    }\n\n    @Override\n    public boolean isWorkerClass() {\n        return this == Hierarchy.H_MINION || this == Hierarchy.H_SLAVE;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/InterWareDirector.java",
    "content": "package com.pinecone.tritium.system;\n\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.ware.MiddlewareDirector;\nimport com.pinecone.tritium.ally.rdb.RDBManager;\nimport com.pinecone.tritium.ally.messengers.MessagersManager;\n\npublic interface InterWareDirector extends MiddlewareDirector, HyComponent {\n\n    @Override\n    TritiumSystem getSystem();\n\n    JSONConfig          getMiddlewareConfig();\n\n    @Override\n    default JSONConfig  getSectionConfig() {\n        return this.getMiddlewareConfig();\n    }\n\n    RDBManager          getRDBManager();  // OLTP-RDB\n\n    MessagersManager    getMessagersManager();\n\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/KnittedMiddlewareDirector.java",
    "content": "package com.pinecone.tritium.system;\n\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.ArchSystemAutoAssembleComponent;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.ware.WareManager;\nimport com.pinecone.tritium.ally.messengers.MessagersManager;\nimport com.pinecone.tritium.ally.rdb.RDBManager;\n\npublic class KnittedMiddlewareDirector extends ArchSystemAutoAssembleComponent implements InterWareDirector {\n    protected JSONConfig       mjoMiddlewareConf         ;\n\n    protected RDBManager       mRDBManager;\n\n    protected MessagersManager mMessagersManager;\n\n    public KnittedMiddlewareDirector(Namespace name, Hydrogen system, HyComponent parent ) {\n        super( name, system, system.getComponentManager(), parent );\n\n        this.mjoMiddlewareConf = (JSONConfig) system.getSystemConfig().getChild( \"Middleware\" );\n        //this.getSystem().getPrimaryConfigScope().autoInject( MiddlewareManager.class, this.mjoMiddlewareConf, this );\n\n        this.mRDBManager       = new RDBManager( this );\n        this.mMessagersManager = new MessagersManager( this );\n        this.addChildComponent( this.mRDBManager       );\n        this.addChildComponent( this.mMessagersManager );\n\n        this.infoLifecycleInitializationDone();\n    }\n\n    public KnittedMiddlewareDirector(Hydrogen system, HyComponent parent ) {\n        this( null, system, parent );\n    }\n\n    public KnittedMiddlewareDirector( Hydrogen system ) {\n        this( system, null );\n    }\n\n    @Override\n    public TritiumSystem getSystem() {\n        return (TritiumSystem) super.getSystem();\n    }\n\n    @Override\n    public JSONConfig getMiddlewareConfig() {\n        return this.mjoMiddlewareConf;\n    }\n\n    @Override\n    public WareManager getManager( String name ) {\n        return null;\n    }\n\n    @Override\n    public RDBManager getRDBManager() {\n        return this.mRDBManager;\n    }\n\n    @Override\n    public MessagersManager getMessagersManager() {\n        return this.mMessagersManager;\n    }\n}\n\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/MissionTerminateException.java",
    "content": "package com.pinecone.tritium.system;\n\npublic class MissionTerminateException extends RuntimeException {\n    public MissionTerminateException() {\n        super();\n    }\n\n    public MissionTerminateException( String message ) {\n        super( message );\n    }\n\n    public MissionTerminateException( String message, Throwable cause ) {\n        super( message, cause );\n    }\n\n    @Override\n    public String toString() {\n        return \"[object MissionTerminateException]\";\n    }\n\n    public String prototypeName() {\n        return \"MissionTerminateException\";\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/NomenclatureAllocator.java",
    "content": "package com.pinecone.tritium.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface NomenclatureAllocator extends Pinenut {\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/ServersScope.java",
    "content": "package com.pinecone.tritium.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.DirectObjectInjector;\nimport com.pinecone.hydra.server.ArchServer;\nimport com.pinecone.hydra.server.ArchServersCenter;\nimport com.pinecone.hydra.server.Server;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.tritium.util.ConfigHelper;\n\nimport java.io.IOException;\nimport java.util.Map;\n\npublic class ServersScope extends ArchServersCenter implements Pinenut {\n    protected DirectObjectInjector mServerInjector;\n\n    public ServersScope(Namespace name, Hydrogen system, HyComponent parent ) {\n        super( name, system, parent );\n\n        this.mServerInjector = new DirectObjectInjector( ConfigHelper.fnToSmallHumpName, ArchServer.class ) ;\n        this.fetchAll();\n        this.reinterpret();\n    }\n\n    public ServersScope(Hydrogen system, HyComponent parent ) {\n        this( null, system, parent );\n    }\n\n    public ServersScope( Hydrogen system ) {\n        this( system, null );\n    }\n\n\n    @Override\n    public TritiumSystem getSystem() {\n        return (TritiumSystem) super.getSystem();\n    }\n\n    @Override\n    protected void  loadConfig() {\n        JSONConfig sys  = (JSONConfig) this.getSystem().getSystemConfig();\n        Object jServers = sys.opt( \"Servers\" );\n        if( jServers instanceof String ) {\n            try {\n                this.serversConfig = sys.fromFile( this.getSystem().getWorkingPath().resolve( (String) jServers ).toFile()  );\n            }\n            catch ( IOException e ) {\n                this.getSystem().handleKillException( e );\n            }\n        }\n        else {\n            this.serversConfig = (JSONObject) jServers;\n        }\n\n        sys.put( \"Servers\", this.serversConfig );\n    }\n\n    protected void  reinterpret() {\n        for ( Map.Entry<String,Object > kv: this.getNickNameMap().entrySet() ) {\n            this.getSystem().getGlobalConfigScope().put( kv.getKey(), ( (BasicServer)kv.getValue() ).getLocalDomain() );\n        }\n    }\n\n    @Override\n    protected Server newServer( JSONObject prototype )  {\n        return new BasicServer( this, prototype );\n    }\n\n    DirectObjectInjector getServerInjector() {\n        return this.mServerInjector;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/StorageSystem.java",
    "content": "package com.pinecone.tritium.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.homotype.JSONGet;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.system.ArchSystemCascadeComponent;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.template.TemplateParser;\nimport com.pinecone.hydra.system.Hydrogen;\n\nimport org.apache.commons.vfs2.CacheStrategy;\nimport org.apache.commons.vfs2.FileSystemException;\nimport org.apache.commons.vfs2.FilesCache;\nimport org.apache.commons.vfs2.impl.StandardFileSystemManager;\nimport org.apache.commons.vfs2.provider.FileProvider;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.IOException;\nimport java.util.Map;\n\n\npublic class StorageSystem extends ArchSystemCascadeComponent implements Pinenut, HyComponent {\n    protected JSONObject                         mjoProtoConfig;\n\n    @JSONGet( \"PathScope.Reinterpret\" )\n    protected JSONObject                         mjoToReinterpret;\n\n    protected JSONObject                         mjoReinterpretedScope;\n\n    @JSONGet( \"Protocols\" )\n    protected JSONObject                         mProtocols;\n\n    protected DynamicFactory                     mFSProvidesFactory;\n\n    @JSONGet( \"CacheStrategy\" )\n    protected String                             mszCacheStrategy;\n\n    @JSONGet( \"FilesCache\" )\n    protected String                             mszFilesCache;\n\n    private final StandardFileSystemManager      mFileSystemManager;\n\n    protected Logger                             mLogger;\n\n    public StorageSystem(Namespace name, Hydrogen system, HyComponent parent ) {\n        super( name, system, system.getComponentManager(), parent );\n        this.mFileSystemManager = new StandardFileSystemManager();\n        if( system instanceof TritiumSystem) {\n            this.mLogger            = ((TritiumSystem) system).getTracerScope().newLogger( this.className() );\n        }\n        else {\n            this.mLogger            = LoggerFactory.getLogger( this.className() + \"Logger\" );\n        }\n\n\n        this.loadConfig();\n        this.reinterpret();\n        this.prepareFileSystem();\n    }\n\n    public StorageSystem(Hydrogen system, HyComponent parent ) {\n        this( null, system, parent );\n    }\n\n    public StorageSystem( Hydrogen system ) {\n        this( system, null );\n    }\n\n    protected void loadConfig() {\n        JSONConfig sys    = (JSONConfig) this.getSystem().getSystemConfig();\n        Object jPathScope = sys.opt( this.className() );\n\n        if( jPathScope instanceof String ) {\n            try {\n                this.mjoProtoConfig = sys.fromFile( this.getSystem().getWorkingPath().resolve( (String) jPathScope ).toFile() );\n            }\n            catch ( IOException e ) {\n                this.getSystem().handleKillException( e );\n            }\n        }\n        else {\n            this.mjoProtoConfig = (JSONObject) jPathScope;\n        }\n\n        sys.put( this.className(), this.mjoProtoConfig );\n        this.getSystem().getPrimaryConfigScope().autoInject( StorageSystem.class, this.mjoProtoConfig, this );\n    }\n\n    protected void reinterpret() {\n        this.mjoReinterpretedScope = (JSONObject) this.getSystem().getGlobalConfigScope().thisScope();\n\n        for ( Map.Entry<String,Object > kv: this.mjoToReinterpret.entrySet() ) {\n            if( this.mjoReinterpretedScope.hasOwnProperty( kv.getKey() ) ) {\n                throw new IllegalArgumentException( \"Illegal system config, duplicated config key.\" );\n            }\n\n            if( kv.getValue() instanceof String ) {\n                String szRaw = (String) kv.getValue();\n                TemplateParser parser = new TemplateParser( szRaw, this.mjoReinterpretedScope );\n                this.mjoReinterpretedScope.put( kv.getKey(), parser.eval() );\n            }\n            else {\n                throw new IllegalArgumentException( \"Illegal system config, reinterpret key can not be object.\" );\n            }\n        }\n    }\n\n    protected void prepareFileSystemProvides() {\n        for( Object o : this.mProtocols.entrySet() ) {\n            Map.Entry kv = (Map.Entry) o;\n\n            JSONObject info  = (JSONObject) kv.getValue();\n            String szProvide = info.optString( \"Provide\" );\n\n            boolean bDone = true;\n            if( !StringUtils.isEmpty( szProvide ) ) { // Empty for defaults, e.g. `file:///`\n                Object provide = this.mFSProvidesFactory.optLoadInstance( szProvide, null, null );\n                if( provide instanceof FileProvider ) {\n                    try{\n                        this.mFileSystemManager.addProvider( kv.getKey().toString(), (FileProvider)provide );\n                    }\n                    catch ( FileSystemException e ) {\n                        this.mLogger.warn( \"[AddFileSystemProviderCompromised] [FileSystemException] <What ->\" + e.getMessage() + \">\" );\n                        bDone = false;\n                    }\n                }\n                else {\n                    this.mLogger.warn( \"[BadAddFileSystemProvider] [Illegal provider or null] <\" + kv.getKey() + \"::`\" + szProvide + \"`>\" );\n                    bDone = false;\n                }\n            }\n\n            if( bDone ) {\n                szProvide = StringUtils.isEmpty( szProvide ) ? \"Default\" : szProvide;\n                this.mLogger.info( \"[AddFileSystemProvider] (\" + kv.getKey() + \"::`\" + szProvide + \"`) <Done>\" );\n            }\n        }\n    }\n\n    protected void prepareFileSystemCache() {\n        CacheStrategy strategy = StringUtils.isEmpty( this.mszCacheStrategy ) ? CacheStrategy.ON_CALL : CacheStrategy.valueOf( this.mszCacheStrategy );\n        try{\n            this.mFileSystemManager.setCacheStrategy( strategy );\n        }\n        catch ( FileSystemException e ) {\n            this.mLogger.warn( \"[SetCacheStrategy] [Compromised] <What ->\" + e.getMessage() + \">\" );\n        }\n\n        if( !StringUtils.isEmpty( this.mszFilesCache ) ) {\n            Object cache = this.mFSProvidesFactory.optLoadInstance( this.mszFilesCache, null, null );\n            if( cache instanceof FilesCache ) {\n                try{\n                    this.mFileSystemManager.setFilesCache( (FilesCache) cache );\n                }\n                catch ( FileSystemException e ) {\n                    this.mLogger.warn( \"[SetFilesCacheCompromised] [FileSystemException] <What ->\" + e.getMessage() + \">\" );\n                }\n            }\n            else {\n                this.mLogger.warn( \"[SetFilesCacheCompromised] [Illegal FilesCache or null] <`\" + this.mszFilesCache + \"`>\" );\n            }\n        }\n    }\n\n    protected void prepareFileSystem() {\n        this.mFSProvidesFactory = new GenericDynamicFactory (\n                this.getSystem().getTaskManager().getClassLoader()\n        );\n\n        this.prepareFileSystemProvides();\n        this.prepareFileSystemCache();\n    }\n\n    public JSONObject getProtoConfig() {\n        return this.mjoProtoConfig;\n    }\n\n    public JSONObject getReinterpretedScope() {\n        return this.mjoReinterpretedScope;\n    }\n\n    public JSONObject getToReinterpret() {\n        return this.mjoToReinterpret;\n    }\n\n    @Override\n    public TritiumSystem getSystem() {\n        return (TritiumSystem) super.getSystem();\n    }\n\n    public StandardFileSystemManager getFileSystemManager() {\n        return this.mFileSystemManager;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/SystemDaemon.java",
    "content": "package com.pinecone.tritium.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.auto.ArchInstructation;\nimport com.pinecone.hydra.auto.PeriodicAutomaton;\nimport com.pinecone.hydra.auto.PeriodicAutomatron;\nimport com.pinecone.hydra.system.ArchSystemCascadeComponent;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.IOException;\n\npublic class SystemDaemon extends ArchSystemCascadeComponent implements Pinenut, HyComponent {\n    protected long                     mnSurveillanceTime;\n    protected JSONObject               mDaemonConfig;\n    protected PeriodicAutomatron       mAutomatron;\n    protected Logger                   mLogger;\n\n    public SystemDaemon(Namespace name, Hydrogen system, HyComponent parent ) {\n        super( name, system, system.getComponentManager(), parent );\n\n        this.loadConfig();\n        this.mLogger      = LoggerFactory.getLogger( String.format( \"Tracer<%s>\", this.className() ) );\n        this.mAutomatron  = new PeriodicAutomaton( this.className(), system, this.mnSurveillanceTime, true );\n        this.mAutomatron.command( new ArchInstructation() {\n            boolean mbStarted = false;\n\n            @Override\n            public void execute() throws Exception {\n                if( !this.mbStarted ) {\n                    SystemDaemon.this.infoLifecycle( \"DaemonStarted\", \"Start\" );\n                    this.mbStarted = true;\n                    SystemDaemon.this.mAutomatron.withdraw( this );\n                }\n            }\n        });\n\n        this.getSystem().getTaskManager().add( this.mAutomatron );\n        this.mAutomatron.start();\n    }\n\n    public SystemDaemon(Hydrogen system, HyComponent parent ) {\n        this( null, system, parent );\n    }\n\n    public SystemDaemon( Hydrogen system ) {\n        this( system, null );\n    }\n\n    protected void loadConfig() {\n        JSONConfig sys  = (JSONConfig) this.getSystem().getSystemConfig();\n        Object jDaemon  = sys.opt( \"SystemDaemon\" );\n        if( jDaemon instanceof String ) {\n            try {\n                this.mDaemonConfig = sys.fromFile( this.getSystem().getWorkingPath().resolve( (String) jDaemon ).toFile()  );\n            }\n            catch ( IOException e ) {\n                this.getSystem().handleKillException( e );\n            }\n        }\n        else {\n            this.mDaemonConfig = (JSONObject) jDaemon;\n        }\n\n        this.mnSurveillanceTime = this.mDaemonConfig.optLong( \"SurveillanceTime\" );\n    }\n\n    public PeriodicAutomatron getAutomatron() {\n        return this.mAutomatron;\n    }\n\n    protected SystemDaemon infoLifecycle( String szWhat, String szStateOrExtra ) {\n        this.mLogger.info( \"[Lifecycle] [{}] <{}>\", szWhat, szStateOrExtra );\n        return this;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/TritiumConfigScope.java",
    "content": "package com.pinecone.tritium.system;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.construction.UnifyStructureInjector;\nimport com.pinecone.framework.system.homotype.StereotypicInjector;\nimport com.pinecone.framework.unit.MultiScopeMap;\nimport com.pinecone.framework.unit.MultiScopeMaptron;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.homotype.AnnotatedObjectInjector;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.config.MapConfigReinterpreter;\nimport com.pinecone.hydra.config.ScopedMapConfigReinterpreter;\nimport com.pinecone.hydra.system.ArchSystemCascadeComponent;\nimport com.pinecone.hydra.system.HyComponent;\nimport com.pinecone.hydra.system.Hydrogen;\n\nimport java.util.Map;\n\npublic class TritiumConfigScope extends ArchSystemCascadeComponent implements ConfigScope {\n    protected JSONConfig                      mProtoConfig;\n    protected MultiScopeMap<String, Object >  mConfigScope;\n    protected MapConfigReinterpreter          mConfigReinterpreter;\n\n    public TritiumConfigScope(Namespace name, Hydrogen system, HyComponent parent, JSONConfig config ) {\n        super( name, system, system.getComponentManager(), parent );\n\n        this.mConfigScope = new MultiScopeMaptron<>( new JSONMaptron() );\n        this.mProtoConfig = config;\n\n        this.reinterpret_conf_default();\n\n        this.mConfigReinterpreter = new ScopedMapConfigReinterpreter( this.getScopeMap() );\n    }\n\n    public TritiumConfigScope(Hydrogen system, HyComponent parent, JSONConfig config ) {\n        this( (Namespace) null, system, parent, config );\n    }\n\n    public TritiumConfigScope(Hydrogen system, JSONConfig config ) {\n        this( system, null,config );\n    }\n\n    public TritiumConfigScope(String name, Hydrogen system, HyComponent parent, JSONConfig config ) {\n        this( system, parent, config );\n\n        this.setTargetingName( name );\n    }\n\n    public TritiumConfigScope(String name, Hydrogen system, JSONConfig config ) {\n        this( name, system, null, config );\n    }\n\n    protected void reinterpret_conf_default() {\n        this.mConfigScope.setName( \"GlobalConfigScope\" );\n        for ( Map.Entry<String,Object > kv: this.getProtoConfig().entrySet() ) {\n            this.mConfigScope.put( kv.getKey(), kv.getValue() );\n        }\n    }\n\n    @Override\n    public JSONConfig getProtoConfig() {\n        return this.mProtoConfig;\n    }\n\n    @Override\n    public MultiScopeMap<String, Object > getScopeMap() {\n        return this.mConfigScope;\n    }\n\n    @Override\n    public MapConfigReinterpreter getMapConfigReinterpreter() {\n        return this.mConfigReinterpreter;\n    }\n\n    @Override\n    public MapConfigReinterpreter newMapConfigReinterpreter() {\n        return new ScopedMapConfigReinterpreter( this.getScopeMap() );\n    }\n\n\n    @Override\n    public StereotypicInjector autoInject( Class<?> stereotype, Object config, Object instance ) {\n        AnnotatedObjectInjector injector = new AnnotatedObjectInjector( stereotype );\n        try{\n            injector.inject( config, instance );\n            return injector;\n        }\n        catch ( Exception e ){\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public StereotypicInjector autoInject( Class<?> stereotype, Map config, Object instance ) {\n        AnnotatedObjectInjector injector = new AnnotatedObjectInjector( stereotype );\n        try{\n            injector.inject( config, instance );\n        }\n        catch ( Exception e ){\n            throw new ProxyProvokeHandleException( e );\n        }\n        return injector;\n    }\n\n    @Override\n    public StereotypicInjector autoConstruct( Class<?> stereotype, Object config, Object instance ) {\n        UnifyStructureInjector injector = new UnifyStructureInjector( stereotype, ( (TritiumSystem)this.getSystem()).getDispenserCenter().getInstanceDispenser() );\n        try{\n            injector.inject( config, instance );\n            return injector;\n        }\n        catch ( Exception e ){\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public StereotypicInjector autoConstruct( Class<?> stereotype, Map config, Object instance ) {\n        UnifyStructureInjector injector = new UnifyStructureInjector( stereotype, ( (TritiumSystem)this.getSystem()).getDispenserCenter().getInstanceDispenser() );\n        try{\n            injector.inject( config, instance );\n        }\n        catch ( Exception e ){\n            throw new ProxyProvokeHandleException( e );\n        }\n        return injector;\n    }\n\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/system/TritiumSystem.java",
    "content": "package com.pinecone.tritium.system;\n\nimport com.pinecone.framework.system.PrimarySystem;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.subsystem.KernelMicroSystemCabinet;\nimport com.pinecone.hydra.system.types.HydraKingdom;\n\npublic interface TritiumSystem extends HydraKingdom, PrimarySystem {\n    InterWareDirector getMiddlewareDirector();\n\n    SystemDaemon getSystemDaemon();\n\n    ServersScope getServersScope() ;\n\n    StorageSystem getStorageSystem() ;\n\n    ConfigScope getPrimaryConfigScope() ;\n\n    ResourceDispenserCenter getDispenserCenter();\n\n    KernelMicroSystemCabinet getKernelMicroSystemCabinet();\n\n    DynamicFactory getShardDynamicFactory();\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/java/com/pinecone/tritium/util/ConfigHelper.java",
    "content": "package com.pinecone.tritium.util;\n\nimport com.pinecone.framework.system.functions.Function;\n\npublic class ConfigHelper {\n    public static final Function fnToSmallHumpName = (Object... arg )->{\n        return ConfigHelper.toSmallHumpName( arg[0] );\n    };\n\n    public static final Function fnToBigHumpName = (Object... arg )->{\n        return ConfigHelper.toBigHumpName( arg[0] );\n    };\n\n\n    public static String toSmallHumpName( String sz ) {\n        StringBuilder sb = new StringBuilder();\n        sb.append( sz );\n        sb.setCharAt( 0, Character.toLowerCase( sb.charAt(0) ) );\n        return sb.toString();\n    }\n\n    public static String toBigHumpName( String sz ) {\n        StringBuilder sb = new StringBuilder();\n        sb.append( sz );\n        sb.setCharAt( 0, Character.toUpperCase( sb.charAt(0) ) );\n        return sb.toString();\n    }\n\n    public static String toSmallHumpName( Object sz ) {\n        return ConfigHelper.toSmallHumpName( (String) sz );\n    }\n\n    public static String toBigHumpName( Object sz ) {\n        return ConfigHelper.toBigHumpName( (String) sz );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/main/resources/logback.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<configuration>\n    <appender name=\"CONSOLE\" class=\"ch.qos.logback.core.ConsoleAppender\">\n        <encoder>\n            <pattern>%d{HH:mm:ss.SSS} [%thread] [%level] %logger{36}: %msg%n</pattern>\n        </encoder>\n    </appender>\n\n    <root level=\"INFO\">\n        <appender-ref ref=\"CONSOLE\" />\n    </root>\n</configuration>"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/major/TestTritium.java",
    "content": "package com.major;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.tritium.Tritium;\n\npublic class TestTritium {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Tritium tritium = (Tritium) Pinecone.sys().getTaskManager().add( new Tritium( args, Pinecone.sys() ) );\n            tritium.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/mc/JesusChrist.java",
    "content": "package com.mc;\n\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.tritium.Tritium;\n\npublic class JesusChrist extends Tritium {\n    public JesusChrist( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public JesusChrist( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    public void vitalize () throws Exception {\n\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/mc/TestMCClient.java",
    "content": "package com.mc;\n\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport io.netty.channel.ChannelHandlerContext;\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umct.WolfMCExpress;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\n\nimport com.pinecone.hydra.umc.wolf.UlfInformMessage;\nimport com.pinecone.hydra.umc.wolf.client.WolfMCClient;\nimport com.pinecone.tritium.messagron.Messagron;\n\nclass Jesus extends JesusChrist {\n    public Jesus( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Jesus( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        this.testClient();\n    }\n\n    public void testClient() throws Exception {\n        Messagron servtron = new Messagron( \"\", this, new JSONMaptron( \"{\\n\" +\n                \"  \\\"Engine\\\"            : \\\"com.pinecone.tritium.messagron.Messagron\\\",\\n\" +\n                \"  \\\"Enable\\\"            : true,\\n\" +\n                \"  \\\"ExpressFactory\\\"    : \\\"com.pinecone.framework.util.lang.GenericDynamicFactory\\\",\\n\" +\n                \"\\n\" +\n                \"  \\\"Expresses\\\"         : {\\n\" +\n                \"    \\\"WolfMCExpress\\\": {\\n\" +\n                \"      \\\"Engine\\\": \\\"com.pinecone.hydra.umct.WolfMCExpress\\\"\\n\" +\n                \"    }\\n\" +\n                \"  }\\n\" +\n                \"}\" ) );\n\n        WolfMCClient wolf = new WolfMCClient( \"\", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( \"Messagers.Messagers.WolfMCKingpin\" ) );\n        wolf.apply( new WolfMCExpress( servtron ) ).execute();\n        JSONObject jsonObject = new JSONMaptron(\n                \"{Messagelet:'ServiceCenter', 'do': 'queryHeistConfTPL', 'heist': 'NeteaseMusic', 'instance': 'RavageAlbums'}\"\n        );\n//        Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( \"{Messagelet:'ServiceCenter', 'do': '1'}\" ), \"test 12345678 Messagers.Messagers.WolfMCKingpin 1 fuck me\" ) ) );\n//        Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( \"{Messagelet:'ServiceCenter', 'do': '2'}\" ), \"test 12345678 Messagers.Messagers.WolfMCKingpin 2 fuck me\" ) ) );\n//        Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( \"{Messagelet:'ServiceCenter', 'do': '3'}\" ), \"test 12345678 Messagers.Messagers.WolfMCKingpin 3 fuck me\" ) ) );\n//        Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( \"{Messagelet:'ServiceCenter', 'do': '4'}\" ), \"test 12345678 Messagers.Messagers.WolfMCKingpin 4 fuck me\" ) ) );\n//        Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( \"{Messagelet:'ServiceCenter', 'do': '5'}\" ), \"test 12345678 Messagers.Messagers.WolfMCKingpin 5 fuck me\" ) ) );\n//        Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( \"{Messagelet:'ServiceCenter', 'do': '6'}\" ), \"test 12345678 Messagers.Messagers.WolfMCKingpin 6 fuck he\" ) ) );\n//        Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( \"{Messagelet:'ServiceCenter', 'do': '7'}\" ), \"test 12345678 Messagers.Messagers.WolfMCKingpin 7 fuck she\" ) ) );\n//        Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( \"{Messagelet:'ServiceCenter', 'do': '8'}\" ), \"test 12345678 Messagers.Messagers.WolfMCKingpin 8 fuck it\" ) ) );\n//        Debug.trace( wolf.sendSyncMsg( new UlfBytesTransferMessage( new JSONMaptron( \"{Messagelet:'ServiceCenter', 'do': '9'}\" ), \"test 12345678 Messagers.Messagers.WolfMCKingpin 9 fuck those\" ) ) );\n\n\n\n//        JSONObject jo = new JSONMaptron( \"{'do': 'Morning' }\" );\n//        try ( ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); ObjectOutputStream objectStream = new ObjectOutputStream(byteStream) ) {\n//            objectStream.writeObject( jo );\n//            byte[] bytes = byteStream.toByteArray();\n//            Debug.trace( wolf.sendSyncMsg( new UlfInformMessage( bytes, 0xAEF2048 ) ) );\n//        }\n\n\n\n\n\n\n        Debug.trace( wolf.sendSyncMsg( new UlfInformMessage( jsonObject ) ).getHead().getExtraHead() );\n        Debug.trace( wolf.sendSyncMsg( new UlfInformMessage( jsonObject ) ) );\n        //wolf.sendAsynMsg( new UlfInformMessage( jsonObject ) );\n\n        wolf.sendAsynMsg( new UlfInformMessage(jsonObject), new UlfAsyncMsgHandleAdapter() {\n            @Override\n            public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception {\n                Debug.trace( \"Ajax\" ,msg );\n                Debug.trace( msg.getHead() );\n            }\n        });\n\n        //wolf.sendAsynMsg( new UlfMCMessage( jsonObject ) );\n        //wolf.sendAsynMsg( new UlfMCMessage( jsonObject ) );\n\n        wolf.sendAsynMsg( new UlfInformMessage(jsonObject), new UlfAsyncMsgHandleAdapter() {\n            @Override\n            public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception {\n                Debug.trace( \"fuck javascript\" ,msg );\n            }\n        });\n        this.getTaskManager().add( wolf );\n\n\n        this.getTaskManager().syncWaitingTerminated();\n    }\n}\n\n\npublic class TestMCClient {\n    public static void main( String[] args ) throws Exception {\n        //String[] as = args;\n        String[] as = new String[]{ \"TestWolfMCClient=true\" };\n        Pinecone.init( (Object...cfg )->{\n            Jesus jesus = (Jesus) Pinecone.sys().getTaskManager().add( new Jesus( as, Pinecone.sys() ) );\n            jesus.vitalize();\n            return 0;\n        }, (Object[]) as );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/mc/TestMCServer.java",
    "content": "package com.mc;\n\nimport com.pinecone.hydra.umc.wolf.UlfStreamTransferMessage;\nimport com.pinecone.hydra.umct.WolfMCExpress;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umc.wolf.server.WolfMCServer;\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.tritium.messagron.Messagron;\n\nimport io.netty.channel.ChannelHandlerContext;\n\n\nclass Christ extends JesusChrist {\n    public Christ( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Christ( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        //this.testServer();\n        // this.testSystemServer();\n        this.testServerCos();\n    }\n\n    public void testServer() throws Exception {\n        Messagron messagron = new Messagron( \"\", this, new JSONMaptron() );\n        WolfMCServer wolf = new WolfMCServer( \"\", this, new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n\n        WolfMCExpress express = new WolfMCExpress( messagron );\n        wolf.apply( express );\n\n        wolf.execute();\n\n        this.getTaskManager().add( wolf );\n        this.getTaskManager().syncWaitingTerminated();\n    }\n\n    public void testSystemServer() throws Exception {\n//        WolfMCServer wolf   = (WolfMCServer)this.getMiddlewareDirector().getMessagersManager().getMessageNodeByName( \"WolfKing\" );\n//        wolf.execute();\n//\n//        this.getTaskManager().add( wolf );\n\n\n        this.getTaskManager().syncWaitingTerminated();\n    }\n\n    public void testServerCos() throws Exception {\n        Messagron messagron = new Messagron( \"\", this, new JSONMaptron() );\n        WolfMCServer wolf = new WolfMCServer( \"\", this, new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n\n        wolf.apply( new UlfAsyncMsgHandleAdapter() {\n            public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception {\n                Debug.redf( rawMsg, new String( ( (UlfStreamTransferMessage) rawMsg).getBody().readAllBytes() ) );\n//                UlfStreamTransferMessage mc = (UlfStreamTransferMessage) rawMsg;\n//                Map<String,Object > jo = mc.getHead().getMapExtraHead();\n//                String dos = jo.get( \"do\" ).toString();\n//                if( dos.equals( \"queryHeistConfTPL\" ) ) {\n//                    Debug.trace( \"hahahaha\" );\n//                }\n//                if( dos.equals( \"xixi\" ) ) {\n//                    Debug.trace( \"xixi\" );\n//                }\n            }\n        });\n\n        wolf.execute();\n\n        this.getTaskManager().add( wolf );\n        this.getTaskManager().syncWaitingTerminated();\n    }\n}\n\npublic class TestMCServer {\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            Christ christ = (Christ) Pinecone.sys().getTaskManager().add( new Christ( args, Pinecone.sys() ) );\n            christ.vitalize();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/Bear.java",
    "content": "package com.protobuf;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.json.homotype.StructJSONEncoder;\n\npublic class Bear {\n    private String        name;\n    private int           force;\n    private List<Integer> values;\n    private String        type;\n\n    public String getName() {\n        return name;\n    }\n\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    public int getForce() {\n        return force;\n    }\n\n    public void setForce(int force) {\n        this.force = force;\n    }\n\n    public List<Integer> getValues() {\n        return values;\n    }\n\n    public void setValues(List<Integer> values) {\n        this.values = values;\n    }\n\n    public String getType() {\n        return type;\n    }\n\n    public void setType(String type) {\n        this.type = type;\n    }\n\n    public String toJSONString() {\n        return StructJSONEncoder.BasicEncoder.encode( this, true );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/Beaver.java",
    "content": "package com.protobuf;\n\nimport com.pinecone.hydra.umct.stereotype.Iface;\n\npublic interface Beaver {\n    @Iface\n    String cutting( String target );\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/Monkey.java",
    "content": "package com.protobuf;\n\nimport com.pinecone.framework.util.json.homotype.GenericBeanJSONEncoder;\n\npublic class Monkey {\n    public String name;\n\n    public String getName() {\n        return this.name;\n    }\n\n    public void setName(String name) {\n        this.name = name;\n    }\n\n\n    public String toJSONString() {\n        return GenericBeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/Parasite.java",
    "content": "package com.protobuf;\n\nimport com.pinecone.framework.util.json.homotype.DirectJSONInjector;\n\npublic class Parasite {\n    public String    name  ;\n    public long      length;\n    public int       emnus;\n\n    public Parasite() {\n\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public long getLength() {\n        return this.length;\n    }\n\n    public void setName( String name ) {\n        this.name = name;\n    }\n\n    public void setLength( long length ) {\n        this.length = length;\n    }\n\n    public String toJSONString() {\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }\n\n    public String toString(){\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }\n}\n\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/Rabbit.java",
    "content": "package com.protobuf;\n\nimport com.pinecone.framework.util.json.homotype.GenericBeanJSONEncoder;\n\npublic class Rabbit {\n    public String name;\n\n    public byte[] bytes;\n\n    public Monkey monkey;\n\n    public Monkey[] monkeys;\n\n    public Rabbit sub;\n\n    public boolean bool;\n\n    public boolean isBool() {\n        return this.bool;\n    }\n\n    public void setBool( boolean bool ) {\n        this.bool = bool;\n    }\n\n    public Rabbit getSub() {\n        return this.sub;\n    }\n\n    public void setSub( Rabbit sub ) {\n        this.sub = sub;\n    }\n\n    public Monkey getMonkey() {\n        return this.monkey;\n    }\n\n    public Monkey[] getMonkeys() {\n        return this.monkeys;\n    }\n\n    public void setMonkey( Monkey monkey ) {\n        this.monkey = monkey;\n    }\n\n    public void setMonkeys( Monkey[] monkeys ) {\n        this.monkeys = monkeys;\n    }\n\n    public byte[] getBytes() {\n        return this.bytes;\n    }\n\n    public void setBytes(byte[] bytes) {\n        this.bytes = bytes;\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    public String toJSONString() {\n        return GenericBeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/Raccoon.java",
    "content": "package com.protobuf;\n\nimport java.util.List;\n\nimport com.pinecone.hydra.umct.stereotype.Iface;\n\n//@Iface\npublic interface Raccoon {\n    @Iface\n    String scratch( String target, int time );\n\n    @Iface\n//    default String scratchA( String target, int time, byte[] bs ) {\n//        return null;\n//    }\n    default String scratchA( String target, int time, Rabbit rabbit ) {\n        return null;\n    }\n\n    @Iface\n    default void scratchV( String target, int time ) {\n\n    }\n\n    @Iface\n    default Rabbit[] scratchC( String target, int time, Rabbit[] more ) {\n        return more;\n    }\n\n    @Iface\n    default String[] scratchS( String target, int time, String[] more ) {\n        return more;\n    }\n\n    @Iface\n    default List<Rabbit> scratchList( String target, int time, List<Rabbit> more ) {\n        return more;\n    }\n\n    @Iface\n    default boolean scratchPrime( String target, int time ) { return time != 0; }\n\n    @Iface\n    default void scratchVoid() {\n\n    }\n\n//    @Iface( name = \"scratchF1\" )\n//    default String scratch( String target, long[] times ) {\n//        return null;\n//    }\n//\n//    @Iface\n//    default void nil() {\n//\n//    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/RaccoonController.java",
    "content": "package com.protobuf;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.umct.AddressMapping;\nimport com.pinecone.hydra.umct.stereotype.Controller;\n\n@Controller\n//@AddressMapping( {\"/fox\", \"/vulpis\"} )\n@AddressMapping( \"com.protobuf.Raccoon.\" )\npublic class RaccoonController {\n    //@AddressMapping( { \"/scratch\", \"/scratches\" } )\n    //@AddressMapping()\n    @AddressMapping( \"scratch\" )\n    public String scratch( String target, int time ) {\n        Debug.whitef( \"Raccoon invoked \" + target + time  );\n        return \"Raccoon Scratch \" + target + time;\n    }\n\n    @AddressMapping( \"scratchA\" )\n    public String scratchA( String target, int time, Rabbit map ) {\n        Debug.bluef( \"Raccoon invoked \" + target + time  );\n        Debug.bluef( map.getName(), map.bytes.length, (Object) map.bytes, map.getMonkey().name );\n        return \"Raccoon Scratch \" + target + time;\n    }\n\n\n    @AddressMapping( \"scratchV\" )\n    public void scratchV( String target, int time ) {\n        Debug.bluef( \"Raccoon invoked V\" + target + time  );\n        //return \"Raccoon Scratch \" + target + time;\n    }\n\n    @AddressMapping( \"scratchC\" )\n    public Rabbit[] scratchC( String target, int time, Rabbit[] list ) {\n        Debug.bluef( \"Raccoon invoked C\" + target + time  );\n        return list;\n    }\n\n    @AddressMapping( \"scratchS\" )\n    public String[] scratchS(String target, int time, String[] list ) {\n        Debug.bluef( \"Raccoon invoked S\" + target + time  );\n        return list;\n    }\n\n    @AddressMapping( \"scratchList\" )\n    public List<Rabbit> scratchList(String target, int time, List<Rabbit> list ) {\n        Debug.bluef( \"Raccoon invoked S\" + target + time  );\n        return list;\n    }\n\n    @AddressMapping( \"scratchPrime\" )\n    public boolean scratchPrime( String target, int time ) {\n        Debug.bluef( \"Raccoon invoked Prime\" + target + time  );\n        return time != 0;\n    }\n\n\n    @AddressMapping( \"scratchVoid\" )\n    public void scratchVoid() {\n        Debug.bluef( \"Raccoon invoked Void\" );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/RaccoonKing.java",
    "content": "package com.protobuf;\n\npublic class RaccoonKing implements Raccoon {\n    @Override\n    public String scratch( String target, int time ) {\n        return \"Scratch \" + target + time;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/RedBeaver.java",
    "content": "package com.protobuf;\n\npublic class RedBeaver implements Beaver {\n    @Override\n    public String cutting( String target ) {\n        return \"A cute beaver is cutting a \" + target;\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/RedRaccoon.java",
    "content": "package com.protobuf;\n\nimport java.lang.reflect.Method;\nimport java.util.List;\nimport java.util.Set;\n\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DynamicMessage;\nimport com.google.protobuf.InvalidProtocolBufferException;\nimport com.pinecone.framework.lang.field.FieldEntity;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.MethodDigest;\nimport com.pinecone.hydra.umct.husky.compiler.MethodPrototype;\nimport com.pinecone.hydra.umct.husky.function.ArgumentRequest;\nimport com.pinecone.hydra.umct.husky.function.GenericArgumentRequest;\nimport com.pinecone.ulf.util.protobuf.GenericFieldProtobufDecoder;\nimport com.pinecone.ulf.util.protobuf.GenericFieldProtobufEncoder;\nimport com.pinecone.ulf.util.protobuf.Options;\n\nimport javassist.ClassPool;\n\npublic class RedRaccoon implements Raccoon {\n    //@Override\n    public String scratch1( String target, int time ) {\n        try{\n            Method[] methods = Raccoon.class.getMethods();\n            GenericArgumentRequest request = new GenericArgumentRequest( Raccoon.class.getName(), methods[0].getParameterTypes() );\n\n            GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder();\n            GenericFieldProtobufDecoder decoder = new GenericFieldProtobufDecoder();\n            Options options = new Options();\n\n            FieldEntity[] types = request.getSegments();\n            Descriptors.Descriptor descriptor = encoder.transform( types, \"Args\", Set.of(), options );\n            Debug.trace( descriptor.getFields() );\n\n            request.setField( 0, target );\n            request.setField( 1, time );\n            DynamicMessage message = encoder.encode( descriptor, types, Set.of(), options );\n            byte[] mb = message.toByteArray();\n            message = DynamicMessage.parseFrom( descriptor, mb );\n\n            request = new GenericArgumentRequest( Raccoon.class.getName(), methods[0].getParameterTypes() );\n            decoder.decodeEntries( request.getSegments(), descriptor, message, Set.of(), options );\n\n\n\n            Descriptors.Descriptor retDes = encoder.transform( String.class, null, Set.of() );\n            Debug.trace( retDes.getFields() );\n            DynamicMessage retMsg = encoder.encode( retDes, request.getField(0).getValue(), Set.of(), options );\n            DynamicMessage retDy = DynamicMessage.parseFrom( retDes, retMsg.toByteArray() );\n\n            String dm = decoder.decode( String.class, retDes, retDy, Set.of(), options );\n            Debug.info(dm);\n            return \"scratch \" + dm;\n        }\n        catch ( InvalidProtocolBufferException e ) {\n            return null;\n        }\n    }\n\n    @Override\n    public String scratch( String target, int time ) {\n        try{\n            BytecodeIfaceCompiler inspector = new BytecodeIfaceCompiler( ClassPool.getDefault() );\n            List<MethodDigest> digests = inspector.compile( Raccoon.class, false ).getMethodDigests();\n            MethodPrototype methodPrototype = (MethodPrototype)digests.get(0);\n            Descriptors.Descriptor argDes = methodPrototype.getArgumentsDescriptor();\n            Descriptors.Descriptor retDes = methodPrototype.getReturnDescriptor();\n\n\n\n\n            GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder();\n            GenericFieldProtobufDecoder decoder = new GenericFieldProtobufDecoder();\n            Options options = new Options();\n\n\n\n            ArgumentRequest request = methodPrototype.conformRequest( new Object[] { target, time } );\n\n            FieldEntity[] types = request.getSegments();\n            Debug.trace( argDes.getFields() );\n\n            DynamicMessage message = encoder.encode( argDes, types, Set.of(), options );\n            byte[] mb = message.toByteArray();\n            message = DynamicMessage.parseFrom( argDes, mb );\n\n            request = methodPrototype.conformRequest();\n            decoder.decodeEntries( request.getSegments(), argDes, message, Set.of(), options );\n\n\n            Debug.trace( retDes.getFields() );\n            DynamicMessage retMsg = encoder.encode( retDes, request.getField(0).getValue(), Set.of(), options );\n            DynamicMessage retDy = DynamicMessage.parseFrom( retDes, retMsg.toByteArray() );\n\n            String dm = decoder.decode( String.class, retDes, retDy, Set.of(), options );\n            Debug.info(dm);\n            return \"scratch \" + dm;\n        }\n        catch ( InvalidProtocolBufferException e ) {\n            return null;\n        }\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/Rpc.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: rpc.proto\n\npackage com.protobuf;\n\npublic final class Rpc {\n  private Rpc() {}\n  public static void registerAllExtensions(\n      com.google.protobuf.ExtensionRegistryLite registry) {\n  }\n\n  public static void registerAllExtensions(\n      com.google.protobuf.ExtensionRegistry registry) {\n    registerAllExtensions(\n        (com.google.protobuf.ExtensionRegistryLite) registry);\n  }\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_RpcRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_RpcRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_RpcResponse_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_RpcResponse_fieldAccessorTable;\n\n  public static com.google.protobuf.Descriptors.FileDescriptor\n      getDescriptor() {\n    return descriptor;\n  }\n  private static  com.google.protobuf.Descriptors.FileDescriptor\n      descriptor;\n  static {\n    java.lang.String[] descriptorData = {\n      \"\\n\\trpc.proto\\\"-\\n\\nRpcRequest\\022\\016\\n\\006method\\030\\001 \\001(\" +\n      \"\\t\\022\\017\\n\\007payload\\030\\002 \\001(\\014\\\"=\\n\\013RpcResponse\\022\\014\\n\\004cod\" +\n      \"e\\030\\001 \\001(\\005\\022\\017\\n\\007message\\030\\002 \\001(\\t\\022\\017\\n\\007payload\\030\\003 \\001(\" +\n      \"\\014B\\020\\n\\014com.protobufP\\001b\\006proto3\"\n    };\n    descriptor = com.google.protobuf.Descriptors.FileDescriptor\n      .internalBuildGeneratedFileFrom(descriptorData,\n        new com.google.protobuf.Descriptors.FileDescriptor[] {\n        });\n    internal_static_RpcRequest_descriptor =\n      getDescriptor().getMessageTypes().get(0);\n    internal_static_RpcRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_RpcRequest_descriptor,\n        new java.lang.String[] { \"Method\", \"Payload\", });\n    internal_static_RpcResponse_descriptor =\n      getDescriptor().getMessageTypes().get(1);\n    internal_static_RpcResponse_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_RpcResponse_descriptor,\n        new java.lang.String[] { \"Code\", \"Message\", \"Payload\", });\n  }\n\n  // @@protoc_insertion_point(outer_class_scope)\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/RpcRequest.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: rpc.proto\n\npackage com.protobuf;\n\n/**\n * Protobuf type {@code RpcRequest}\n */\npublic final class RpcRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:RpcRequest)\n    RpcRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use RpcRequest.newBuilder() to construct.\n  private RpcRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private RpcRequest() {\n    method_ = \"\";\n    payload_ = com.google.protobuf.ByteString.EMPTY;\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new RpcRequest();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.protobuf.Rpc.internal_static_RpcRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.protobuf.Rpc.internal_static_RpcRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.protobuf.RpcRequest.class, com.protobuf.RpcRequest.Builder.class);\n  }\n\n  public static final int METHOD_FIELD_NUMBER = 1;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object method_ = \"\";\n  /**\n   * <code>string method = 1;</code>\n   * @return The method.\n   */\n  @java.lang.Override\n  public java.lang.String getMethod() {\n    java.lang.Object ref = method_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs =\n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      method_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string method = 1;</code>\n   * @return The bytes for method.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getMethodBytes() {\n    java.lang.Object ref = method_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b =\n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      method_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int PAYLOAD_FIELD_NUMBER = 2;\n  private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY;\n  /**\n   * <code>bytes payload = 2;</code>\n   * @return The payload.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString getPayload() {\n    return payload_;\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, method_);\n    }\n    if (!payload_.isEmpty()) {\n      output.writeBytes(2, payload_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, method_);\n    }\n    if (!payload_.isEmpty()) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeBytesSize(2, payload_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.protobuf.RpcRequest)) {\n      return super.equals(obj);\n    }\n    com.protobuf.RpcRequest other = (com.protobuf.RpcRequest) obj;\n\n    if (!getMethod()\n        .equals(other.getMethod())) return false;\n    if (!getPayload()\n        .equals(other.getPayload())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + METHOD_FIELD_NUMBER;\n    hash = (53 * hash) + getMethod().hashCode();\n    hash = (37 * hash) + PAYLOAD_FIELD_NUMBER;\n    hash = (53 * hash) + getPayload().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.protobuf.RpcRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.protobuf.RpcRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.protobuf.RpcRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.protobuf.RpcRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.protobuf.RpcRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.protobuf.RpcRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.protobuf.RpcRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.protobuf.RpcRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.protobuf.RpcRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.protobuf.RpcRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.protobuf.RpcRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.protobuf.RpcRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.protobuf.RpcRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code RpcRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:RpcRequest)\n      com.protobuf.RpcRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.protobuf.Rpc.internal_static_RpcRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.protobuf.Rpc.internal_static_RpcRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.protobuf.RpcRequest.class, com.protobuf.RpcRequest.Builder.class);\n    }\n\n    // Construct using com.protobuf.RpcRequest.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      method_ = \"\";\n      payload_ = com.google.protobuf.ByteString.EMPTY;\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.protobuf.Rpc.internal_static_RpcRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public com.protobuf.RpcRequest getDefaultInstanceForType() {\n      return com.protobuf.RpcRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.protobuf.RpcRequest build() {\n      com.protobuf.RpcRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.protobuf.RpcRequest buildPartial() {\n      com.protobuf.RpcRequest result = new com.protobuf.RpcRequest(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.protobuf.RpcRequest result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.method_ = method_;\n      }\n      if (((from_bitField0_ & 0x00000002) != 0)) {\n        result.payload_ = payload_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.protobuf.RpcRequest) {\n        return mergeFrom((com.protobuf.RpcRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.protobuf.RpcRequest other) {\n      if (other == com.protobuf.RpcRequest.getDefaultInstance()) return this;\n      if (!other.getMethod().isEmpty()) {\n        method_ = other.method_;\n        bitField0_ |= 0x00000001;\n        onChanged();\n      }\n      if (other.getPayload() != com.google.protobuf.ByteString.EMPTY) {\n        setPayload(other.getPayload());\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 10: {\n              method_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 10\n            case 18: {\n              payload_ = input.readBytes();\n              bitField0_ |= 0x00000002;\n              break;\n            } // case 18\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private java.lang.Object method_ = \"\";\n    /**\n     * <code>string method = 1;</code>\n     * @return The method.\n     */\n    public java.lang.String getMethod() {\n      java.lang.Object ref = method_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        method_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string method = 1;</code>\n     * @return The bytes for method.\n     */\n    public com.google.protobuf.ByteString\n        getMethodBytes() {\n      java.lang.Object ref = method_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b =\n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        method_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string method = 1;</code>\n     * @param value The method to set.\n     * @return This builder for chaining.\n     */\n    public Builder setMethod(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      method_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string method = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearMethod() {\n      method_ = getDefaultInstance().getMethod();\n      bitField0_ = (bitField0_ & ~0x00000001);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string method = 1;</code>\n     * @param value The bytes for method to set.\n     * @return This builder for chaining.\n     */\n    public Builder setMethodBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      method_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n\n    private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY;\n    /**\n     * <code>bytes payload = 2;</code>\n     * @return The payload.\n     */\n    @java.lang.Override\n    public com.google.protobuf.ByteString getPayload() {\n      return payload_;\n    }\n    /**\n     * <code>bytes payload = 2;</code>\n     * @param value The payload to set.\n     * @return This builder for chaining.\n     */\n    public Builder setPayload(com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      payload_ = value;\n      bitField0_ |= 0x00000002;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>bytes payload = 2;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearPayload() {\n      bitField0_ = (bitField0_ & ~0x00000002);\n      payload_ = getDefaultInstance().getPayload();\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:RpcRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:RpcRequest)\n  private static final com.protobuf.RpcRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.protobuf.RpcRequest();\n  }\n\n  public static com.protobuf.RpcRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<RpcRequest>\n      PARSER = new com.google.protobuf.AbstractParser<RpcRequest>() {\n    @java.lang.Override\n    public RpcRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<RpcRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<RpcRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.protobuf.RpcRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/RpcRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: rpc.proto\n\npackage com.protobuf;\n\npublic interface RpcRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:RpcRequest)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>string method = 1;</code>\n   * @return The method.\n   */\n  java.lang.String getMethod();\n  /**\n   * <code>string method = 1;</code>\n   * @return The bytes for method.\n   */\n  com.google.protobuf.ByteString\n      getMethodBytes();\n\n  /**\n   * <code>bytes payload = 2;</code>\n   * @return The payload.\n   */\n  com.google.protobuf.ByteString getPayload();\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/RpcResponse.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: rpc.proto\n\npackage com.protobuf;\n\n/**\n * Protobuf type {@code RpcResponse}\n */\npublic final class RpcResponse extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:RpcResponse)\n    RpcResponseOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use RpcResponse.newBuilder() to construct.\n  private RpcResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private RpcResponse() {\n    message_ = \"\";\n    payload_ = com.google.protobuf.ByteString.EMPTY;\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new RpcResponse();\n  }\n\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return com.protobuf.Rpc.internal_static_RpcResponse_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return com.protobuf.Rpc.internal_static_RpcResponse_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            com.protobuf.RpcResponse.class, com.protobuf.RpcResponse.Builder.class);\n  }\n\n  public static final int CODE_FIELD_NUMBER = 1;\n  private int code_ = 0;\n  /**\n   * <code>int32 code = 1;</code>\n   * @return The code.\n   */\n  @java.lang.Override\n  public int getCode() {\n    return code_;\n  }\n\n  public static final int MESSAGE_FIELD_NUMBER = 2;\n  @SuppressWarnings(\"serial\")\n  private volatile java.lang.Object message_ = \"\";\n  /**\n   * <code>string message = 2;</code>\n   * @return The message.\n   */\n  @java.lang.Override\n  public java.lang.String getMessage() {\n    java.lang.Object ref = message_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs =\n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      message_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string message = 2;</code>\n   * @return The bytes for message.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getMessageBytes() {\n    java.lang.Object ref = message_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b =\n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      message_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int PAYLOAD_FIELD_NUMBER = 3;\n  private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY;\n  /**\n   * <code>bytes payload = 3;</code>\n   * @return The payload.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString getPayload() {\n    return payload_;\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (code_ != 0) {\n      output.writeInt32(1, code_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {\n      com.google.protobuf.GeneratedMessageV3.writeString(output, 2, message_);\n    }\n    if (!payload_.isEmpty()) {\n      output.writeBytes(3, payload_);\n    }\n    getUnknownFields().writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (code_ != 0) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeInt32Size(1, code_);\n    }\n    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {\n      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, message_);\n    }\n    if (!payload_.isEmpty()) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeBytesSize(3, payload_);\n    }\n    size += getUnknownFields().getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof com.protobuf.RpcResponse)) {\n      return super.equals(obj);\n    }\n    com.protobuf.RpcResponse other = (com.protobuf.RpcResponse) obj;\n\n    if (getCode()\n        != other.getCode()) return false;\n    if (!getMessage()\n        .equals(other.getMessage())) return false;\n    if (!getPayload()\n        .equals(other.getPayload())) return false;\n    if (!getUnknownFields().equals(other.getUnknownFields())) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + CODE_FIELD_NUMBER;\n    hash = (53 * hash) + getCode();\n    hash = (37 * hash) + MESSAGE_FIELD_NUMBER;\n    hash = (53 * hash) + getMessage().hashCode();\n    hash = (37 * hash) + PAYLOAD_FIELD_NUMBER;\n    hash = (53 * hash) + getPayload().hashCode();\n    hash = (29 * hash) + getUnknownFields().hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static com.protobuf.RpcResponse parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.protobuf.RpcResponse parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.protobuf.RpcResponse parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.protobuf.RpcResponse parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.protobuf.RpcResponse parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static com.protobuf.RpcResponse parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static com.protobuf.RpcResponse parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.protobuf.RpcResponse parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  public static com.protobuf.RpcResponse parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n\n  public static com.protobuf.RpcResponse parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static com.protobuf.RpcResponse parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static com.protobuf.RpcResponse parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(com.protobuf.RpcResponse prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code RpcResponse}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:RpcResponse)\n      com.protobuf.RpcResponseOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return com.protobuf.Rpc.internal_static_RpcResponse_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return com.protobuf.Rpc.internal_static_RpcResponse_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              com.protobuf.RpcResponse.class, com.protobuf.RpcResponse.Builder.class);\n    }\n\n    // Construct using com.protobuf.RpcResponse.newBuilder()\n    private Builder() {\n\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      bitField0_ = 0;\n      code_ = 0;\n      message_ = \"\";\n      payload_ = com.google.protobuf.ByteString.EMPTY;\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return com.protobuf.Rpc.internal_static_RpcResponse_descriptor;\n    }\n\n    @java.lang.Override\n    public com.protobuf.RpcResponse getDefaultInstanceForType() {\n      return com.protobuf.RpcResponse.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public com.protobuf.RpcResponse build() {\n      com.protobuf.RpcResponse result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public com.protobuf.RpcResponse buildPartial() {\n      com.protobuf.RpcResponse result = new com.protobuf.RpcResponse(this);\n      if (bitField0_ != 0) { buildPartial0(result); }\n      onBuilt();\n      return result;\n    }\n\n    private void buildPartial0(com.protobuf.RpcResponse result) {\n      int from_bitField0_ = bitField0_;\n      if (((from_bitField0_ & 0x00000001) != 0)) {\n        result.code_ = code_;\n      }\n      if (((from_bitField0_ & 0x00000002) != 0)) {\n        result.message_ = message_;\n      }\n      if (((from_bitField0_ & 0x00000004) != 0)) {\n        result.payload_ = payload_;\n      }\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof com.protobuf.RpcResponse) {\n        return mergeFrom((com.protobuf.RpcResponse)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(com.protobuf.RpcResponse other) {\n      if (other == com.protobuf.RpcResponse.getDefaultInstance()) return this;\n      if (other.getCode() != 0) {\n        setCode(other.getCode());\n      }\n      if (!other.getMessage().isEmpty()) {\n        message_ = other.message_;\n        bitField0_ |= 0x00000002;\n        onChanged();\n      }\n      if (other.getPayload() != com.google.protobuf.ByteString.EMPTY) {\n        setPayload(other.getPayload());\n      }\n      this.mergeUnknownFields(other.getUnknownFields());\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      if (extensionRegistry == null) {\n        throw new java.lang.NullPointerException();\n      }\n      try {\n        boolean done = false;\n        while (!done) {\n          int tag = input.readTag();\n          switch (tag) {\n            case 0:\n              done = true;\n              break;\n            case 8: {\n              code_ = input.readInt32();\n              bitField0_ |= 0x00000001;\n              break;\n            } // case 8\n            case 18: {\n              message_ = input.readStringRequireUtf8();\n              bitField0_ |= 0x00000002;\n              break;\n            } // case 18\n            case 26: {\n              payload_ = input.readBytes();\n              bitField0_ |= 0x00000004;\n              break;\n            } // case 26\n            default: {\n              if (!super.parseUnknownField(input, extensionRegistry, tag)) {\n                done = true; // was an endgroup tag\n              }\n              break;\n            } // default:\n          } // switch (tag)\n        } // while (!done)\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.unwrapIOException();\n      } finally {\n        onChanged();\n      } // finally\n      return this;\n    }\n    private int bitField0_;\n\n    private int code_ ;\n    /**\n     * <code>int32 code = 1;</code>\n     * @return The code.\n     */\n    @java.lang.Override\n    public int getCode() {\n      return code_;\n    }\n    /**\n     * <code>int32 code = 1;</code>\n     * @param value The code to set.\n     * @return This builder for chaining.\n     */\n    public Builder setCode(int value) {\n\n      code_ = value;\n      bitField0_ |= 0x00000001;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>int32 code = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearCode() {\n      bitField0_ = (bitField0_ & ~0x00000001);\n      code_ = 0;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object message_ = \"\";\n    /**\n     * <code>string message = 2;</code>\n     * @return The message.\n     */\n    public java.lang.String getMessage() {\n      java.lang.Object ref = message_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        message_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string message = 2;</code>\n     * @return The bytes for message.\n     */\n    public com.google.protobuf.ByteString\n        getMessageBytes() {\n      java.lang.Object ref = message_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b =\n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        message_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string message = 2;</code>\n     * @param value The message to set.\n     * @return This builder for chaining.\n     */\n    public Builder setMessage(\n        java.lang.String value) {\n      if (value == null) { throw new NullPointerException(); }\n      message_ = value;\n      bitField0_ |= 0x00000002;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string message = 2;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearMessage() {\n      message_ = getDefaultInstance().getMessage();\n      bitField0_ = (bitField0_ & ~0x00000002);\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string message = 2;</code>\n     * @param value The bytes for message to set.\n     * @return This builder for chaining.\n     */\n    public Builder setMessageBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      checkByteStringIsUtf8(value);\n      message_ = value;\n      bitField0_ |= 0x00000002;\n      onChanged();\n      return this;\n    }\n\n    private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY;\n    /**\n     * <code>bytes payload = 3;</code>\n     * @return The payload.\n     */\n    @java.lang.Override\n    public com.google.protobuf.ByteString getPayload() {\n      return payload_;\n    }\n    /**\n     * <code>bytes payload = 3;</code>\n     * @param value The payload to set.\n     * @return This builder for chaining.\n     */\n    public Builder setPayload(com.google.protobuf.ByteString value) {\n      if (value == null) { throw new NullPointerException(); }\n      payload_ = value;\n      bitField0_ |= 0x00000004;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>bytes payload = 3;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearPayload() {\n      bitField0_ = (bitField0_ & ~0x00000004);\n      payload_ = getDefaultInstance().getPayload();\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:RpcResponse)\n  }\n\n  // @@protoc_insertion_point(class_scope:RpcResponse)\n  private static final com.protobuf.RpcResponse DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new com.protobuf.RpcResponse();\n  }\n\n  public static com.protobuf.RpcResponse getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<RpcResponse>\n      PARSER = new com.google.protobuf.AbstractParser<RpcResponse>() {\n    @java.lang.Override\n    public RpcResponse parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      Builder builder = newBuilder();\n      try {\n        builder.mergeFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        throw e.setUnfinishedMessage(builder.buildPartial());\n      } catch (com.google.protobuf.UninitializedMessageException e) {\n        throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());\n      } catch (java.io.IOException e) {\n        throw new com.google.protobuf.InvalidProtocolBufferException(e)\n            .setUnfinishedMessage(builder.buildPartial());\n      }\n      return builder.buildPartial();\n    }\n  };\n\n  public static com.google.protobuf.Parser<RpcResponse> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<RpcResponse> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.protobuf.RpcResponse getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/RpcResponseOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: rpc.proto\n\npackage com.protobuf;\n\npublic interface RpcResponseOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:RpcResponse)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>int32 code = 1;</code>\n   * @return The code.\n   */\n  int getCode();\n\n  /**\n   * <code>string message = 2;</code>\n   * @return The message.\n   */\n  java.lang.String getMessage();\n  /**\n   * <code>string message = 2;</code>\n   * @return The bytes for message.\n   */\n  com.google.protobuf.ByteString\n      getMessageBytes();\n\n  /**\n   * <code>bytes payload = 3;</code>\n   * @return The payload.\n   */\n  com.google.protobuf.ByteString getPayload();\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/Slave.java",
    "content": "package com.protobuf;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.json.homotype.StructJSONEncoder;\n\npublic class Slave {\n    public String    name  ;\n    public long      length;\n    public int       emnus;\n    public Parasite  parasite;\n    public Map       atts;\n    public Object[]  li;\n\n    //public Slave     child;\n    //public List<Slave>    children;\n\n    public Slave() {\n\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public long getLength() {\n        return this.length;\n    }\n\n    public void setName( String name ) {\n        this.name = name;\n    }\n\n    public void setLength( long length ) {\n        this.length = length;\n    }\n\n    public void setParasite2( Parasite parasite ) {\n        this.parasite = parasite;\n    }\n\n    public Parasite getParasite() {\n        return this.parasite;\n    }\n\n    public void setParasite( Parasite parasite ) {\n        this.parasite = parasite;\n    }\n\n    public Map getAtts() {\n        return this.atts;\n    }\n\n    public void setAtts(Map atts) {\n        this.atts = atts;\n    }\n\n    //    public List<Slave> getChildren() {\n//        return this.children;\n//    }\n\n    public String toJSONString() {\n        return StructJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    public String toString(){\n        return StructJSONEncoder.BasicEncoder.encode( this );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/TestKafkaClient.java",
    "content": "package com.protobuf;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.umb.UMBClientException;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.UlfMBInformMessage;\nimport com.pinecone.hydra.umb.UlfPackageMessageHandler;\nimport com.pinecone.hydra.umb.broadcast.BroadcastConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlProducer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastProducer;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer;\nimport com.pinecone.hydra.umb.kafka.KafkaClient;\nimport com.pinecone.hydra.umb.kafka.WolfMCKafkaClient;\nimport com.pinecone.hydra.umb.rocket.RocketClient;\nimport com.pinecone.hydra.umb.rocket.RocketMQClient;\nimport com.pinecone.hydra.umb.rocket.UlfRocketClient;\nimport com.pinecone.hydra.umb.rocket.WolfMCRocketClient;\nimport com.pinecone.hydra.umb.wolf.WolfMCBClient;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\nimport com.pinecone.hydra.umct.WolfMCExpress;\nimport com.pinecone.tritium.Tritium;\n\nimport org.apache.kafka.common.serialization.StringDeserializer;\nimport org.apache.kafka.common.serialization.StringSerializer;\n\nimport java.io.IOException;\n\n\nclass Luben extends Tritium {\n    public Luben( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Luben( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        //this.testFundamental();\n        //this.testWolfMB();\n        //this.testWolfMCTB();\n        //this.testKafka();\n        //this.testWolfKafka();\n        this.testWolfMCTBKafka();\n    }\n\n    public void testFundamental() throws Exception {\n        String nameSrvAddr = \"localhost:9876\";\n        String groupName = \"testGroup\";\n        String topic = \"testTopic\";\n        String tags = \"*\";\n        String keys = \"testKeys\";\n        String body = \"This is a test message\";\n\n\n        RocketClient client = new RocketMQClient( nameSrvAddr, groupName );\n        BroadcastConsumer consumer = client.createConsumer( topic );\n        consumer.start(new UlfPackageMessageHandler() {\n            @Override\n            public void onSuccessfulMsgReceived( byte[] body, Object[] args ) throws Exception {\n                Debug.trace( new String( body ) );\n            }\n        });\n\n\n        BroadcastProducer producer = client.createProducer();\n        producer.start();\n        producer.sendMessage( topic, body.getBytes() );\n\n        Debug.sleep( 100000 );\n    }\n\n    public void testKafka() throws UMBClientException, UMBServiceException {\n        String server = \"localhost:9092\";\n        String keySerializer = StringSerializer.class.getName();\n        String valueSerializer = StringSerializer.class.getName();\n        String topic = \"testTopic\";\n        String group = \"testGroup\";\n        String keyDeserializer = StringDeserializer.class.getName();\n        String valueDeserializer = StringDeserializer.class.getName();\n        String autoOffsetReset = \"earliest\";\n\n        KafkaClient kafkaClient = new KafkaClient( server );\n        byte[] bytes = new byte[100000];\n        for( int i=0; i< 100000; i++ ){\n            int j = 0;\n           j = i % 128;\n            bytes[i] = (byte) j;\n        }\n\n        BroadcastProducer producer = kafkaClient.createProducer();\n        producer.sendMessage( topic, bytes );\n\n\n        BroadcastConsumer consumer = kafkaClient.createConsumer(topic,group);\n        consumer.start(new UlfPackageMessageHandler() {\n            @Override\n            public void onSuccessfulMsgReceived( byte[] body, Object[] args ) throws Exception {\n                Debug.trace( body.length );\n                for( byte c : body ){\n                    Debug.trace(c);\n                }\n            }\n        });\n\n    }\n\n    public void testWolfKafka() throws UMBServiceException, UMBClientException {\n        String server = \"localhost:9092\";\n        String keySerializer = StringSerializer.class.getName();\n        String valueSerializer = StringSerializer.class.getName();\n        String topic = \"testTopic\";\n        String group = \"testGroup\";\n        String keyDeserializer = StringDeserializer.class.getName();\n        String valueDeserializer = StringDeserializer.class.getName();\n        String autoOffsetReset = \"earliest\";\n\n        WolfMCKafkaClient wolfMCKafkaClient = new WolfMCKafkaClient( server );\n        UMCBroadcastProducer producer = wolfMCKafkaClient.createUlfProducer();\n        producer.sendMessage( topic,\"你好\".getBytes() );\n\n        UMCBroadcastConsumer consumer = wolfMCKafkaClient.createUlfConsumer(topic, group);\n        consumer.start( new UlfPackageMessageHandler() {\n            @Override\n            public void onSuccessfulMsgReceived( byte[] body, Object[] args ) throws Exception {\n                Debug.trace( new String( body ) );\n            }\n        } );\n\n\n    }\n\n    public void testWolfMB() throws Exception {\n        String nameSrvAddr = \"localhost:9876\";\n        String groupName = \"testGroup\";\n        String topic = \"testTopic\";\n        String tags = \"*\";\n        String keys = \"testKeys\";\n\n\n        UlfRocketClient client = new WolfMCRocketClient( nameSrvAddr, groupName );\n        UMCBroadcastConsumer consumer = client.createUlfConsumer( topic );\n        consumer.start(new UMCTExpressHandler() {\n            @Override\n            public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n                if ( msg.evinceTransferMessage() != null ) {\n                    Debug.greenfs( msg.getHead(), new String( (byte[]) msg.evinceTransferMessage().getBody() ) );\n                }\n                else {\n                    Debug.redf( msg.getHead() );\n                }\n            }\n        });\n\n\n        UMCBroadcastProducer producer = client.createUlfProducer();\n        producer.start();\n\n        producer.sendMessage( topic, new UlfMBInformMessage( new JSONMaptron( \"{ path: '/user/getName ' }\" ) ) );\n        //producer.sendMessage( topic, new UlfMBInformMessage( new JSONMaptron( \"{ msg: 'Jesus, Mr.Garrison! ' }\" ), 0xFA ) );\n        //producer.sendMessage( topic, new UlfBytesTransferMessage( new JSONMaptron( \"{ msg: 'Jesus, Mr.Garrison! ' }\" ), \"fuck you\" ) );\n\n\n        Debug.sleep( 100000 );\n    }\n\n    public void testWolfMCTBKafka() throws IOException {\n        String server = \"b-serverkingpin:9092\";\n        String keySerializer = StringSerializer.class.getName();\n        String valueSerializer = StringSerializer.class.getName();\n        String topic = \"testTopic\";\n        String group = \"testGroup\";\n        String keyDeserializer = StringDeserializer.class.getName();\n        String valueDeserializer = StringDeserializer.class.getName();\n        String autoOffsetReset = \"earliest\";\n\n        WolfMCBClient client = new WolfMCBClient(new WolfMCKafkaClient(server), \"\", this, WolfMCExpress.class);\n\n        client.compile( Raccoon.class, false );\n        BroadcastControlProducer producer = client.createBroadcastControlProducer();\n\n\n        producer.start();\n        for ( int i = 0; i < 1e4; i++ ) {\n            producer.issueInform( topic, \"com.protobuf.Raccoon.scratch\", \"fuck you !\", 2025 );\n        }\n\n\n        Raccoon raccoon = producer.getIface( Raccoon.class, topic );\n        //raccoon.scratch(\"haha, I am XiaoMing\", 5202 );\n\n//        Rabbit rabbit = new Rabbit();\n//        rabbit.name = \"rabbit\";\n//        rabbit.bytes = new byte[999*1024];\n//        Arrays.fill(rabbit.bytes, (byte)43);\n//        Debug.bluef( raccoon.scratchA( \"DP you!\", 741741, rabbit ) );\n\n\n        BroadcastControlConsumer consumer = client.createBroadcastControlConsumer(topic,group);\n        RaccoonController controller  = new RaccoonController();\n        consumer.registerController( controller );\n        consumer.start();\n\n\n\n        Debug.sleep( 100000 );\n    }\n\n\n}\n\n\npublic class TestKafkaClient {\n    public static void main(String[] args) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n\n            Luben luben = (Luben) Pinecone.sys().getTaskManager().add( new Luben( args, Pinecone.sys() ) );\n            luben.vitalize();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/TestProtobuf.java",
    "content": "package com.protobuf;\n\nimport java.io.FileOutputStream;\nimport java.lang.reflect.Method;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.google.protobuf.ByteString;\nimport com.google.protobuf.DescriptorProtos;\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DynamicMessage;\nimport com.mc.JesusChrist;\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.lang.field.FieldEntity;\nimport com.pinecone.framework.lang.field.GenericStructure;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.ClassUtils;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.umc.msg.ChannelControlBlock;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.wolf.UlfAsyncMsgHandleAdapter;\nimport com.pinecone.hydra.umc.wolf.UlfInformMessage;\nimport com.pinecone.hydra.umc.wolf.client.WolfMCClient;\nimport com.pinecone.hydra.umc.wolf.server.WolfMCServer;\nimport com.pinecone.hydra.umct.WolfMCExpress;\nimport com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler;\nimport com.pinecone.hydra.umct.husky.function.GenericArgumentRequest;\nimport com.pinecone.ulf.util.protobuf.GenericBeanProtobufDecoder;\nimport com.pinecone.ulf.util.protobuf.GenericBeanProtobufEncoder;\nimport com.pinecone.ulf.util.protobuf.GenericFieldProtobufDecoder;\nimport com.pinecone.ulf.util.protobuf.GenericFieldProtobufEncoder;\nimport com.pinecone.ulf.util.protobuf.Options;\nimport com.pinecone.tritium.messagron.Messagron;\n\nimport io.netty.channel.ChannelHandlerContext;\nimport javassist.ClassPool;\n\nclass DynamicProtobufBuilder {\n    public static Descriptors.Descriptor buildRpcRequestDescriptor() throws Descriptors.DescriptorValidationException {\n        DescriptorProtos.DescriptorProto rpcRequestProto = DescriptorProtos.DescriptorProto.newBuilder()\n                .setName(\"RpcRequest\")\n                .addField(DescriptorProtos.FieldDescriptorProto.newBuilder()\n                        .setName(\"method\")\n                        .setNumber(1)\n                        .setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING))\n                .addField(DescriptorProtos.FieldDescriptorProto.newBuilder()\n                        .setName(\"payload\")\n                        .setNumber(2)\n                        .setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_BYTES))\n                .build();\n\n        DescriptorProtos.FileDescriptorProto fileDescriptorProto = DescriptorProtos.FileDescriptorProto.newBuilder()\n                .setName(\"rpc.proto\")\n                .addMessageType(rpcRequestProto)\n                .build();\n\n        Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom(fileDescriptorProto, new Descriptors.FileDescriptor[0]);\n        return fileDescriptor.findMessageTypeByName(\"RpcRequest\");\n    }\n\n    public static Descriptors.Descriptor buildRpcResponseDescriptor() throws Descriptors.DescriptorValidationException {\n        DescriptorProtos.DescriptorProto rpcResponseProto = DescriptorProtos.DescriptorProto.newBuilder()\n                .setName(\"RpcResponse\")\n                .addField(DescriptorProtos.FieldDescriptorProto.newBuilder()\n                        .setName(\"code\")\n                        .setNumber(1)\n                        .setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_INT32))\n                .addField(DescriptorProtos.FieldDescriptorProto.newBuilder()\n                        .setName(\"message\")\n                        .setNumber(2)\n                        .setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING))\n                .addField(DescriptorProtos.FieldDescriptorProto.newBuilder()\n                        .setName(\"payload\")\n                        .setNumber(3)\n                        .setType(DescriptorProtos.FieldDescriptorProto.Type.TYPE_BYTES))\n                .build();\n\n        DescriptorProtos.FileDescriptorProto fileDescriptorProto = DescriptorProtos.FileDescriptorProto.newBuilder()\n                .setName(\"rpc.proto\")\n                .addMessageType(rpcResponseProto)\n                .build();\n\n        Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom(fileDescriptorProto, new Descriptors.FileDescriptor[0]);\n        return fileDescriptor.findMessageTypeByName(\"RpcResponse\");\n    }\n}\n\nclass Appleby extends JesusChrist {\n    public Appleby( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Appleby( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n//        RpcRequest request = RpcRequest.newBuilder()\n//                .setMethod(\"haha\")\n//                .setPayload( ByteString.copyFrom( new byte[]{123} ) )\n//                .build();\n//\n//        byte[] serializedData = request.toByteArray();\n//\n//        RpcRequest deserializedReq = RpcRequest.parseFrom(serializedData);\n//        Debug.trace( deserializedReq.getMethod() );\n\n        //this.testDynamic();\n\n        //this.testDynamicUMCT();\n\n        //this.testElementRroto();\n\n        //this.testFieldEntry();\n\n        //this.testReflect();\n\n        //this.testManualRPCServer();\n\n        //this.testManualRPCClient();\n\n        //this.testStructure();\n\n        //this.testIfacInspector();\n    }\n\n    private void testDynamic() throws Exception {\n        Descriptors.Descriptor rpcRequestDescriptor = DynamicProtobufBuilder.buildRpcRequestDescriptor();\n        Descriptors.Descriptor rpcResponseDescriptor = DynamicProtobufBuilder.buildRpcResponseDescriptor();\n\n\n        String method  = \"echo\";\n        byte[] payload = \"Dragon King\".getBytes();\n\n        DynamicMessage request1 = DynamicMessage.newBuilder(rpcRequestDescriptor)\n                .setField(rpcRequestDescriptor.findFieldByName(\"method\"), method)\n                .setField(rpcRequestDescriptor.findFieldByName(\"payload\"), com.google.protobuf.ByteString.copyFrom(payload))\n                .build();\n\n        byte[] rd = request1.toByteArray();\n\n\n        DynamicMessage request = DynamicMessage.parseFrom(rpcRequestDescriptor, rd);\n        String method1 = (String) request.getField(rpcRequestDescriptor.findFieldByName(\"method\"));\n        ByteString payload1 = (ByteString) request.getField(rpcRequestDescriptor.findFieldByName(\"payload\"));\n\n        DynamicMessage response = DynamicMessage.newBuilder(rpcResponseDescriptor)\n                .setField(rpcResponseDescriptor.findFieldByName(\"code\"), 200)\n                .setField(rpcResponseDescriptor.findFieldByName(\"message\"), \"Success\")\n                .setField(rpcResponseDescriptor.findFieldByName(\"payload\"), payload1)\n                .build();\n\n\n        FileOutputStream ofs = new FileOutputStream( \"e:/sss.bin\" );\n        ofs.write( rd );\n        ofs.close();\n        Debug.greenf( rd );\n\n        Debug.infoSyn( ( (ByteString)DynamicMessage.parseFrom(rpcRequestDescriptor, rd).getField(rpcRequestDescriptor.findFieldByName(\"payload\")) ).toStringUtf8() );\n    }\n\n    private void testDynamicUMCT() throws Exception {\n        Slave slave = JSON.unmarshal( \"{ name:Slave, length:1234, parasite:{ name: parasite, length:20241102 }, atts: { key:val }, li:[1,2,3, 'ssss'],\" +\n                \"children: [{ name:SlaveChild, length:137, parasite:{ name: parasitec, length:20241117 }  } ] }\", Slave.class );\n        Debug.trace( 2, slave );\n////\n        GenericBeanProtobufEncoder encoder = new GenericBeanProtobufEncoder();\n        Descriptors.Descriptor descriptor = encoder.transform( Slave.class, slave, Set.of() );\n        Debug.trace( descriptor.getFields() );\n\n        Options options = new Options();\n        DynamicMessage message = encoder.encode( descriptor, slave, Set.of(), options );\n        Debug.trace( message.getAllFields(), descriptor.findFieldByName( \"parasite\" ).getMessageType().getFields() );\n\n        byte[] rd = message.toByteArray();\n        DynamicMessage unmarshaled = DynamicMessage.parseFrom(descriptor, rd);\n        Debug.trace( unmarshaled.getAllFields() );\n\n        GenericBeanProtobufDecoder decoder = new GenericBeanProtobufDecoder();\n        Map dm = decoder.decodeMap( descriptor, unmarshaled, Set.of(), options );\n        Debug.purplef( dm );\n\n        Slave neo = decoder.decode( Slave.class, descriptor, unmarshaled, Set.of(), options );\n        Debug.purplef( neo );\n\n\n\n//        Map bear = JSON.unmarshal( \"{ name: 'William', force: 320, values: [1,2,3], type: grizzly, trait: { species: mammal } }\", Map.class );\n//        Debug.trace( bear );\n//        Options options = new Options();\n//        Descriptors.Descriptor descriptor = encoder.transform( Map.class, bear,  Set.of(), options );\n//        Debug.trace( descriptor.getFields() );\n//        Debug.trace( descriptor.findFieldByName( \"values\" ).isRepeated() );\n//        Debug.trace( descriptor.findFieldByName( \"trait\" ).getMessageType().getFields() );\n//\n//        DynamicMessage message = encoder.encode( descriptor, bear, Set.of(), options );\n//        Debug.trace( message.getAllFields(), descriptor.findFieldByName( \"trait\" ).getMessageType().getFields() );\n//        Debug.trace( message.getField( descriptor.findFieldByName( \"values\" )  ) );\n//\n//        byte[] rd = message.toByteArray();\n//        DynamicMessage unmarshaled = DynamicMessage.parseFrom(descriptor, rd);\n//        Debug.trace( unmarshaled.getAllFields() );\n//        Debug.trace( unmarshaled.getField( descriptor.findFieldByName( \"values\" )  ) );\n//\n//\n//        GenericBeanProtobufDecoder decoder = new GenericBeanProtobufDecoder();\n//        Map dm = decoder.decodeMap( descriptor, unmarshaled, Set.of(), options );\n//        Debug.purplef( dm );\n\n\n//        FileOutputStream ofs = new FileOutputStream( \"e:/sss.bin\" );\n//        ofs.write( rd );\n//        ofs.close();\n//        Debug.greenf( rd );\n\n//        Bear bear = JSON.unmarshal( \"{ name: 'William', force: 320, values: [1,2,3], type: grizzly }\", Bear.class );\n//        Debug.trace( bear );\n//        Options options = new Options();\n//        Descriptors.Descriptor descriptor = encoder.transform( Bear.class, bear,  Set.of(), options );\n//        Debug.trace( descriptor.findFieldByName( \"values\" ).isRepeated() );\n//\n//        DynamicMessage message = encoder.decode( descriptor, bear, Set.of(), options );\n//        Debug.trace( message.getAllFields() );\n//        Debug.trace( message.getField( descriptor.findFieldByName( \"values\" )  ) );\n\n\n    }\n\n    private void testElementRroto() throws Exception {\n        String sz = \"miaomiao\";\n\n        GenericBeanProtobufEncoder encoder = new GenericBeanProtobufEncoder();\n        Descriptors.Descriptor descriptor = encoder.transform( String.class, sz, Set.of() );\n        Debug.trace( descriptor.getFields() );\n\n        Options options = new Options();\n        DynamicMessage message = encoder.encode( descriptor, sz, Set.of(), options );\n        Debug.trace( message.getAllFields() );\n\n        GenericBeanProtobufDecoder decoder = new GenericBeanProtobufDecoder();\n        var dm = decoder.decode( String.class, descriptor, message, Set.of(), options );\n        Debug.purplef( dm );\n    }\n\n    private void testFieldEntry() throws Exception {\n        GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder();\n\n\n        Map bear = JSON.unmarshal( \"{ name: 'William', force: 320, values: [1,2,3], type: grizzly, trait: { species: mammal } }\", Map.class );\n        Debug.trace( bear );\n        Options options = new Options();\n\n        FieldEntity[] entities = FieldEntity.from( bear );\n\n\n        Descriptors.Descriptor descriptor = encoder.transform( entities, \"Args\", Set.of(), options );\n        Debug.trace( descriptor.getFields() );\n        Debug.trace( descriptor.findFieldByName( \"values\" ).isRepeated() );\n        Debug.trace( descriptor.findFieldByName( \"trait\" ).getMessageType().getFields() );\n\n        DynamicMessage message = encoder.encode( descriptor, entities, Set.of(), options );\n        Debug.trace( message.getAllFields(), descriptor.findFieldByName( \"trait\" ).getMessageType().getFields() );\n        Debug.trace( message.getField( descriptor.findFieldByName( \"values\" )  ) );\n\n        byte[] rd = message.toByteArray();\n        DynamicMessage unmarshaled = DynamicMessage.parseFrom(descriptor, rd);\n        Debug.trace( unmarshaled.getAllFields() );\n        Debug.trace( unmarshaled.getField( descriptor.findFieldByName( \"values\" )  ) );\n\n\n\n        FieldEntity[] types = FieldEntity.typeFrom( bear );\n\n        GenericFieldProtobufDecoder decoder = new GenericFieldProtobufDecoder();\n        Map.Entry<String, Object>[] kvs = decoder.decodeEntries( descriptor, unmarshaled, Set.of(), options );\n        Debug.trace( kvs );\n\n        decoder.decodeEntries( types, descriptor, unmarshaled, Set.of(), options );\n        Debug.trace( types[4].getType() );\n\n        Object[] vals = decoder.decodeValues( types, descriptor, unmarshaled, Set.of(), options );\n        Debug.trace( vals );\n    }\n\n    private void testReflect() throws Exception {\n        GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder();\n        Options options = new Options();\n\n        GenericFieldProtobufDecoder decoder = new GenericFieldProtobufDecoder();\n\n        Method[] methods = Raccoon.class.getMethods();\n        for ( Method method : methods ) {\n            FieldEntity[] types = FieldEntity.from( method.getParameterTypes() );\n            Debug.trace( types );\n\n            types[ 0 ].setValue( \"red_raccoon\" );\n            types[ 1 ].setValue( 12345L );\n\n            Descriptors.Descriptor descriptor = encoder.transform( types, \"Args\", Set.of(), options );\n            Debug.trace( descriptor.getFields() );\n\n            DynamicMessage message = encoder.encode( descriptor, types, Set.of(), options );\n            Debug.trace( message.getAllFields() );\n\n            types = FieldEntity.from( method.getParameterTypes() );\n            Object[] vals = decoder.decodeValues( types, descriptor, message, Set.of(), options );\n            Debug.trace( vals );\n        }\n    }\n\n    private void testManualRPCServer() throws Exception {\n        Messagron messagron = new Messagron( \"\", this, new JSONMaptron() );\n        WolfMCServer wolf = new WolfMCServer( \"\", this, new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n\n        Method[] methods = Raccoon.class.getMethods();\n        Class<? > retType = methods[0].getReturnType();\n        GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder();\n        Options options = new Options();\n\n        String sz = \"xixihaha\";\n        Descriptors.Descriptor descriptor = encoder.transform( String.class, sz, Set.of() );\n        Debug.trace( descriptor.getFields() );\n\n        DynamicMessage message = encoder.encode( descriptor, sz, Set.of(), options );\n        Debug.trace( message.getAllFields() );\n        wolf.apply( new UlfAsyncMsgHandleAdapter() {\n            public void onSuccessfulMsgReceived( Medium medium, ChannelControlBlock block, UMCMessage msg, ChannelHandlerContext ctx, Object rawMsg ) throws Exception {\n                UlfInformMessage mc = (UlfInformMessage) rawMsg;\n                byte[]bytes = (byte[]) mc.getHead().getExtraHead();\n\n\n                Method[] methods = Raccoon.class.getMethods();\n                FieldEntity[] types = FieldEntity.from( methods[0].getParameterTypes() );\n                Debug.trace( types );\n\n                GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder();\n                Options options = new Options();\n\n                Descriptors.Descriptor descriptor = encoder.transform( types, \"Args\", Set.of(), options );\n                Debug.trace( descriptor.getFields() );\n\n                DynamicMessage unmarshaled = DynamicMessage.parseFrom(descriptor, bytes);\n                GenericFieldProtobufDecoder decoder = new GenericFieldProtobufDecoder();\n                types = FieldEntity.from( methods[0].getParameterTypes() );\n                Object[] vals = decoder.decodeValues( types, descriptor, unmarshaled, Set.of(), options );\n                Debug.trace( vals );\n\n\n                String sz = vals[0].toString();\n                Descriptors.Descriptor retDes = encoder.transform( String.class, sz, Set.of() );\n                Debug.trace( retDes.getFields() );\n\n                DynamicMessage retMsg = encoder.encode( retDes, sz, Set.of(), options );\n                block.getTransmit().sendMsg(new UlfInformMessage(retMsg.toByteArray()));\n\n            }\n        });\n\n        wolf.execute();\n\n        this.getTaskManager().add( wolf );\n        //this.getTaskManager().syncWaitingTerminated();\n    }\n\n\n    private void testManualRPCClient() throws Exception {\n        Messagron servtron = new Messagron( \"\", this, new JSONMaptron( \"{\\n\" +\n                \"  \\\"Engine\\\"            : \\\"com.pinecone.tritium.messagron.Messagron\\\",\\n\" +\n                \"  \\\"Enable\\\"            : true,\\n\" +\n                \"  \\\"ExpressFactory\\\"    : \\\"com.pinecone.framework.util.lang.GenericDynamicFactory\\\",\\n\" +\n                \"\\n\" +\n                \"  \\\"Expresses\\\"         : {\\n\" +\n                \"    \\\"WolfMCExpress\\\": {\\n\" +\n                \"      \\\"Engine\\\": \\\"com.pinecone.hydra.umct.WolfMCExpress\\\"\\n\" +\n                \"    }\\n\" +\n                \"  }\\n\" +\n                \"}\" ) );\n\n        WolfMCClient wolf = new WolfMCClient( \"\", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( \"Messagers.Messagers.WolfMCKingpin\" ) );\n        wolf.apply( new WolfMCExpress( servtron ) ).execute();\n\n\n\n\n        Method[] methods = Raccoon.class.getMethods();\n        FieldEntity[] types = FieldEntity.from( methods[0].getParameterTypes() );\n        Debug.trace( types );\n\n        GenericFieldProtobufEncoder encoder = new GenericFieldProtobufEncoder();\n        Options options = new Options();\n\n        Descriptors.Descriptor descriptor = encoder.transform( types, \"Args\", Set.of(), options );\n        Debug.trace( descriptor.getFields() );\n\n        types[0].setValue(\"fuck you\");\n        types[1].setValue(2024);\n        DynamicMessage message = encoder.encode( descriptor, types, Set.of(), options );\n        Debug.trace( message.getAllFields() );\n\n\n\n        Debug.sleep( 500 );\n        UMCMessage retMsg = wolf.sendSyncMsg(new UlfInformMessage(message.toByteArray()));\n        if(retMsg instanceof UlfInformMessage) {\n            Debug.trace(retMsg.getHead().getExtraHead());\n\n\n            Descriptors.Descriptor retDes = encoder.transform( String.class, \"\", Set.of() );\n            Debug.trace( retDes.getFields() );\n\n            DynamicMessage retDy = DynamicMessage.parseFrom( retDes, (byte[])retMsg.getHead().getExtraHead() );\n            GenericBeanProtobufDecoder decoder = new GenericBeanProtobufDecoder();\n            var dm = decoder.decode( String.class, retDes, retDy, Set.of(), options );\n            Debug.info(dm);\n        }\n        this.getTaskManager().add( wolf );\n        this.getTaskManager().syncWaitingTerminated();\n    }\n\n\n    protected void testStructure() throws Exception {\n        GenericStructure structure = new GenericStructure( \"test.red\", 3 );\n        structure.setDataField( 0, \"name\", \"test\" );\n        structure.setDataField( 1, \"t1\", \"v1\" );\n        structure.setDataField( 2, \"t2\", new JSONMaptron( \"{ k: v}\" ) );\n\n        Debug.trace( structure, structure.findDataField( \"t2\" ), structure.findTextField( \"__NAME__\" ), structure.findTextField( \"sss\" ) );\n\n        structure.resize( 4 );\n        structure.setDataField( 3, \"t3\", 3 );\n        Debug.trace( structure );\n\n        //structure.setDataOffset( 2 );\n        //structure.setTextOffset( 3 );\n        Debug.trace( structure, structure.size(), structure.capacity() );\n\n\n\n\n\n\n        Method method = ClassUtils.getFirstMethodByName( Raccoon.class, \"scratch\" );\n        if( method != null ) {\n            GenericArgumentRequest request = new GenericArgumentRequest(\n                    Raccoon.class.getName(), method.getParameterTypes()\n            );\n            Debug.trace( request, request.getAddressPath(), request.getInterceptedPath(), request.getInterceptorName(), request.getSegments() );\n        }\n\n        Raccoon raccoon = new RedRaccoon();\n        Debug.trace( raccoon.scratch( \"you\", 166 ) );\n    }\n\n    protected void testIfacInspector() throws Exception {\n        BytecodeIfaceCompiler inspector = new BytecodeIfaceCompiler( ClassPool.getDefault() );\n\n        Debug.trace( inspector.compile( Raccoon.class, false ).getMethodDigests() );\n    }\n\n\n\n\n}\n\npublic class TestProtobuf {\n    public static void main( String[] args ) throws Exception {\n        //String[] as = args;\n        String[] as = new String[]{ \"TestWolfMCClient=true\" };\n        Pinecone.init( (Object...cfg )->{\n            Appleby appleby = (Appleby) Pinecone.sys().getTaskManager().add( new Appleby( as, Pinecone.sys() ) );\n            appleby.vitalize();\n            return 0;\n        }, (Object[]) as );\n    }\n}"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/TestRPCSystem.java",
    "content": "package com.protobuf;\n\n\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.concurrent.atomic.AtomicInteger;\n\nimport com.mc.JesusChrist;\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\nimport com.pinecone.hydra.umc.wolf.client.WolfMCClient;\nimport com.pinecone.hydra.umc.wolf.server.WolfMCServer;\nimport com.pinecone.hydra.uma.HuskyDuplexExpress;\nimport com.pinecone.hydra.uma.wolf.WolfAppointClient;\nimport com.pinecone.hydra.uma.wolf.WolfAppointServer;\nimport com.pinecone.hydra.uma.wolf.WolvesAppointClient;\nimport com.pinecone.hydra.uma.wolf.WolvesAppointServer;\nimport com.pinecone.hydra.umct.husky.compiler.ClassDigest;\nimport com.pinecone.hydra.umct.husky.machinery.HuskyMappingLoader;\nimport com.pinecone.hydra.umct.husky.machinery.MultiMappingLoader;\nimport com.pinecone.hydra.umct.mapping.BytecodeControllerInspector;\nimport com.pinecone.hydra.umct.mapping.MappingDigest;\nimport com.pinecone.hydra.umct.husky.compiler.BytecodeIfaceCompiler;\nimport com.pinecone.hydra.umct.husky.compiler.DynamicMethodPrototype;\nimport com.pinecone.hydra.umct.husky.compiler.MethodDigest;\nimport com.pinecone.tritium.messagron.Messagron;\n\nimport javassist.ClassPool;\n\nclass Jeff extends JesusChrist {\n    public Jeff( String[] args, CascadeSystem parent ) {\n        this(args, null, parent);\n    }\n\n    public Jeff( String[] args, String szName, CascadeSystem parent ) {\n        super(args, szName, parent);\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        //this.testProtoRPCServer();\n\n        //this.testProtoRPCClient();\n\n        //this.testIfaceProxy();\n\n        //this.testController();\n\n        //this.testProtoRPCServerController();\n\n        //this.testClassScanner();\n\n        this.testDuplex();\n\n    }\n\n    private void testProtoRPCServer() throws Exception {\n        Messagron messagron = new Messagron( \"\", this, new JSONMaptron() );\n\n        WolfMCServer wolf1 = new WolfMCServer( \"\", this, new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n        WolfAppointServer wolf = new WolfAppointServer( wolf1 );\n\n\n        BytecodeIfaceCompiler inspector = new BytecodeIfaceCompiler( ClassPool.getDefault() );\n        List<MethodDigest> digests = inspector.compile( Raccoon.class, false ).getMethodDigests();\n        MethodDigest digest = digests.get( 0 );\n        DynamicMethodPrototype prototype = (DynamicMethodPrototype) digest;\n//\n//\n//        wolf.getDefaultDeliver().registerController(\"com.protobuf.Raccoon.scratch\", new MessageHandler() {\n//            @Override\n//            public String getAddressMapping() {\n//                return null;\n//            }\n//\n//            @Override\n//            public Object invoke( Object... args ) throws Exception {\n//                Debug.purplef( args );\n//\n//                return \"miaomiao\";\n//            }\n//\n//            @Override\n//            public List<String> getArgumentsKey() {\n//                return null;\n//            }\n//\n//            @Override\n//            public Object getReturnDescriptor() {\n//                return prototype.getReturnDescriptor();\n//            }\n//\n//            @Override\n//            public Object getArgumentsDescriptor() {\n//                return prototype.getArgumentsDescriptor();\n//            }\n//        });\n\n\n        RaccoonKing raccoonKing = new RaccoonKing();\n        wolf.registerInstance( raccoonKing, Raccoon.class );\n\n        wolf.execute();\n\n        this.getTaskManager().add( wolf );\n        //this.getTaskManager().syncWaitingTerminated();\n    }\n\n    private void testProtoRPCClient() throws Exception {\n        WolfAppointClient wolf = new WolfAppointClient( new WolfMCClient( 2048, \"\", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( \"Messagers.Messagers.WolfMCKingpin\" ) ) );\n        wolf.execute();\n\n        wolf.compile( Raccoon.class, false );\n        DynamicMethodPrototype digest = (DynamicMethodPrototype)wolf.queryMethodDigest( \"com.protobuf.Raccoon.scratch\" );\n        Debug.sleep( 500 );\n\n//        wolf.invokeInformAsyn(digest, new Object[]{\"fuck you\", 2024}, new AsynReturnHandler() {\n//            @Override\n//            public void onSuccessfulReturn( Object ret ) throws Exception {\n//                Debug.greenf( ret );\n//            }\n//\n//            @Override\n//            public void onErrorMsgReceived( UMCMessage msg ) throws Exception {\n//\n//            }\n//        });\n\n        Debug.greenf( wolf.invokeInform(digest, \"a\", 0 ) );\n\n\n        boolean testParallel = true;\n        if ( testParallel ) {\n            final AtomicInteger ai = new AtomicInteger();\n\n            for ( int j = 0; j < 10; ++j ) {\n                final int id = j;\n                Thread thread = new Thread(()->{\n                    for ( int i = 0; i < 1e3; ++i ) {\n                        try {\n                            Debug.greenfs( wolf.invokeInform(digest, \"afd\", id + 7700 ), ai.getAndIncrement() );\n                        }\n                        catch (IOException e) {\n                            e.printStackTrace();\n                        }\n                    }\n                });\n                thread.start();\n            }\n        }\n\n        //long s = System.currentTimeMillis();\n        for ( int i = 0; i < 1e2; ++i ) {\n            Debug.greenf( wolf.invokeInform(digest, \"afd\", i ) );\n        }\n        //Debug.redfs( System.currentTimeMillis() - s );\n\n        Debug.sleep( 1000000 );\n\n        this.getTaskManager().add( wolf );\n        this.getTaskManager().syncWaitingTerminated();\n    }\n\n    protected void testIfaceProxy() throws Exception {\n        WolfAppointClient wolf = new WolfAppointClient( new WolfMCClient( \"\", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( \"Messagers.Messagers.WolfMCKingpin\" ) ) );\n        wolf.execute();\n\n        wolf.compile( Raccoon.class, false );\n\n        Raccoon pRaccoon = wolf.getIface( Raccoon.class );\n        Debug.trace( pRaccoon.scratch( \"tree \", 9133 ) );\n        Debug.trace( pRaccoon.scratch( \"tref \", 9132 ) );\n        Debug.trace( pRaccoon.scratch( \"treg \", 9131 ) );\n        Debug.trace( pRaccoon.scratch( \"treh \", 9130 ) );\n\n        this.getTaskManager().add( wolf );\n        this.getTaskManager().syncWaitingTerminated();\n\n    }\n\n    protected void testController() throws Exception {\n        BytecodeControllerInspector inspector = new BytecodeControllerInspector( ClassPool.getDefault() );\n\n        List<MappingDigest > digests = inspector.characterize( RaccoonController.class );\n        Debug.greenf( digests );\n    }\n\n    private void testProtoRPCServerController() throws Exception {\n        Messagron messagron = new Messagron( \"\", this, new JSONMaptron() );\n\n        WolfMCServer wolf1 = new WolfMCServer( \"\", this, new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n        WolfAppointServer wolf = new WolfAppointServer( wolf1 );\n\n        RaccoonController controller  = new RaccoonController();\n\n        wolf.registerController( controller );\n\n        wolf.execute();\n\n        this.getTaskManager().add( wolf );\n\n        this.testProtoRPCClient();\n        //this.testIfaceProxy();\n    }\n\n    private void testClassScanner() throws Exception {\n        DynamicFactory factory = new GenericDynamicFactory();\n        WolfMCServer wolf1 = new WolfMCServer( \"\", this, new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n        WolfAppointServer wolf = new WolfAppointServer( wolf1 );\n\n        factory.getClassScope().addScope( \"com.protobuf\" );\n        MultiMappingLoader mappingLoader = new HuskyMappingLoader( factory, wolf.getMCTTransformer() );\n        mappingLoader.updateScope();\n\n        Debug.trace( wolf );\n    }\n\n    private void testDuplex() throws Exception {\n        Messagron messagron = new Messagron( \"\", this, new JSONMaptron() );\n\n        WolfMCServer wolfKing = new WolfMCServer( \"\", this, new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n\n\n        WolvesAppointServer wolf = new WolvesAppointServer(wolfKing, HuskyDuplexExpress.class);\n\n//        wolfKing.registerChannelInactiveHandler(new ChannelInactiveHandler() {\n//            @Override\n//            public boolean afterChannelInactive( ChannelControlBlock ccb ) throws ChannelHandleException {\n//                Debug.bluefs( ccb.getChannel().getChannelID(), ccb.getChannel().getIdentityID() );\n//                ChannelPool pool = wolf.getUMCTExpress().getPoolByClientId( ccb.getChannel().getIdentityID() );\n//                if ( pool != null ) {\n//                    Debug.redfs( pool.isEmpty() );\n//                }\n//                return false;\n//            }\n//        });\n\n        RaccoonController controller  = new RaccoonController();\n\n        wolf.registerController( controller );\n\n        wolf.execute();\n\n\n        this.testDuplexClient();\n\n\n        Debug.sleep( 100 );\n\n        ClassDigest digest = wolf.compile( Raccoon.class, false );\n\n//        for ( int i = 0; i < 2; i++ ) {\n//            wolf.invokeInformAsyn(2048, \"com.protobuf.Raccoon.scratch\", new Object[]{\"shit\", 123}, new AsynReturnHandler() {\n//                @Override\n//                public void onSuccessfulReturn( Object ret ) throws Exception {\n//                    Debug.redfs( ret );\n//                }\n//\n//                @Override\n//                public void onErrorMsgReceived( UMCMessage msg ) throws Exception {\n//                    Debug.redfs( msg );\n//                }\n//            });\n//        }\n\n\n        //Debug.greenf( wolf.invokeInform( 2048, \"com.protobuf.Raccoon.scratch\", \"fuck you\", 2025 ) );\n        String[] ss = new String[] { \"abc\", \"efg\" };\n        //Debug.greenf( wolf.invokeInform( 2048, \"com.protobuf.Raccoon.scratchS\", \"fuck you\", 2025, ss ) );\n\n        Rabbit rabbit = new Rabbit();\n        rabbit.name = \"rabbit\";\n        rabbit.bool = true;\n        rabbit.bytes = new byte[] { 1,2,3 };\n        Monkey monkey = new Monkey();\n        monkey.name = \"monkey\";\n        rabbit.setMonkey( monkey );\n        rabbit.setMonkeys( new Monkey[] { monkey, monkey } );\n\n        Rabbit sub = new Rabbit();\n        sub.setName( \"haha\" );\n        rabbit.setSub( sub );\n\n        //Rabbit[] args = new Rabbit[] { rabbit };\n        List<Rabbit> args = List.of(rabbit);\n        //Debug.greenf( wolf.invokeInform( 2048, \"com.protobuf.Raccoon.scratchA\", \"fuck you\", 2025, rabbit ) );\n\n        //Debug.greenf( wolf.invokeInform( 2048, \"com.protobuf.Raccoon.scratchC\", \"fuck you\", 2025, args ) );\n\n        Debug.greenf( wolf.invokeInform( 2048, \"com.protobuf.Raccoon.scratchList\", \"fuck you\", 2025, args ) );\n        //Debug.greenf( wolf.invokeInform( 2048, \"com.protobuf.Raccoon.scratchVoid\" ) );\n\n        //Debug.greenf( wolf.invokeInform( 2048, \"com.protobuf.Raccoon.scratchPrime\", \"fuck you\", 12025 ) );\n\n\n\n\n        //Debug.sleep( 3000 );\n\n        //Raccoon raccoon = wolf.getIface( 2048, Raccoon.class );\n        //Debug.greenf( raccoon.scratch( \"fuck you\", 202510 ) );\n\n\n        this.getTaskManager().add( wolf );\n\n        this.getTaskManager().syncWaitingTerminated();\n\n    }\n\n\n    private void testDuplexClient() throws Exception {\n        WolvesAppointClient wolf = new WolvesAppointClient(\n                new WolfMCClient( 2048, \"\", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( \"Messagers.Messagers.WolfMCKingpin\" ) )\n        );\n        wolf.execute();\n\n        wolf.compile( Raccoon.class, false );\n        DynamicMethodPrototype digest = (DynamicMethodPrototype)wolf.queryMethodDigest( \"com.protobuf.Raccoon.scratch\" );\n\n        RaccoonController controller  = new RaccoonController();\n        wolf.getRouteDispatcher().registerController( controller );\n\n        Debug.sleep( 200 );\n\n        wolf.embraces(2);\n\n        Rabbit rabbit = new Rabbit();\n        rabbit.name = \"rabbit\";\n        rabbit.bytes = new byte[] { 1,2,3 };\n        Monkey monkey = new Monkey();\n        monkey.name = \"monkey\";\n        rabbit.setMonkey( monkey );\n        //Debug.bluef( wolf.invokeInform( \"com.protobuf.Raccoon.scratchA\", \"DP you!\", 5202123, rabbit ) );\n\n        Debug.bluef( wolf.invokeInform( \"com.protobuf.Raccoon.scratch\", \"DP you!\", 5202 ) );\n\n//        Debug.sleep( 3500 );\n//        Debug.bluef( wolf.invokeInform( \"com.protobuf.Raccoon.scratch\", \"DP you!\", 5201 ) );\n\n        //Debug.greenf( wolf.invokeInform(digest, \"fuck you\", 2024 ) );\n\n        this.getTaskManager().add( wolf );\n    }\n}\n\npublic class TestRPCSystem {\n    public static void main( String[] args ) throws Exception {\n        //String[] as = args;\n        String[] as = new String[]{ \"TestWolfMCClient=true\" };\n        Pinecone.init( (Object...cfg )->{\n            Jeff jeff = (Jeff) Pinecone.sys().getTaskManager().add( new Jeff( as, Pinecone.sys() ) );\n            jeff.vitalize();\n            return 0;\n        }, (Object[]) as );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/TestRocketClient.java",
    "content": "package com.protobuf;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.umb.UlfMBInformMessage;\nimport com.pinecone.hydra.umb.UlfPackageMessageHandler;\nimport com.pinecone.hydra.umb.broadcast.BroadcastConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlNode;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlProducer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastProducer;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastConsumer;\nimport com.pinecone.hydra.umb.broadcast.UMCBroadcastProducer;\nimport com.pinecone.hydra.umb.rocket.RocketClient;\nimport com.pinecone.hydra.umb.rocket.RocketMQClient;\nimport com.pinecone.hydra.umb.rocket.UlfRocketClient;\nimport com.pinecone.hydra.umb.rocket.WolfMCRocketClient;\nimport com.pinecone.hydra.umb.wolf.WolfMCBClient;\nimport com.pinecone.hydra.umc.msg.Medium;\nimport com.pinecone.hydra.umc.msg.UMCMessage;\nimport com.pinecone.hydra.umc.msg.UMCReceiver;\nimport com.pinecone.hydra.umc.msg.UMCTransmit;\nimport com.pinecone.hydra.umct.UMCTExpressHandler;\nimport com.pinecone.hydra.umct.WolfMCExpress;\nimport com.pinecone.tritium.Tritium;\n\n\nclass Garrison extends Tritium {\n    public Garrison( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Garrison( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        //this.testFundamental();\n        //this.testWolfMB();\n        this.testWolfMCTB();\n    }\n\n    public void testFundamental() throws Exception {\n        String nameSrvAddr = \"localhost:9876\";\n        String groupName = \"testGroup\";\n        String topic = \"testTopic\";\n        String tags = \"*\";\n        String keys = \"testKeys\";\n        String body = \"This is a test message\";\n\n\n        RocketClient client = new RocketMQClient( nameSrvAddr, groupName );\n        BroadcastConsumer consumer = client.createConsumer( topic );\n        consumer.start(new UlfPackageMessageHandler() {\n            @Override\n            public void onSuccessfulMsgReceived( byte[] body, Object[] args ) throws Exception {\n                Debug.trace( new String( body ) );\n            }\n        });\n\n\n        BroadcastProducer producer = client.createProducer();\n        producer.start();\n        producer.sendMessage( topic, body.getBytes() );\n\n        Debug.sleep( 100000 );\n    }\n\n    public void testWolfMB() throws Exception {\n        String nameSrvAddr = \"localhost:9876\";\n        String groupName = \"testGroup\";\n        String topic = \"testTopic\";\n        String tags = \"*\";\n        String keys = \"testKeys\";\n\n\n        UlfRocketClient client = new WolfMCRocketClient( nameSrvAddr, groupName );\n        UMCBroadcastConsumer consumer = client.createUlfConsumer( topic );\n        consumer.start(new UMCTExpressHandler() {\n            @Override\n            public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n                if ( msg.evinceTransferMessage() != null ) {\n                    Debug.greenfs( msg.getHead(), new String( (byte[]) msg.evinceTransferMessage().getBody() ) );\n                }\n                else {\n                    Debug.redf( msg.getHead() );\n                }\n            }\n        });\n\n\n        UMCBroadcastProducer producer = client.createUlfProducer();\n        producer.start();\n\n        producer.sendMessage( topic, new UlfMBInformMessage( new JSONMaptron( \"{ path: '/user/getName ' }\" ) ) );\n        //producer.sendMessage( topic, new UlfMBInformMessage( new JSONMaptron( \"{ msg: 'Jesus, Mr.Garrison! ' }\" ), 0xFA ) );\n        //producer.sendMessage( topic, new UlfBytesTransferMessage( new JSONMaptron( \"{ msg: 'Jesus, Mr.Garrison! ' }\" ), \"fuck you\" ) );\n\n\n        Debug.sleep( 100000 );\n    }\n\n    public void testWolfMCTB() throws Exception {\n        String nameSrvAddr = \"localhost:9876\";\n        String groupName = \"UCDNFileServiceTransmitGroup\";\n        String topic = \"ucdn-file-cloud-distribute-topic\";\n        String tags = \"*\";\n        String keys = \"testKeys\";\n\n\n        BroadcastControlNode client = new WolfMCBClient( new WolfMCRocketClient( nameSrvAddr, groupName ), \"\", this, WolfMCExpress.class );\n\n\n\n//        UMCBroadcastConsumer consumer = client.createUlfConsumer( topic );\n//        consumer.start(new UMCTExpressHandler() {\n//            @Override\n//            public void onSuccessfulMsgReceived( Medium medium, UMCTransmit transmit, UMCReceiver receiver, UMCMessage msg, Object[] args ) throws Exception {\n//                if ( msg.evinceTransferMessage() != null ) {\n//                    Debug.greenfs( msg.getHead(), new String( (byte[]) msg.evinceTransferMessage().getBody() ) );\n//                }\n//                else {\n//                    byte[] bytes = (byte[]) msg.evinceInformMessage().getExHead();\n//                    for ( int i = 0; i < bytes.length; ++i ) {\n//                        try{\n//                            Debug.greenfs( (char)bytes[ i ] );\n//                        }\n//                        catch ( Exception e ) {\n//                            e.printStackTrace();\n//                        }\n//                    }\n//\n//                    Debug.redf( msg.getHead() );\n//                }\n//            }\n//        });\n\n\n        BroadcastControlConsumer consumer = client.createBroadcastControlConsumer( topic );\n        RaccoonController controller  = new RaccoonController();\n        consumer.registerController( controller );\n        consumer.start();\n\n\n\n        client.compile( Raccoon.class, false );\n        BroadcastControlProducer producer = client.createBroadcastControlProducer();\n\n\n        producer.start();\n        producer.issueInform( topic, \"com.protobuf.Raccoon.scratch\", \"fuck you !\", 2025 );\n\n\n        Debug.sleep( 100000 );\n    }\n}\n\n\npublic class TestRocketClient {\n    public static void main(String[] args) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n\n            Garrison garrison = (Garrison) Pinecone.sys().getTaskManager().add( new Garrison( args, Pinecone.sys() ) );\n            garrison.vitalize();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/rpc.proto",
    "content": "syntax = \"proto3\";\n\n//package rpc;\n\noption java_multiple_files = true;\noption java_package = \"com.protobuf\";\n\nmessage RpcRequest {\n  string method = 1;\n  bytes payload = 2;\n}\n\nmessage RpcResponse {\n  int32 code = 1;\n  string message = 2;\n  bytes payload = 3;\n}"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/v3/Rpc.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: rpc.proto\n\npackage com.protobuf.v3;\n\npublic final class Rpc {\n  private Rpc() {}\n  public static void registerAllExtensions(\n      com.google.protobuf.ExtensionRegistryLite registry) {\n  }\n\n  public static void registerAllExtensions(\n      com.google.protobuf.ExtensionRegistry registry) {\n    registerAllExtensions(\n        (com.google.protobuf.ExtensionRegistryLite) registry);\n  }\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_RpcRequest_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_RpcRequest_fieldAccessorTable;\n  static final com.google.protobuf.Descriptors.Descriptor\n    internal_static_RpcResponse_descriptor;\n  static final \n    com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internal_static_RpcResponse_fieldAccessorTable;\n\n  public static com.google.protobuf.Descriptors.FileDescriptor\n      getDescriptor() {\n    return descriptor;\n  }\n  private static  com.google.protobuf.Descriptors.FileDescriptor\n      descriptor;\n  static {\n    java.lang.String[] descriptorData = {\n      \"\\n\\trpc.proto\\\"-\\n\\nRpcRequest\\022\\016\\n\\006method\\030\\001 \\001(\" +\n      \"\\t\\022\\017\\n\\007payload\\030\\002 \\001(\\014\\\"=\\n\\013RpcResponse\\022\\014\\n\\004cod\" +\n      \"e\\030\\001 \\001(\\005\\022\\017\\n\\007message\\030\\002 \\001(\\t\\022\\017\\n\\007payload\\030\\003 \\001(\" +\n      \"\\014B\\020\\n\\014com.protobufP\\001b\\006proto3\"\n    };\n    descriptor = com.google.protobuf.Descriptors.FileDescriptor\n      .internalBuildGeneratedFileFrom(descriptorData,\n        new com.google.protobuf.Descriptors.FileDescriptor[] {\n        });\n    internal_static_RpcRequest_descriptor =\n      getDescriptor().getMessageTypes().get(0);\n    internal_static_RpcRequest_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_RpcRequest_descriptor,\n        new java.lang.String[] { \"Method\", \"Payload\", });\n    internal_static_RpcResponse_descriptor =\n      getDescriptor().getMessageTypes().get(1);\n    internal_static_RpcResponse_fieldAccessorTable = new\n      com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(\n        internal_static_RpcResponse_descriptor,\n        new java.lang.String[] { \"Code\", \"Message\", \"Payload\", });\n  }\n\n  // @@protoc_insertion_point(outer_class_scope)\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/v3/RpcRequest.java1",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: rpc.proto\n\npackage com.protobuf.v3;\n\n/**\n * Protobuf type {@code RpcRequest}\n */\npublic final class RpcRequest extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:RpcRequest)\n    RpcRequestOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use RpcRequest.newBuilder() to construct.\n  private RpcRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private RpcRequest() {\n    method_ = \"\";\n    payload_ = com.google.protobuf.ByteString.EMPTY;\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new RpcRequest();\n  }\n\n  @java.lang.Override\n  public final com.google.protobuf.UnknownFieldSet\n  getUnknownFields() {\n    return this.unknownFields;\n  }\n  private RpcRequest(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    this();\n    if (extensionRegistry == null) {\n      throw new java.lang.NullPointerException();\n    }\n    com.google.protobuf.UnknownFieldSet.Builder unknownFields =\n        com.google.protobuf.UnknownFieldSet.newBuilder();\n    try {\n      boolean done = false;\n      while (!done) {\n        int tag = input.readTag();\n        switch (tag) {\n          case 0:\n            done = true;\n            break;\n          case 10: {\n            java.lang.String s = input.readStringRequireUtf8();\n\n            method_ = s;\n            break;\n          }\n          case 18: {\n\n            payload_ = input.readBytes();\n            break;\n          }\n          default: {\n            if (!parseUnknownField(\n                input, unknownFields, extensionRegistry, tag)) {\n              done = true;\n            }\n            break;\n          }\n        }\n      }\n    } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n      throw e.setUnfinishedMessage(this);\n    } catch (com.google.protobuf.UninitializedMessageException e) {\n      throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);\n    } catch (java.io.IOException e) {\n      throw new com.google.protobuf.InvalidProtocolBufferException(\n          e).setUnfinishedMessage(this);\n    } finally {\n      this.unknownFields = unknownFields.build();\n      makeExtensionsImmutable();\n    }\n  }\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return Rpc.internal_static_RpcRequest_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return Rpc.internal_static_RpcRequest_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            RpcRequest.class, RpcRequest.Builder.class);\n  }\n\n  public static final int METHOD_FIELD_NUMBER = 1;\n  private volatile java.lang.Object method_;\n  /**\n   * <code>string method = 1;</code>\n   * @return The method.\n   */\n  @java.lang.Override\n  public java.lang.String getMethod() {\n    java.lang.Object ref = method_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      method_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string method = 1;</code>\n   * @return The bytes for method.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getMethodBytes() {\n    java.lang.Object ref = method_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      method_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int PAYLOAD_FIELD_NUMBER = 2;\n  private com.google.protobuf.ByteString payload_;\n  /**\n   * <code>bytes payload = 2;</code>\n   * @return The payload.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString getPayload() {\n    return payload_;\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n//    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) {\n//      com.google.protobuf.GeneratedMessageV3.writeString(output, 1, method_);\n//    }\n    if (!payload_.isEmpty()) {\n      output.writeBytes(2, payload_);\n    }\n    unknownFields.writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n//    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) {\n//      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, method_);\n//    }\n    if (!payload_.isEmpty()) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeBytesSize(2, payload_);\n    }\n    size += unknownFields.getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof RpcRequest)) {\n      return super.equals(obj);\n    }\n    RpcRequest other = (RpcRequest) obj;\n\n    if (!getMethod()\n        .equals(other.getMethod())) return false;\n    if (!getPayload()\n        .equals(other.getPayload())) return false;\n    if (!unknownFields.equals(other.unknownFields)) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + METHOD_FIELD_NUMBER;\n    hash = (53 * hash) + getMethod().hashCode();\n    hash = (37 * hash) + PAYLOAD_FIELD_NUMBER;\n    hash = (53 * hash) + getPayload().hashCode();\n    hash = (29 * hash) + unknownFields.hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static RpcRequest parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static RpcRequest parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static RpcRequest parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static RpcRequest parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static RpcRequest parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static RpcRequest parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static RpcRequest parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static RpcRequest parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static RpcRequest parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n  public static RpcRequest parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static RpcRequest parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static RpcRequest parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(RpcRequest prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code RpcRequest}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:RpcRequest)\n          RpcRequestOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return Rpc.internal_static_RpcRequest_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return Rpc.internal_static_RpcRequest_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              RpcRequest.class, RpcRequest.Builder.class);\n    }\n\n    // Construct using com.protobuf.RpcRequest.newBuilder()\n    private Builder() {\n      maybeForceBuilderInitialization();\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n      maybeForceBuilderInitialization();\n    }\n    private void maybeForceBuilderInitialization() {\n      if (com.google.protobuf.GeneratedMessageV3\n              .alwaysUseFieldBuilders) {\n      }\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      method_ = \"\";\n\n      payload_ = com.google.protobuf.ByteString.EMPTY;\n\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return Rpc.internal_static_RpcRequest_descriptor;\n    }\n\n    @java.lang.Override\n    public RpcRequest getDefaultInstanceForType() {\n      return RpcRequest.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public RpcRequest build() {\n      RpcRequest result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public RpcRequest buildPartial() {\n      RpcRequest result = new RpcRequest(this);\n      result.method_ = method_;\n      result.payload_ = payload_;\n      onBuilt();\n      return result;\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof RpcRequest) {\n        return mergeFrom((RpcRequest)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(RpcRequest other) {\n      if (other == RpcRequest.getDefaultInstance()) return this;\n      if (!other.getMethod().isEmpty()) {\n        method_ = other.method_;\n        onChanged();\n      }\n      if (other.getPayload() != com.google.protobuf.ByteString.EMPTY) {\n        setPayload(other.getPayload());\n      }\n      this.mergeUnknownFields(other.unknownFields);\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      RpcRequest parsedMessage = null;\n      try {\n        parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        parsedMessage = (RpcRequest) e.getUnfinishedMessage();\n        throw e.unwrapIOException();\n      } finally {\n        if (parsedMessage != null) {\n          mergeFrom(parsedMessage);\n        }\n      }\n      return this;\n    }\n\n    private java.lang.Object method_ = \"\";\n    /**\n     * <code>string method = 1;</code>\n     * @return The method.\n     */\n    public java.lang.String getMethod() {\n      java.lang.Object ref = method_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        method_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string method = 1;</code>\n     * @return The bytes for method.\n     */\n    public com.google.protobuf.ByteString\n        getMethodBytes() {\n      java.lang.Object ref = method_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        method_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string method = 1;</code>\n     * @param value The method to set.\n     * @return This builder for chaining.\n     */\n    public Builder setMethod(\n        java.lang.String value) {\n      if (value == null) {\n    throw new NullPointerException();\n  }\n  \n      method_ = value;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string method = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearMethod() {\n      \n      method_ = getDefaultInstance().getMethod();\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string method = 1;</code>\n     * @param value The bytes for method to set.\n     * @return This builder for chaining.\n     */\n    public Builder setMethodBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) {\n    throw new NullPointerException();\n  }\n  checkByteStringIsUtf8(value);\n      \n      method_ = value;\n      onChanged();\n      return this;\n    }\n\n    private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY;\n    /**\n     * <code>bytes payload = 2;</code>\n     * @return The payload.\n     */\n    @java.lang.Override\n    public com.google.protobuf.ByteString getPayload() {\n      return payload_;\n    }\n    /**\n     * <code>bytes payload = 2;</code>\n     * @param value The payload to set.\n     * @return This builder for chaining.\n     */\n    public Builder setPayload(com.google.protobuf.ByteString value) {\n      if (value == null) {\n    throw new NullPointerException();\n  }\n  \n      payload_ = value;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>bytes payload = 2;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearPayload() {\n      \n      payload_ = getDefaultInstance().getPayload();\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:RpcRequest)\n  }\n\n  // @@protoc_insertion_point(class_scope:RpcRequest)\n  private static final RpcRequest DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new RpcRequest();\n  }\n\n  public static RpcRequest getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<RpcRequest>\n      PARSER = new com.google.protobuf.AbstractParser<RpcRequest>() {\n    @java.lang.Override\n    public RpcRequest parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      return new RpcRequest(input, extensionRegistry);\n    }\n  };\n\n  public static com.google.protobuf.Parser<RpcRequest> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<RpcRequest> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public RpcRequest getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/v3/RpcRequestOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: rpc.proto\n\npackage com.protobuf.v3;\n\npublic interface RpcRequestOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:RpcRequest)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>string method = 1;</code>\n   * @return The method.\n   */\n  java.lang.String getMethod();\n  /**\n   * <code>string method = 1;</code>\n   * @return The bytes for method.\n   */\n  com.google.protobuf.ByteString\n      getMethodBytes();\n\n  /**\n   * <code>bytes payload = 2;</code>\n   * @return The payload.\n   */\n  com.google.protobuf.ByteString getPayload();\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/v3/RpcResponse.java1",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: rpc.proto\n\npackage com.protobuf.v3;\n\n/**\n * Protobuf type {@code RpcResponse}\n */\npublic final class RpcResponse extends\n    com.google.protobuf.GeneratedMessageV3 implements\n    // @@protoc_insertion_point(message_implements:RpcResponse)\n    RpcResponseOrBuilder {\nprivate static final long serialVersionUID = 0L;\n  // Use RpcResponse.newBuilder() to construct.\n  private RpcResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {\n    super(builder);\n  }\n  private RpcResponse() {\n    message_ = \"\";\n    payload_ = com.google.protobuf.ByteString.EMPTY;\n  }\n\n  @java.lang.Override\n  @SuppressWarnings({\"unused\"})\n  protected java.lang.Object newInstance(\n      UnusedPrivateParameter unused) {\n    return new RpcResponse();\n  }\n\n  @java.lang.Override\n  public final com.google.protobuf.UnknownFieldSet\n  getUnknownFields() {\n    return this.unknownFields;\n  }\n  private RpcResponse(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    this();\n    if (extensionRegistry == null) {\n      throw new java.lang.NullPointerException();\n    }\n    com.google.protobuf.UnknownFieldSet.Builder unknownFields =\n        com.google.protobuf.UnknownFieldSet.newBuilder();\n    try {\n      boolean done = false;\n      while (!done) {\n        int tag = input.readTag();\n        switch (tag) {\n          case 0:\n            done = true;\n            break;\n          case 8: {\n\n            code_ = input.readInt32();\n            break;\n          }\n          case 18: {\n            java.lang.String s = input.readStringRequireUtf8();\n\n            message_ = s;\n            break;\n          }\n          case 26: {\n\n            payload_ = input.readBytes();\n            break;\n          }\n          default: {\n            if (!parseUnknownField(\n                input, unknownFields, extensionRegistry, tag)) {\n              done = true;\n            }\n            break;\n          }\n        }\n      }\n    } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n      throw e.setUnfinishedMessage(this);\n    } catch (com.google.protobuf.UninitializedMessageException e) {\n      throw e.asInvalidProtocolBufferException().setUnfinishedMessage(this);\n    } catch (java.io.IOException e) {\n      throw new com.google.protobuf.InvalidProtocolBufferException(\n          e).setUnfinishedMessage(this);\n    } finally {\n      this.unknownFields = unknownFields.build();\n      makeExtensionsImmutable();\n    }\n  }\n  public static final com.google.protobuf.Descriptors.Descriptor\n      getDescriptor() {\n    return Rpc.internal_static_RpcResponse_descriptor;\n  }\n\n  @java.lang.Override\n  protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n      internalGetFieldAccessorTable() {\n    return Rpc.internal_static_RpcResponse_fieldAccessorTable\n        .ensureFieldAccessorsInitialized(\n            RpcResponse.class, RpcResponse.Builder.class);\n  }\n\n  public static final int CODE_FIELD_NUMBER = 1;\n  private int code_;\n  /**\n   * <code>int32 code = 1;</code>\n   * @return The code.\n   */\n  @java.lang.Override\n  public int getCode() {\n    return code_;\n  }\n\n  public static final int MESSAGE_FIELD_NUMBER = 2;\n  private volatile java.lang.Object message_;\n  /**\n   * <code>string message = 2;</code>\n   * @return The message.\n   */\n  @java.lang.Override\n  public java.lang.String getMessage() {\n    java.lang.Object ref = message_;\n    if (ref instanceof java.lang.String) {\n      return (java.lang.String) ref;\n    } else {\n      com.google.protobuf.ByteString bs = \n          (com.google.protobuf.ByteString) ref;\n      java.lang.String s = bs.toStringUtf8();\n      message_ = s;\n      return s;\n    }\n  }\n  /**\n   * <code>string message = 2;</code>\n   * @return The bytes for message.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString\n      getMessageBytes() {\n    java.lang.Object ref = message_;\n    if (ref instanceof java.lang.String) {\n      com.google.protobuf.ByteString b = \n          com.google.protobuf.ByteString.copyFromUtf8(\n              (java.lang.String) ref);\n      message_ = b;\n      return b;\n    } else {\n      return (com.google.protobuf.ByteString) ref;\n    }\n  }\n\n  public static final int PAYLOAD_FIELD_NUMBER = 3;\n  private com.google.protobuf.ByteString payload_;\n  /**\n   * <code>bytes payload = 3;</code>\n   * @return The payload.\n   */\n  @java.lang.Override\n  public com.google.protobuf.ByteString getPayload() {\n    return payload_;\n  }\n\n  private byte memoizedIsInitialized = -1;\n  @java.lang.Override\n  public final boolean isInitialized() {\n    byte isInitialized = memoizedIsInitialized;\n    if (isInitialized == 1) return true;\n    if (isInitialized == 0) return false;\n\n    memoizedIsInitialized = 1;\n    return true;\n  }\n\n  @java.lang.Override\n  public void writeTo(com.google.protobuf.CodedOutputStream output)\n                      throws java.io.IOException {\n    if (code_ != 0) {\n      output.writeInt32(1, code_);\n    }\n//    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {\n//      com.google.protobuf.GeneratedMessageV3.writeString(output, 2, message_);\n//    }\n    if (!payload_.isEmpty()) {\n      output.writeBytes(3, payload_);\n    }\n    unknownFields.writeTo(output);\n  }\n\n  @java.lang.Override\n  public int getSerializedSize() {\n    int size = memoizedSize;\n    if (size != -1) return size;\n\n    size = 0;\n    if (code_ != 0) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeInt32Size(1, code_);\n    }\n//    if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {\n//      size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, message_);\n//    }\n    if (!payload_.isEmpty()) {\n      size += com.google.protobuf.CodedOutputStream\n        .computeBytesSize(3, payload_);\n    }\n    size += unknownFields.getSerializedSize();\n    memoizedSize = size;\n    return size;\n  }\n\n  @java.lang.Override\n  public boolean equals(final java.lang.Object obj) {\n    if (obj == this) {\n     return true;\n    }\n    if (!(obj instanceof RpcResponse)) {\n      return super.equals(obj);\n    }\n    RpcResponse other = (RpcResponse) obj;\n\n    if (getCode()\n        != other.getCode()) return false;\n    if (!getMessage()\n        .equals(other.getMessage())) return false;\n    if (!getPayload()\n        .equals(other.getPayload())) return false;\n    if (!unknownFields.equals(other.unknownFields)) return false;\n    return true;\n  }\n\n  @java.lang.Override\n  public int hashCode() {\n    if (memoizedHashCode != 0) {\n      return memoizedHashCode;\n    }\n    int hash = 41;\n    hash = (19 * hash) + getDescriptor().hashCode();\n    hash = (37 * hash) + CODE_FIELD_NUMBER;\n    hash = (53 * hash) + getCode();\n    hash = (37 * hash) + MESSAGE_FIELD_NUMBER;\n    hash = (53 * hash) + getMessage().hashCode();\n    hash = (37 * hash) + PAYLOAD_FIELD_NUMBER;\n    hash = (53 * hash) + getPayload().hashCode();\n    hash = (29 * hash) + unknownFields.hashCode();\n    memoizedHashCode = hash;\n    return hash;\n  }\n\n  public static RpcResponse parseFrom(\n      java.nio.ByteBuffer data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static RpcResponse parseFrom(\n      java.nio.ByteBuffer data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static RpcResponse parseFrom(\n      com.google.protobuf.ByteString data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static RpcResponse parseFrom(\n      com.google.protobuf.ByteString data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static RpcResponse parseFrom(byte[] data)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data);\n  }\n  public static RpcResponse parseFrom(\n      byte[] data,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws com.google.protobuf.InvalidProtocolBufferException {\n    return PARSER.parseFrom(data, extensionRegistry);\n  }\n  public static RpcResponse parseFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static RpcResponse parseFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static RpcResponse parseDelimitedFrom(java.io.InputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input);\n  }\n  public static RpcResponse parseDelimitedFrom(\n      java.io.InputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseDelimitedWithIOException(PARSER, input, extensionRegistry);\n  }\n  public static RpcResponse parseFrom(\n      com.google.protobuf.CodedInputStream input)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input);\n  }\n  public static RpcResponse parseFrom(\n      com.google.protobuf.CodedInputStream input,\n      com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n      throws java.io.IOException {\n    return com.google.protobuf.GeneratedMessageV3\n        .parseWithIOException(PARSER, input, extensionRegistry);\n  }\n\n  @java.lang.Override\n  public Builder newBuilderForType() { return newBuilder(); }\n  public static Builder newBuilder() {\n    return DEFAULT_INSTANCE.toBuilder();\n  }\n  public static Builder newBuilder(RpcResponse prototype) {\n    return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);\n  }\n  @java.lang.Override\n  public Builder toBuilder() {\n    return this == DEFAULT_INSTANCE\n        ? new Builder() : new Builder().mergeFrom(this);\n  }\n\n  @java.lang.Override\n  protected Builder newBuilderForType(\n      com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n    Builder builder = new Builder(parent);\n    return builder;\n  }\n  /**\n   * Protobuf type {@code RpcResponse}\n   */\n  public static final class Builder extends\n      com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements\n      // @@protoc_insertion_point(builder_implements:RpcResponse)\n          RpcResponseOrBuilder {\n    public static final com.google.protobuf.Descriptors.Descriptor\n        getDescriptor() {\n      return Rpc.internal_static_RpcResponse_descriptor;\n    }\n\n    @java.lang.Override\n    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable\n        internalGetFieldAccessorTable() {\n      return Rpc.internal_static_RpcResponse_fieldAccessorTable\n          .ensureFieldAccessorsInitialized(\n              RpcResponse.class, RpcResponse.Builder.class);\n    }\n\n    // Construct using com.protobuf.RpcResponse.newBuilder()\n    private Builder() {\n      maybeForceBuilderInitialization();\n    }\n\n    private Builder(\n        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {\n      super(parent);\n      maybeForceBuilderInitialization();\n    }\n    private void maybeForceBuilderInitialization() {\n      if (com.google.protobuf.GeneratedMessageV3\n              .alwaysUseFieldBuilders) {\n      }\n    }\n    @java.lang.Override\n    public Builder clear() {\n      super.clear();\n      code_ = 0;\n\n      message_ = \"\";\n\n      payload_ = com.google.protobuf.ByteString.EMPTY;\n\n      return this;\n    }\n\n    @java.lang.Override\n    public com.google.protobuf.Descriptors.Descriptor\n        getDescriptorForType() {\n      return Rpc.internal_static_RpcResponse_descriptor;\n    }\n\n    @java.lang.Override\n    public RpcResponse getDefaultInstanceForType() {\n      return RpcResponse.getDefaultInstance();\n    }\n\n    @java.lang.Override\n    public RpcResponse build() {\n      RpcResponse result = buildPartial();\n      if (!result.isInitialized()) {\n        throw newUninitializedMessageException(result);\n      }\n      return result;\n    }\n\n    @java.lang.Override\n    public RpcResponse buildPartial() {\n      RpcResponse result = new RpcResponse(this);\n      result.code_ = code_;\n      result.message_ = message_;\n      result.payload_ = payload_;\n      onBuilt();\n      return result;\n    }\n\n    @java.lang.Override\n    public Builder clone() {\n      return super.clone();\n    }\n    @java.lang.Override\n    public Builder setField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.setField(field, value);\n    }\n    @java.lang.Override\n    public Builder clearField(\n        com.google.protobuf.Descriptors.FieldDescriptor field) {\n      return super.clearField(field);\n    }\n    @java.lang.Override\n    public Builder clearOneof(\n        com.google.protobuf.Descriptors.OneofDescriptor oneof) {\n      return super.clearOneof(oneof);\n    }\n    @java.lang.Override\n    public Builder setRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        int index, java.lang.Object value) {\n      return super.setRepeatedField(field, index, value);\n    }\n    @java.lang.Override\n    public Builder addRepeatedField(\n        com.google.protobuf.Descriptors.FieldDescriptor field,\n        java.lang.Object value) {\n      return super.addRepeatedField(field, value);\n    }\n    @java.lang.Override\n    public Builder mergeFrom(com.google.protobuf.Message other) {\n      if (other instanceof RpcResponse) {\n        return mergeFrom((RpcResponse)other);\n      } else {\n        super.mergeFrom(other);\n        return this;\n      }\n    }\n\n    public Builder mergeFrom(RpcResponse other) {\n      if (other == RpcResponse.getDefaultInstance()) return this;\n      if (other.getCode() != 0) {\n        setCode(other.getCode());\n      }\n      if (!other.getMessage().isEmpty()) {\n        message_ = other.message_;\n        onChanged();\n      }\n      if (other.getPayload() != com.google.protobuf.ByteString.EMPTY) {\n        setPayload(other.getPayload());\n      }\n      this.mergeUnknownFields(other.unknownFields);\n      onChanged();\n      return this;\n    }\n\n    @java.lang.Override\n    public final boolean isInitialized() {\n      return true;\n    }\n\n    @java.lang.Override\n    public Builder mergeFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws java.io.IOException {\n      RpcResponse parsedMessage = null;\n      try {\n        parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);\n      } catch (com.google.protobuf.InvalidProtocolBufferException e) {\n        parsedMessage = (RpcResponse) e.getUnfinishedMessage();\n        throw e.unwrapIOException();\n      } finally {\n        if (parsedMessage != null) {\n          mergeFrom(parsedMessage);\n        }\n      }\n      return this;\n    }\n\n    private int code_ ;\n    /**\n     * <code>int32 code = 1;</code>\n     * @return The code.\n     */\n    @java.lang.Override\n    public int getCode() {\n      return code_;\n    }\n    /**\n     * <code>int32 code = 1;</code>\n     * @param value The code to set.\n     * @return This builder for chaining.\n     */\n    public Builder setCode(int value) {\n      \n      code_ = value;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>int32 code = 1;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearCode() {\n      \n      code_ = 0;\n      onChanged();\n      return this;\n    }\n\n    private java.lang.Object message_ = \"\";\n    /**\n     * <code>string message = 2;</code>\n     * @return The message.\n     */\n    public java.lang.String getMessage() {\n      java.lang.Object ref = message_;\n      if (!(ref instanceof java.lang.String)) {\n        com.google.protobuf.ByteString bs =\n            (com.google.protobuf.ByteString) ref;\n        java.lang.String s = bs.toStringUtf8();\n        message_ = s;\n        return s;\n      } else {\n        return (java.lang.String) ref;\n      }\n    }\n    /**\n     * <code>string message = 2;</code>\n     * @return The bytes for message.\n     */\n    public com.google.protobuf.ByteString\n        getMessageBytes() {\n      java.lang.Object ref = message_;\n      if (ref instanceof String) {\n        com.google.protobuf.ByteString b = \n            com.google.protobuf.ByteString.copyFromUtf8(\n                (java.lang.String) ref);\n        message_ = b;\n        return b;\n      } else {\n        return (com.google.protobuf.ByteString) ref;\n      }\n    }\n    /**\n     * <code>string message = 2;</code>\n     * @param value The message to set.\n     * @return This builder for chaining.\n     */\n    public Builder setMessage(\n        java.lang.String value) {\n      if (value == null) {\n    throw new NullPointerException();\n  }\n  \n      message_ = value;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string message = 2;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearMessage() {\n      \n      message_ = getDefaultInstance().getMessage();\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>string message = 2;</code>\n     * @param value The bytes for message to set.\n     * @return This builder for chaining.\n     */\n    public Builder setMessageBytes(\n        com.google.protobuf.ByteString value) {\n      if (value == null) {\n    throw new NullPointerException();\n  }\n  checkByteStringIsUtf8(value);\n      \n      message_ = value;\n      onChanged();\n      return this;\n    }\n\n    private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY;\n    /**\n     * <code>bytes payload = 3;</code>\n     * @return The payload.\n     */\n    @java.lang.Override\n    public com.google.protobuf.ByteString getPayload() {\n      return payload_;\n    }\n    /**\n     * <code>bytes payload = 3;</code>\n     * @param value The payload to set.\n     * @return This builder for chaining.\n     */\n    public Builder setPayload(com.google.protobuf.ByteString value) {\n      if (value == null) {\n    throw new NullPointerException();\n  }\n  \n      payload_ = value;\n      onChanged();\n      return this;\n    }\n    /**\n     * <code>bytes payload = 3;</code>\n     * @return This builder for chaining.\n     */\n    public Builder clearPayload() {\n      \n      payload_ = getDefaultInstance().getPayload();\n      onChanged();\n      return this;\n    }\n    @java.lang.Override\n    public final Builder setUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.setUnknownFields(unknownFields);\n    }\n\n    @java.lang.Override\n    public final Builder mergeUnknownFields(\n        final com.google.protobuf.UnknownFieldSet unknownFields) {\n      return super.mergeUnknownFields(unknownFields);\n    }\n\n\n    // @@protoc_insertion_point(builder_scope:RpcResponse)\n  }\n\n  // @@protoc_insertion_point(class_scope:RpcResponse)\n  private static final RpcResponse DEFAULT_INSTANCE;\n  static {\n    DEFAULT_INSTANCE = new RpcResponse();\n  }\n\n  public static RpcResponse getDefaultInstance() {\n    return DEFAULT_INSTANCE;\n  }\n\n  private static final com.google.protobuf.Parser<RpcResponse>\n      PARSER = new com.google.protobuf.AbstractParser<RpcResponse>() {\n    @java.lang.Override\n    public RpcResponse parsePartialFrom(\n        com.google.protobuf.CodedInputStream input,\n        com.google.protobuf.ExtensionRegistryLite extensionRegistry)\n        throws com.google.protobuf.InvalidProtocolBufferException {\n      return new RpcResponse(input, extensionRegistry);\n    }\n  };\n\n  public static com.google.protobuf.Parser<RpcResponse> parser() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public com.google.protobuf.Parser<RpcResponse> getParserForType() {\n    return PARSER;\n  }\n\n  @java.lang.Override\n  public RpcResponse getDefaultInstanceForType() {\n    return DEFAULT_INSTANCE;\n  }\n\n}\n\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/protobuf/v3/RpcResponseOrBuilder.java",
    "content": "// Generated by the protocol buffer compiler.  DO NOT EDIT!\n// source: rpc.proto\n\npackage com.protobuf.v3;\n\npublic interface RpcResponseOrBuilder extends\n    // @@protoc_insertion_point(interface_extends:RpcResponse)\n    com.google.protobuf.MessageOrBuilder {\n\n  /**\n   * <code>int32 code = 1;</code>\n   * @return The code.\n   */\n  int getCode();\n\n  /**\n   * <code>string message = 2;</code>\n   * @return The message.\n   */\n  java.lang.String getMessage();\n  /**\n   * <code>string message = 2;</code>\n   * @return The bytes for message.\n   */\n  com.google.protobuf.ByteString\n      getMessageBytes();\n\n  /**\n   * <code>bytes payload = 3;</code>\n   * @return The payload.\n   */\n  com.google.protobuf.ByteString getPayload();\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/springram/TestSpringram.java",
    "content": "package com.springram;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.summer.spring.Springron;\nimport com.pinecone.tritium.Tritium;\n\nclass JesusChrist extends Tritium {\n    public JesusChrist( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public JesusChrist( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    public void vitalize () throws Exception {\n        Springron springron = new Springron( \"Springron\", this );\n        springron.execute();\n\n        Thread shutdowner = new Thread(()->{\n            Debug.sleep( 5000 );\n            springron.terminate();\n        });\n        //shutdowner.start();\n\n        this.getTaskManager().add( springron );\n        this.getTaskManager().syncWaitingTerminated();\n    }\n}\n\npublic class TestSpringram {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            JesusChrist jesus = (JesusChrist) Pinecone.sys().getTaskManager().add( new JesusChrist( args, Pinecone.sys() ) );\n            jesus.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/utils/TestSchemeQuerier.java",
    "content": "package com.utils;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.unit.ListDictium;\nimport com.pinecone.framework.unit.MapDictium;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.Randomium;\nimport com.pinecone.framework.util.json.*;\nimport com.pinecone.slime.cache.query.LocalDictCachePage;\nimport com.pinecone.slime.cache.query.LocalFixedLRUDictCachePage;\nimport com.pinecone.slime.cache.query.pool.CountSelfPooledPageDictCache;\nimport com.pinecone.slime.cache.query.pool.LocalHotspotPooledDictCache;\nimport com.pinecone.slime.cache.query.pool.LocalLRUPrimaryPooledDictCache;\nimport com.pinecone.slime.jelly.source.ibatis.GenericMybatisQuerierDataManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisManipulatorProxyMapperFactory;\nimport com.pinecone.slime.jelly.source.memcached.GenericMemcachedManipulator;\nimport com.pinecone.slime.jelly.source.redis.GenericRedisHashManipulator;\nimport com.pinecone.slime.jelly.source.redis.GenericRedisMasterManipulator;\nimport com.pinecone.slime.map.LocalMapQuerier;\n\nimport java.net.InetSocketAddress;\nimport java.util.*;\n\nimport com.pinecone.slime.map.indexable.IndexableMapQuerier;\nimport com.pinecone.slime.map.rdb.RDBMapQuerier;\nimport com.pinecone.slime.source.indexable.*;\nimport com.pinecone.slime.source.rdb.*;\nimport net.spy.memcached.MemcachedClient;\nimport org.apache.ibatis.session.*;\n\n\nimport org.apache.ibatis.datasource.pooled.PooledDataSource;\nimport org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory;\nimport org.apache.ibatis.mapping.Environment;\nimport redis.clients.jedis.Jedis;\nimport redis.clients.jedis.JedisPool;\nimport redis.clients.jedis.JedisPoolConfig;\n\nimport javax.sql.DataSource;\nimport java.util.Map;\nimport java.util.concurrent.Future;\nimport java.util.concurrent.TimeUnit;\n\nclass DataEntity {\n    private Object id;\n    private Object value;\n\n    public Object getId() {\n        return this.id;\n    }\n\n    public void setId( Object key ) {\n        this.id = key;\n    }\n\n    public Object getValue() {\n        return this.value;\n    }\n\n    public void setValue( Object value ) {\n        this.value = value;\n    }\n\n    @Override\n    public String toString() {\n        return \"{\" + this.id + \":\" + this.value + \"}\";\n    }\n}\n\nclass MyBatisUtil {\n    private static SqlSessionFactory sqlSessionFactory;\n\n    static {\n        try {\n            // Define database connection information\n            String driver = \"com.mysql.cj.jdbc.Driver\";\n            String url = \"jdbc:mysql://localhost:3306/pinecone\";\n            String username = \"root\";\n            String password = \"test\";\n\n            DataSource dataSource       = new PooledDataSource(driver, url, username, password);\n            Environment environment     = new Environment(\"development\", new JdbcTransactionFactory(), dataSource);\n            Configuration configuration = new Configuration(environment);\n\n\n            // Add mappers directly in the configuration\n            configuration.addMapper(GenericMybatisQuerierDataManipulator.class);\n\n            sqlSessionFactory = new SqlSessionFactoryBuilder().build(configuration);\n        }\n        catch ( Exception e ) {\n            e.printStackTrace();\n        }\n    }\n\n    public static SqlSessionFactory getSqlSessionFactory() {\n        return sqlSessionFactory;\n    }\n}\n\n\n\n\n\n\n\n\n\n\npublic class TestSchemeQuerier {\n    public static void testListDict() throws Exception {\n        JSONArray ja = new JSONArraytron( \"[1,2,sss,null,false]\" );\n\n        ListDictium<Object > listDictium = new ListDictium<>( ja.toList() );\n\n        for( Map.Entry kv : listDictium.entrySet() ) {\n            Debug.trace( kv );\n        }\n\n        Debug.trace( listDictium.entrySet() );\n    }\n\n    public static void testMapDict() throws Exception {\n        JSONObject jo = new JSONMaptron( \"{ k1:v1, k2:v2, k3:3 }\" );\n\n        MapDictium<Object > mapDictium = new MapDictium<>( jo.toMap(), true );\n\n        for( Map.Entry kv : mapDictium.entrySet() ) {\n            Debug.trace( kv.getKey(), kv.getValue() );\n        }\n\n        Debug.trace( mapDictium.entrySet() );\n    }\n\n    public static void testLocalDict() throws Exception {\n        JSONObject jo = new JSONMaptron( \"{ k1:v1, k2:v2, k3:3 }\" );\n\n        LocalMapQuerier<Object > querier = new LocalMapQuerier<>( jo );\n\n        for( Object kv : querier.entrySet() ) {\n            Debug.trace( kv );\n        }\n\n        Debug.trace( querier );\n\n        querier = new LocalMapQuerier<>( true );\n        querier.insert( 0, 111 );\n        querier.insert( 1, 211 );\n        querier.insert( 2, 311 );\n        querier.insert( 3, 311 );\n\n        Debug.trace( querier );\n\n        querier.insert( 8, 811 );\n        Debug.trace( querier );\n\n    }\n\n    public static void testLRUDictCache() throws Exception {\n        SqlSessionFactory sqlSessionFactory = MyBatisUtil.getSqlSessionFactory();\n        try (SqlSession sqlSession = sqlSessionFactory.openSession()) {\n            //sqlSession.getConnection().setAutoCommit(true);\n\n            GenericMybatisQuerierDataManipulator manipulator = IbatisManipulatorProxyMapperFactory.getMapper( sqlSession, GenericMybatisQuerierDataManipulator.class );\n            RDBTargetTableMeta meta = (\n                    new GenericRDBTargetTableMeta(\"test_table\", \"id\", String.class, manipulator )\n            ).addValueMetaKey( \"value\" );/*.addValueMetaKey( \"id\" ).addValueMetaKey( \"value\" )*/\n\n            ContiguousNumIndexBatchPageSourceRetriever<Integer, String > retriever = new ContiguousNumIndexBatchPageSourceRetriever<>( meta, 100, \"id\" );\n\n            //Debug.trace( retriever.retrieve( 56 ) );\n\n            Debug.trace( ( (LocalDictCachePage) retriever.retrieves( 8561 ) ).getDictium() );\n\n            LocalLRUPrimaryPooledDictCache<Integer, String > cache = new LocalLRUPrimaryPooledDictCache<>( 100, 3, retriever );\n\n            Debug.trace( cache.get( 123 ) );\n            Debug.trace( cache.get( 126 ) );\n            Debug.trace( cache.get( 128 ) );\n\n\n//            {username:undefined, role:admin, expired:20250117-12:30:00, xxxx}\n            Debug.trace( cache.get( 1995 ) );\n            Debug.trace( cache.get( 1915 ) );\n            cache.erase( 1915 );\n\n            Debug.trace( cache.get( 1915 ) );\n            Debug.trace( cache.get( 2915 ) );\n\n            Debug.trace( cache.get( 3615 ) );\n            Debug.trace( cache.get( 3415 ) );\n\n\n            LocalFixedLRUDictCachePage<String > cachePage = new LocalFixedLRUDictCachePage<>( 3, retriever );\n            Debug.trace( cachePage.get( 1995 ) );\n            Debug.trace( cachePage.get( 1915 ) );\n            cachePage.erase( 1915 );\n\n            Debug.trace( cachePage.get( 1915 ) );\n//            for ( int i = 0; i < (int)1e4; ++i ) {\n//                Debug.trace( cachePage.get( i ) );\n//            }\n        }\n    }\n\n    public static void testHotspotDictCache() throws Exception {\n        SqlSessionFactory sqlSessionFactory = MyBatisUtil.getSqlSessionFactory();\n        try (SqlSession sqlSession = sqlSessionFactory.openSession()) {\n            //sqlSession.getConnection().setAutoCommit(true);\n\n            GenericMybatisQuerierDataManipulator manipulator = IbatisManipulatorProxyMapperFactory.getMapper( sqlSession, GenericMybatisQuerierDataManipulator.class );\n            RDBTargetTableMeta meta = (\n                    new GenericRDBTargetTableMeta(\"test_table\", \"id\", String.class, manipulator )\n            ).addValueMetaKey( \"value\" );/*.addValueMetaKey( \"id\" ).addValueMetaKey( \"value\" )*/\n\n            ContiguousNumIndexBatchPageSourceRetriever<Integer, String > retriever = new ContiguousNumIndexBatchPageSourceRetriever<>( meta, 100, \"id\" );\n\n            //Debug.trace( retriever.retrieve( 56 ) );\n\n            Debug.trace( ( (LocalDictCachePage) retriever.retrieves( 8561 ) ).getDictium() );\n\n            LocalHotspotPooledDictCache<Integer, String > cache = new LocalHotspotPooledDictCache<>( 100, 6, retriever );\n\n            Debug.trace( cache.get( 123 ) );\n            Debug.trace( cache.get( 126 ) );\n            Debug.trace( cache.get( 128 ) );\n\n            Debug.trace( cache.get( 1995 ) );\n            Debug.trace( cache.get( 1915 ) );\n\n            Debug.trace( cache.get( 2915 ) );\n\n            Debug.trace( cache.get( 3414 ) );\n            Debug.trace( cache.get( 3415 ) );\n            //cache.erase( 3415 );\n            //Debug.trace( cache.get( 3415 ) );\n            Debug.trace( cache.get( 3416 ) );\n            Debug.trace( cache.get( 3417 ) );\n\n            Debug.trace( cache.get( 4915 ) );\n            Debug.trace( cache.get( 4916 ) );\n            Debug.trace( cache.get( 4917 ) );\n\n            Debug.trace( cache.get( 1917 ) );\n            Debug.trace( cache.get( 1918 ) );\n\n            Debug.trace( cache.get( 5917 ) );\n\n            Debug.trace( cache.get( 6917 ) );\n\n            Randomium randomium = Randomium.newInstance();\n            int scale = (int)1e4;\n            for ( int i = 0; i < scale; ++i ) {\n                //Debug.trace( cache.get( i ) );\n                Debug.trace( cache.get( (int)randomium.nextBias(0, (int)1e3, 0.4 ) ) );\n            }\n\n            Debug.trace( cache.getMisses() );\n            Debug.trace( cache.getAccesses() );\n\n        }\n    }\n\n    public static void testRDBDict() throws Exception {\n        SqlSessionFactory sqlSessionFactory = MyBatisUtil.getSqlSessionFactory();\n        try (SqlSession sqlSession = sqlSessionFactory.openSession()) {\n            //sqlSession.getConnection().setAutoCommit(true);\n\n            RangedRDBQuerierDataManipulator manipulator = sqlSession.getMapper( GenericMybatisQuerierDataManipulator.class );\n\n            RDBTargetTableMeta meta = (\n                    new GenericRDBTargetTableMeta(\"test_table\", \"id\", String.class, manipulator )\n            ).addValueMetaKey( \"value\" );/*.addValueMetaKey( \"id\" ).addValueMetaKey( \"value\" )*/\n\n            CountSelfPooledPageDictCache<String > cache = new LocalLRUPrimaryPooledDictCache<>( 100, 3,\n                    new ContiguousNumIndexBatchPageSourceRetriever<>( meta, 100, \"id\" )\n            );\n            RDBMapQuerier<Integer, String > querier = new RDBMapQuerier<>( meta, cache );\n//            querier.insert(1, \"value1\");\n//            querier.insert(2, \"value2\");\n//            querier.insert(3, \"value3\");\n//            querier.insert(4, \"value4\");\n\n//            for ( int i = 0; i < (int)1e4; ++i ) {\n//                querier.insert(i, \"value\"+i);\n//            }\n\n//            Debug.trace( querier );\n//\n//            //querier.clear();\n//\n            Debug.trace(querier.get(1));\n            Debug.trace(querier.get(2));\n            Debug.trace(querier.get(3));\n            sqlSession.commit();\n\n            Debug.trace( querier.values() );\n\n            Debug.trace( querier.isEmpty() );\n\n            Debug.trace( querier.queryVal( \"SELECT * FROM test_table WHERE id > 100 AND id < 120\" ) );\n        }\n    }\n\n    public static void testRedisDict() throws Exception {\n        //IndexableMapQuerier<String, String > querier = new IndexableMapQuerier<>( \"b-serverkingpin\", 6397, \"\", \"wolf19310918\" );\n\n        //Debug.trace( querier.get( \"name\" ) );\n\n\n\n        JedisPoolConfig poolConfig = new JedisPoolConfig();\n        JedisPool jedisPool = new JedisPool( poolConfig, \"b-serverkingpin\", 6379, 2000, \"wolf19310918\", 0 );\n        Jedis jedis = jedisPool.getResource();\n        jedis.auth( \"wolf19310918\" );\n        //IndexableIteratableManipulator<String, String > manipulator = new GenericRedisHashManipulator<>( jedis );\n        IndexableIterableManipulator<String, String > manipulator = new GenericRedisMasterManipulator<>( jedis );\n        IndexableTargetScopeMeta meta = new GenericIndexableTargetScopeMeta( \"1\", \"test\", Object.class, manipulator );\n\n        //manipulator.insert( meta, \"hah\", \"hhhh\" );\n        //Debug.trace( manipulator.selectByKey( meta, \"name\" ) );\n\n        //Debug.trace( manipulator.selectByKey( meta, \"li\" ) );\n\n        //manipulator.insertByNS( meta, \"shit\", \"more\", \"fuck\" );\n        //manipulator.insertByNS( meta, \"shit\", \"more\", \"fuck\" );\n\n        jedis.select( 0 );\n//        manipulator.insert( meta, \"shit1\", \"vshit1\" );\n//        manipulator.insert( meta, \"crap:shit1\", \"crap:vshit1\" );\n//        manipulator.insert( meta, \"crap:shit2\", \"crap:vshit2\" );\n\n//        Debug.trace( manipulator.selectAllByNS( meta, null, null ) );\n\n\n\n\n\n\n        manipulator = new GenericRedisMasterManipulator<>( jedis );\n        meta = new GenericIndexableTargetScopeMeta( \"0\", \"\", Object.class, manipulator );\n        //IndexableMapQuerier<String, String > querier = new IndexableMapQuerier<>( meta );\n        //IndexableMapQuerier<String, String > querier = new IndexableMapQuerier<>( meta, false );\n        IndexableMapQuerier<String, String > querier = new IndexableMapQuerier<>( meta, true );\n\n        Debug.trace( querier.get( \"test\" ) );\n        Debug.trace( querier.get( \"test\" ) );\n        Debug.trace( querier.get( \"test\" ) );\n\n        Debug.trace( querier.containsKey( \"li\" ) );\n        Debug.trace( querier.containsValue( \"ssss\" ) );\n        Map map = querier.toMap();\n        Debug.trace( map.entrySet() );\n\n\n\n\n        manipulator = new GenericRedisHashManipulator<>( jedis );\n        meta = new GenericIndexableTargetScopeMeta( \"0\", \"student\", Object.class, manipulator );\n        Iterator iter = manipulator.iterator( meta );\n        while ( iter.hasNext() ) {\n            Debug.trace( iter.next() );\n        }\n\n        querier = new IndexableMapQuerier<>( meta );\n        map = querier.toMap();\n        Debug.trace( map.entrySet() );\n    }\n\n    public static void testMemCachedDict() throws Exception {\n        MemcachedClient client = new MemcachedClient( new InetSocketAddress( \"b-serverkingpin\", 11211 ) );\n\n        // 设置一个键值对\n        Future<Boolean> setFuture = client.set(\"key1\", 900, \"value1\");\n        Debug.trace(\"Set key1: \" + setFuture.get(5, TimeUnit.SECONDS));\n        Debug.trace(client.get(\"key1\"));\n\n\n        Debug.trace(client.get(\"key2\"));\n\n        Set<String> allKeys = new HashSet<>();\n\n        IndexableIterableManipulator<String, String > manipulator = new GenericMemcachedManipulator<> ( client );\n        IndexableTargetScopeMeta meta = new GenericIndexableTargetScopeMeta( \"\", \"\", Object.class, manipulator );\n        manipulator.insert( meta, \"key2\", \"val2\" );\n\n        Debug.trace( ((GenericMemcachedManipulator<String>) manipulator).keys(), manipulator.counts( meta, \"key1\" ) );\n\n\n        IndexableMapQuerier<String, String > querier = new IndexableMapQuerier<>( meta );\n\n        Map map = querier.toMap();\n        Debug.trace( map );\n\n        Debug.trace( querier.get( \"key2\" ) );\n\n        client.shutdown();\n    }\n\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n\n\n            //TestSchemeQuerier.testListDict();\n            //TestSchemeQuerier.testMapDict();\n            //TestSchemeQuerier.testLocalDict();\n            //TestSchemeQuerier.testLRUDictCache();\n            //TestSchemeQuerier.testHotspotDictCache();\n            //TestSchemeQuerier.testRDBDict();\n            TestSchemeQuerier.testRedisDict();\n            //TestSchemeQuerier.testMemCachedDict();\n\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Hydra/hydra-system-tritium/src/test/java/com/utils/TestVFS.java",
    "content": "package com.utils;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\nimport org.apache.commons.vfs2.*;\nimport org.apache.commons.vfs2.cache.SoftRefFilesCache;\nimport org.apache.commons.vfs2.impl.StandardFileSystemManager;\nimport org.apache.commons.vfs2.provider.http5.Http5FileProvider;\nimport org.apache.commons.vfs2.provider.http5s.Http5sFileProvider;\nimport org.apache.commons.vfs2.provider.webdav.WebdavFileProvider;\n\nimport java.net.URLEncoder;\n\npublic class TestVFS {\n    public static void testCRUD ( ) throws Exception {\n//        FileSystemManager fsManager = null;\n//\n//        fsManager = VFS.getManager();\n        StandardFileSystemManager fsManager = new StandardFileSystemManager();\n        Http5FileProvider http5FileProvider = new Http5FileProvider();\n        fsManager.addProvider( \"http\", http5FileProvider );\n        fsManager.addProvider( \"https\", new Http5sFileProvider());\n        fsManager.addProvider( \"webdav\", new WebdavFileProvider() );\n        fsManager.setCacheStrategy(CacheStrategy.ON_CALL);\n        fsManager.setFilesCache(new SoftRefFilesCache());\n\n        String localFilePath = \"file:///K:/test/1.txt\";\n        localFilePath = \"\\\\\\\\b-serverkingpin\\\\ARBOmnium/EnderChest/1.txt\";\n\n        String username = \"undefined\";\n        String password = \"\";\n        String webdavHost = \"b-serverkingpin:8077\";\n        String encodedPassword = URLEncoder.encode(password, \"UTF-8\");\n        localFilePath = \"http://\" + username + \":\" + encodedPassword + \"@\" + webdavHost + \"/EnderChest/test/1.text\";\n        //localFilePath = \"https://www.rednest.cn/index.html\";\n\n        //String webdavFilePath = \"webdav://username:password@xxx/test\";\n        FileObject fileObject = fsManager.resolveFile(localFilePath);\n\n        //FileObject webdavFile = fsManager.resolveFile(webdavFilePath);\n\n\n        if ( !fileObject.exists() ) {\n            if ( fileObject.getType() == FileType.IMAGINARY ) {\n                fileObject.createFile();\n                Debug.trace( fileObject.getName()  );\n            }\n        }\n\n\n        Debug.trace( fileObject.getName() + \" exists.\" );\n\n        if ( fileObject.getType() == FileType.FOLDER ) {\n            for ( FileObject child : fileObject.getChildren() ) {\n                Debug.trace(\" - \" + child.getName().getBaseName());\n            }\n        }\n\n        if ( fileObject.getType() == FileType.FILE ) {\n\n            FileContent content = fileObject.getContent();\n            byte[] buffer = content.getInputStream().readAllBytes();\n            Debug.echo( new String(buffer) );\n\n        }\n    }\n\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n\n            TestVFS.testCRUD();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Hydra/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sauron</artifactId>\n        <groupId>com.sauron</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.hydra</groupId>\n    <artifactId>hydra</artifactId>\n    <packaging>pom</packaging>\n    <version>2.5.1</version>\n\n    <modules>\n        <module>hydra-architecture</module>\n        <module>hydra-architecture-conduct</module>\n        <module>hydra-architecture-storage</module>\n        <module>hydra-framework-runtime</module>\n        <module>hydra-message-control</module>\n        <module>hydra-message-broadcast</module>\n        <module>hydra-framework-service</module>\n        <module>hydra-framework-device</module>\n        <module>hydra-framework-config</module>\n        <module>hydra-framework-storage</module>\n\n        <module>hydra-kom-default-driver</module>\n        <module>hydra-lib-thrift-sdk</module>\n        <module>hydra-lib-grpc-service-sdk</module>\n        <module>hydra-lib-uofs-cache</module>\n\n        <module>hydra-system-tritium</module>\n        <module>hydra-system-reign</module>\n        <module>hydra-service-control</module>\n        <module>hydra-architecture-message</module>\n    </modules>\n</project>"
  },
  {
    "path": "LICENSE",
    "content": "MIT License\n\nCopyright (c) 2024 undefined\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
  },
  {
    "path": "Messenger/Messenger.iml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<module version=\"4\">\n  <component name=\"NewModuleRootManager\" LANGUAGE_LEVEL=\"JDK_11\">\n    <output url=\"file://$MODULE_DIR$/target/classes\" />\n    <output-test url=\"file://$MODULE_DIR$/target/test-classes\" />\n    <content url=\"file://$MODULE_DIR$\">\n      <sourceFolder url=\"file://$MODULE_DIR$/src/main/java\" isTestSource=\"false\" />\n      <sourceFolder url=\"file://$MODULE_DIR$/src/test/java\" isTestSource=\"true\" />\n      <excludeFolder url=\"file://$MODULE_DIR$/target\" />\n    </content>\n    <orderEntry type=\"inheritedJdk\" />\n    <orderEntry type=\"sourceFolder\" forTests=\"false\" />\n    <orderEntry type=\"module\" module-name=\"Pinecone\" />\n    <orderEntry type=\"library\" name=\"Maven: mysql:mysql-connector-java:8.0.23\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.google.protobuf:protobuf-java:3.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: commons-fileupload:commons-fileupload:1.3.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: commons-io:commons-io:2.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-amqp:2.3.9.RELEASE\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-autoconfigure:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-logging:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: ch.qos.logback:logback-classic:1.2.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: ch.qos.logback:logback-core:1.2.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.logging.log4j:log4j-to-slf4j:2.13.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.logging.log4j:log4j-api:2.13.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.slf4j:jul-to-slf4j:1.7.30\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: jakarta.annotation:jakarta.annotation-api:1.3.5\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.yaml:snakeyaml:1.27\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-messaging:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-beans:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.amqp:spring-rabbit:2.3.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.amqp:spring-amqp:2.3.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.retry:spring-retry:1.3.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.rabbitmq:amqp-client:5.10.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-context:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-tx:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-test:2.3.9.RELEASE\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-test:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-test-autoconfigure:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.jayway.jsonpath:json-path:2.4.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: net.minidev:json-smart:2.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: net.minidev:accessors-smart:1.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.ow2.asm:asm:5.0.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.slf4j:slf4j-api:1.7.30\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: jakarta.xml.bind:jakarta.xml.bind-api:2.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: jakarta.activation:jakarta.activation-api:1.2.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.assertj:assertj-core:3.18.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.hamcrest:hamcrest:2.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.junit.jupiter:junit-jupiter:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.junit.jupiter:junit-jupiter-api:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.opentest4j:opentest4j:1.2.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.junit.platform:junit-platform-commons:1.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.junit.jupiter:junit-jupiter-params:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"RUNTIME\" name=\"Maven: org.junit.jupiter:junit-jupiter-engine:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.junit.vintage:junit-vintage-engine:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apiguardian:apiguardian-api:1.1.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.junit.platform:junit-platform-engine:1.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: junit:junit:4.13.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.mockito:mockito-core:3.6.28\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: net.bytebuddy:byte-buddy:1.10.18\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: net.bytebuddy:byte-buddy-agent:1.10.18\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.objenesis:objenesis:3.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.mockito:mockito-junit-jupiter:3.6.28\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.skyscreamer:jsonassert:1.5.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.vaadin.external.google:android-json:0.0.20131108.vaadin1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-core:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-jcl:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-test:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.xmlunit:xmlunit-core:2.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-web:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-json:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.module:jackson-module-parameter-names:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-tomcat:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.glassfish:jakarta.el:3.0.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.tomcat.embed:tomcat-embed-websocket:9.0.41\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-web:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-webmvc:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-aop:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-expression:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.dataformat:jackson-dataformat-xml:2.9.10\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.core:jackson-core:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.core:jackson-annotations:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.core:jackson-databind:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.codehaus.woodstox:stax2-api:4.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.woodstox:woodstox-core:5.3.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.alibaba:fastjson:1.2.75\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.tomcat.embed:tomcat-embed-core:9.0.41\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.tomcat:tomcat-annotations-api:9.0.41\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.projectlombok:lombok:1.18.16\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.aspectj:aspectjweaver:1.9.6\" level=\"project\" />\n  </component>\n</module>"
  },
  {
    "path": "Messenger/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <groupId>com.Sauron</groupId>\n        <artifactId>sauron</artifactId>\n        <version>1.0-SNAPSHOT</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <artifactId>Messenger</artifactId>\n    <packaging>jar</packaging>\n\n    <name>Messenger</name>\n    <url>http://maven.apache.org</url>\n\n    <properties>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n\n        <!--<dependency>-->\n            <!--<groupId>io.netty</groupId>-->\n            <!--<artifactId>netty-all</artifactId>-->\n        <!--</dependency>-->\n\n        <dependency>\n            <groupId>com.Sauron</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>3.3.1</version>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-amqp</artifactId>\n        </dependency>\n\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-test</artifactId>\n        </dependency>\n\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n        </dependency>\n\n        <dependency>\n            <groupId>com.fasterxml.jackson.dataformat</groupId>\n            <artifactId>jackson-dataformat-xml</artifactId>\n            <version>2.9.10</version>\n        </dependency>\n\n        <dependency>\n            <groupId>com.alibaba</groupId>\n            <artifactId>fastjson</artifactId>\n            <version>1.2.75</version>\n        </dependency>\n\n    </dependencies>\n</project>\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/App.java",
    "content": "package com.genius;\n\n/**\n * Hello world!\n *\n */\npublic class App \n{\n    public static void main( String[] args )\n    {\n        System.out.println( \"Hello World!\" );\n    }\n}\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/common/UlfUMC/CommonMessageBuilder.java",
    "content": "package com.genius.common.UlfUMC;\n\n\nimport java.util.Map;\n\n/**\n * @author Genius\n * @date 2023/05/14 20:40\n **/\npublic class CommonMessageBuilder implements MessageBuilder{\n\n    private UlfUMCMessage message;\n\n    public CommonMessageBuilder(){\n        message = new UlfUMCMessage();\n    }\n\n    public UlfUMCMessage getMessage(){\n        return message;\n    }\n\n    @Override\n    public MessageBuilder func(String funcName) {\n        message.getUlfUMCBody().setFunction(funcName);\n        return this;\n    }\n\n    @Override\n    public MessageBuilder method(UlfUMCMessageType methodType) {\n        message.getUlfUMCBody().setMethod(methodType);\n        return this;\n    }\n\n    @Override\n    public MessageBuilder data(Map<String,Object> data) {\n        message.getUlfUMCBody().setData(data);\n        return this;\n    }\n\n    @Override\n    public UlfUMCMessage build() {\n        return this.message;\n    }\n\n    @Override\n    public byte[] toByte() {\n        return UlfUMCMessage.encode(message);\n    }\n}\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/common/UlfUMC/ErrorMessageBuilder.java",
    "content": "package com.genius.common.UlfUMC;\n\nimport com.genius.pool.FunctionNamePool;\n\nimport java.util.Map;\n\n/**\n * @author Genius\n * @date 2023/05/18 20:14\n **/\npublic class ErrorMessageBuilder implements MessageBuilder{\n\n    private UlfUMCMessage errorMessage;\n\n    public ErrorMessageBuilder(){\n        errorMessage = new UlfUMCMessage(UlfUMCMessageType.GET, FunctionNamePool.ERROR,Map.of(\"error\",\"\"));\n    }\n    @Override\n    public MessageBuilder func(String funcName) {\n        return null;\n    }\n\n    @Override\n    public MessageBuilder method(UlfUMCMessageType methodType) {\n        return null;\n    }\n\n    @Override\n    public MessageBuilder data(Map<String, Object> data) {\n        return null;\n    }\n\n    public MessageBuilder error(Object data){\n        errorMessage.getUlfUMCBody().getData().put(\"error\",data);\n        return this;\n    }\n\n    @Override\n    public UlfUMCMessage build() {\n        return errorMessage;\n    }\n\n    @Override\n    public byte[] toByte() {\n        return UlfUMCMessage.encode(errorMessage);\n    }\n}\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/common/UlfUMC/MessageBuilder.java",
    "content": "package com.genius.common.UlfUMC;\n\n\n\nimport java.util.Map;\n\npublic interface MessageBuilder {\n\n\n    MessageBuilder func(String funcName);\n\n    MessageBuilder method(UlfUMCMessageType methodType);\n\n    MessageBuilder data(Map<String,Object> data);\n\n    UlfUMCMessage build();\n\n    byte[] toByte();\n}\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/common/UlfUMC/MessageFactory.java",
    "content": "package com.genius.common.UlfUMC;\n\n/**\n * @author Genius\n * @date 2023/05/14 20:42\n **/\npublic class MessageFactory{\n\n    public enum MessageBuilderType{\n        COMMON,\n        SLAVE,\n        ERROR\n    }\n\n    public static MessageBuilder getMessageBuilder(MessageBuilderType builderType){\n        switch (builderType){\n            case SLAVE:return new SlaveMessageBuilder();\n            case ERROR: return new ErrorMessageBuilder();\n            default:return new CommonMessageBuilder();\n        }\n    }\n}\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/common/UlfUMC/SlaveMessageBuilder.java",
    "content": "package com.genius.common.UlfUMC;\n\nimport com.genius.config.SystemConfig;\n\nimport java.util.HashMap;\nimport java.util.Map;\n\n/**\n * @author Genius\n * @date 2023/05/14 20:32\n **/\npublic class SlaveMessageBuilder extends CommonMessageBuilder{\n\n\n\n    public SlaveMessageBuilder(){\n        super();\n    }\n\n    @Override\n    public MessageBuilder data(Map<String,Object> data) {\n        HashMap<String,Object> newData = new HashMap<>(data);\n        newData.put(\"serviceId\",SystemConfig.ServiceId);\n        getMessage().getUlfUMCBody().setData(newData);\n        return this;\n    }\n}\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/common/UlfUMC/UlfUMCBody.java",
    "content": "package com.genius.common.UlfUMC;\n\nimport lombok.AllArgsConstructor;\nimport lombok.Data;\nimport lombok.NoArgsConstructor;\n\nimport java.io.Serializable;\nimport java.util.Map;\n\n/**\n * @author Genius\n * @date 2023/05/16 21:34\n **/\n@Data\n@AllArgsConstructor\n@NoArgsConstructor\npublic class UlfUMCBody implements Serializable {\n    private UlfUMCMessageType method;\n\n    private String function;\n\n    private Map<String,Object> data;\n}\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/common/UlfUMC/UlfUMCMessage.java",
    "content": "package com.genius.common.UlfUMC;\n\nimport com.alibaba.fastjson.JSONObject;\nimport com.genius.util.IntToByteUtil;\n//import io.netty.buffer.ByteBuf;\nimport lombok.AllArgsConstructor;\nimport lombok.Data;\nimport lombok.NoArgsConstructor;\n\nimport java.io.Serializable;\nimport java.nio.ByteBuffer;\nimport java.nio.charset.StandardCharsets;\nimport java.util.Arrays;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\n/**\n * @author Genius\n * @date 2023/05/16 16:41\n **/\n\n@Data\npublic class UlfUMCMessage implements Serializable {\n\n    private UlfUMCProtocol ulfUMCProtocol;\n\n    private UlfUMCBody ulfUMCBody;\n\n    public UlfUMCMessage(UlfUMCMessageType methodType,String function,Map<String,Object> data){\n        ulfUMCBody = new UlfUMCBody(methodType,function,data);\n        ulfUMCProtocol = new UlfUMCProtocol();\n        ulfUMCProtocol.setLength(JSONObject.toJSONString(ulfUMCBody).length());\n    }\n\n    public UlfUMCMessage(){\n        ulfUMCBody = new UlfUMCBody();\n        ulfUMCProtocol = new UlfUMCProtocol();\n        ulfUMCProtocol.setLength(0);\n    }\n\n    public static byte[] encode(UlfUMCMessage message){\n        UlfUMCProtocol ulfUMCProtocol = message.getUlfUMCProtocol();\n        UlfUMCBody ulfUMCBody = message.getUlfUMCBody();\n        ByteBuffer buffer = ByteBuffer.allocate(message.getLength()+UlfUMCProtocol.header.length()+1028);\n\n        buffer.put(UlfUMCProtocol.header.getBytes());\n        String body = JSONObject.toJSONString(ulfUMCBody);\n        buffer.put(IntToByteUtil.intToByte(message.getLength()));\n        buffer.put(body.getBytes(StandardCharsets.UTF_8));\n\n        return buffer.array();\n    }\n\n    public static UlfUMCMessage decode(byte[] in) throws UlfUMCMessageException {\n        try {\n            int index = 0;\n            String UMCProtocolHeader = new String(Arrays.copyOfRange(in,index,UlfUMCProtocol.header.length()));\n            if(!UMCProtocolHeader.equals(UlfUMCProtocol.header)){\n                throw new UlfUMCMessageException(\"UlfUMCMessage decode Error\");\n            }\n            index+=UMCProtocolHeader.length();\n            int length = IntToByteUtil.byteArrayToInt(Arrays.copyOfRange(in,index,index+4));\n            index+=4;\n            String body = new String(Arrays.copyOfRange(in,index,index+length));\n\n            Map<String,Object> map = JSONObject.parseObject(body, Map.class);\n            UlfUMCMessageType methodType = UlfUMCMessageType.valueOf((String) map.get(\"method\"));\n            String function = (String) map.get(\"function\");\n            Map<String,Object> data = new HashMap<>();\n            if (map.containsKey(\"data\")) {\n                data = JSONObject.parseObject(map.get(\"data\").toString(), Map.class);\n            }\n            return new UlfUMCMessage(methodType,function,data);\n        }catch (Exception e){\n            throw new UlfUMCMessageException(\"UlfUMCMessage decode Error\");\n        }\n    }\n\n    public int getLength(){\n        int length = JSONObject.toJSONString(ulfUMCBody).length();\n        ulfUMCProtocol.setLength(length);\n        return length;\n    }\n\n    public Map getData(){\n        return this.ulfUMCBody.getData();\n    }\n\n    public UlfUMCMessageType getMethod(){\n        return this.ulfUMCBody.getMethod();\n    }\n\n    public String getFunction(){\n        return this.ulfUMCBody.getFunction();\n    }\n\n}\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/common/UlfUMC/UlfUMCMessageException.java",
    "content": "package com.genius.common.UlfUMC;\n\n/**\n * @author Genius\n * @date 2023/05/16 22:05\n **/\npublic class UlfUMCMessageException extends Exception{\n\n\n    private String reason;\n\n    public UlfUMCMessageException(String reason) {\n        this.reason = reason;\n    }\n\n    @Override\n    public String getMessage() {\n        return reason;\n    }\n}\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/common/UlfUMC/UlfUMCMessageType.java",
    "content": "package com.genius.common.UlfUMC;\n\n/**\n * @author Genius\n * @date 2023/05/18 18:39\n **/\npublic enum UlfUMCMessageType {\n    GET(\"Get\"),\n    POST(\"Post\");\n\n    private final String value;\n    UlfUMCMessageType(String value){this.value = value;}\n\n    public String getName(){\n        return this.value;\n    }\n}\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/common/UlfUMC/UlfUMCProtocol.java",
    "content": "package com.genius.common.UlfUMC;\n\nimport lombok.AllArgsConstructor;\nimport lombok.Data;\nimport lombok.NoArgsConstructor;\n\nimport javax.naming.directory.SearchResult;\nimport java.io.Serializable;\n\n/**\n * @author Genius\n * @date 2023/05/16 16:32\n **/\n\n@AllArgsConstructor\n@NoArgsConstructor\npublic class UlfUMCProtocol implements Serializable {\n\n    public static final String header = \"UMC/1.1\";\n\n    private int length;\n\n    public void setLength(int length){\n        this.length = length;\n    }\n\n}\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/config/MessageConverterConfig.java",
    "content": "package com.genius.config;\n\nimport org.springframework.amqp.support.converter.Jackson2JsonMessageConverter;\nimport org.springframework.amqp.support.converter.MessageConverter;\nimport org.springframework.context.annotation.Bean;\nimport org.springframework.context.annotation.Configuration;\n\n/**\n * @author Genius\n * @date 2023/05/12 15:44\n **/\n\n@Configuration\npublic class MessageConverterConfig {\n\n    @Bean\n    public MessageConverter messageConverter() { return new Jackson2JsonMessageConverter();}\n}\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/pool/FunctionNamePool.java",
    "content": "package com.genius.pool;\n\n/**\n * @author Genius\n * @date 2023/05/12 18:14\n **/\npublic class FunctionNamePool {\n    public static final String QUERY_TASK_RANGE = \"QueryTaskRange\";\n\n    public static final String SHUTDOWN = \"shutdown\";\n\n    public static final String ERROR = \"Error\";\n}\n"
  },
  {
    "path": "Messenger/src/main/java/com/genius/pool/MqPool.java",
    "content": "package com.genius.pool;\n\n/**\n * @author Genius\n * @date 2023/05/12 18:23\n **/\npublic class MqPool {\n    public static final String EXCHANGE_TOPIC_NONJRON_TASK = \"com.pinecone.tritium.task.direct\";\n\n    public static final String MASTER_TASK_SEND_CENTER = \"task.send\";\n}\n"
  },
  {
    "path": "Messenger/src/test/java/com/genius/AppTest.java",
    "content": "package com.genius;\n\nimport junit.framework.Test;\nimport junit.framework.TestCase;\nimport junit.framework.TestSuite;\n\n/**\n * unit test for simple App.\n */\npublic class AppTest \n    extends TestCase\n{\n    /**\n     * Create the test case\n     *\n     * @param testName name of the test case\n     */\n    public AppTest( String testName )\n    {\n        super( testName );\n    }\n\n    /**\n     * @return the suite of tests being tested\n     */\n    public static Test suite()\n    {\n        return new TestSuite( AppTest.class );\n    }\n\n    /**\n     * Rigourous Test :-)\n     */\n    public void testApp()\n    {\n        assertTrue( true );\n    }\n}\n"
  },
  {
    "path": "Odin/odin-architecture/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>odin</artifactId>\n        <groupId>com.walnut.odin</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.odin</groupId>\n    <artifactId>odin-architecture</artifactId>\n    <version>2.5.1</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-message-control</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime.jelly</groupId>\n            <artifactId>jelly</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/atlas/advance/GraphAdvancer.java",
    "content": "package com.walnut.odin.atlas.advance;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueElement;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\n\nimport java.util.List;\n\npublic interface GraphAdvancer extends Pinenut {\n    void traverse( VectorDAG vectorDAG );\n\n    List<QueueElement> fetchExecuteNode(long offset, long limit );\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/atlas/advance/GraphStratumAdvancer.java",
    "content": "package com.walnut.odin.atlas.advance;\n\npublic interface GraphStratumAdvancer extends GraphAdvancer {\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/atlas/advance/GraphStratumTape.java",
    "content": "package com.walnut.odin.atlas.advance;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.iqueue.DeflectPriorityQueue;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\n\nimport java.util.List;\n\npublic interface GraphStratumTape extends Pinenut {\n\n    GraphNode queryNodeByIndex( long index );\n\n    GUID queryNodeGuidByIndex( long index );\n\n    List<GraphNode> fetchNodes( List<GUID> guids );\n\n    List<GraphNode> fetchNodes( long offset, long limit );\n\n    List<GraphNode> fetchNodes( long queuePriority, long offset, long limit );\n\n\n    List<GUID> fetchGuids( long offset, long limit );\n\n    List<GUID> fetchGuids( long queryPriority, long offset, long limit );\n\n    int countStratum();\n\n    DeflectPriorityQueue query(int stratumId, short runtimePriority );\n\n    DeflectPriorityQueue getExecutionPriorityQueue();\n\n}\n\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/atlas/graph/RuntimeAtlasInstrument.java",
    "content": "package com.walnut.odin.atlas.graph;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.Unsafe;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.unit.vgraph.AtlasInstrument;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\nimport com.pinecone.slime.meta.TableIndexMeta;\n\nimport com.walnut.odin.atlas.advance.GraphStratumTape;\n\n/**\n *  Pinecone Ursus For Java RuntimeAtlas\n *  Author: Ken, Harald.E (Dragon King)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Runtime Orchestration Atlas\n *  统一大规模运行矢量调度云图\n *  *****************************************************************************************\n */\npublic interface RuntimeAtlasInstrument extends Pinenut, AtlasInstrument {\n\n    TaskInstrument taskInstrument();\n\n    GraphNode queryGraphNodeByTaskGuid( GUID taskGuid );\n\n    TaskElement queryTaskElementByGuid( GUID graphNodeGuid );\n\n    GraphStratumTape tapedGraphStratumAdvancer(VectorDAG vectorDAG, KOIMappingDriver driver );\n\n    String querySegmentName( GUID vgraphGuid, short stratumId, short runtimePriority );\n\n    int countStratum( GUID vgraphGuid );\n\n    int countPriority( GUID vgraphGuid, short stratumId );\n\n    void putStratumMeta( GUID vgraphGuid, short stratumId, short runtimePriority, String segmentName );\n\n    VectorDAG getByLayerGuid( GUID layerGuid );\n\n    VectorDAG queryByPath( String path );\n\n\n    List<GUID> fetchParentIds( GUID graphNodeGuid );\n\n\n    @Unsafe( \"TestOnly\" )\n    List<GraphNode> fetchIsolatedNodesAll();\n\n    List<GraphNode> fetchIsolatedNodes( long offset, long limit );\n\n    List<GraphNode> fetchIsolatedNodesById( long idStart, long idEnd );\n\n    TableIndexMeta getIsolatedNodeIndexMeta();\n\n    long queryMaxIsolatedNodePage( long limit );\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/CollectiveTaskLegionary.java",
    "content": "package com.walnut.odin.conduct;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.walnut.odin.conduct.entity.RegimentJoinResponse;\nimport com.walnut.odin.proc.RemoteProcessServiceRPCException;\nimport com.walnut.odin.proc.client.RemoteProcessManagerClient;\n\npublic interface CollectiveTaskLegionary extends Pinenut {\n\n    String getName();\n\n    long getClientId();\n\n    ProcessManager processManager();\n\n    void startService () throws RemoteProcessServiceRPCException;\n\n    RegimentJoinResponse joinRegiment () throws RegimentException;\n\n    RemoteProcessManagerClient remoteProcessManagerClient();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/CollectiveTaskRegiment.java",
    "content": "package com.walnut.odin.conduct;\n\nimport com.pinecone.framework.system.regime.Regiment;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.walnut.odin.conduct.entity.LaunchedContext;\nimport com.walnut.odin.conduct.entity.RegimentJoinRequest;\nimport com.walnut.odin.conduct.entity.RegimentJoinResponse;\nimport com.walnut.odin.dispatch.TaskDispatchException;\nimport com.walnut.odin.dispatch.TaskDispatcher;\nimport com.walnut.odin.proc.RemoteProcessServiceRPCException;\nimport com.walnut.odin.proc.server.RemoteProcessManagerServer;\nimport com.walnut.odin.task.CentralizedTaskInstrument;\nimport com.walnut.odin.task.RavenTask;\nimport com.walnut.odin.task.troll.InstanceLaunchException;\nimport com.walnut.odin.task.troll.LaunchFeature;\nimport com.walnut.odin.task.troll.TaskExecutionLauncher;\n\npublic interface CollectiveTaskRegiment extends Regiment, Slf4jTraceable {\n\n    RavenTask createTask( TaskElement taskElement, Identification serviceId );\n\n    void purgeTask( GUID guid );\n\n    RavenTask affirmTask( String path, Identification serviceId, TaskElement metaInfos );\n\n    RavenTask queryTaskByPath( String path ) ;\n\n    RavenTask getTaskByGuid( GUID taskGuid ) ;\n\n\n\n    RemoteProcessManagerServer remoteProcessManagerServer();\n\n    ProcessManager processManager();\n\n    CentralizedTaskInstrument taskInstrument();\n\n    TaskExecutionLauncher taskExecutionLauncher();\n\n    TaskDispatcher taskDispatcher();\n\n    void startRemoteProcessServer() throws RemoteProcessServiceRPCException;\n\n\n\n    LaunchedContext create( GUID taskGuid, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException;\n\n    LaunchedContext launch( GUID taskGuid, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException;\n\n\n    LaunchedContext create( String path, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException;\n\n    LaunchedContext launch( String path, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException;\n\n\n    RegimentJoinResponse invokeJoinRegiment( RegimentJoinRequest request );\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/ProcessorDeployManager.java",
    "content": "package com.walnut.odin.conduct;\n\nimport com.pinecone.framework.system.regime.arch.Manager;\n\npublic interface ProcessorDeployManager extends Manager {\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/RegimentException.java",
    "content": "package com.walnut.odin.conduct;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class RegimentException extends Exception implements Pinenut {\n\n    public RegimentException() {\n        super();\n    }\n\n    public RegimentException( String message ) {\n        super(message);\n    }\n\n    public RegimentException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public RegimentException( Throwable cause ) {\n        super(cause);\n    }\n\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/InstanceAtlasAdjacent.java",
    "content": "package com.walnut.odin.conduct.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface InstanceAtlasAdjacent extends Pinenut {\n    GUID getGuid();\n    void setGuid( GUID guid );\n\n    GUID getParentGuid();\n    void setParentGuid( GUID parentGuid );\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/InstanceAtlasNode.java",
    "content": "package com.walnut.odin.conduct.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface InstanceAtlasNode extends Pinenut {\n    GUID getGuid();\n    void setGuid(GUID guid);\n\n    GUID getInstanceGuid();\n    void setInstanceGuid(GUID instanceGuid);\n\n    String getNodeName();\n    void setNodeName(String nodeName);\n\n    boolean isIsolated();\n    void setIsIsolated(boolean isIsolated);\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/InstanceEvent.java",
    "content": "package com.walnut.odin.conduct.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.time.LocalDateTime;\n\npublic interface InstanceEvent extends Pinenut {\n    GUID getGuid();\n    void setGuid(GUID guid);\n\n    GUID getTaskGuid();\n    void setTaskGuid(GUID taskGuid);\n\n    GUID getInstanceGuid();\n    void setInstanceGuid(GUID instanceGuid);\n\n    String getInstanceName();\n    void setInstanceName(String instanceName);\n\n    int getRetryTimes();\n    void setRetryTimes(int retryTimes);\n\n    int getCurrentRetryNumber();\n    void setCurrentRetryNumber(int currentRetryNumber);\n\n    String getEventType();\n    void setEventType(String eventType);\n\n    String getState();\n    void setState(String state);\n\n    String getEventContext();\n    void setEventContext(String eventContext);\n\n    LocalDateTime getExecTime();\n    void setExecTime(LocalDateTime execTime);\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/InstanceExec.java",
    "content": "package com.walnut.odin.conduct.entity;\n\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport java.time.LocalDateTime;\n\npublic interface InstanceExec extends Pinenut {\n    long getId();\n    void setId(long id);\n\n    GUID getTaskGuid();\n    void setTaskGuid(GUID taskGuid);\n\n    GUID getInstanceGuid();\n    void setInstanceGuid(GUID instanceGuid);\n\n    String getTaskName();\n    void setTaskName(String taskName);\n\n    String getInstanceName();\n    void setInstanceName(String instanceName);\n\n    String getProcessorQueue();\n    void setProcessorQueue(String processorQueue);\n\n    String getClusterName();\n    void setClusterName(String clusterName);\n\n    String getExecState();\n    void setExecState(String execState);\n\n    int getCurrentRetryNumber();\n    void setCurrentRetryNumber(int currentRetryNumber);\n\n    int getRetryTimes();\n    void setRetryTimes(int retryTimes);\n\n    LocalDateTime getStartTime();\n    void setStartTime(LocalDateTime startTime);\n\n    LocalDateTime getRunTime();\n    void setRunTime(LocalDateTime runTime);\n\n    LocalDateTime getFinishTime();\n    void setFinishTime(LocalDateTime finishTime);\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/LaunchedContext.java",
    "content": "package com.walnut.odin.conduct.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.walnut.odin.task.RavenTaskInstance;\n\npublic class LaunchedContext implements Pinenut {\n\n    protected UProcess process;\n\n    protected RavenTaskInstance taskInstance;\n\n    public LaunchedContext( UProcess process, RavenTaskInstance taskInstance ) {\n        this.process = process;\n        this.taskInstance = taskInstance;\n    }\n\n    public RavenTaskInstance getTaskInstance() {\n        return this.taskInstance;\n    }\n    public void setTaskInstance( RavenTaskInstance taskInstance ) {\n        this.taskInstance = taskInstance;\n    }\n\n    public UProcess getProcess() {\n        return this.process;\n    }\n\n    public void setProcess( UProcess process ) {\n        this.process = process;\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/RegimentJoinRequest.java",
    "content": "package com.walnut.odin.conduct.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class RegimentJoinRequest implements Pinenut {\n\n    protected String mszNodeName;\n    protected Long   mnClientId;\n\n    public RegimentJoinRequest() {\n    }\n\n    public String getNodeName() {\n        return this.mszNodeName;\n    }\n\n    public void setNodeName( String szNodeName ) {\n        this.mszNodeName = szNodeName;\n    }\n\n    public Long getClientId() {\n        return this.mnClientId;\n    }\n\n    public void setClientId( Long nClientId ) {\n        this.mnClientId = nClientId;\n    }\n\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/entity/RegimentJoinResponse.java",
    "content": "package com.walnut.odin.conduct.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class RegimentJoinResponse implements Pinenut {\n\n    protected String mszGuid;\n    protected String mszName;\n    protected String mszClusterPath;\n    protected String mszClusterName;\n    protected long   mnControlClientId;\n    protected int    mnPriority;\n\n    protected String mszQueueName;\n    protected int    mnQueueMaxCapacity;\n    protected int    mnQueueMinCapacity;\n    protected int    mnQueueRuntimeInstanceCapacity;\n\n    protected String mszErrorMsg;\n\n    public RegimentJoinResponse() {\n    }\n\n\n    public String getGuid() {\n        return this.mszGuid;\n    }\n\n    public void setGuid( String szGuid ) {\n        this.mszGuid = szGuid;\n    }\n\n    public String getName() {\n        return this.mszName;\n    }\n\n    public void setName( String szName ) {\n        this.mszName = szName;\n    }\n\n    public String getClusterPath() {\n        return this.mszClusterPath;\n    }\n\n    public void setClusterPath( String szClusterPath ) {\n        this.mszClusterPath = szClusterPath;\n    }\n\n    public String getClusterName() {\n        return this.mszClusterName;\n    }\n\n    public void setClusterName( String szClusterName ) {\n        this.mszClusterName = szClusterName;\n    }\n\n    public long getControlClientId() {\n        return this.mnControlClientId;\n    }\n\n    public void setControlClientId( long nControlClientId ) {\n        this.mnControlClientId = nControlClientId;\n    }\n\n    public int getPriority() {\n        return this.mnPriority;\n    }\n\n    public void setPriority( int nPriority ) {\n        this.mnPriority = nPriority;\n    }\n\n    public String getQueueName() {\n        return this.mszQueueName;\n    }\n\n    public void setQueueName( String szQueueName ) {\n        this.mszQueueName = szQueueName;\n    }\n\n    public int getQueueMaxCapacity() {\n        return this.mnQueueMaxCapacity;\n    }\n\n    public void setQueueMaxCapacity( int nQueueMaxCapacity ) {\n        this.mnQueueMaxCapacity = nQueueMaxCapacity;\n    }\n\n    public int getQueueMinCapacity() {\n        return this.mnQueueMinCapacity;\n    }\n\n    public void setQueueMinCapacity( int nQueueMinCapacity ) {\n        this.mnQueueMinCapacity = nQueueMinCapacity;\n    }\n\n    public int getQueueRuntimeInstanceCapacity() {\n        return this.mnQueueRuntimeInstanceCapacity;\n    }\n\n    public void setQueueRuntimeInstanceCapacity( int nQueueRuntimeInstanceCapacity ) {\n        this.mnQueueRuntimeInstanceCapacity = nQueueRuntimeInstanceCapacity;\n    }\n\n    public String getErrorMsg() {\n        return this.mszErrorMsg;\n    }\n\n    public void setErrorMsg( String errorMsg ) {\n        this.mszErrorMsg = errorMsg;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/InstanceScheduleAllocator.java",
    "content": "package com.walnut.odin.conduct.schedule;\n\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\nimport com.walnut.odin.conduct.schedule.entity.ScheduleFittingContext;\n\npublic interface InstanceScheduleAllocator extends Pinenut {\n\n    String getPartitionName();\n\n    ScheduleFittingContext pipeFitting( Collection<InstanceEntry> instances );\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/InstanceScheduleImpetus.java",
    "content": "package com.walnut.odin.conduct.schedule;\n\nimport java.time.LocalDateTime;\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\n\npublic interface InstanceScheduleImpetus extends Pinenut {\n\n    UniformTaskScheduler taskScheduler();\n\n    void impelSchedulableInstances( Collection<TaskInstanceStatus> statuses, LocalDateTime targetTime );\n\n    void impelPrelaunchInstances( LocalDateTime targetTime );\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/TaskSchedulePreparator.java",
    "content": "package com.walnut.odin.conduct.schedule;\n\nimport java.time.LocalDateTime;\nimport java.util.Collection;\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\n\npublic interface TaskSchedulePreparator extends Pinenut {\n\n    UniformTaskScheduler taskScheduler();\n\n\n    void prepareSchedulableTasks( Collection<TaskScheduleCycle> cycles, LocalDateTime targetTime );\n\n    void prepareSchedulableTasksDaily( LocalDateTime targetTime );\n\n    List<TaskElement> fetchSchedulableTasksInRange( long idMin, long idMax, Collection<TaskScheduleCycle> cycles, LocalDateTime targetTime );\n\n    List<TaskElement> fetchSchedulableTasksDaily( long idMin, long idMax, LocalDateTime targetTime );\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/UniformTaskScheduler.java",
    "content": "package com.walnut.odin.conduct.schedule;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.task.kom.instance.InstanceInstrument;\nimport com.walnut.odin.atlas.graph.RuntimeAtlasInstrument;\nimport com.walnut.odin.dispatch.TaskDispatcher;\nimport com.walnut.odin.task.CentralizedTaskInstrument;\nimport com.walnut.odin.task.RavenTaskConfig;\nimport com.walnut.odin.task.troll.TaskExecutionLauncher;\n\npublic interface UniformTaskScheduler extends Pinenut {\n\n    RavenTaskConfig ravenTaskConfig();\n\n    CentralizedTaskInstrument taskInstrument();\n\n    InstanceInstrument instanceInstrument();\n\n    RuntimeAtlasInstrument atlasInstrument();\n\n    TaskExecutionLauncher taskExecutionLauncher();\n\n    TaskDispatcher taskDispatcher();\n\n    String getPartitionName();\n\n\n    TaskSchedulePreparator taskSchedulePreparator();\n\n    InstanceScheduleImpetus instanceScheduleImpetus();\n\n    InstanceScheduleAllocator instanceScheduleAllocator();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/entity/ConcurrentQuota.java",
    "content": "package com.walnut.odin.conduct.schedule.entity;\n\nimport java.util.HashMap;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport com.pinecone.framework.util.json.JSONObject;\n\npublic class ConcurrentQuota implements Pinenut {\n\n    private final short      mnPriority;\n\n    // 最大水位，如果高于该水位，对应优先级的任务不允许再并行启动\n    // The maximum level, if it is higher than this level,\n    // tasks of corresponding priority are not allowed to start in parallel again\n    private volatile double  mnMaximumRatio;\n    private volatile Long    mnMaximumCnt;\n\n\n    // 最低保障水位，如果低于该水位，会主动启动对应优先级的任务\n    // The minimum guaranteed level,\n    // if it is lower than this level, the corresponding priority task will be initiated actively\n    private volatile double  mnMinimumRatio;\n    private volatile Long    mnMinimumCnt;\n\n    public ConcurrentQuota( short priority ) {\n        this.mnPriority = priority;\n    }\n\n    public short getPriority() {\n        return this.mnPriority;\n    }\n\n    public double getMaximumRatio() {\n        return this.mnMaximumRatio;\n    }\n\n    public void setMaximumRatio( double nMaximumRatio ) {\n        this.mnMaximumRatio = nMaximumRatio;\n    }\n\n    public Long getMaximumCnt() {\n        return this.mnMaximumCnt;\n    }\n\n    public void setMaximumCnt( Long nMaximumCnt ) {\n        this.mnMaximumCnt = nMaximumCnt;\n    }\n\n    public double getMinimumRatio() {\n        return this.mnMinimumRatio;\n    }\n\n    public void setMinimumRatio( double nMinimumRatio ) {\n        this.mnMinimumRatio = nMinimumRatio;\n    }\n\n    public Long getMinimumCnt() {\n        return this.mnMinimumCnt;\n    }\n\n    public void setMinimumCnt( Long nMinimumCnt ) {\n        this.mnMinimumCnt = nMinimumCnt;\n    }\n\n\n    public boolean isMaximumRatioMode() {\n        return this.mnMaximumRatio >= 0D;\n    }\n\n    public boolean isMinimumRatioMode() {\n        return this.mnMinimumRatio >= 0D;\n    }\n\n    public boolean isMaximumUnlimited() {\n        return this.mnMaximumCnt != null && this.mnMaximumCnt < 0L;\n    }\n\n    public boolean isMinimumUnlimited() {\n        return this.mnMinimumCnt != null && this.mnMinimumCnt < 0L;\n    }\n\n\n\n    public static ConcurrentQuota from( JSONObject map ) {\n        short nPriority = (short) map.optLong( \"priority\" );\n        ConcurrentQuota quota = new ConcurrentQuota( nPriority );\n\n        quota.setMaximumRatio( map.optDouble( \"maximumRatio\", 0D ) );\n        quota.setMinimumRatio( map.optDouble( \"minimumRatio\", 0D ) );\n\n        if ( map.hasOwnProperty( \"maximumCnt\" ) && !map.isNull( \"maximumCnt\" ) ) {\n            long nMaximumCnt = map.optLong( \"maximumCnt\" );\n            if ( nMaximumCnt < 0 ) {\n                quota.setMaximumCnt( Long.MAX_VALUE );\n            }\n            else {\n                quota.setMaximumCnt( nMaximumCnt );\n            }\n        }\n\n        if ( map.hasOwnProperty( \"minimumCnt\" ) && !map.isNull( \"minimumCnt\" ) ) {\n            long nMinimumCnt = map.optLong( \"minimumCnt\" );\n            if ( nMinimumCnt < 0 ) {\n                quota.setMinimumCnt( Long.MAX_VALUE );\n            }\n            else {\n                quota.setMinimumCnt( nMinimumCnt );\n            }\n        }\n\n        return quota;\n    }\n\n    public static Map<String, ConcurrentQuota> fromThose( JSONObject map ) {\n        Map<String, ConcurrentQuota> quotas = new HashMap<>();\n\n        if ( map == null ) {\n            return quotas;\n        }\n\n        for ( Map.Entry<String, Object> entry : map.entrySet() ) {\n            String szKey = entry.getKey();\n            JSONObject joQuota = (JSONObject) entry.getValue();\n            ConcurrentQuota quota = ConcurrentQuota.from( joQuota );\n            quotas.put( szKey, quota );\n        }\n\n        return quotas;\n    }\n\n\n    public ConcurrentQuota reproduce( short nPriority ) {\n        ConcurrentQuota quota = new ConcurrentQuota( nPriority );\n        quota.setMaximumRatio( this.getMaximumRatio() );\n        quota.setMinimumRatio( this.getMinimumRatio() );\n        quota.setMaximumCnt( this.getMaximumCnt() );\n        quota.setMinimumCnt( this.getMinimumCnt() );\n        return quota;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"priority\"      , this.getPriority() ),\n                new KeyValue<>( \"maximumRatio\"  , this.getMaximumRatio() ),\n                new KeyValue<>( \"maximumCnt\"    , this.getMaximumCnt() ),\n                new KeyValue<>( \"minimumRatio\"  , this.getMinimumRatio() ),\n                new KeyValue<>( \"minimumCnt\"    , this.getMaximumCnt() )\n        } );\n    }\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/entity/DepartureChecklist.java",
    "content": "package com.walnut.odin.conduct.schedule.entity;\n\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\n\npublic class DepartureChecklist implements Pinenut {\n\n    private boolean             mbTraceDependencyDetails; // 是否追踪依赖明细，开启会增加内存消耗，会记录依赖实例.\n    private Collection<GUID>    mDependentInstanceIds;\n\n    private LocalDateTime       mCheckTime;\n    private InstanceEntry       mTargetInstance;\n    private TaskInstanceStatus  mInterceptedStatus;       // 拦截状态原因.\n    private TaskInstanceStatus  mPreDepartureLastStatus;  // 启动前最后状态，DepartureStandby状态才能出港，其他状态送入下一批流水线.\n\n\n    public DepartureChecklist( InstanceEntry targetInstance, boolean bTraceDependencyDetails ) {\n        this.mTargetInstance = targetInstance;\n        this.mbTraceDependencyDetails = bTraceDependencyDetails;\n        this.mCheckTime = LocalDateTime.now();\n    }\n\n    public DepartureChecklist( InstanceEntry targetInstance ) {\n        this( targetInstance, true );\n    }\n\n    public boolean isTraceDependencyDetails() {\n        return this.mbTraceDependencyDetails;\n    }\n\n    public void setTraceDependencyDetails( boolean bTraceDependencyDetails ) {\n        this.mbTraceDependencyDetails = bTraceDependencyDetails;\n    }\n\n    public Collection<GUID> getDependentInstanceIds() {\n        return this.mDependentInstanceIds;\n    }\n\n    public void setDependentInstanceIds( Collection<GUID> dependentInstanceIds ) {\n        this.mDependentInstanceIds = dependentInstanceIds;\n    }\n\n    public InstanceEntry getTargetInstance() {\n        return this.mTargetInstance;\n    }\n\n    public void setTargetInstance( InstanceEntry targetInstance ) {\n        this.mTargetInstance = targetInstance;\n    }\n\n    public TaskInstanceStatus getInterceptedStatus() {\n        return this.mInterceptedStatus;\n    }\n\n    public void setInterceptedStatus( TaskInstanceStatus interceptedStatus ) {\n        this.mInterceptedStatus = interceptedStatus;\n    }\n\n    public TaskInstanceStatus getPreDepartureLastStatus() {\n        return this.mPreDepartureLastStatus;\n    }\n\n    public void setPreDepartureLastStatus( TaskInstanceStatus preDepartureLastStatus ) {\n        this.mPreDepartureLastStatus = preDepartureLastStatus;\n    }\n\n    public boolean isDepartureCheckPassed() {\n        return this.mPreDepartureLastStatus == TaskInstanceStatus.DepartureStandby;\n    }\n\n    public boolean isIntercepted() {\n        return this.mInterceptedStatus != null;\n    }\n\n    public void addDependentInstanceId( GUID dependentInstanceId ) {\n        if ( !this.mbTraceDependencyDetails ) {\n            return;\n        }\n\n        if ( this.mDependentInstanceIds == null ) {\n            this.mDependentInstanceIds = new ArrayList<>();\n        }\n\n        this.mDependentInstanceIds.add( dependentInstanceId );\n    }\n\n    public LocalDateTime getCheckTime() {\n        return this.mCheckTime;\n    }\n\n    public void setCheckTime( LocalDateTime checkTime ) {\n        this.mCheckTime = checkTime;\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/conduct/schedule/entity/ScheduleFittingContext.java",
    "content": "package com.walnut.odin.conduct.schedule.entity;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\n\npublic class ScheduleFittingContext implements Pinenut {\n\n    private Collection<InstanceEntry> mFittedInstances;\n    private Collection<InstanceEntry> mDiscardedInstances;\n\n    public ScheduleFittingContext() {\n        this.mFittedInstances    = new ArrayList<>();\n        this.mDiscardedInstances = new ArrayList<>();\n    }\n\n    public Collection<InstanceEntry> getFittedInstances() {\n        return this.mFittedInstances;\n    }\n\n    public void setFittedInstances( Collection<InstanceEntry> launchedInstances ) {\n        this.mFittedInstances = launchedInstances;\n    }\n\n    public Collection<InstanceEntry> getDiscardedInstances() {\n        return this.mDiscardedInstances;\n    }\n\n    public void setDiscardedInstances( Collection<InstanceEntry> discardedInstances ) {\n        this.mDiscardedInstances = discardedInstances;\n    }\n\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/ArchTaskExecutionI32Queue.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport java.util.ArrayDeque;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Deque;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.locks.ReentrantLock;\n\nimport com.pinecone.framework.util.id.Identification;\n\npublic abstract class ArchTaskExecutionI32Queue implements TaskExecutionQueue {\n\n    protected String mszName;\n\n    protected volatile int mnMaxCapacity;\n    protected volatile int mnMinCapacity;\n    protected volatile int mnUsedCapacity;\n    protected volatile int mnRuntimeInstanceCapacity;\n\n    protected final Deque<TaskLaunchContext>               mWaitingQueue;\n    protected final Map<Identification, TaskLaunchContext> mRunningInstances;\n\n    protected final ReentrantLock mLock;\n\n    protected ArchTaskExecutionI32Queue() {\n        this.mWaitingQueue     = new ArrayDeque<>();\n        this.mRunningInstances = new HashMap<>();\n        this.mLock             = new ReentrantLock();\n    }\n\n\n    protected void assertOfferCapacityLocked( int nIncoming ) throws QueueBadAllocatedException {\n        if ( nIncoming <= 0 ) {\n            return;\n        }\n\n        int nFuture = this.mnUsedCapacity + this.mWaitingQueue.size() + nIncoming;\n\n        if ( nFuture > this.mnMaxCapacity ) {\n            throw new QueueBadAllocatedException(\n                    \"Queue capacity exceeded. incoming=\" + nIncoming +\n                            \", used=\" + this.mnUsedCapacity +\n                            \", waiting=\" + this.mWaitingQueue.size() +\n                            \", max=\" + this.mnMaxCapacity\n            );\n        }\n    }\n\n\n    @Override\n    public void offer( Collection<TaskLaunchContext> contexts ) throws TaskDispatchException {\n        this.mLock.lock();\n        try {\n            this.assertOfferCapacityLocked( contexts.size() );\n            for ( TaskLaunchContext context : contexts ) {\n                this.mWaitingQueue.addLast( context );\n            }\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public void offer( TaskLaunchContext context ) throws TaskDispatchException {\n        this.mLock.lock();\n        try {\n            this.assertOfferCapacityLocked( 1 );\n            this.mWaitingQueue.addLast( context );\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public Collection<TaskLaunchContext> consume( int n, boolean bForce, TaskInstanceConsumer consumer ) throws TaskConsumeException {\n        this.mLock.lock();\n        try {\n            if ( n <= 0 ) {\n                return Collections.emptyList();\n            }\n\n            int nFreeCapacity = this.pendingCapacity();\n            if ( nFreeCapacity <= 0 ) {\n                return Collections.emptyList();\n            }\n\n            int nAllowedByRuntime;\n            if ( bForce ) {\n                nAllowedByRuntime = n;\n            }\n            else {\n                nAllowedByRuntime = this.mnRuntimeInstanceCapacity > 0 ? this.mnRuntimeInstanceCapacity : n;\n            }\n\n            int nConsume = Math.min(\n                    Math.min( n, nAllowedByRuntime ),\n                    Math.min( nFreeCapacity, this.mWaitingQueue.size() )\n            );\n\n            if ( nConsume <= 0 ) {\n                return Collections.emptyList();\n            }\n\n            List<TaskLaunchContext> result = new ArrayList<>( nConsume );\n            while ( nConsume > 0 ) {\n                TaskLaunchContext context = this.mWaitingQueue.pollFirst();\n                if ( context == null ) {\n                    break;\n                }\n\n                try {\n                    consumer.tryConsume( context );\n                }\n                catch ( TaskConsumeException e ) {\n                    ConsumeCompromisedPolice police = consumer.compromisedPolice();\n\n                    switch ( police ) {\n                        case EvictionIgnore: {\n                            // 丢弃任务，不重新入队\n                            --nConsume;\n                            continue;\n                        }\n                        case EvictionException: {\n                            e.setEvictionTask( context );\n                            throw e;\n                        }\n                        case BreakException:\n                        default: {\n                            // 恢复任务到原队列位置（队头）\n                            this.mWaitingQueue.addFirst( context );\n                            throw e;\n                        }\n                    }\n                }\n\n                Identification id = context.getTaskInstance().getId();\n\n                this.mRunningInstances.put( id, context );\n                ++this.mnUsedCapacity;\n\n                result.add( context );\n                --nConsume;\n            }\n\n            return result;\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public Collection<TaskLaunchContext> consume( int n, TaskInstanceConsumer consumer ) throws TaskConsumeException {\n        return this.consume( n, false, consumer );\n    }\n\n    @Override\n    public Collection<TaskLaunchContext> consume( TaskInstanceConsumer consumer ) throws TaskConsumeException {\n        return this.consume( this.mnRuntimeInstanceCapacity, false, consumer );\n    }\n\n    protected void addRemain( Collection<TaskLaunchContext> products, TaskLaunchContext context ) {\n        boolean bSkipCurrent = true;\n        for ( TaskLaunchContext remain : products ) {\n            if ( bSkipCurrent ) {\n                if ( remain == context ) {\n                    bSkipCurrent = false;\n                }\n                continue;\n            }\n            this.mWaitingQueue.addLast( remain );\n        }\n    }\n\n    @Override\n    public Collection<TaskLaunchContext> pipeConsume( Collection<TaskLaunchContext> products, TaskInstanceConsumer consumer ) throws TaskDispatchException, TaskConsumeException {\n        this.mLock.lock();\n        try {\n            if ( products == null || products.isEmpty() ) {\n                return Collections.emptyList();\n            }\n\n            this.assertOfferCapacityLocked( products.size() );\n\n            int nFreeCapacity = this.pendingCapacity();\n            if ( nFreeCapacity <= 0 ) {\n                for ( TaskLaunchContext context : products ) {\n                    this.mWaitingQueue.addLast( context );\n                }\n                return Collections.emptyList();\n            }\n\n            int nAllowedByRuntime = this.mnRuntimeInstanceCapacity > 0\n                    ? this.mnRuntimeInstanceCapacity : products.size();\n\n            int nConsume = Math.min(\n                    Math.min( products.size(), nAllowedByRuntime ),\n                    nFreeCapacity\n            );\n\n            if ( nConsume <= 0 ) {\n                for ( TaskLaunchContext context : products ) {\n                    this.mWaitingQueue.addLast( context );\n                }\n                return Collections.emptyList();\n            }\n\n            List<TaskLaunchContext> consumed = new ArrayList<>( nConsume );\n            int nIndex = 0;\n            for ( TaskLaunchContext context : products ) {\n                if ( nIndex < nConsume ) {\n                    try {\n                        consumer.tryConsume( context );\n                    }\n                    catch ( TaskConsumeException e ) {\n                        ConsumeCompromisedPolice police = consumer.compromisedPolice();\n                        switch ( police ) {\n                            case EvictionIgnore: {\n                                --nConsume;\n                                continue;\n                            }\n                            case EvictionException: {\n                                this.addRemain( products, context );\n                                e.setEvictionTask( context );\n                                throw e;\n                            }\n                            case BreakException:\n                            default: {\n                                // 当前任务未消费，重新入 waiting 队尾\n                                this.mWaitingQueue.addLast( context );\n\n                                // 剩余未遍历的 products 全部入队\n                                this.addRemain( products, context );\n                                throw e;\n                            }\n                        }\n                    }\n\n                    Identification id = context.getTaskInstance().getId();\n\n                    this.mRunningInstances.put( id, context );\n                    ++this.mnUsedCapacity;\n\n                    consumed.add( context );\n                    ++nIndex;\n                }\n                else {\n                    this.mWaitingQueue.addLast( context );\n                }\n            }\n\n            return consumed;\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public Collection<TaskLaunchContext> runningInstances() {\n        this.mLock.lock();\n        try {\n            return Collections.unmodifiableCollection( this.mRunningInstances.values() );\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public void markTerminated( Identification id ) {\n        this.mLock.lock();\n        try {\n            TaskLaunchContext context = this.mRunningInstances.remove( id );\n            if ( context != null ) {\n                this.mnUsedCapacity--;\n            }\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public Collection<TaskLaunchContext> recycleTerminated( Collection<Identification> terminatedIds ) {\n        this.mLock.lock();\n        try {\n            if ( terminatedIds == null || terminatedIds.isEmpty() ) {\n                return Collections.emptyList();\n            }\n\n            List<TaskLaunchContext> recycled = new ArrayList<>( terminatedIds.size() );\n            for ( Identification id : terminatedIds ) {\n                TaskLaunchContext context = this.mRunningInstances.remove( id );\n                if ( context != null ) {\n                    --this.mnUsedCapacity;\n                    recycled.add( context );\n                }\n            }\n\n            if ( recycled.isEmpty() ) {\n                return Collections.emptyList();\n            }\n            return recycled;\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public Collection<TaskLaunchContext> consumePending( TaskInstanceConsumer consumer ) throws TaskConsumeException {\n        return this.consume( this.mnRuntimeInstanceCapacity, false, consumer );\n    }\n\n    @Override\n    public Collection<TaskLaunchContext> shiftPipeline( Collection<Identification> terminatedIds, TaskInstanceConsumer consumer ) throws TaskConsumeException {\n        // Recycle terminated instances first to release capacity.\n        // The returned collection only represents newly consumed contexts.\n        // Recycled instances are intentionally not part of the return value,\n        // since this method models a \"release-then-refill\" pipeline step.\n        // Callers must not rely on the return value to infer recycle results.\n        this.recycleTerminated( terminatedIds );\n\n        return this.consumePending( consumer );\n    }\n\n    @Override\n    public int pendingCapacity() {\n        return this.mnMaxCapacity - this.mnUsedCapacity;\n    }\n\n    @Override\n    public TaskLaunchContext getRunningContextById( Identification id ) {\n        this.mLock.lock();\n        try {\n            return this.mRunningInstances.get( id );\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public int waitingSize() {\n        this.mLock.lock();\n        try {\n            return this.mWaitingQueue.size();\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public int runningSize() {\n        this.mLock.lock();\n        try {\n            return this.mRunningInstances.size();\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public boolean isFull() {\n        this.mLock.lock();\n        try {\n            return this.mnUsedCapacity >= this.mnMaxCapacity;\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public boolean isIdle() {\n        this.mLock.lock();\n        try {\n            return this.mnUsedCapacity == 0 && this.mWaitingQueue.isEmpty();\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n\n    @Override\n    public Collection<TaskLaunchContext> drainAllWaiting() {\n        this.mLock.lock();\n        try {\n            if ( this.mWaitingQueue.isEmpty() ) {\n                return Collections.emptyList();\n            }\n            List<TaskLaunchContext> drained = new ArrayList<>( this.mWaitingQueue.size() );\n\n            while ( !this.mWaitingQueue.isEmpty() ) {\n                TaskLaunchContext context = this.mWaitingQueue.pollFirst();\n                if ( context == null ) {\n                    break;\n                }\n                drained.add( context );\n            }\n            return drained;\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n\n    @Override\n    public boolean isUsageCriticalCapacity() {\n        this.mLock.lock();\n        try {\n            return this.mnUsedCapacity >= this.mnMinCapacity;\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public void applyMaxCapacity( int nMaxCapacity ) {\n        this.mLock.lock();\n        try {\n            this.mnMaxCapacity = (int) nMaxCapacity;\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public void applyMinCapacity( int nMinCapacity ) {\n        this.mLock.lock();\n        try {\n            this.mnMinCapacity = (int) nMinCapacity;\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public void applyRuntimeInstanceCapacity( int nCapacity ) {\n        this.mLock.lock();\n        try {\n            this.mnRuntimeInstanceCapacity = nCapacity;\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n\n\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public int getMaxCapacity() {\n        return this.mnMaxCapacity;\n    }\n\n    @Override\n    public int getMinCapacity() {\n        return this.mnMinCapacity;\n    }\n\n    @Override\n    public int getUsedCapacity() {\n        return this.mnUsedCapacity;\n    }\n\n    @Override\n    public int getRuntimeInstanceCapacity() {\n        return this.mnRuntimeInstanceCapacity;\n    }\n\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/ConsumeCompromisedPolice.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic enum ConsumeCompromisedPolice implements Pinenut {\n\n    EvictionIgnore(\"EvictionIgnore\"),\n    EvictionException(\"EvictionException\"),\n    BreakException(\"BreakException\"),\n\n    ;\n\n    private final String value;\n\n    ConsumeCompromisedPolice( String value ){\n        this.value = value;\n    }\n\n    public String getName(){\n        return this.value;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/DispatchStrategy.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport java.util.Collection;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface DispatchStrategy extends Pinenut {\n\n    Map<TaskExecutionProcessor, Collection<TaskLaunchContext>> dispatch(\n            Collection<TaskExecutionProcessor> processors, Collection<TaskLaunchContext> contexts, TaskDispatcher dispatcher\n    ) throws TaskDispatchException;\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/PipelineLaunchReport.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.proc.UProcess;\n\npublic interface PipelineLaunchReport extends Pinenut {\n\n    Collection<UProcess> launchedProcesses();\n\n    Collection<TaskLaunchContext> launchedContext();\n\n    Collection<TaskLaunchContext> waitingContext();\n\n    boolean isPreparing();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/QueueBadAllocatedException.java",
    "content": "package com.walnut.odin.dispatch;\n\npublic class QueueBadAllocatedException extends TaskDispatchException {\n\n    public QueueBadAllocatedException() {\n        super();\n    }\n\n    public QueueBadAllocatedException( String message ) {\n        super(message);\n    }\n\n    public QueueBadAllocatedException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public QueueBadAllocatedException( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskConsumeException.java",
    "content": "package com.walnut.odin.dispatch;\n\npublic class TaskConsumeException extends TaskDispatchException {\n\n    protected TaskLaunchContext evictionTask;\n\n    public TaskConsumeException() {\n        super();\n    }\n\n    public TaskConsumeException( String message ) {\n        super(message);\n    }\n\n    public TaskConsumeException( String message, Throwable cause, TaskLaunchContext context ) {\n        super(message, cause);\n        this.evictionTask = context;\n    }\n\n    public TaskConsumeException( Throwable cause, TaskLaunchContext context ) {\n        super(cause);\n        this.evictionTask = context;\n    }\n\n    public TaskConsumeException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public TaskConsumeException( Throwable cause ) {\n        super(cause);\n    }\n\n    public TaskConsumeException( String message, TaskLaunchContext context ) {\n        super(message);\n        this.evictionTask = context;\n    }\n\n    public TaskLaunchContext getEvictionTask() {\n        return this.evictionTask;\n    }\n\n    public void setEvictionTask( TaskLaunchContext evictionTask ) {\n        this.evictionTask = evictionTask;\n    }\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskDispatchException.java",
    "content": "package com.walnut.odin.dispatch;\n\npublic class TaskDispatchException extends Exception {\n\n    public TaskDispatchException() {\n        super();\n    }\n\n    public TaskDispatchException(String message ) {\n        super(message);\n    }\n\n    public TaskDispatchException(String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public TaskDispatchException(Throwable cause ) {\n        super(cause);\n    }\n\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskDispatcher.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.walnut.odin.dispatch.entity.TaskProcessorEntity;\nimport com.walnut.odin.task.RavenTaskInstance;\nimport com.walnut.odin.task.troll.InstanceLaunchException;\nimport com.walnut.odin.task.troll.LaunchFeature;\nimport com.walnut.odin.task.troll.TaskExecutionLauncher;\n\npublic interface TaskDispatcher extends Pinenut {\n\n    TaskExecutionLauncher taskExecutionLauncher();\n\n    void registerProcessor( TaskExecutionProcessor processor );\n\n    TaskProcessorEntity registerProcessor( String szProcessorName, long nClientId ) throws IllegalArgumentException;\n\n    void unregisterProcessor( String szProcessorName );\n\n    void unregisterProcessor( long nClientId );\n\n    Collection<TaskExecutionProcessor> fetchProcessors();\n\n\n    void setProcessorAffinity( String szProcessorName, TaskLaunchContext launchContext );\n\n    TaskExecutionProcessor getAffinityTasks( Identification taskId );\n\n    Collection<TaskLaunchContext> queryAffinityTasks( String szProcessorName );\n\n\n    PipelineLaunchReport pipeCreate( Collection<TaskLaunchContext> contexts ) throws InstanceLaunchException, TaskDispatchException;\n\n    PipelineLaunchReport pipeLaunch( Collection<TaskLaunchContext> contexts ) throws InstanceLaunchException, TaskDispatchException;\n\n\n    UProcess create( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException;\n\n    UProcess launch( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException;\n\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskExecutionProcessor.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.deploy.Server;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.walnut.odin.task.RavenTaskInstance;\nimport com.walnut.odin.task.troll.InstanceLaunchException;\nimport com.walnut.odin.task.troll.LaunchFeature;\n\npublic interface TaskExecutionProcessor extends Pinenut {\n\n    String getName();\n\n    Server getDeployClusterServer();\n\n    String getClusterPath();\n\n    String getClusterName();\n\n    long getControlClientId();\n\n    TaskExecutionQueue getTaskExecutionQueue();\n\n    boolean isLocal();\n\n    int getPriority();\n\n    boolean isExclusive();\n\n    TaskLaunchContext getTaskLaunchContextByPID( GUID pid );\n\n\n    int getRunningSize();\n\n\n    int getWaitingSize();\n\n\n    UProcess directlyCreate( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException;\n\n    UProcess directlyLaunch( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException;\n\n\n\n    PipelineLaunchReport recycleTerminated(Collection<Identification> terminatedIds );\n\n    PipelineLaunchReport launchsPending() throws TaskDispatchException;\n\n    PipelineLaunchReport shiftLaunchsPipeline(Collection<Identification> terminatedIds ) throws TaskDispatchException;\n\n\n    PipelineLaunchReport prepare(Collection<TaskLaunchContext> contexts ) throws TaskDispatchException;\n\n    PipelineLaunchReport pipeCreate(Collection<TaskLaunchContext> contexts ) throws TaskDispatchException;\n\n    PipelineLaunchReport pipeLaunch(Collection<TaskLaunchContext> contexts ) throws TaskDispatchException;\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskExecutionQueue.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport java.util.Collection;\n\nimport com.pinecone.framework.util.id.Identification;\n\npublic interface TaskExecutionQueue extends TaskQueueMeta {\n\n    void applyMaxCapacity( int maxCapacity );\n\n    void applyMinCapacity( int minCapacity );\n\n    void applyRuntimeInstanceCapacity( int capacity );\n\n    boolean isUsageCriticalCapacity();\n\n\n    void offer( Collection<TaskLaunchContext> contexts ) throws TaskDispatchException;\n\n    void offer( TaskLaunchContext context ) throws TaskDispatchException;\n\n    Collection<TaskLaunchContext> consume( int n, boolean bForce, TaskInstanceConsumer consumer ) throws TaskConsumeException;\n\n    Collection<TaskLaunchContext> consume( int n, TaskInstanceConsumer consumer ) throws TaskConsumeException;\n\n    Collection<TaskLaunchContext> consume( TaskInstanceConsumer consumer ) throws TaskConsumeException;\n\n    Collection<TaskLaunchContext> pipeConsume( Collection<TaskLaunchContext> products, TaskInstanceConsumer consumer ) throws TaskDispatchException, TaskConsumeException;\n\n    Collection<TaskLaunchContext> runningInstances();\n\n    void markTerminated( Identification id );\n\n    Collection<TaskLaunchContext> recycleTerminated( Collection<Identification> terminatedIds );\n\n    Collection<TaskLaunchContext> consumePending( TaskInstanceConsumer consumer ) throws TaskConsumeException;\n\n    Collection<TaskLaunchContext> shiftPipeline( Collection<Identification> terminatedIds, TaskInstanceConsumer consumer ) throws TaskConsumeException;\n\n    int pendingCapacity();\n\n\n    int waitingSize();\n\n    int runningSize();\n\n    boolean isFull();\n\n    boolean isIdle();\n\n\n    Collection<TaskLaunchContext> drainAllWaiting();\n\n    TaskLaunchContext getRunningContextById( Identification id );\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskInstanceConsumer.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface TaskInstanceConsumer extends Pinenut {\n\n    void tryConsume( TaskLaunchContext context ) throws TaskConsumeException;\n\n    ConsumeCompromisedPolice compromisedPolice();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskLaunchContext.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.walnut.odin.task.RavenTaskInstance;\nimport com.walnut.odin.task.troll.LaunchFeature;\n\npublic interface TaskLaunchContext extends Pinenut {\n\n    LaunchFeature getLaunchFeature();\n\n    RavenTaskInstance getTaskInstance();\n\n    default Identification getTaskId() {\n        return this.getTaskInstance().getOwnedTask().getId();\n    }\n\n    default Identification getTaskInstanceId() {\n        return this.getTaskInstance().getId();\n    }\n\n\n    String getAffinityProcessorName();\n\n    void setAffinityProcessorName( String affinityProcessorName );\n\n    UProcess getLaunchedProcess();\n\n    void afterProcessLaunched( UProcess launchedProcess );\n\n\n    static TaskLaunchContext of( RavenTaskInstance taskInstance, LaunchFeature launchFeature ) {\n        return new TaskLaunchContext() {\n            private String affinityProcessorName;\n            private UProcess launchedProcess;\n\n            @Override\n            public LaunchFeature getLaunchFeature() {\n                return launchFeature;\n            }\n\n            @Override\n            public RavenTaskInstance getTaskInstance() {\n                return taskInstance;\n            }\n\n            @Override\n            public String getAffinityProcessorName() {\n                return this.affinityProcessorName;\n            }\n\n            @Override\n            public void setAffinityProcessorName( String affinityProcessorName ) {\n                this.affinityProcessorName = affinityProcessorName;\n            }\n\n            @Override\n            public UProcess getLaunchedProcess() {\n                return this.launchedProcess;\n            }\n\n            @Override\n            public void afterProcessLaunched( UProcess launchedProcess ) {\n                this.launchedProcess = launchedProcess;\n            }\n        };\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/TaskQueueMeta.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.deploy.Server;\n\npublic interface TaskQueueMeta extends Pinenut {\n\n    String getName();\n\n    int getMaxCapacity();\n\n    int getMinCapacity();\n\n    int getUsedCapacity();\n\n    int getRuntimeInstanceCapacity();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/entity/ArchTaskQueueMeta.java",
    "content": "package com.walnut.odin.dispatch.entity;\n\nimport java.util.Map;\n\nimport com.walnut.odin.dispatch.TaskQueueMeta;\n\npublic abstract class ArchTaskQueueMeta implements TaskQueueMeta {\n\n    protected String mszName;\n\n    protected int mnMaxCapacity;\n\n    protected int mnMinCapacity;\n\n    protected int mnUsedCapacity;\n\n    protected int mnRuntimeInstanceCapacity;\n\n    protected ArchTaskQueueMeta() {\n\n    }\n\n    public ArchTaskQueueMeta( Map<String, Object> jo ) {\n        if ( jo == null ) {\n            return;\n        }\n\n        Object name = jo.get( \"name\" );\n        if ( name instanceof String ) {\n            this.mszName = (String) name;\n        }\n\n        Object maxCapacity = jo.get( \"maxCapacity\" );\n        if ( maxCapacity instanceof Number ) {\n            this.mnMaxCapacity = ( (Number) maxCapacity ).intValue();\n        }\n\n        Object minCapacity = jo.get( \"minCapacity\" );\n        if ( minCapacity instanceof Number ) {\n            this.mnMinCapacity = ( (Number) minCapacity ).intValue();\n        }\n\n        Object runtimeCapacity = jo.get( \"runtimeInstanceCapacity\" );\n        if ( runtimeCapacity instanceof Number ) {\n            this.mnRuntimeInstanceCapacity = ( (Number) runtimeCapacity ).intValue();\n        }\n    }\n\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public int getMaxCapacity() {\n        return this.mnMaxCapacity;\n    }\n\n    @Override\n    public int getMinCapacity() {\n        return this.mnMinCapacity;\n    }\n\n    @Override\n    public int getUsedCapacity() {\n        return this.mnUsedCapacity;\n    }\n\n    @Override\n    public int getRuntimeInstanceCapacity() {\n        return this.mnRuntimeInstanceCapacity;\n    }\n\n    protected void setName( String szName ) {\n        this.mszName = szName;\n    }\n\n    protected void setMaxCapacity( int nMaxCapacity ) {\n        this.mnMaxCapacity = nMaxCapacity;\n    }\n\n    protected void setMinCapacity( int nMinCapacity ) {\n        this.mnMinCapacity = nMinCapacity;\n    }\n\n    protected void setUsedCapacity( int nUsedCapacity ) {\n        this.mnUsedCapacity = nUsedCapacity;\n    }\n\n    protected void setRuntimeInstanceCapacity( int nRuntimeInstanceCapacity ) {\n        this.mnRuntimeInstanceCapacity = nRuntimeInstanceCapacity;\n    }\n\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/entity/GenericTaskProcessorEntity.java",
    "content": "package com.walnut.odin.dispatch.entity;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.Server;\nimport com.walnut.odin.dispatch.TaskQueueMeta;\n\npublic class GenericTaskProcessorEntity implements TaskProcessorEntity {\n\n    protected GUID           mGuid;\n    protected String         mszName;\n    protected Server         mDeployClusterServer;\n    protected String         mszClusterPath;\n    protected String         mszClusterName;\n    protected long           mnControlClientId;\n    protected boolean        mbLocal;\n    protected boolean        mbExclusive;\n    protected int            mnPriority;\n    protected TaskQueueMeta  mTaskQueueMeta;\n    protected boolean        mbEnable;\n\n    public GenericTaskProcessorEntity() {\n        this.mTaskQueueMeta = new GenericTaskQueueEntity();\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public GenericTaskProcessorEntity( Map<String, Object> jo ) {\n        this();\n\n        if ( jo == null ) {\n            return;\n        }\n\n        Object name = jo.get( \"name\" );\n        if ( name instanceof String ) {\n            this.mszName = (String) name;\n        }\n\n        Object clusterPath = jo.get( \"clusterPath\" );\n        if ( clusterPath instanceof String ) {\n            this.mszClusterPath = (String) clusterPath;\n        }\n\n        Object clusterName = jo.get( \"clusterName\" );\n        if ( clusterName instanceof String ) {\n            this.mszClusterName = (String) clusterName;\n        }\n\n        Object controlClientId = jo.get( \"controlClientId\" );\n        if ( controlClientId instanceof Number ) {\n            this.mnControlClientId = ( (Number) controlClientId ).intValue();\n        }\n\n        Object local = jo.get( \"local\" );\n        if ( local instanceof Boolean ) {\n            this.mbLocal = (Boolean) local;\n        }\n\n        Object priority = jo.get( \"priority\" );\n        if ( priority instanceof Number ) {\n            this.mnPriority = ( (Number) priority ).intValue();\n        }\n\n        Object queueMeta = jo.get( \"queueMeta\" );\n        if ( queueMeta instanceof Map ) {\n            this.mTaskQueueMeta = new GenericTaskQueueEntity( (Map<String, Object>)queueMeta );\n        }\n    }\n\n\n\n    @Override\n    public GUID getGuid() {\n        return this.mGuid;\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public Server getDeployClusterServer() {\n        return this.mDeployClusterServer;\n    }\n\n    @Override\n    public String getClusterPath() {\n        return this.mszClusterPath;\n    }\n\n    @Override\n    public String getClusterName() {\n        return this.mszClusterName;\n    }\n\n    @Override\n    public long getControlClientId() {\n        return this.mnControlClientId;\n    }\n\n    @Override\n    public boolean isLocal() {\n        return this.mbLocal;\n    }\n\n    @Override\n    public boolean isExclusive() {\n        return this.mbExclusive;\n    }\n\n    @Override\n    public int getPriority() {\n        return this.mnPriority;\n    }\n\n    @Override\n    public TaskQueueMeta getTaskQueueMeta() {\n        return this.mTaskQueueMeta;\n    }\n\n    @Override\n    public boolean isEnable() {\n        return this.mbEnable;\n    }\n\n    public void setEnable( boolean enable ) {\n        this.mbEnable = enable;\n    }\n\n    public void setExclusive( boolean exclusive ) {\n        this.mbExclusive = exclusive;\n    }\n\n    public void setGuid( GUID guid ) {\n        this.mGuid = guid;\n    }\n\n    public void setName(String name ) {\n        this.mszName = name;\n    }\n\n    public String getQueueName() {\n        return this.mTaskQueueMeta != null ? this.asTaskQueueMeta().getName() : null;\n    }\n\n    public Integer getQueueMaxCapacity() {\n        return this.mTaskQueueMeta != null ? this.asTaskQueueMeta().getMaxCapacity() : null;\n    }\n\n    public Integer getQueueMinCapacity() {\n        return this.mTaskQueueMeta != null ? this.asTaskQueueMeta().getMinCapacity() : null;\n    }\n\n    public Integer getQueueRuntimeInstanceCapacity() {\n        return this.mTaskQueueMeta != null ? this.asTaskQueueMeta().getRuntimeInstanceCapacity() : null;\n    }\n\n    public void setDeployClusterServer( Server server ) {\n        this.mDeployClusterServer = server;\n    }\n\n    public void setClusterPath( String clusterPath ) {\n        this.mszClusterPath = clusterPath;\n    }\n\n    public void setClusterName( String clusterName ) {\n        this.mszClusterName = clusterName;\n    }\n\n    @Override\n    public void setControlClientId( long controlClientId ) {\n        this.mnControlClientId = controlClientId;\n    }\n\n    public void setLocal( boolean bLocal ) {\n        this.mbLocal = bLocal;\n    }\n\n    public void setPriority( int priority ) {\n        this.mnPriority = priority;\n    }\n\n    public void setTaskQueueMeta( TaskQueueMeta queueMeta ) {\n        this.mTaskQueueMeta = queueMeta;\n    }\n\n    protected ArchTaskQueueMeta asTaskQueueMeta() {\n        return (ArchTaskQueueMeta) this.mTaskQueueMeta;\n    }\n\n    public void setQueueName( String queueName ) {\n        this.asTaskQueueMeta().setName( queueName );\n    }\n\n    public void setQueueMaxCapacity( int nMaxCapacity ) {\n        this.asTaskQueueMeta().setMaxCapacity( nMaxCapacity );\n    }\n\n    public void setQueueMinCapacity( int nMinCapacity ) {\n        this.asTaskQueueMeta().setMinCapacity( nMinCapacity );\n    }\n\n    public void setQueueUsedCapacity( int nUsedCapacity ) {\n        this.asTaskQueueMeta().setUsedCapacity( nUsedCapacity );\n    }\n\n    public void setQueueRuntimeInstanceCapacity( int nRuntimeInstanceCapacity ) {\n        this.asTaskQueueMeta().setRuntimeInstanceCapacity( nRuntimeInstanceCapacity );\n    }\n\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/entity/GenericTaskQueueEntity.java",
    "content": "package com.walnut.odin.dispatch.entity;\n\nimport java.util.Map;\n\npublic class GenericTaskQueueEntity extends ArchTaskQueueMeta {\n\n    public GenericTaskQueueEntity() {\n\n    }\n\n    public GenericTaskQueueEntity( Map<String, Object> jo ) {\n        super( jo );\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/dispatch/entity/TaskProcessorEntity.java",
    "content": "package com.walnut.odin.dispatch.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.Server;\nimport com.walnut.odin.dispatch.TaskQueueMeta;\n\npublic interface TaskProcessorEntity extends Pinenut {\n\n    GUID getGuid();\n\n    String getName();\n\n    Server getDeployClusterServer();\n\n    String getClusterPath();\n\n    String getClusterName();\n\n    long getControlClientId();\n\n    void setControlClientId( long controlClientId );\n\n    boolean isLocal();\n\n    boolean isExclusive();\n\n    int getPriority();\n\n    boolean isEnable();\n\n    TaskQueueMeta getTaskQueueMeta();\n\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/ProcessLifecycleExaminer.java",
    "content": "package com.walnut.odin.proc;\n\nimport com.pinecone.framework.system.regime.Examiner;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.image.ImageModifier;\n\n/**\n * ProcessLifecycleExaminer\n * Process Lifecycle Manager and Runtime Examiner\n * Manages process lifecycles, action auditing, and runtime inspection.\n * 进程生命周期管理与运行检察器，管理进程生命周期与行为审计、检查\n */\npublic interface ProcessLifecycleExaminer extends Examiner {\n\n    void startProcess( UProcess process );\n\n    ImageModifier imageModifier();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/ProcessRemoteEventHandler.java",
    "content": "package com.walnut.odin.proc;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\n\npublic interface ProcessRemoteEventHandler extends Pinenut {\n\n    void fired( long pmClientId, ProcessEvent event, Object caused );\n\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteProcess.java",
    "content": "package com.walnut.odin.proc;\n\nimport com.pinecone.hydra.proc.RemoteUProcess;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.walnut.odin.proc.entity.UProcessRuntimeMeta;\n\nimport java.time.LocalDateTime;\n\n\npublic interface RemoteProcess extends RemoteUProcess {\n\n    long getControlClientId();\n\n    LocalDateTime remoteGetEndTime();\n\n    LocalDateTime remoteGetLastUpdateTime();\n\n    UProcessRuntimeMeta retrieveRemoteRuntimeMeta() throws RemoteProcessLifecycleException;\n\n    void addRemoteEventHandler( ProcessRemoteEventHandler handler ) ;\n\n    void removeRemoteEventHandler( ProcessRemoteEventHandler handler ) ;\n\n    int remoteEventHandlerSize(  ) ;\n\n    void notifyRemoteEvent( long pmClientId, ProcessEvent event, Object caused );\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteProcessLifecycleException.java",
    "content": "package com.walnut.odin.proc;\n\npublic class RemoteProcessLifecycleException extends RemoteProcessServiceException {\n\n    public RemoteProcessLifecycleException() {\n        super();\n    }\n\n    public RemoteProcessLifecycleException( String message ) {\n        super(message);\n    }\n\n    public RemoteProcessLifecycleException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public RemoteProcessLifecycleException( Throwable cause ) {\n        super(cause);\n    }\n\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteProcessManagerNode.java",
    "content": "package com.walnut.odin.proc;\n\nimport java.net.URI;\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.Unsafe;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.proc.event.ProcessLifecycleHandler;\nimport com.pinecone.hydra.proc.image.EntryPointRunnable;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.image.URLImageLoader;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.walnut.odin.proc.entity.UProcessRuntimeMeta;\n\npublic interface RemoteProcessManagerNode extends Slf4jTraceable {\n\n    void startService () throws RemoteProcessServiceRPCException;\n\n    void terminateService () throws IllegalStateException;\n\n    GuidAllocator getGuidAllocator();\n\n    ProcessManager localProcessManager();\n\n    URLImageLoader imageLoader();\n\n    ExecutionImage queryExecutionImage( String path );\n\n    ExecutionImage queryExecutionImage( URI uri );\n\n    RuntimeSystem superiorSystem();\n\n    void registerLocalScopeExecutionImage ( String dirPath, ExecutionImage image );\n\n    void register( UProcess that );\n\n    void erase( UProcess that );\n\n    UProcess getProcess( GUID pid );\n\n    /**\n     * Checks only whether the current node directly owns the specified process, without involving any child nodes or proxy mirrors.\n     * 仅检查当前节点自身是否直接持有该进程，不涉及任何下级节点或代理镜像。\n     */\n    boolean hasOwnProcess( GUID pid );\n\n    /**\n     * Determines whether the specified process exists in the current node or any of its child nodes.\n     * 判断当前节点或其下级节点中是否存在指定进程。\n     */\n    boolean containProcess( GUID pid );\n\n    UProcessRuntimeMeta queryProcessRuntimeMeta( GUID pid ) throws RemoteProcessLifecycleException ;\n\n    Collection<UProcess> searchProcessesByName( String procName ) ;\n\n    Collection<UProcess> searchProcessesByNameNoCase( String procName );\n\n\n\n\n\n    RemoteProcessManagerNode addProcessLifecycleHandler( ProcessLifecycleHandler handler );\n\n    RemoteProcessManagerNode removeProcessLifecycleHandler( ProcessLifecycleHandler handler );\n\n    int getProcessLifecycleHandlersSize();\n\n    @Unsafe\n    void notifyProcessLifecycleHandlers( String imageAddress, EntryPointRunnable runnable, ProcessEvent event );\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteProcessServiceException.java",
    "content": "package com.walnut.odin.proc;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class RemoteProcessServiceException extends Exception implements Pinenut {\n\n    public RemoteProcessServiceException() {\n        super();\n    }\n\n    public RemoteProcessServiceException( String message ) {\n        super(message);\n    }\n\n    public RemoteProcessServiceException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public RemoteProcessServiceException( Throwable cause ) {\n        super(cause);\n    }\n\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteProcessServiceRPCException.java",
    "content": "package com.walnut.odin.proc;\n\npublic class RemoteProcessServiceRPCException extends RemoteProcessServiceException {\n\n    public RemoteProcessServiceRPCException() {\n        super();\n    }\n\n    public RemoteProcessServiceRPCException( String message ) {\n        super(message);\n    }\n\n    public RemoteProcessServiceRPCException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public RemoteProcessServiceRPCException( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteTerminationStatus.java",
    "content": "package com.walnut.odin.proc;\n\npublic enum RemoteTerminationStatus {\n    // === Normal ===\n    Expected              (0x00),    // Expected termination, 计划内正常结束\n    Error                 (0x01),    // Unexpected termination, 程序内部异常、错误\n    HostKilled            (0x02),    // Affiliated otter-host-process termination, 由Daemon执行宿主进程死刑（宿主JVM进程终止）\n    InitFailure           (0x03),    // Initialization Failure, 初始化构造失败（尚未真正开始执行）\n\n\n    // === Signal ===\n    SignalInterrupted     (0x20),    // Voluntary Interrupted (SIGINT), 收到信号程序主动中断 （走线程信号中断）\n    SignalApoptosis       (0x21),    // Voluntary Apoptosis (SIGAPOP), 收到信号程序主动凋亡 （走程序性死亡协议）\n    SignalElimination     (0x22),    // Destination killed by kernel (SIGELIM), 中央内核终末强制终止 （内部进程强制杀死）\n\n\n    // === Procedure ===\n    Restarted             (0x50),    // Expected restart, 计划内重启（如热更新、配置变更）\n    Transferred           (0x51),    // Expected transfer, 计划内进程置换（如负载均衡、资源调度）\n    Preempted             (0x52),    // Expected preempt, 计划内被调度器抢占式终止（后期自动恢复）\n\n\n    // === Internal Failure ===\n    AuthFailure           (0x60),    // Authority failure, 鉴权失败，安全策略拒绝\n    HeathyCheckFailed     (0x61),    // Heathy check failed, 健康检查失败被终止\n    ResourceExhausted     (0x62),    // Resource exhausted, 资源耗尽\n    MigrationAborted      (0x63),    // Migration aborted, 进程迁移过程中失败或终止\n\n\n    // === Network Error ===\n    NetUnreachableTimeout (0x80),    // Net RPC unreachable timeout, 失联超时状态\n\n\n    // === Checkpoint / Tombstone ===\n    Suspended             (0xC001),  // Suspended, 可恢复的挂起状态\n    SuspendedAborted      (0xC002),  // Suspended Aborted, 挂起状态被取消\n    FrozenSeal            (0xC003),  // Frozen and seal, 封印状态（进程终止并持久化挂起到磁盘）\n    CheckpointCorrupted   (0xC004);  // Checkpoint corrupted, 检查点不可用，恢复失败\n\n    ;\n\n    private final int code;\n\n    RemoteTerminationStatus( int code ) {\n        this.code = code;\n    }\n\n    public int getCode() {\n        return this.code;\n    }\n\n    public static RemoteTerminationStatus getByCode( int code ) {\n        for ( RemoteTerminationStatus type : RemoteTerminationStatus.values() ) {\n            if ( type.code == code ) {\n                return type;\n            }\n        }\n\n        return null;\n    }\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/RemoteVitalizationStatus.java",
    "content": "package com.walnut.odin.proc;\n\npublic enum RemoteVitalizationStatus {\n    New              ( 0x00 ),\n    Vitalized        ( 0x01 ),\n    NoImage          ( 0x02 ),\n    Error            ( 0x03 ),\n    AuthorityDenial  ( 0x04 ),\n    ;\n\n    private final int code;\n\n    RemoteVitalizationStatus( int code ) {\n        this.code = code;\n    }\n\n    public int getCode() {\n        return this.code;\n    }\n\n    public static RemoteVitalizationStatus getByCode( int code ) {\n        for ( RemoteVitalizationStatus type : RemoteVitalizationStatus.values() ) {\n            if ( type.code == code ) {\n                return type;\n            }\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/client/RemoteProcessManagerClient.java",
    "content": "package com.walnut.odin.proc.client;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.uma.DuplexAppointClient;\nimport com.walnut.odin.proc.RemoteProcessLifecycleException;\nimport com.walnut.odin.proc.RemoteProcessManagerNode;\nimport com.walnut.odin.proc.entity.RemoteVitalizationResponse;\nimport com.walnut.odin.proc.entity.UProcessMirrorDTO;\n\nimport java.util.Map;\n\npublic interface RemoteProcessManagerClient extends RemoteProcessManagerNode {\n\n    /**\n     *  createLocalUProcess\n     *  Proactively creating local-UProcess.\n     */\n    UProcess createLocalUProcess( ExecutionImage image, UProcess parent, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars );\n\n    void startLocalUProcess( GUID pid );\n\n    long getClientId();\n\n    RemoteVitalizationResponse createLocalUProcess( UProcessMirrorDTO handlerDTO, UProcess[] lpProcess ) throws RemoteProcessLifecycleException;\n\n    RemoteVitalizationResponse vitalizeLocalUProcess( UProcessMirrorDTO handlerDTO ) throws RemoteProcessLifecycleException;\n\n    DuplexAppointClient duplexAppointClient();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/entity/RemoteTerminationReport.java",
    "content": "package com.walnut.odin.proc.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.walnut.odin.proc.RemoteTerminationStatus;\n\npublic class RemoteTerminationReport implements Pinenut {\n\n    protected long mnLocalPID;\n\n    protected GUID mPID;\n\n    protected String mszPID;\n\n    protected int mnTerminationStatus;\n\n    protected int mnExitCode;\n\n    protected String mszErrorMsg;\n\n    public RemoteTerminationReport() {\n        this.mnTerminationStatus = RemoteTerminationStatus.Expected.getCode();\n    }\n\n    public long getLocalPID() {\n        return this.mnLocalPID;\n    }\n\n    public void setLocalPID( long nLocalPID ) {\n        this.mnLocalPID = nLocalPID;\n    }\n\n    public String getPID() {\n        return this.mszPID;\n    }\n\n    public void setPID( String szPID ) {\n        this.mszPID = szPID;\n    }\n\n    public void setTerminationStatus( int nStatus ) {\n        this.mnTerminationStatus = nStatus;\n    }\n\n    public int getTerminationStatus() {\n        return this.mnTerminationStatus;\n    }\n\n    public String getErrorMsg() {\n        return this.mszErrorMsg;\n    }\n\n    public void setErrorMsg( String szErrorMsg ) {\n        this.mszErrorMsg = szErrorMsg;\n    }\n\n    public void setProcessID( GUID pid ) {\n        this.setPID( pid.toString() );\n        this.mPID = pid;\n    }\n\n    public GUID optProcessID() {\n        return this.mPID;\n    }\n\n    public int getExitCode() {\n        return this.mnExitCode;\n    }\n\n    public void setExitCode( int nExitCode ) {\n        this.mnExitCode = nExitCode;\n    }\n\n    public void setRemoteTerminationStatus( RemoteTerminationStatus status ) {\n        this.setTerminationStatus( status.getCode() );\n    }\n\n    public RemoteTerminationStatus optStatus() {\n        return RemoteTerminationStatus.getByCode( this.getTerminationStatus() );\n    }\n\n}\n\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/entity/RemoteVitalizationResponse.java",
    "content": "package com.walnut.odin.proc.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.walnut.odin.proc.RemoteVitalizationStatus;\n\npublic class RemoteVitalizationResponse implements Pinenut {\n\n    protected long mnLocalPID;\n\n    protected String mszName;\n\n    protected GUID mPID;\n\n    protected String mszPID;\n\n    protected int mnStatus;\n\n    protected String mszErrorMsg;\n\n    private String mStartupArguments;\n\n    private String mEnvironmentVariables;\n\n    private String mszImageAddress;\n    private boolean mbImageAddressURI;\n\n    public RemoteVitalizationResponse() {\n        this.mnStatus = RemoteVitalizationStatus.Vitalized.getCode();\n    }\n\n    public String getImageAddress() {\n        return this.mszImageAddress;\n    }\n\n    public void setImageAddress( String szImageAddress ) {\n        this.mszImageAddress = szImageAddress;\n    }\n\n    public void setImageAddressURI( boolean bImageAddressURI ) {\n        this.mbImageAddressURI = bImageAddressURI;\n    }\n\n    public boolean isImageAddressURI() {\n        return this.mbImageAddressURI;\n    }\n\n    public String getName() {\n        return this.mszName;\n    }\n\n    public void setName( String szName ) {\n        this.mszName = szName;\n    }\n\n    public long getLocalPID() {\n        return this.mnLocalPID;\n    }\n\n    public void setLocalPID( long nLocalPID ) {\n        this.mnLocalPID = nLocalPID;\n    }\n\n    public String getPID() {\n        return this.mszPID;\n    }\n\n    public void setPID( String szPID ) {\n        this.mszPID = szPID;\n    }\n\n    public void setStatus( int nStatus ) {\n        this.mnStatus = nStatus;\n    }\n\n    public int getStatus() {\n        return this.mnStatus;\n    }\n\n    public String getErrorMsg() {\n        return this.mszErrorMsg;\n    }\n\n    public void setErrorMsg( String szErrorMsg ) {\n        this.mszErrorMsg = szErrorMsg;\n    }\n\n    public void setProcessID( GUID pid ) {\n        this.setPID( pid.toString() );\n        this.mPID = pid;\n    }\n\n    public GUID optProcessID() {\n        return this.mPID;\n    }\n\n    public void setRemoteVitalizationStatus( RemoteVitalizationStatus status ) {\n        this.setStatus( status.getCode() );\n    }\n\n    public RemoteVitalizationStatus optStatus() {\n        return RemoteVitalizationStatus.getByCode( this.getStatus() );\n    }\n\n    public String getStartupArguments() {\n        return mStartupArguments;\n    }\n\n    public void setStartupArguments( String startupArguments ) {\n        this.mStartupArguments = startupArguments;\n    }\n\n    public String getEnvironmentVariables() {\n        return mEnvironmentVariables;\n    }\n\n    public void setEnvironmentVariables( String environmentVariables ) {\n        this.mEnvironmentVariables = environmentVariables;\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/entity/UProcessMirrorDTO.java",
    "content": "package com.walnut.odin.proc.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class UProcessMirrorDTO implements Pinenut {\n\n    private String      mszName;\n\n    private long        mnLocalPID;\n\n    private String      mszParentPID;\n\n    private String      mszProcessId;\n\n    private String      mStartupArguments;\n\n    private String      mEnvironmentVariables;\n\n    private String      mszImageAddress;\n    private boolean     mbImageAddressURI;\n\n    public UProcessMirrorDTO( String name, long localPID, String processId, String startupArguments, String environmentVariables ) {\n        this.mszName               = name;\n        this.mnLocalPID            = localPID;\n        this.mszProcessId          = processId;\n        this.mStartupArguments     = startupArguments;\n        this.mEnvironmentVariables = environmentVariables;\n    }\n\n    public UProcessMirrorDTO( String name, long localPID, String processId ) {\n        this( name, localPID, processId, null, null );\n    }\n\n    public UProcessMirrorDTO(){}\n\n\n\n    public String getImageAddress() {\n        return this.mszImageAddress;\n    }\n\n    public void setImageAddress( String szImageAddress ) {\n        this.mszImageAddress = szImageAddress;\n    }\n\n    public void setImageAddressURI( boolean bImageAddressURI ) {\n        this.mbImageAddressURI = bImageAddressURI;\n    }\n\n    public boolean isImageAddressURI() {\n        return this.mbImageAddressURI;\n    }\n\n    public String getName() {\n        return mszName;\n    }\n\n    public void setName( String name ) {\n        this.mszName = name;\n    }\n\n    public String getParentPID() {\n        return this.mszParentPID;\n    }\n\n    public void setParentPID( String szParentPID ) {\n        this.mszParentPID = szParentPID;\n    }\n\n    public long getLocalPID() {\n        return mnLocalPID;\n    }\n\n    public void setLocalPID( long pid ) {\n        this.mnLocalPID = pid;\n    }\n\n    public String getPID() {\n        return mszProcessId;\n    }\n\n    public void setPID( String pid ) {\n        this.mszProcessId = pid;\n    }\n\n    public String getStartupArguments() {\n        return mStartupArguments;\n    }\n\n    public void setStartupArguments( String startupArguments ) {\n        this.mStartupArguments = startupArguments;\n    }\n\n    public String getEnvironmentVariables() {\n        return mEnvironmentVariables;\n    }\n\n    public void setEnvironmentVariables( String environmentVariables ) {\n        this.mEnvironmentVariables = environmentVariables;\n    }\n\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/entity/UProcessRuntimeMeta.java",
    "content": "package com.walnut.odin.proc.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class UProcessRuntimeMeta implements Pinenut {\n    private String      mszName;\n\n    private long        mnLocalPID;\n\n    private String      mszParentPID;\n\n    private String      mszProcessId;\n\n    private String      mszCreateTime;\n    private String      mszStartTime;\n    private String      mszEndTime;\n    private String      mszLastUpdateTime;\n    private String      mszMainThreadStatus;\n    private boolean     mbTerminated;\n\n\n    public UProcessRuntimeMeta() {\n\n    }\n\n    public String getCreateTime() {\n        return this.mszCreateTime;\n    }\n\n    public void setCreateTime( String createTime ) {\n        this.mszCreateTime = createTime;\n    }\n\n    public String getStartTime() {\n        return this.mszStartTime;\n    }\n\n    public void setStartTime( String startTime ) {\n        this.mszStartTime = startTime;\n    }\n\n    public String getEndTime() {\n        return this.mszEndTime;\n    }\n\n    public void setEndTime( String endTime ) {\n        this.mszEndTime = endTime;\n    }\n\n    public String getLastUpdateTime() {\n        return this.mszLastUpdateTime;\n    }\n\n    public void setLastUpdateTime( String lastUpdateTime ) {\n        this.mszLastUpdateTime = lastUpdateTime;\n    }\n\n\n\n    public String getMainThreadStatus() {\n        return this.mszMainThreadStatus;\n    }\n\n    public void setMainThreadStatus( String mainThreadStatus ) {\n        this.mszMainThreadStatus = mainThreadStatus;\n    }\n\n    public boolean isTerminated() {\n        return this.mbTerminated;\n    }\n\n    public void setTerminated( boolean terminated ) {\n        this.mbTerminated = terminated;\n    }\n\n\n\n\n    public String getName() {\n        return this.mszName;\n    }\n\n    public void setName( String name ) {\n        this.mszName = name;\n    }\n\n    public String getParentPID() {\n        return this.mszParentPID;\n    }\n\n    public void setParentPID( String szParentPID ) {\n        this.mszParentPID = szParentPID;\n    }\n\n    public long getLocalPID() {\n        return mnLocalPID;\n    }\n\n    public void setLocalPID( long pid ) {\n        this.mnLocalPID = pid;\n    }\n\n    public String getPID() {\n        return mszProcessId;\n    }\n\n    public void setPID( String pid ) {\n        this.mszProcessId = pid;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/proc/server/RemoteProcessManagerServer.java",
    "content": "package com.walnut.odin.proc.server;\n\nimport java.net.URI;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.uma.DuplexAppointServer;\nimport com.walnut.odin.proc.RemoteProcess;\nimport com.walnut.odin.proc.RemoteProcessLifecycleException;\nimport com.walnut.odin.proc.RemoteProcessManagerNode;\nimport com.walnut.odin.proc.RemoteProcessServiceRPCException;\nimport com.walnut.odin.proc.entity.RemoteVitalizationResponse;\nimport com.walnut.odin.proc.entity.UProcessMirrorDTO;\n\npublic interface RemoteProcessManagerServer extends RemoteProcessManagerNode {\n\n    DuplexAppointServer duplexAppointServer();\n\n    void registerProcess( long clientId, UProcessMirrorDTO processDTO );\n\n    void startRemoteUProcess( GUID pid ) throws RemoteProcessServiceRPCException;\n\n    RemoteVitalizationResponse vitalizeRemoteUProcess( long clientId, String imageAddress, boolean isURI, GUID parentPID, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars ) throws RemoteProcessLifecycleException;\n\n    RemoteVitalizationResponse vitalizeRemoteUProcess( long clientId, String imagePath, GUID parentPID, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars ) throws RemoteProcessLifecycleException;\n\n    RemoteVitalizationResponse vitalizeRemoteUProcess( long clientId, URI imageURI, GUID parentPID, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars ) throws RemoteProcessLifecycleException;\n\n\n    RemoteCreationResult createRemoteUProcess( long clientId, String imageAddress, boolean isURI, GUID parentPID, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars ) throws RemoteProcessLifecycleException;\n\n    RemoteCreationResult createRemoteUProcess( long clientId, String imagePath, GUID parentPID, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars ) throws RemoteProcessLifecycleException;\n\n    RemoteCreationResult createRemoteUProcess( long clientId, URI imageURI, GUID parentPID, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars ) throws RemoteProcessLifecycleException;\n\n\n    @Override\n    void register( UProcess that );\n\n    @Override\n    void erase( UProcess that );\n\n    Long queryClientIdByPID( GUID pid );\n\n    RemoteProcess createMediatedRemoteProcess( long clientId, RemoteVitalizationResponse response );\n\n    RemoteProcess createMediatedRemoteProcess( long clientId, UProcessMirrorDTO processDTO );\n\n\n\n    class RemoteCreationResult {\n        RemoteVitalizationResponse response;\n        RemoteProcess process;\n\n        public RemoteProcess getProcess() {\n            return this.process;\n        }\n\n        public RemoteVitalizationResponse getResponse() {\n            return this.response;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/system/RavenException.java",
    "content": "package com.walnut.odin.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class RavenException extends Exception implements Pinenut {\n\n    public RavenException() {\n        super();\n    }\n\n    public RavenException(String message ) {\n        super(message);\n    }\n\n    public RavenException(String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public RavenException(Throwable cause ) {\n        super(cause);\n    }\n\n    protected RavenException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/system/RavenRuntimeException.java",
    "content": "package com.walnut.odin.system;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class RavenRuntimeException extends PineRuntimeException {\n\n    public RavenRuntimeException() {\n        super();\n    }\n\n    public RavenRuntimeException( String message ) {\n        super(message);\n    }\n\n    public RavenRuntimeException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public RavenRuntimeException( Throwable cause ) {\n        super(cause);\n    }\n\n    protected RavenRuntimeException( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/system/TaskCentralControl.java",
    "content": "package com.walnut.odin.system;\n\nimport com.pinecone.framework.system.SynergicSystem;\nimport com.pinecone.hydra.system.centrum.CentralControlSubsystem;\n\npublic interface TaskCentralControl extends SynergicSystem, CentralControlSubsystem {\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/CentralizedTaskInstrument.java",
    "content": "package com.walnut.odin.task;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.UniformTaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.walnut.odin.task.service.CategoryService;\nimport com.walnut.odin.task.source.RavenTaskMasterManipulator;\nimport com.walnut.odin.task.system.TaskPathInvalidException;\n\npublic interface CentralizedTaskInstrument extends TaskInstrument {\n\n    RavenTaskConfig RAVEN_TASK_CONFIG = new GenericRavenTaskConfig();\n\n    UniformTaskInstrument getUniformTaskInstrument();\n\n    RavenTaskMasterManipulator getRavenTaskMasterManipulator();\n\n    CategoryService getCategoryService();\n\n    GUID assertGUIDByPath ( String taskTreePath ) throws TaskPathInvalidException;\n\n    GUID assertTaskGUIDByPath ( String taskTreePath ) throws TaskPathInvalidException, IllegalArgumentException;\n\n\n\n\n\n\n\n    RavenTask constructTask( TaskElement taskElement );\n\n    RavenTask constructTask( TaskElement taskElement, @Nullable Identification serviceId );\n\n    RavenTask createTask( TaskElement taskElement, @Nullable Identification serviceId );\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/GenericRavenTaskConfig.java",
    "content": "package com.walnut.odin.task;\n\n\nimport java.util.Map;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.system.ko.ArchKernelObjectConfig;\n\npublic class GenericRavenTaskConfig extends ArchKernelObjectConfig implements RavenTaskConfig {\n\n    protected String mszInstanceTitleTimeFormat = RavenTaskConstants.InstanceTitleTimeFormat;\n    protected String mszDefaultDateTimeFormat   = RavenTaskConstants.DefaultDateTimeFormat;\n\n    protected int    mnScheduleScanThreadCount  = RavenTaskConstants.ScheduleScanThreadCount;\n    protected long   mnScheduleScanIdWindow     = RavenTaskConstants.ScheduleScanIdWindow;\n\n    protected String mszSchedulePartitionName   = \"__DEFAULT__\";\n    protected JSONObject mScheduleGlobalAllocatorConfig;\n\n    public GenericRavenTaskConfig() {\n        super();\n    }\n\n    public GenericRavenTaskConfig( JSONObject main ) {\n        super( main.optJSONObject( \"kernelConfig\" ) );\n        JSONObject config = main.optJSONObject( \"kernelConfig\" );\n        this.mszInstanceTitleTimeFormat = (String) config.getOrDefault(\"instanceTitleTimeFormat\", RavenTaskConstants.InstanceTitleTimeFormat);\n        this.mszDefaultDateTimeFormat   = (String) config.getOrDefault(\"defaultDateTimeFormat\", RavenTaskConstants.DefaultDateTimeFormat);\n\n        this.mnScheduleScanThreadCount  = ( (Number) config.getOrDefault(\"scheduleScanThreadCount\", RavenTaskConstants.ScheduleScanThreadCount) ).intValue();\n        this.mnScheduleScanIdWindow     = ( (Number) config.getOrDefault(\"scheduleScanIdWindow\", RavenTaskConstants.ScheduleScanIdWindow) ).longValue();\n\n        this.mszSchedulePartitionName   = main.optJSONObject( \"scheduler\" ).optString( \"partitionName\", \"__DEFAULT__\" );\n        this.mScheduleGlobalAllocatorConfig = main.optJSONObject( \"scheduler\" ).optJSONObject( \"globalAllocator\" );\n    }\n\n    @Override\n    public String getInstanceTitleTimeFormat() {\n        return this.mszInstanceTitleTimeFormat;\n    }\n\n    @Override\n    public String getDefaultDateTimeFormat() {\n        return this.mszDefaultDateTimeFormat;\n    }\n\n    @Override\n    public int getScheduleScanThreadCount() {\n        return this.mnScheduleScanThreadCount;\n    }\n\n    @Override\n    public long getScheduleScanIdWindow() {\n        return this.mnScheduleScanIdWindow;\n    }\n\n    @Override\n    public JSONObject getScheduleGlobalAllocatorConfig() {\n        return this.mScheduleGlobalAllocatorConfig;\n    }\n\n    @Override\n    public String getSchedulePartitionName() {\n        return this.mszSchedulePartitionName;\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/RavenTask.java",
    "content": "package com.walnut.odin.task;\n\nimport com.pinecone.hydra.system.ups.UniformPyramidTask;\nimport com.pinecone.hydra.task.Task;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\n\n\npublic interface RavenTask extends Task, UniformPyramidTask {\n\n    RavenTaskInstance createInstance();\n\n    TaskElement getTaskElement();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/RavenTaskConfig.java",
    "content": "package com.walnut.odin.task;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\n\npublic interface RavenTaskConfig extends KernelObjectConfig {\n\n    String getInstanceTitleTimeFormat();\n\n    String getDefaultDateTimeFormat();\n\n    int getScheduleScanThreadCount();\n\n    long getScheduleScanIdWindow();\n\n    JSONObject getScheduleGlobalAllocatorConfig();\n\n    String getSchedulePartitionName();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/RavenTaskConstants.java",
    "content": "package com.walnut.odin.task;\n\nimport com.pinecone.framework.util.datetime.DatePattern;\n\npublic final class RavenTaskConstants {\n\n    public static final String InstanceTitleTimeFormat = \"yyyy_MM_dd_HH_mm_ss\";\n    public static final String DefaultDateTimeFormat = DatePattern.NORM_DATETIME_PATTERN;\n\n    public static final int  ScheduleScanThreadCount = 8;\n    public static final long ScheduleScanIdWindow    = 1000L;\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/RavenTaskInstance.java",
    "content": "package com.walnut.odin.task;\n\nimport java.net.URI;\n\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.system.ko.MetaPersistenceException;\nimport com.pinecone.hydra.task.TaskInstance;\nimport com.pinecone.hydra.task.kom.instance.InstanceInstrument;\n\npublic interface RavenTaskInstance extends TaskInstance {\n\n    URI getProcessImageURI();\n\n    UProcess affinityProcess();\n\n    void startLocalProcess();\n\n    void startRemoteProcess();\n\n    void startRemoteProcess( boolean bDirectlyVitalize );\n\n    void startRemoteProcess( boolean bDirectlyVitalize, long processClientId );\n\n    void update() throws MetaPersistenceException;\n\n    void persist() throws MetaPersistenceException;\n\n    InstanceInstrument instanceInstrument();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/dto/CategoryTag.java",
    "content": "package com.walnut.odin.task.dto;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.slime.entity.EnumIndexableEntity;\n\npublic interface CategoryTag extends EnumIndexableEntity {\n\n    void setEnumId( long id );\n\n    void setTaskGuid( GUID taskGuid );\n\n    GUID getTaskGuid();\n\n    void setCategoryName( String categoryName );\n\n    String getCategoryName();\n\n    void setCategoryType( String categoryType );\n\n    String getCategoryType();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/entity/pyramid/Category.java",
    "content": "package com.walnut.odin.task.entity.pyramid;\n\nimport com.pinecone.slime.entity.EnumIndexableEntity;\n\npublic interface Category extends EnumIndexableEntity {\n\n    void setEnumId( long id );\n\n    void setName( String name ) ;\n\n    String getName() ;\n\n    void setAlias( String alias ) ;\n\n    String getAlias() ;\n\n    void setDescription( String description ) ;\n\n    String getDescription() ;\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/entity/pyramid/CategoryType.java",
    "content": "package com.walnut.odin.task.entity.pyramid;\n\npublic interface CategoryType extends Category {\n\n}\n\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/entity/pyramid/TaskCategory.java",
    "content": "package com.walnut.odin.task.entity.pyramid;\n\npublic interface TaskCategory extends Category {\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/mapper/InstanceAtlasAdjacentMapper.java",
    "content": "package com.walnut.odin.task.mapper;\n\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.walnut.odin.conduct.entity.InstanceAtlasAdjacent;\n\n@IbatisDataAccessObject\npublic interface InstanceAtlasAdjacentMapper {\n\n    void insert( InstanceAtlasAdjacent instanceAtlasAdjacent );\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/mapper/InstanceAtlasNodeMapper.java",
    "content": "package com.walnut.odin.task.mapper;\n\nimport com.pinecone.hydra.task.kom.instance.source.InstanceNodeManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.walnut.odin.conduct.entity.InstanceAtlasNode;\n\n@IbatisDataAccessObject\npublic interface InstanceAtlasNodeMapper extends InstanceNodeManipulator {\n\n    void insert( InstanceAtlasNode instanceAtlasNode);\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/mapper/InstanceEventMapper.java",
    "content": "package com.walnut.odin.task.mapper;\n\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.walnut.odin.conduct.entity.InstanceEvent;\n\n@IbatisDataAccessObject\npublic interface InstanceEventMapper {\n\n    void insert( InstanceEvent instanceEvent );\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/mapper/InstanceExecMapper.java",
    "content": "package com.walnut.odin.task.mapper;\n\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.walnut.odin.conduct.entity.InstanceExec;\n\n@IbatisDataAccessObject\npublic interface InstanceExecMapper {\n\n    void insert( InstanceExec instanceExec );\n\n    void updateStateByInstanceGuid( InstanceExec execUpdate );\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/mapper/OdinTaskMappingDriver.java",
    "content": "package com.walnut.odin.task.mapper;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\n\npublic interface OdinTaskMappingDriver extends KOIMappingDriver {\n\n    KOIMappingDriver getParentDriver();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/service/CategoryService.java",
    "content": "package com.walnut.odin.task.service;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.NonNull;\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.walnut.odin.task.dto.CategoryTag;\nimport com.walnut.odin.task.system.TaskPathInvalidException;\n\npublic interface CategoryService extends Pinenut {\n\n    void addCategoryTag ( CategoryTag categoryTag );\n\n    void addCategoryTag ( String taskTreePath, CategoryTag categoryTag ) throws TaskPathInvalidException, IllegalArgumentException;\n\n    CategoryTag setCategoryTag ( String taskTreePath, CategoryTag categoryTag ) throws TaskPathInvalidException, IllegalArgumentException;\n\n    void updateCategoryTag ( CategoryTag categoryTag );\n\n    CategoryTag queryOwnedTag( GUID taskGuid, String type, String name );\n\n    List<CategoryTag> queryCategoryTag ( GUID taskGuid );\n\n    List<CategoryTag> queryCategoryTag ( String taskTreePath );\n\n    long countCategoryTag( String type, String name );\n\n    List<CategoryTag> queryCategoryTag ( String type, String name, long offset, long pageSize );\n\n    long countCategoryTagsByName( String name );\n\n    List<CategoryTag> fetchCategoryTagByName ( String name, long offset, long pageSize );\n\n    void purgeCategoryTag( @Nullable GUID taskGuid, @Nullable String type, @Nullable String name );\n\n    void purgeCategoryTag( @NonNull String name );\n\n    void purgeCategoryTag( @NonNull GUID taskGuid );\n\n    void removeCategoryTag( @NonNull GUID taskGuid, @NonNull String type, @NonNull String name );\n\n    void eraseCategoryTag( @NonNull String taskTreePath, @Nullable String type, @Nullable String name ) throws TaskPathInvalidException, IllegalArgumentException;\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/source/CategoryMappingManipulator.java",
    "content": "package com.walnut.odin.task.source;\n\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.walnut.odin.task.dto.CategoryTag;\n\npublic interface CategoryMappingManipulator extends Pinenut {\n\n    void insert( CategoryTag categoryTag );\n\n    List<CategoryTag> queryByTaskGuid ( GUID taskGuid );\n\n    CategoryTag queryOwnedTag( GUID taskGuid, String type, String name );\n\n    long countTag( String type, String name );\n\n    List<CategoryTag> queryTag ( String type, String name, long offset, long pageSize );\n\n    long countTagsByName( String name );\n\n    List<CategoryTag> fetchByName ( String name, long offset, long pageSize );\n\n    void update( CategoryTag categoryTag );\n\n    void purge( GUID taskGuid, String type, String name );\n\n    default void remove( GUID taskGuid, String type, String name ) {\n        this.purge( taskGuid, type, name );\n    }\n\n    default void purgeByName( String name ) {\n        this.purge( null, null, name );\n    }\n\n    default void purgeByTaskGuid( GUID taskGuid ) {\n        this.purge( taskGuid, null, null );\n    }\n\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/source/CategoryTypeManipulator.java",
    "content": "package com.walnut.odin.task.source;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.walnut.odin.task.entity.pyramid.CategoryType;\n\n\npublic interface CategoryTypeManipulator extends Pinenut {\n\n    void insert( CategoryType categoryType );\n\n    CategoryType queryType( String name );\n\n    long countTypes();\n\n    List<CategoryType> fetchType( long offset, long pageSize );\n\n    default List<CategoryType> fetchType() {\n        return this.fetchType( 0, this.countTypes() );\n    }\n\n    void remove( String name );\n\n    void update( CategoryType categoryType );\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/source/RavenTaskMasterManipulator.java",
    "content": "package com.walnut.odin.task.source;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.task.kom.source.TaskMasterManipulator;\n\npublic interface RavenTaskMasterManipulator extends KOIMasterManipulator {\n\n    TaskMasterManipulator getTaskMasterManipulator();\n\n    KOIMappingDriver getTaskMappingDriver();\n\n    CategoryTypeManipulator getCategoryTypeManipulator();\n\n    TaskCategoryManipulator getTaskCategoryManipulator();\n\n    CategoryMappingManipulator getCategoryMappingManipulator();\n\n    TaskProcessorManipulator getTaskProcessorManipulator();\n\n    ScheduleManipulator getScheduleManipulator();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/source/ScheduleManipulator.java",
    "content": "package com.walnut.odin.task.source;\n\nimport com.walnut.odin.task.mapper.InstanceAtlasAdjacentMapper;\nimport com.walnut.odin.task.mapper.InstanceAtlasNodeMapper;\nimport com.walnut.odin.task.mapper.InstanceEventMapper;\nimport com.walnut.odin.task.mapper.InstanceExecMapper;\n\npublic interface ScheduleManipulator {\n\n    InstanceEventMapper getInstanceEventMapper();\n\n    InstanceAtlasAdjacentMapper getInstanceAtlasAdjacentMapper();\n\n    InstanceAtlasNodeMapper getInstanceAtlasNodeMapper();\n\n    InstanceExecMapper getInstanceExecMapper();\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/source/TaskCategoryManipulator.java",
    "content": "package com.walnut.odin.task.source;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.walnut.odin.task.entity.pyramid.TaskCategory;\n\npublic interface TaskCategoryManipulator extends Pinenut {\n\n    void insert( TaskCategory taskCategory );\n\n    TaskCategory queryTaskCategory( String name );\n\n    long countCategories();\n\n    List<TaskCategory> fetchCategory( long offset, long pageSize );\n\n    default List<TaskCategory> fetchCategory() {\n        return this.fetchCategory( 0, this.countCategories() );\n    }\n\n    void remove( String name );\n\n    void update( TaskCategory kernelCategory );\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/source/TaskProcessorManipulator.java",
    "content": "package com.walnut.odin.task.source;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.walnut.odin.dispatch.entity.TaskProcessorEntity;\n\npublic interface TaskProcessorManipulator extends Pinenut {\n\n    TaskProcessorEntity selectByProcessorName( String szProcessorName );\n\n    TaskProcessorEntity selectByGuid( GUID guid );\n\n    List<TaskProcessorEntity> selectByClusterName( String clusterName );\n\n    List<TaskProcessorEntity> selectAll();\n\n    int insert( TaskProcessorEntity entity );\n\n    int updateByGuid( TaskProcessorEntity entity );\n\n    int updateQueueCapacity(\n            GUID guid,\n            int maxCapacity,\n            int minCapacity,\n            int runtimeCapacity\n    );\n\n    int deleteByGuid( GUID guid );\n\n    int disable(  GUID guid );\n\n    int enable(  GUID guid );\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/system/TaskPathInvalidException.java",
    "content": "package com.walnut.odin.task.system;\n\nimport com.walnut.odin.system.RavenRuntimeException;\n\npublic class TaskPathInvalidException extends RavenRuntimeException {\n\n    public TaskPathInvalidException() {\n        super();\n    }\n\n    public TaskPathInvalidException( String path ) {\n        super( \"Path `\" + path + \"` is invalided.\" );\n    }\n\n    public TaskPathInvalidException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public TaskPathInvalidException( Throwable cause ) {\n        super(cause);\n    }\n\n    protected TaskPathInvalidException( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/troll/InstanceLaunchException.java",
    "content": "package com.walnut.odin.task.troll;\n\npublic class InstanceLaunchException extends LaunchException {\n\n    public InstanceLaunchException() {\n        super();\n    }\n\n    public InstanceLaunchException( String message ) {\n        super(message);\n    }\n\n    public InstanceLaunchException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public InstanceLaunchException( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/troll/LaunchException.java",
    "content": "package com.walnut.odin.task.troll;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class LaunchException extends Exception implements Pinenut {\n\n    public LaunchException() {\n        super();\n    }\n\n    public LaunchException(String message ) {\n        super(message);\n    }\n\n    public LaunchException(String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public LaunchException(Throwable cause ) {\n        super(cause);\n    }\n\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/troll/LaunchFeature.java",
    "content": "package com.walnut.odin.task.troll;\n\nimport java.net.URI;\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.event.ProcessEventHandler;\n\npublic class LaunchFeature implements Pinenut {\n\n    private boolean retry;\n\n    private URI designatedImageURI;\n\n    private UProcess parentProcess;\n\n    private String processorDesignated;\n\n    private GUID parentPid;\n\n    private Map<String, String[]> startupArgs;\n\n    private Map<String, String[]> contextEnvironmentVars;\n\n    private LocalDateTime bizTimeEpoch;\n\n    private List<ProcessEventHandler> sysProcEventHandlers;\n\n    public LaunchFeature() {\n        this.bizTimeEpoch = LocalDateTime.now().minusDays( 1 ); // dtm\n    }\n\n    public boolean isRetry() {\n        return this.retry;\n    }\n\n    public URI getDesignatedImageURI() {\n        return this.designatedImageURI;\n    }\n\n    public UProcess getParentProcess() {\n        return this.parentProcess;\n    }\n\n    public Map<String, String[]> getStartupArgs() {\n        return this.startupArgs;\n    }\n\n    public Map<String, String[]> getContextEnvironmentVars() {\n        return this.contextEnvironmentVars;\n    }\n\n    public GUID getParentPid() {\n        return this.parentPid;\n    }\n\n    public List<ProcessEventHandler> getSysProcEventHandlers() {\n        return this.sysProcEventHandlers;\n    }\n\n    public String getProcessorDesignated() {\n        return this.processorDesignated;\n    }\n\n    public LaunchFeature withProcessorDesignated( String processorName ) {\n        this.processorDesignated = processorName;\n        return this;\n    }\n\n    public LaunchFeature withParentPid( GUID pid ) {\n        this.parentPid = pid;\n        return this;\n    }\n\n    public LocalDateTime getBizTimeEpoch() {\n        return this.bizTimeEpoch;\n    }\n\n    public void setBizTimeEpoch( LocalDateTime bizTimeEpoch ) {\n        this.bizTimeEpoch = bizTimeEpoch;\n    }\n\n    public LaunchFeature withRetry(boolean retry ) {\n        this.retry = retry;\n        return this;\n    }\n\n    public LaunchFeature withDesignatedImageURI( URI designatedImageURI ) {\n        this.designatedImageURI = designatedImageURI;\n        return this;\n    }\n\n    public LaunchFeature withParentProcess( UProcess parent ) {\n        this.parentProcess = parent;\n        this.parentPid = parent.getPID();\n        return this;\n    }\n\n    public LaunchFeature withStartupArgs( Map<String, String[]> startupArgs ) {\n        this.startupArgs = startupArgs;\n        return this;\n    }\n\n    public LaunchFeature withContextEnvironmentVars( Map<String, String[]> contextEnvironmentVars ) {\n        this.contextEnvironmentVars = contextEnvironmentVars;\n        return this;\n    }\n\n    public LaunchFeature withSysProcEventHandlers( List<ProcessEventHandler> sysProcEventHandlers ) {\n        this.sysProcEventHandlers = sysProcEventHandlers;\n        return this;\n    }\n\n    public LaunchFeature withSysProcEventHandlers( ProcessEventHandler handler ) {\n        if ( this.sysProcEventHandlers == null ) {\n            this.sysProcEventHandlers = new ArrayList<>();\n        }\n        this.sysProcEventHandlers.add( handler );\n        return this;\n    }\n\n}"
  },
  {
    "path": "Odin/odin-architecture/src/main/java/com/walnut/odin/task/troll/TaskExecutionLauncher.java",
    "content": "package com.walnut.odin.task.troll;\n\nimport java.time.LocalDateTime;\n\nimport com.pinecone.framework.system.regime.arch.Manager;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.walnut.odin.task.RavenTaskInstance;\n\npublic interface TaskExecutionLauncher extends Manager {\n\n    ProcessManager processManager();\n\n    LocalDateTime evalBusinessTime( RavenTaskInstance instance, LocalDateTime biz ) ;\n\n    LocalDateTime evalBusinessTime( RavenTaskInstance instance ) ;\n\n    String evalBusinessTimeLabel( RavenTaskInstance instance, LocalDateTime biz ) ;\n\n    String evalBusinessTimeLabel( RavenTaskInstance instance ) ;\n\n    String evalInstanceName( RavenTaskInstance instance, LocalDateTime now, LocalDateTime bizTimeEpoch ) ;\n\n    String evalInstanceName( RavenTaskInstance instance, LocalDateTime bizTimeEpoch ) ;\n\n    void initializeInstance( RavenTaskInstance instance, LaunchFeature feature );\n\n\n\n    UProcess createLocally( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException;\n\n    UProcess createRemotely( RavenTaskInstance instance, long pmClientId, LaunchFeature feature ) throws InstanceLaunchException;\n\n\n\n    UProcess launchLocally( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException;\n\n    UProcess launchRemotely( RavenTaskInstance instance, long pmClientId, LaunchFeature feature ) throws InstanceLaunchException;\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>odin</artifactId>\n        <groupId>com.walnut.odin</groupId>\n        <version>2.5.1</version>\n    </parent>\n\n    <artifactId>odin-framework-atlas</artifactId>\n    <version>2.5.1</version>\n    <modelVersion>4.0.0</modelVersion>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime.jelly</groupId>\n            <artifactId>jelly</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n            <artifactId>hydra-kom-default-driver</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-architecture</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n\n</project>"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/GenericGraphStratumTape.java",
    "content": "package com.walnut.odin.atlas.advance;\n\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.iqueue.ConfigurableMegaDeflectPriorityQueueMeta;\nimport com.pinecone.hydra.unit.iqueue.MagnitudeDPQueue;\nimport com.pinecone.hydra.unit.iqueue.DeflectPriorityQueue;\nimport com.pinecone.hydra.unit.iqueue.MegaDeflectPriorityQueueMeta;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueElement;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\n\nimport com.walnut.odin.atlas.graph.RuntimeAtlasInstrument;\n\n\npublic class GenericGraphStratumTape implements GraphStratumTape {\n    protected RuntimeAtlasInstrument                          mRuntimeAtlasInstrument;\n\n    // StratumId => RuntimePriority => MegaDeflectPriorityQueue\n    protected List<Map<Short, DeflectPriorityQueue>>      mMegaDeflectPriorityQueues;\n\n    protected DeflectPriorityQueue mExecutionPriorityQueue;\n\n    protected VectorDAG                                       mVectorDAG;\n\n    public GenericGraphStratumTape( RuntimeAtlasInstrument runtimeAtlasInstrument, VectorDAG vectorDAG, KOIMappingDriver queueDrive ) {\n        this.mRuntimeAtlasInstrument = runtimeAtlasInstrument;\n        this.mVectorDAG = vectorDAG;\n        ArrayList<Map<Short, DeflectPriorityQueue>> list = new ArrayList<>();\n        int stratumNum = this.mRuntimeAtlasInstrument.countStratum(vectorDAG.getAffiliateLayerGuid());\n\n        for( int i = 0; i < stratumNum; i++ ) {\n            HashMap<Short, DeflectPriorityQueue> map = new HashMap<>();\n            int priorityNum = this.mRuntimeAtlasInstrument.countPriority(vectorDAG.getAffiliateLayerGuid(), (short) i);\n            for( int j = 0; j < priorityNum; j++ ) {\n                String segmentName = this.mRuntimeAtlasInstrument.querySegmentName(vectorDAG.getAffiliateLayerGuid(), (short) i, (short) j);\n                MegaDeflectPriorityQueueMeta meta = new ConfigurableMegaDeflectPriorityQueueMeta();\n                meta.setQueueTableName(\"hydra_queue_nodes\");\n                MagnitudeDPQueue magnitudeDPQueue = new MagnitudeDPQueue(queueDrive, 0, \"segment_name\", segmentName, meta );\n                map.put((short) j, magnitudeDPQueue );\n            }\n            list.add( map );\n        }\n        this.mMegaDeflectPriorityQueues = list;\n    }\n\n\n    @Override\n    public GraphNode queryNodeByIndex( long index ) {\n        long currentNum = 0;\n        for( int i = 0; i < this.mMegaDeflectPriorityQueues.size(); i++ ) {\n            for( DeflectPriorityQueue queue : this.mMegaDeflectPriorityQueues.get(i).values() ) {\n                currentNum += queue.size();\n                if( currentNum >= index ) {\n                    QueueElement queueElement = queue.getByIndex(index);\n                    return this.mVectorDAG.get( queueElement.getObjectGuid() );\n                }\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public GUID queryNodeGuidByIndex( long index ) {\n        long currentNum = 0;\n        for( int i = 0; i < this.mMegaDeflectPriorityQueues.size(); i++ ) {\n            for( DeflectPriorityQueue queue : this.mMegaDeflectPriorityQueues.get(i).values() ) {\n                currentNum += queue.size();\n                if( currentNum >= index ) {\n                    return queue.getByIndex( index ).getObjectGuid();\n                }\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public List<GraphNode> fetchNodes( List<GUID> guids ) {\n        ArrayList<GraphNode> nodes = new ArrayList<>();\n        for( GUID guid : guids ) {\n            nodes.add( this.mVectorDAG.get( guid ) );\n        }\n        return nodes;\n    }\n\n    @Override\n    public List<GraphNode> fetchNodes( long offset, long limit ) {\n        long currentNum = 0;\n        long maxIndex = offset + limit;\n\n        ArrayList<GraphNode> graphNodes = new ArrayList<>();\n        for( int i = 0; i < this.mMegaDeflectPriorityQueues.size(); ++i ) {\n            for( DeflectPriorityQueue queue : this.mMegaDeflectPriorityQueues.get(i).values() ) {\n                currentNum += queue.size();\n                if( currentNum >= offset ) {\n                    List<QueueElement> queueElements = queue.fetchElements(offset, limit);\n                    ArrayList<GraphNode> nodes = new ArrayList<>();\n                    for( QueueElement element : queueElements ) {\n                        nodes.add( this.mVectorDAG.get( element.getObjectGuid() ) );\n                    }\n                    graphNodes.addAll( nodes );\n                }\n\n                if( currentNum > maxIndex ) {\n                    return graphNodes;\n                }\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public List<GraphNode> fetchNodes( long queuePriority, long offset, long limit ) {\n        long currentNum = 0;\n        long maxIndex = offset + limit;\n\n        ArrayList<GraphNode> graphNodes = new ArrayList<>();\n        for( int i = 0; i < this.mMegaDeflectPriorityQueues.size(); ++i ) {\n            for( DeflectPriorityQueue queue : this.mMegaDeflectPriorityQueues.get(i).values() ) {\n                currentNum += queue.size();\n                if( currentNum >= offset ) {\n                    List<QueueElement> queueElements = queue.fetchElementByPriority( queuePriority, offset, limit );\n                    ArrayList<GraphNode> nodes = new ArrayList<>();\n                    for( QueueElement element : queueElements ) {\n                        nodes.add( this.mVectorDAG.get( element.getObjectGuid() ) );\n                    }\n                    graphNodes.addAll( nodes );\n                }\n\n                if( currentNum > maxIndex ) {\n                    return graphNodes;\n                }\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public List<GUID> fetchGuids( long offset, long limit ) {\n        long currentNum = 0;\n        long maxIndex = offset + limit;\n\n        ArrayList<GUID> graphNodes = new ArrayList<>();\n        for( int i = 0; i < this.mMegaDeflectPriorityQueues.size(); ++i ) {\n            for( DeflectPriorityQueue queue : this.mMegaDeflectPriorityQueues.get(i).values() ) {\n                currentNum += queue.size();\n                if( currentNum >= offset ) {\n                    List<QueueElement> queueElements = queue.fetchElements(offset, limit);\n                    ArrayList<GUID> nodes = new ArrayList<>();\n                    for( QueueElement element : queueElements ) {\n                        nodes.add( element.getObjectGuid() );\n                    }\n                    graphNodes.addAll( nodes );\n                }\n\n                if( currentNum > maxIndex ) {\n                    return graphNodes;\n                }\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public List<GUID> fetchGuids( long queuePriority, long offset, long limit ) {\n        long currentNum = 0;\n        long maxIndex = offset + limit;\n\n        ArrayList<GUID> graphNodes = new ArrayList<>();\n        for( int i = 0; i < this.mMegaDeflectPriorityQueues.size(); ++i ) {\n            for( DeflectPriorityQueue queue : this.mMegaDeflectPriorityQueues.get(i).values() ) {\n                currentNum += queue.size();\n                if( currentNum >= offset ) {\n                    List<QueueElement> queueElements = queue.fetchElementByPriority( queuePriority, offset, limit );\n                    ArrayList<GUID> nodes = new ArrayList<>();\n                    for( QueueElement element : queueElements ) {\n                        nodes.add( element.getObjectGuid() );\n                    }\n                    graphNodes.addAll( nodes );\n                }\n\n                if( currentNum > maxIndex ) {\n                    return graphNodes;\n                }\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public int countStratum() {\n        return this.mRuntimeAtlasInstrument.countStratum( this.mVectorDAG.getAffiliateLayerGuid() );\n    }\n\n    @Override\n    public DeflectPriorityQueue query(int stratumId, short runtimePriority ) {\n        Map<Short, DeflectPriorityQueue> queueMap = this.mMegaDeflectPriorityQueues.get( stratumId );\n        if ( queueMap != null ) {\n            return queueMap.get( runtimePriority );\n        }\n\n        return null;\n    }\n\n    @Override\n    public DeflectPriorityQueue getExecutionPriorityQueue() {\n        return this.mExecutionPriorityQueue;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/GenericQueueEntity.java",
    "content": "package com.walnut.odin.atlas.advance;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class GenericQueueEntity implements QueueEntity{\n    private GUID mGuid;\n\n    private int mnStratum;\n\n\n\n    public GenericQueueEntity(){}\n\n    public GenericQueueEntity( GUID guid, int stratum ) {\n        this.mGuid = guid;\n        this.mnStratum = stratum;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.mGuid = guid;\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.mGuid;\n    }\n\n    @Override\n    public void setStratum(int stratum) {\n        this.mnStratum = stratum;\n    }\n\n    @Override\n    public int getStratum() {\n        return this.mnStratum;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/GenericTapedBFSGraphAdvancer.java",
    "content": "package com.walnut.odin.atlas.advance;\n\nimport com.walnut.odin.atlas.advance.strategy.PriorityProcessStrategy;\nimport com.walnut.odin.atlas.graph.RuntimeAtlasInstrument;\nimport com.pinecone.hydra.unit.iqueue.DeflectPriorityQueue;\nimport com.pinecone.hydra.unit.iqueue.QueueExistManipulator;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueElement;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\n\nimport java.util.List;\n\npublic class GenericTapedBFSGraphAdvancer implements TapedBFSGraphStratumAdvancer {\n    private RuntimeAtlasInstrument          mRuntimeAtlasInstrument;\n\n    private QueueExistManipulator           mQueueExistManipulator;\n\n    private DeflectPriorityQueue            mDeflectPriorityQueue;\n\n    private PriorityProcessStrategy         mStrategy;\n\n    public GenericTapedBFSGraphAdvancer( RuntimeAtlasInstrument runtimeAtlasInstrument, DeflectPriorityQueue deflectPriorityQueue, PriorityProcessStrategy strategy ) {\n        this.mRuntimeAtlasInstrument    = runtimeAtlasInstrument;\n        this.mDeflectPriorityQueue      = deflectPriorityQueue;\n        this.mQueueExistManipulator     = deflectPriorityQueue.getMasterManipulator().getQueueExistManipulator();\n        this.mStrategy = strategy;\n    }\n\n    public void traverse( VectorDAG vectorDAG ) {\n        if( !this.mQueueExistManipulator.isExist( vectorDAG.getAffiliateLayerGuid() ) ) {\n            this.mQueueExistManipulator.setQueueExist( vectorDAG.getAffiliateLayerGuid() );\n            this.mStrategy.process( vectorDAG );\n        }\n\n    }\n\n    @Override\n    public List<QueueElement> fetchExecuteNode( long offset, long limit ) {\n        return this.mDeflectPriorityQueue.fetchElements( offset, limit );\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/QueueEntity.java",
    "content": "package com.walnut.odin.atlas.advance;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface QueueEntity extends Pinenut {\n    void setGuid( GUID guid );\n\n    GUID getGuid();\n\n    void setStratum( int stratum );\n\n    int getStratum();\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/TapedBFSGraphStratumAdvancer.java",
    "content": "package com.walnut.odin.atlas.advance;\n\npublic interface TapedBFSGraphStratumAdvancer extends GraphStratumAdvancer {\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/strategy/AtlasPriorityProcessStrategy.java",
    "content": "package com.walnut.odin.atlas.advance.strategy;\n\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class AtlasPriorityProcessStrategy implements PriorityProcessStrategy {\n    protected List<GraphPriorityProcessStrategy > mStrategyPipeline;\n\n\n    public AtlasPriorityProcessStrategy() {\n        this.mStrategyPipeline                  = new ArrayList<>();\n    }\n\n    @Override\n    public void process( VectorDAG vectorDAG ) {\n        for ( GraphPriorityProcessStrategy strategy : this.mStrategyPipeline ) {\n            strategy.process( vectorDAG );\n        }\n    }\n\n    @Override\n    public void addStrategy( GraphPriorityProcessStrategy strategy ) {\n        this.mStrategyPipeline.add( strategy );\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/strategy/GraphPriorityProcessStrategy.java",
    "content": "package com.walnut.odin.atlas.advance.strategy;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\n\npublic interface GraphPriorityProcessStrategy extends Pinenut {\n    void process( VectorDAG vectorDAG );\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/strategy/MegaInDegreeFirstStrategy.java",
    "content": "package com.walnut.odin.atlas.advance.strategy;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.unit.iqueue.DeflectPriorityQueue;\nimport com.pinecone.hydra.unit.iqueue.MegaStratumQueue;\nimport com.pinecone.hydra.unit.iqueue.entity.GenericQueueElement;\nimport com.pinecone.hydra.unit.iqueue.entity.GenericStratumQueueElement;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueStratumElement;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\nimport com.pinecone.hydra.unit.vgraph.layer.Layer;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerInstrument;\n\nimport com.walnut.odin.atlas.graph.RuntimeAtlasInstrument;\n\npublic class MegaInDegreeFirstStrategy implements GraphPriorityProcessStrategy {\n    private RuntimeAtlasInstrument      mRuntimeAtlasInstrument;\n\n    private DeflectPriorityQueue        mDeflectPriorityQueue;\n\n    private MegaStratumQueue            mTempMegaStratumQueue;\n\n    private LayerInstrument             mLayerInstrument;\n\n    private int mnPriority = 0;\n\n    public MegaInDegreeFirstStrategy(\n            RuntimeAtlasInstrument runtimeAtlasInstrument, DeflectPriorityQueue deflectPriorityQueue,\n            MegaStratumQueue megaStratumQueue, LayerInstrument layerInstrument\n    ) {\n        this.mRuntimeAtlasInstrument    = runtimeAtlasInstrument;\n        this.mDeflectPriorityQueue      = deflectPriorityQueue;\n        this.mTempMegaStratumQueue      = megaStratumQueue;\n        this.mLayerInstrument           = layerInstrument;\n    }\n\n\n    @Override\n    public void process( VectorDAG vectorDAG ) {\n        Layer layer = (Layer)this.mLayerInstrument.get(vectorDAG.getAffiliateLayerGuid());\n        long handNodeNums = this.mLayerInstrument.countSourceNode( vectorDAG.getAffiliateLayerGuid() );\n        long offset = 0;\n\n        //todo 后面记得将这个每次遍历的节点数量改成配置\n        for ( long i = 0; i < handNodeNums; i += 1000 ) {\n            List<GUID> handleGuids = this.mLayerInstrument.fetchSourceGuidsByTaskPriority(vectorDAG.getAffiliateLayerGuid(),offset, 1000);\n            for (GUID handleGuid : handleGuids) {\n                TaskElement taskElement = this.mRuntimeAtlasInstrument.queryTaskElementByGuid(handleGuid);\n                if (taskElement.getPriority() > this.mnPriority) {\n                    this.bfsGraph(vectorDAG, this.mnPriority, layer.getSinkGuids());\n                    ++this.mnPriority;\n                }\n                GenericStratumQueueElement element = new GenericStratumQueueElement();\n                element.setObjectGuid(handleGuid);\n                element.setStratum((short) 0);\n                this.mTempMegaStratumQueue.pushBack(element);\n                ++offset;\n            }\n        }\n        // 跳出循环后要将所有节点入队，直接降低成最低优先级\n        this.bfsGraph( vectorDAG, 10,layer.getSinkGuids() );\n    }\n\n    protected void bfsGraph( VectorDAG vectorDAG, int priority, List<GUID> sinkNodes ) {\n        while ( !this.mTempMegaStratumQueue.isEmpty() ) {\n            QueueStratumElement pop = this.mTempMegaStratumQueue.popFront();\n            GUID currentNodeGuid = pop.getObjectGuid();\n\n            TaskElement taskElement = this.mRuntimeAtlasInstrument.queryTaskElementByGuid(currentNodeGuid);\n            GenericQueueElement element = new GenericQueueElement();\n            element.setObjectGuid(currentNodeGuid);\n            element.setPriority(taskElement.getPriority());\n            this.mDeflectPriorityQueue.pushBack(element);\n\n            this.mRuntimeAtlasInstrument.putStratumMeta(\n                    vectorDAG.getAffiliateLayerGuid(),\n                    (short) pop.getStratum(),\n                    (short) element.getPriority(),\n                    this.mDeflectPriorityQueue.getSegmentName()\n            );\n\n            if (sinkNodes != null && sinkNodes.contains(currentNodeGuid)) {\n                continue;\n            }\n\n            long childNodeNum = vectorDAG.countChildNodeNum(currentNodeGuid);\n            long childOffset = 0;\n\n            for ( int i = 0; i < childNodeNum; i += 1000 ) {\n                List<GUID> guids = vectorDAG.fetchChildNodeGuids(childOffset, 1000, currentNodeGuid);\n                for (GUID guid : guids) {\n                    TaskElement childtaskElement = this.mRuntimeAtlasInstrument.queryTaskElementByGuid(guid);\n                    if (childtaskElement.getPriority() <= priority) {\n                        GenericStratumQueueElement stratumQueueElement = new GenericStratumQueueElement();\n                        stratumQueueElement.setObjectGuid(guid);\n                        stratumQueueElement.setStratum((short) (pop.getStratum() + 1));\n                        this.mTempMegaStratumQueue.pushBack(stratumQueueElement);\n                    }\n                }\n            }\n        }\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/advance/strategy/PriorityProcessStrategy.java",
    "content": "package com.walnut.odin.atlas.advance.strategy;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\n\npublic interface PriorityProcessStrategy extends Pinenut {\n    void process( VectorDAG vectorDAG );\n\n    void addStrategy( GraphPriorityProcessStrategy strategy );\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/flow/AnalyzeStage.java",
    "content": "package com.walnut.odin.atlas.flow;\n\npublic interface AnalyzeStage extends ConductFlow {\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/flow/ConductFlow.java",
    "content": "package com.walnut.odin.atlas.flow;\n\n\nimport com.pinecone.hydra.system.flow.Flow;\n\npublic interface ConductFlow extends Flow {\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/flow/ConductStage.java",
    "content": "package com.walnut.odin.atlas.flow;\n\n\nimport com.pinecone.hydra.system.flow.Stage;\n\npublic interface ConductStage extends Stage {\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/flow/MarshallingStage.java",
    "content": "package com.walnut.odin.atlas.flow;\n\npublic interface MarshallingStage extends ConductFlow {\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/flow/OptimizationStage.java",
    "content": "package com.walnut.odin.atlas.flow;\n\npublic interface OptimizationStage extends ConductStage {\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/graph/UniformRuntimeAtlas.java",
    "content": "package com.walnut.odin.atlas.graph;\n\nimport com.pinecone.framework.system.Unsafe;\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.ElementNode;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.kom.entity.TaskTreeNode;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.hydra.unit.vgraph.ArchAtlasInstrument;\nimport com.pinecone.hydra.unit.vgraph.MagnitudeVectorDAG;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\nimport com.pinecone.hydra.unit.vgraph.VectorGraphConfig;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\nimport com.pinecone.hydra.unit.vgraph.layer.Layer;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerInstrument;\nimport com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphMasterManipulator;\nimport com.pinecone.slime.meta.TableIndexMeta;\n\nimport com.walnut.odin.atlas.advance.GenericGraphStratumTape;\nimport com.walnut.odin.atlas.advance.GraphStratumTape;\nimport com.walnut.odin.atlas.graph.entity.TaskGraphNode;\nimport com.walnut.odin.atlas.mapper.QueueStratumManipulator;\nimport com.walnut.odin.atlas.mapper.RunAtlasMasterManipulator;\nimport com.walnut.odin.atlas.mapper.TaskGraphManipulator;\n\nimport java.util.List;\n\npublic class UniformRuntimeAtlas extends ArchAtlasInstrument implements RuntimeAtlasInstrument {\n\n    private TaskInstrument                       mTaskInstrument;\n\n    private RunAtlasMasterManipulator            mRuntimeMasterManipulator;\n\n    private VectorGraphMasterManipulator         mVectorGraphMasterManipulator;\n\n    private TaskGraphManipulator                 mTaskGraphManipulator;\n\n    private QueueStratumManipulator              mQueueStratumManipulator;\n\n    protected void init( TaskInstrument taskInstrument ) {\n        this.mTaskInstrument                   = taskInstrument;\n        this.mRuntimeMasterManipulator         = (RunAtlasMasterManipulator) this.mAtlasMasterManipulator;\n        this.mQueueStratumManipulator          = this.mRuntimeMasterManipulator.getQueueStratumManipulator();\n        this.mVectorGraphMasterManipulator     = this.mRuntimeMasterManipulator.getVectorGraphMasterManipulator();\n        this.mTaskGraphManipulator             = (TaskGraphManipulator) this.mVectorGraphMasterManipulator.getVectorGraphManipulator();\n    }\n\n    public UniformRuntimeAtlas(\n            TaskInstrument taskInstrument, LayerInstrument layerInstrument, AtlasMappingDriver driver, VectorGraphConfig config\n    ) {\n        super( driver, config, layerInstrument );\n        this.init( taskInstrument );\n    }\n\n    public UniformRuntimeAtlas( AtlasMappingDriver driver, TaskInstrument taskInstrument, LayerInstrument layerInstrument ) {\n        super( driver, layerInstrument );\n        this.init( taskInstrument );\n    }\n\n    @Override\n    public TaskInstrument taskInstrument() {\n        return this.mTaskInstrument;\n    }\n\n\n\n    @Override\n    public GUID put( GraphNode graphNode ) {\n        return super.put(graphNode);\n    }\n\n    @Override\n    public void remove( GUID guid ) {\n        super.remove(guid);\n    }\n\n    public TaskGraphNode query( GUID guid ) {\n        return (TaskGraphNode) super.get(guid);\n    }\n\n    @Override\n    public GraphNode queryGraphNodeByTaskGuid( GUID taskGuid ) {\n        TaskGraphNode taskGraphNode = this.mTaskGraphManipulator.getNodeByTaskGuid( taskGuid );\n        GUID guid = taskGraphNode.getId();\n        return this.query(guid);\n    }\n\n    @Override\n    public TaskElement queryTaskElementByGuid( GUID graphNodeGuid ) {\n        GUID guid = this.mTaskGraphManipulator.queryTaskGuidByNodeId( graphNodeGuid );\n        TaskTreeNode taskTreeNode = (TaskTreeNode) this.mTaskInstrument.get( guid );\n        ElementNode elementNode = taskTreeNode.evinceElementNode();\n        if ( elementNode != null ) {\n            return elementNode.evinceTaskElement();\n        }\n        return null;\n    }\n\n    @Override\n    public GraphStratumTape tapedGraphStratumAdvancer( VectorDAG vectorDAG, KOIMappingDriver driver ) {\n        return new GenericGraphStratumTape( this, vectorDAG, driver );\n    }\n\n    @Override\n    public String querySegmentName( GUID vgraphGuid, short stratumId, short runtimePriority ) {\n        return this.mQueueStratumManipulator.querySegmentName( vgraphGuid, stratumId, runtimePriority );\n    }\n\n    @Override\n    public int countStratum( GUID vgraphGuid ) {\n        Integer i = this.mQueueStratumManipulator.countStratum( vgraphGuid );\n        Assert.notNull( i );\n        return i;\n    }\n\n    @Override\n    public int countPriority( GUID vgraphGuid, short stratumId ) {\n        Integer i = this.mQueueStratumManipulator.countPriority( vgraphGuid, stratumId );\n        Assert.notNull( i );\n        return i;\n    }\n\n    @Override\n    public void putStratumMeta( GUID vgraphGuid, short stratumId, short runtimePriority, String segmentName ) {\n        this.mQueueStratumManipulator.put( vgraphGuid, stratumId, runtimePriority, segmentName );\n    }\n\n    @Override\n    public VectorDAG toVectorDAG( Layer layer ) {\n        return new MagnitudeVectorDAG(\n                layer,\n                this.mVectorGraphMasterManipulator,\n                this.mVectorGraphConfig\n        );\n    }\n\n    @Override\n    public VectorDAG getByLayerGuid( GUID layerGuid ) {\n        TreeNode treeNode = this.mLayerInstrument.get( layerGuid );\n        if ( !( treeNode instanceof Layer ) ) {\n            return null;\n        }\n        Layer layer = (Layer) treeNode;\n        return this.toVectorDAG( layer );\n    }\n\n    @Override\n    public VectorDAG queryByPath( String path ) {\n        EntityNode entityNode = this.mLayerInstrument.queryNode( path );\n        if ( !( entityNode instanceof Layer ) ) {\n            return null;\n        }\n        Layer layer = (Layer) entityNode;\n        return this.toVectorDAG( layer );\n    }\n\n    @Override\n    public List<GUID> fetchParentIds(GUID graphNodeGuid) {\n        return  this.mTaskGraphManipulator.fetchParentIds( graphNodeGuid );\n    }\n\n    @Override\n    public void addChild( GUID parentGuid, GUID childGuid ) {\n        this.mVectorGraphManipulator.addChild( parentGuid,childGuid );\n    }\n\n\n\n\n\n\n    @Unsafe( \"TestOnly\" )\n    @Override\n    public List<GraphNode> fetchIsolatedNodesAll() {\n        TableIndexMeta meta = this.getIsolatedNodeIndexMeta();\n        return this.fetchIsolatedNodesById( meta.getMinId(), meta.getMaxId() );\n    }\n\n    @Override\n    public List<GraphNode> fetchIsolatedNodes( long offset, long limit ) {\n        return this.mTaskGraphManipulator.fetchIsolatedNodes( offset, limit );\n    }\n\n    @Override\n    public List<GraphNode> fetchIsolatedNodesById( long idStart, long idEnd ) {\n        return this.mTaskGraphManipulator.fetchIsolatedNodesById( idStart, idEnd );\n    }\n\n    @Override\n    public TableIndexMeta getIsolatedNodeIndexMeta() {\n        return this.mTaskGraphManipulator.selectIsolatedNodeIndexMeta();\n    }\n\n    @Override\n    public long queryMaxIsolatedNodePage( long limit ) {\n        if ( limit <= 0 ) {\n            throw new IllegalArgumentException( \"Limit must be greater than zero.\" );\n        }\n\n        long nTotal = this.mTaskGraphManipulator.countIsolatedNodes();\n        if ( nTotal == 0 ) {\n            return 0;\n        }\n\n        long nPage = nTotal / limit;\n        if ( nTotal % limit != 0 ) {\n            nPage++;\n        }\n\n        return nPage;\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/graph/entity/TaskAtlasNode.java",
    "content": "package com.walnut.odin.atlas.graph.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\n\nimport java.util.List;\n\npublic class TaskAtlasNode implements TaskGraphNode {\n    private long            enumId;\n\n    private GUID            guid;\n\n    private GUID            taskGuid;\n\n    private String          name;\n\n    private List<GUID>      parentIds;\n\n    private String          description;\n\n    private boolean         isolated;\n\n    public TaskAtlasNode(){\n    }\n\n    @Override\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n    @Override\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n    @Override\n    public String getName() {\n        return this.name;\n    }\n\n    @Override\n    public GUID getId() {\n        return this.guid;\n    }\n\n    @Override\n    public void setId(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public GUID getTaskGuid() {\n        return this.taskGuid;\n    }\n\n    @Override\n    public void setTaskGuid( GUID taskGuid ) {\n        this.taskGuid = taskGuid;\n    }\n\n    @Override\n    public List<GUID> getParentIds() {\n        return this.parentIds;\n    }\n\n    @Override\n    public void setParentIds(List<GUID> parentIds) {\n        this.parentIds = parentIds;\n    }\n\n    @Override\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    @Override\n    public String getDescription() {\n        return this.description;\n    }\n\n    @Override\n    public void setDescription(String description) {\n        this.description = description;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    public boolean isIsolated() {\n        return this.isolated;\n    }\n\n    public void setIsolated(boolean isolated ) {\n        this.isolated = isolated;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/graph/entity/TaskGraphNode.java",
    "content": "package com.walnut.odin.atlas.graph.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\n\npublic interface TaskGraphNode extends GraphNode {\n    void setName( String name );\n\n    GUID getTaskGuid();\n\n    void setTaskGuid( GUID taskGuid );\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/mapper/QueueStratumManipulator.java",
    "content": "package com.walnut.odin.atlas.mapper;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface QueueStratumManipulator extends Pinenut {\n    String querySegmentName( GUID vgraphGuid, short stratumId, short runtimePriority );\n\n    Integer countStratum( GUID vgraphGuid );\n\n    Integer countPriority( GUID vgraphGuid, short stratumId  );\n\n    void put( GUID vgraphGuid, short stratumId, short runtimePriority, String segmentName );\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/mapper/QueueStratumMapper.java",
    "content": "package com.walnut.odin.atlas.mapper;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface QueueStratumMapper extends QueueStratumManipulator {\n\n    @Override\n    String querySegmentName(@Param(\"vgraphGuid\") GUID vgraphGuid, @Param(\"stratumId\") short stratumId,\n                            @Param(\"runtimePriority\") short runtimePriority);\n\n    @Override\n    Integer countStratum( GUID vgraphGuid );\n\n    @Override\n    Integer countPriority( @Param(\"vgraphGuid\") GUID vgraphGuid, @Param(\"stratumId\") short stratumId );\n\n    @Override\n    void put(GUID vgraphGuid, short stratumId, short runtimePriority, String segmentName);\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/mapper/RunAtlasMasterManipulator.java",
    "content": "package com.walnut.odin.atlas.mapper;\n\nimport com.pinecone.hydra.unit.vgraph.source.AtlasMasterManipulator;\n\npublic interface RunAtlasMasterManipulator extends AtlasMasterManipulator {\n\n    QueueStratumManipulator      getQueueStratumManipulator();\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/mapper/RuntimeVGraphMapper.java",
    "content": "package com.walnut.odin.atlas.mapper;\n\nimport java.util.List;\n\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.pinecone.slime.meta.TableIndex64Meta;\n\nimport com.walnut.odin.atlas.graph.entity.TaskAtlasNode;\nimport com.walnut.odin.atlas.graph.entity.TaskGraphNode;\n\n@SuppressWarnings(\"unchecked\")\n@IbatisDataAccessObject\npublic interface RuntimeVGraphMapper extends TaskGraphManipulator {\n    @Override\n    default void insertHandleNode( GraphNode graphNode ){\n        this.insertGraphNode(graphNode);\n    }\n\n    @Override\n    void insertGraphNode( @Param(\"graphNode\") GraphNode graphNode );\n\n    void insertNodeAdjacent( @Param(\"parentGuid\") GUID parentGuid, @Param(\"childGuid\") GUID childGuid );\n\n    @Override\n    default void insertNodeByEdge(GUID parentGuid, GraphNode graphNode){\n        this.insertGraphNode(graphNode);\n        this.insertNodeAdjacent(parentGuid,graphNode.getId());\n    }\n\n\n    default void addChild(GUID parentGuid, GraphNode graphNode) {\n        this.insertNodeAdjacent(parentGuid,graphNode.getId());\n    }\n\n    @Override\n    default void removeNode( GUID guid ) {\n        this.removeGraphNode(guid);\n        this.removeGraphAdjacent(guid);\n    }\n\n    void removeGraphNode(  @Param(\"guid\") GUID guid );\n\n    void removeGraphAdjacent(  @Param(\"guid\") GUID guid );\n\n    @Override\n    TaskAtlasNode queryNode( @Param(\"guid\") GUID guid );\n\n    @Override\n    TaskGraphNode getNodeByTaskGuid( @Param(\"taskGuid\") GUID taskGuid );\n\n    @Override\n    GUID queryTaskGuidByNodeId( GUID nodeId );\n\n    @Override\n    List<GUID> fetchParentIds( @Param(\"guid\") GUID guid );\n\n    List<TaskAtlasNode> fetchChildNodes0( @Param(\"guid\") GUID guid );\n\n    @Override\n    default List<GraphNode> fetchChildNodes( GUID guid ) {\n        return (List) this.fetchChildNodes0( guid );\n    }\n\n    @Override\n    List<GUID> fetchChildNodeGuids(GUID guid);\n\n\n    @Override\n    default List<GraphNode> fetchRootNodes() {\n        return (List) this.fetchRootNodes0();\n    }\n\n    List<TaskAtlasNode> fetchRootNodes0();\n\n    @Override\n    List<GUID> fetchChildNodeIds( @Param(\"guid\") GUID guid );\n\n    List<TaskAtlasNode> fetchNodesByName0(  @Param(\"name\") String name );\n\n\n    @Override\n    default List<GraphNode> fetchNodesByName( String name ) {\n        return (List) this.fetchNodesByName0( name );\n    }\n\n    @Override\n    @Update(\"UPDATE `hydra_atlas_vgraph_nodes` \" +\n            \"SET \" +\n            \"    `task_guid` = #{graphNode.taskGuid}, \" +\n            \"    `node_name` = #{graphNode.nodeName}, \" +\n            \"    `node_description` = #{graphNode.nodeDescription} \" +\n            \"WHERE `guid` = #{graphNode.guid}\")\n    void updateNode(  @Param(\"graphNode\") GraphNode graphNode );\n\n    @Override\n    List<GUID> fetchHandleGuids( @Param(\"offset\") long offset, @Param(\"limit\") long limit);\n\n    @Override\n    @Select(\"SELECT havn.guid \" +\n            \"FROM hydra_atlas_vgraph_nodes havn \" +\n            \"JOIN hydra_atlas_vgraph_task_mapping vatm ON havn.guid = vatm.vgraph_node_guid \" +\n            \"JOIN hydra_task_task_node httn ON vatm.task_guid = httn.guid \" +\n            \"WHERE NOT EXISTS (\" +\n            \"SELECT id FROM hydra_atlas_vgraph_adjacent hava WHERE hava.guid = havn.guid) \" +\n            \"ORDER BY httn.priority \" +\n            \"LIMIT #{limit} OFFSET #{offset}\")\n    List<GUID> fetchHandleGuidsByTaskPriority(long offset, long limit);\n\n    @Override\n    @Select(\"SELECT COUNT(havn.guid) \" +\n            \"FROM `hydra_atlas_vgraph_nodes` havn \" +\n            \"WHERE NOT EXISTS (SELECT `id` FROM `hydra_atlas_vgraph_adjacent` `hava` WHERE `hava`.guid = `havn`.guid)\")\n    long countSourceNodes();\n\n    @Override\n    List<GUID> fetchDownstreamNodeGuid( @Param(\"nodeGuid\") GUID nodeGuid, @Param(\"offset\") long offset, @Param(\"limit\") long limit);\n\n    @Override\n    List<GUID> fetchUpstreamNodeGuid( @Param(\"nodeGuid\") GUID nodeGuid, @Param(\"offset\") long offset, @Param(\"limit\") long limit);\n\n    @Override\n    long queryInDegree( @Param(\"nodeGuid\") GUID nodeGuid);\n\n    @Override\n    long queryOutDegree( @Param(\"nodeGuid\") GUID nodeGuid);\n\n    @Override\n    long getPriorityByInDegree(@Param(\"guid\") GUID guid);\n\n\n    @Override\n    List<GUID> limitFetchChildNodeGuids(@Param(\"offset\") long offset, @Param(\"limit\") long limit, @Param(\"guid\") GUID guid);\n\n    @Override\n    long countChildNodeNums(GUID guid);\n\n    @Override\n    void addChild(GUID parentGuid, GUID childGuid);\n\n\n\n\n\n\n    List<TaskAtlasNode> fetchIsolatedNodes0(\n        @Param(\"offset\") long offset,\n        @Param(\"limit\") long limit\n    );\n\n    @Override\n    default List<GraphNode> fetchIsolatedNodes( long offset, long limit ) {\n        return ( List ) this.fetchIsolatedNodes0( offset, limit );\n    }\n\n\n    List<TaskAtlasNode> fetchIsolatedNodesById0(\n        @Param(\"idStart\") long idStart, @Param(\"idEnd\") long idEnd\n    );\n\n    @Override\n    default List<GraphNode> fetchIsolatedNodesById( long idStart, long idEnd ) {\n        return ( List ) this.fetchIsolatedNodesById0( idStart, idEnd );\n    }\n\n    @Override\n    long countIsolatedNodes();\n\n    @Override\n    TableIndex64Meta selectIsolatedNodeIndexMeta();\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/mapper/RuntimeVectorGraphPathCacheMapper.java",
    "content": "package com.walnut.odin.atlas.mapper;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphPathCacheManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\n\nimport java.util.List;\n\n@IbatisDataAccessObject\npublic interface RuntimeVectorGraphPathCacheMapper extends VectorGraphPathCacheManipulator {\n    @Override\n    @Insert(\"INSERT INTO hydra_atlas_vgraph_cache_path (`guid`, `path`) VALUES (#{path},#{guid})\")\n    void insert(@Param(\"path\") String path, @Param(\"guid\") GUID guid);\n\n    @Override\n    @Insert(\"INSERT INTO hydra_atlas_vgraph_cache_path (guid, path, long_path) VALUES (#{guid},#{path},#{longPath})\")\n    void insertLongPath( GUID guid, String path, String longPath );\n\n    @Override\n    @Delete(\"DELETE FROM `hydra_atlas_vgraph_cache_path` WHERE `guid` = #{guid}\")\n    void remove ( GUID guid );\n\n    @Override\n    @Select(\"SELECT `path` FROM `hydra_atlas_vgraph_cache_path` WHERE `guid` = #{guid}\")\n    List<String> getPath (GUID guid );\n\n    @Override\n    @Select(\"SELECT `guid` FROM `hydra_atlas_vgraph_cache_path` WHERE `path` = #{path}\")\n    GUID getNode ( String path );\n\n    @Override\n    @Select(\"SELECT `guid` FROM `hydra_atlas_vgraph_cache_path` WHERE `path` = #{path}\")\n    GUID queryGUIDByPath( String path );\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/java/com/walnut/odin/atlas/mapper/TaskGraphManipulator.java",
    "content": "package com.walnut.odin.atlas.mapper;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphManipulator;\n\nimport com.walnut.odin.atlas.graph.entity.TaskGraphNode;\n\npublic interface TaskGraphManipulator extends VectorGraphManipulator {\n\n    @Override\n    TaskGraphNode queryNode( GUID guid );\n\n    TaskGraphNode getNodeByTaskGuid( GUID taskGuid );\n\n    GUID queryTaskGuidByNodeId( GUID nodeId );\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/resources/mapper/kernel/task/InstanceAtlasAdjacentMapper.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE mapper PUBLIC \"-//mybatis.org//DTD Mapper 3.0//EN\"\n        \"http://mybatis.org/dtd/mybatis-3-mapper.dtd\">\n<mapper namespace=\"com.walnut.odin.task.mapper.InstanceAtlasAdjacentMapper\">\n\n    <insert id=\"insert\" parameterType=\"com.walnut.odin.conduct.entity.GenericInstanceAtlasAdjacent\">\n        INSERT INTO odin_taks_ins_atlas_adjacent (\n            guid,\n            parent_guid\n        ) VALUES (\n                     #{guid},\n                     #{parentGuid}\n                 )\n    </insert>\n\n</mapper>"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/resources/mapper/kernel/task/InstanceAtlasNodeMapper.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE mapper PUBLIC \"-//mybatis.org//DTD Mapper 3.0//EN\"\n        \"http://mybatis.org/dtd/mybatis-3-mapper.dtd\">\n<mapper namespace=\"com.walnut.odin.task.mapper.InstanceAtlasNodeMapper\">\n\n    <insert id=\"insert\" parameterType=\"com.walnut.odin.conduct.entity.GenericInstanceAtlasNode\">\n        INSERT INTO odin_task_instance_atlas_nodes (\n            guid,\n            instance_guid,\n            node_name,\n            is_isolated\n        ) VALUES (\n                     #{guid},\n                     #{instanceGuid},\n                     #{nodeName},\n                     #{isIsolated}\n                 )\n    </insert>\n\n</mapper>"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/resources/mapper/kernel/task/InstanceEventMapper.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE mapper PUBLIC \"-//mybatis.org//DTD Mapper 3.0//EN\"\n        \"http://mybatis.org/dtd/mybatis-3-mapper.dtd\">\n<mapper namespace=\"com.walnut.odin.task.mapper.InstanceEventMapper\">\n\n    <insert id=\"insert\" parameterType=\"com.walnut.odin.conduct.entity.GenericInstanceEvent\">\n        INSERT INTO odin_task_instance_event (\n            guid,\n            task_guid,\n            instance_guid,\n            instance_name,\n            retry_times,\n            current_retry_number,\n            event_type,\n            state,\n            event_context,\n            exec_time\n        ) VALUES (\n                     #{guid},\n                     #{taskGuid},\n                     #{instanceGuid},\n                     #{instanceName},\n                     #{retryTimes},\n                     #{currentRetryNumber},\n                     #{eventType},\n                     #{state},\n                     #{eventContext},\n                     #{execTime}\n                 )\n    </insert>\n\n</mapper>"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/resources/mapper/kernel/task/InstanceManipulator.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE mapper PUBLIC \"-//mybatis.org//DTD Mapper 3.0//EN\"\n        \"http://mybatis.org/dtd/mybatis-3-mapper.dtd\">\n<mapper namespace=\"com.walnut.odin.task.mapper.InstanceExecMapper\">\n\n    <insert id=\"insert\" parameterType=\"com.walnut.odin.conduct.entity.GenericInstanceExec\" useGeneratedKeys=\"true\" keyProperty=\"id\">\n        INSERT INTO odin_task_instance_exec (\n            task_guid,\n            instance_guid,\n            task_name,\n            instance_name,\n            processor_queue,\n            cluster_name,\n            exec_state,\n            current_retry_number,\n            retry_times,\n            start_time,\n            run_time,\n            finish_time\n        ) VALUES (\n                     #{taskGuid},\n                     #{instanceGuid},\n                     #{taskName},\n                     #{instanceName},\n                     #{processorQueue},\n                     #{clusterName},\n                     #{execState},\n                     #{currentRetryNumber},\n                     #{retryTimes},\n                     #{startTime},\n                     #{runTime},\n                     #{finishTime}\n                 )\n    </insert>\n\n    <update id=\"updateStateByInstanceGuid\" parameterType=\"com.walnut.odin.conduct.entity.GenericInstanceExec\">\n        UPDATE odin_task_instance_exec\n        <set>\n            <if test=\"execState != null and execState != ''\">\n                exec_state = #{execState},\n            </if>\n            <if test=\"startTime != null\">\n                start_time = #{startTime},\n            </if>\n            <if test=\"runTime != null\">\n                run_time = #{runTime},\n            </if>\n            <if test=\"finishTime != null\">\n                finish_time = #{finishTime},\n            </if>\n            <if test=\"currentRetryNumber != null\">\n                current_retry_number = #{currentRetryNumber},\n            </if>\n        </set>\n        WHERE instance_guid = #{instanceGuid}\n    </update>\n\n</mapper>"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/resources/mapper/kernel/task/QueueStratumMapper.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE mapper\n        PUBLIC \"-//mybatis.org//DTD Mapper 3.0//EN\"\n        \"http://mybatis.org/dtd/mybatis-3-mapper.dtd\">\n\n<mapper namespace=\"com.walnut.odin.atlas.mapper.QueueStratumMapper\">\n\n    <select id=\"querySegmentName\" resultType=\"java.lang.String\">\n        SELECT\n            `segment_name`\n        FROM\n            `hydra_atlas_queue_stratum`\n        WHERE\n            `vgraph_guid`     = #{vgraphGuid}\n          AND `stratum_id`  = #{stratumId}\n          AND `runtime_priority` = #{runtimePriority}\n    </select>\n\n    <select id=\"countStratum\" resultType=\"java.lang.Integer\">\n        SELECT\n            `stratum_id`\n        FROM\n            `hydra_atlas_queue_stratum`\n        WHERE\n            `vgraph_guid` = #{vgraphGuid}\n        ORDER BY\n            `stratum_id` DESC\n            LIMIT 1\n    </select>\n\n    <select id=\"countPriority\" resultType=\"java.lang.Integer\">\n        SELECT\n            `runtime_priority`\n        FROM\n            `hydra_atlas_queue_stratum`\n        WHERE\n            `vgraph_guid` = #{vgraphGuid}\n          AND `stratum_id` = #{stratumId}\n        ORDER BY\n            `runtime_priority` DESC\n            LIMIT 1\n    </select>\n\n    <insert id=\"put\">\n        INSERT INTO `hydra_atlas_queue_stratum`\n        ( `runtime_priority`,\n          `stratum_id`,\n          `segment_name`,\n          `vgraph_guid` )\n        VALUES\n            ( #{runtimePriority},\n              #{stratumId},\n              #{segmentName},\n              #{vgraphGuid} )\n    </insert>\n\n</mapper>"
  },
  {
    "path": "Odin/odin-framework-atlas/src/main/resources/mapper/kernel/task/RuntimeVGraphMapper.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\" ?>\n<!DOCTYPE mapper\n        PUBLIC \"-//mybatis.org//DTD Mapper 3.0//EN\"\n        \"http://mybatis.org/dtd/mybatis-3-mapper.dtd\">\n\n<mapper namespace=\"com.walnut.odin.atlas.mapper.RuntimeVGraphMapper\">\n\n    <!-- ================= INSERT ================= -->\n\n    <insert id=\"insertGraphNode\">\n        INSERT INTO `hydra_atlas_vgraph_nodes`\n        ( `guid`,\n          `task_guid`,\n          `node_name`,\n          `node_description` )\n        VALUES\n            ( #{graphNode.guid},\n              #{graphNode.taskGuid},\n              #{graphNode.name},\n              #{graphNode.description} )\n    </insert>\n\n    <insert id=\"insertNodeAdjacent\">\n        INSERT INTO `hydra_atlas_vgraph_adjacent`\n        ( `guid`,\n          `linked_type`,\n          `parent_guid` )\n        VALUES\n            ( #{childGuid},\n              'owned',\n              #{parentGuid} )\n    </insert>\n\n    <delete id=\"removeGraphNode\">\n        DELETE FROM `hydra_atlas_vgraph_nodes`\n        WHERE `guid` = #{guid}\n    </delete>\n\n    <delete id=\"removeGraphAdjacent\">\n        DELETE FROM `hydra_atlas_vgraph_adjacent`\n        WHERE `guid` = #{guid}\n    </delete>\n\n    <!-- ================= RESULT MAP ================= -->\n\n    <resultMap id=\"TaskAtlasNodeMap\" type=\"com.walnut.odin.atlas.graph.entity.TaskAtlasNode\">\n        <id     column=\"id\"                property=\"enumId\"/>\n        <result column=\"guid\"              property=\"guid\"/>\n        <result column=\"task_guid\"         property=\"taskGuid\"/>\n        <result column=\"node_name\"         property=\"name\"/>\n        <result column=\"node_description\"  property=\"description\"/>\n        <result column=\"is_isolated\"       property=\"isolated\"/>\n    </resultMap>\n\n    <!-- ================= BASIC QUERY ================= -->\n\n    <select id=\"queryNode\" resultMap=\"TaskAtlasNodeMap\">\n        SELECT\n            `id`,\n            `guid`,\n            `task_guid`,\n            `node_name`,\n            `node_description`,\n            `is_isolated`,\n            `create_time`,\n            `update_time`\n        FROM `hydra_atlas_vgraph_nodes`\n        WHERE `guid` = #{guid}\n    </select>\n\n    <select id=\"getNodeByTaskGuid\" resultMap=\"TaskAtlasNodeMap\">\n        SELECT\n            `id`,\n            `guid`,\n            `task_guid`,\n            `node_name`,\n            `node_description`,\n            `is_isolated`,\n            `create_time`,\n            `update_time`\n        FROM `hydra_atlas_vgraph_nodes`\n        WHERE `task_guid` = #{taskGuid}\n    </select>\n\n    <select id=\"queryTaskGuidByNodeId\"\n            resultType=\"com.pinecone.framework.util.id.GUID\">\n        SELECT `task_guid`\n        FROM `hydra_atlas_vgraph_nodes`\n        WHERE `guid` = #{nodeId}\n    </select>\n\n    <select id=\"fetchParentIds\" resultType=\"com.pinecone.framework.util.id.GUID\">\n        SELECT `parent_guid`\n        FROM `hydra_atlas_vgraph_adjacent`\n        WHERE `guid` = #{guid}\n    </select>\n\n    <!-- ================= CHILD / ROOT ================= -->\n\n    <select id=\"fetchChildNodes0\" resultMap=\"TaskAtlasNodeMap\">\n        SELECT\n            vnodes.`id`,\n            vnodes.`guid`,\n            vnodes.`task_guid`,\n            vnodes.`node_name`,\n            vnodes.`node_description`,\n            vnodes.`is_isolated`,\n            vnodes.`create_time`,\n            vnodes.`update_time`\n        FROM `hydra_atlas_vgraph_nodes` AS vnodes\n                 JOIN `hydra_atlas_vgraph_adjacent` AS vadj\n                      ON vnodes.`guid` = vadj.`guid`\n        WHERE vadj.`parent_guid` = #{guid}\n    </select>\n\n    <select id=\"fetchChildNodeGuids\" resultType=\"com.pinecone.framework.util.id.GUID\">\n        SELECT havn.`guid`\n        FROM `hydra_atlas_vgraph_nodes` havn\n                 JOIN `hydra_atlas_vgraph_adjacent` hava\n                      ON havn.`guid` = hava.`guid`\n        WHERE hava.`parent_guid` = #{guid}\n    </select>\n\n    <select id=\"fetchRootNodes0\" resultMap=\"TaskAtlasNodeMap\">\n        SELECT\n            `id`,\n            `guid`,\n            `task_guid`,\n            `node_name`,\n            `node_description`,\n            `is_isolated`,\n            `create_time`,\n            `update_time`\n        FROM `hydra_atlas_vgraph_nodes`\n        WHERE NOT EXISTS (\n            SELECT 1\n            FROM `hydra_atlas_vgraph_adjacent`\n            WHERE `guid` = `hydra_atlas_vgraph_nodes`.`guid`\n        )\n    </select>\n\n    <select id=\"fetchChildNodeIds\" resultType=\"com.pinecone.framework.util.id.GUID\">\n        SELECT `guid`\n        FROM `hydra_atlas_vgraph_adjacent`\n        WHERE `parent_guid` = #{parentGuid}\n    </select>\n\n    <select id=\"fetchNodesByName0\" resultMap=\"TaskAtlasNodeMap\">\n        SELECT\n            `id`,\n            `guid`,\n            `task_guid`,\n            `node_name`,\n            `node_description`,\n            `is_isolated`,\n            `create_time`,\n            `update_time`\n        FROM `hydra_atlas_vgraph_nodes`\n        WHERE `node_name` = #{name}\n    </select>\n\n    <!-- ================= PAGING ================= -->\n\n    <select id=\"fetchHandleGuids\" resultType=\"com.pinecone.framework.util.id.GUID\">\n        SELECT `guid`\n        FROM `hydra_atlas_vgraph_nodes`\n        WHERE NOT EXISTS (\n            SELECT 1\n            FROM `hydra_atlas_vgraph_adjacent`\n            WHERE `guid` = `hydra_atlas_vgraph_nodes`.`guid`\n        )\n            LIMIT #{limit} OFFSET #{offset}\n    </select>\n\n    <select id=\"fetchDownstreamNodeGuid\" resultType=\"com.pinecone.framework.util.id.GUID\">\n        SELECT `guid`\n        FROM `hydra_atlas_vgraph_adjacent`\n        WHERE `parent_guid` = #{nodeGuid}\n            LIMIT #{limit} OFFSET #{offset}\n    </select>\n\n    <select id=\"fetchUpstreamNodeGuid\" resultType=\"com.pinecone.framework.util.id.GUID\">\n        SELECT `parent_guid`\n        FROM `hydra_atlas_vgraph_adjacent`\n        WHERE `guid` = #{nodeGuid}\n            LIMIT #{limit} OFFSET #{offset}\n    </select>\n\n    <!-- ================= DEGREE ================= -->\n\n    <select id=\"queryInDegree\" resultType=\"long\">\n        SELECT COUNT(*)\n        FROM `hydra_atlas_vgraph_adjacent`\n        WHERE `parent_guid` = #{nodeGuid}\n    </select>\n\n    <select id=\"queryOutDegree\" resultType=\"long\">\n        SELECT COUNT(*)\n        FROM `hydra_atlas_vgraph_adjacent`\n        WHERE `guid` = #{nodeGuid}\n    </select>\n\n    <select id=\"getPriorityByInDegree\" resultType=\"long\">\n        SELECT COUNT(*) + 1\n        FROM (\n                 SELECT\n                     `guid`,\n                     COUNT(*) AS `degree`\n                 FROM `hydra_atlas_vgraph_adjacent`\n                 GROUP BY `guid`\n             ) AS degree_table\n        WHERE degree_table.`degree` > (\n            SELECT COUNT(*)\n            FROM `hydra_atlas_vgraph_adjacent`\n            WHERE `guid` = #{guid}\n        )\n    </select>\n\n\n    <!-- ================= CHILD GUID PAGING ================= -->\n\n    <select id=\"limitFetchChildNodeGuids\" resultType=\"com.pinecone.framework.util.id.GUID\">\n        SELECT `guid`\n        FROM `hydra_atlas_vgraph_adjacent`\n        WHERE `parent_guid` = #{guid}\n            LIMIT #{limit} OFFSET #{offset}\n\n    </select>\n\n\n    <!-- ================= CHILD COUNT ================= -->\n\n    <select id=\"countChildNodeNums\" resultType=\"long\">\n        SELECT COUNT(*)\n        FROM `hydra_atlas_vgraph_adjacent`\n        WHERE `parent_guid` = #{guid}\n\n    </select>\n\n\n    <!-- ================= INSERT CHILD ================= -->\n\n    <insert id=\"addChild\">\n        INSERT INTO `hydra_atlas_vgraph_adjacent`\n        ( `guid`,\n          `linked_type`,\n          `parent_guid` )\n        VALUES\n            ( #{childGuid},\n              'owned',\n              #{parentGuid} )\n\n    </insert>\n\n\n\n\n\n\n\n\n    <!-- ================= ISOLATED ================= -->\n\n    <select id=\"fetchIsolatedNodesById0\" resultMap=\"TaskAtlasNodeMap\">\n        SELECT\n            `id`,\n            `guid`,\n            `task_guid`,\n            `node_name`,\n            `node_description`,\n            `is_isolated`,\n            `create_time`,\n            `update_time`\n        FROM `hydra_atlas_vgraph_nodes`\n        WHERE `is_isolated` = 1\n          AND `id` BETWEEN #{idStart} AND #{idEnd}\n        ORDER BY `id` ASC\n\n    </select>\n\n    <select id=\"fetchIsolatedNodes0\" resultMap=\"TaskAtlasNodeMap\">\n        SELECT\n            `id`,\n            `guid`,\n            `task_guid`,\n            `node_name`,\n            `node_description`,\n            `is_isolated`,\n            `create_time`,\n            `update_time`\n        FROM `hydra_atlas_vgraph_nodes`\n        WHERE `is_isolated` = 1\n        ORDER BY `id` ASC\n            LIMIT #{limit} OFFSET #{offset}\n    </select>\n\n    <select id=\"countIsolatedNodes\" resultType=\"long\">\n        SELECT COUNT(*)\n        FROM `hydra_atlas_vgraph_nodes`\n        WHERE `is_isolated` = 1\n    </select>\n\n    <select id=\"selectIsolatedNodeIndexMeta\" resultType=\"com.pinecone.slime.meta.TableIndex64Meta\">\n        SELECT\n            COALESCE( MIN(`id`), 0 ) AS minId,\n            COALESCE( MAX(`id`), 0 ) AS maxId\n        FROM `hydra_atlas_vgraph_nodes`\n        WHERE `is_isolated` = 1\n    </select>\n\n</mapper>"
  },
  {
    "path": "Odin/odin-framework-conduct/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>odin</artifactId>\n        <groupId>com.walnut.odin</groupId>\n        <version>2.5.1</version>\n    </parent>\n\n    <artifactId>odin-framework-conduct</artifactId>\n    <version>2.5.1</version>\n    <modelVersion>4.0.0</modelVersion>\n\n    <dependencies>\n        <dependency>\n            <groupId>org.quartz-scheduler</groupId>\n            <artifactId>quartz</artifactId>\n            <version>2.3.2</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime.jelly</groupId>\n            <artifactId>jelly</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n            <artifactId>hydra-kom-default-driver</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-architecture</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-framework-runtime</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-framework-atlas</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n\n</project>"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/ProcessorLifecycleController.java",
    "content": "package com.walnut.odin.conduct;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umct.AddressMapping;\nimport com.pinecone.hydra.umct.stereotype.Controller;\nimport com.walnut.odin.conduct.entity.RegimentJoinRequest;\nimport com.walnut.odin.conduct.entity.RegimentJoinResponse;\n\n@Controller\n@AddressMapping( \"com.walnut.odin.conduct.ProcessorLifecycleIface.\" )\npublic class ProcessorLifecycleController implements Pinenut {\n\n    private CollectiveTaskRegiment collectiveTaskRegiment;\n\n    public ProcessorLifecycleController( CollectiveTaskRegiment collectiveTaskRegiment ) {\n        this.collectiveTaskRegiment = collectiveTaskRegiment;\n    }\n\n    @AddressMapping( \"joinRegiment\" )\n    public RegimentJoinResponse joinRegiment( RegimentJoinRequest request ) {\n        return this.collectiveTaskRegiment.invokeJoinRegiment( request );\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/ProcessorLifecycleIface.java",
    "content": "package com.walnut.odin.conduct;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umct.stereotype.Iface;\nimport com.walnut.odin.conduct.entity.RegimentJoinRequest;\nimport com.walnut.odin.conduct.entity.RegimentJoinResponse;\n\n@Iface\npublic interface ProcessorLifecycleIface extends Pinenut {\n\n    RegimentJoinResponse joinRegiment( RegimentJoinRequest request );\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/RavenCollectiveTaskLegionary.java",
    "content": "package com.walnut.odin.conduct;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.system.construction.Postpone;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.UniformProcessManager;\nimport com.pinecone.hydra.uma.DuplexAppointClient;\nimport com.pinecone.hydra.umc.wolf.client.UlfClient;\nimport com.walnut.odin.conduct.entity.RegimentJoinRequest;\nimport com.walnut.odin.conduct.entity.RegimentJoinResponse;\nimport com.walnut.odin.proc.RemoteProcessServiceRPCException;\nimport com.walnut.odin.proc.client.RavenRemoteProcessManagerClient;\nimport com.walnut.odin.proc.client.RemoteProcessManagerClient;\n\npublic class RavenCollectiveTaskLegionary implements CollectiveTaskLegionary {\n\n    protected String                           mszNodeName;\n    protected RemoteProcessManagerClient       mRemoteProcessManagerClient;\n    protected ProcessManager                   mLocalProcessManager;\n    protected ProcessorLifecycleIface          mProcessLifecycleIface;\n\n    protected Logger                           mLogger;\n\n    protected RavenCollectiveTaskLegionary( ProcessManager processManager, @Postpone RemoteProcessManagerClient pmClient, String szNodeName ) {\n        this.mszNodeName                 = szNodeName;\n        this.mLocalProcessManager        = processManager;\n        this.mRemoteProcessManagerClient = pmClient;\n        this.mLogger                     = LoggerFactory.getLogger( this.getClass() );\n    }\n\n    public RavenCollectiveTaskLegionary( String szNodeName, ProcessManager processManager, RemoteProcessManagerClient pmClient ) {\n        this( processManager, pmClient, szNodeName );\n    }\n\n    public RavenCollectiveTaskLegionary( String szNodeName, Processum superiorProcess, UlfClient rpcClient ) {\n        this(\n                new UniformProcessManager(\n                        superiorProcess, null, ( szNodeName + \"-process-manager\" ).toLowerCase(), \"\", null\n                ),\n                null,\n                szNodeName\n        );\n\n        this.mRemoteProcessManagerClient = new RavenRemoteProcessManagerClient( this.mLocalProcessManager, rpcClient );\n    }\n\n\n    @Override\n    public String getName() {\n        return this.mszNodeName;\n    }\n\n    @Override\n    public long getClientId() {\n        return this.mRemoteProcessManagerClient.getClientId();\n    }\n\n    @Override\n    public ProcessManager processManager() {\n        return this.mLocalProcessManager;\n    }\n\n    @Override\n    public RemoteProcessManagerClient remoteProcessManagerClient() {\n        return this.mRemoteProcessManagerClient;\n    }\n\n    @Override\n    public void startService () throws RemoteProcessServiceRPCException {\n        this.mRemoteProcessManagerClient.startService();\n\n        DuplexAppointClient duplexAppointClient = this.mRemoteProcessManagerClient.duplexAppointClient();\n        duplexAppointClient.compile( ProcessorLifecycleIface.class,false );\n        this.mProcessLifecycleIface = duplexAppointClient.getIface( ProcessorLifecycleIface.class );\n    }\n\n    @Override\n    public RegimentJoinResponse joinRegiment() throws RegimentException {\n        RegimentJoinRequest request = new RegimentJoinRequest();\n        request.setClientId( this.mRemoteProcessManagerClient.getClientId() );\n        request.setNodeName( this.mszNodeName );\n        RegimentJoinResponse response = this.mProcessLifecycleIface.joinRegiment( request );\n        if ( response == null ) {\n            throw new RegimentException( \"response is null\" );\n        }\n        else if ( StringUtils.isNoneEmpty( response.getErrorMsg() ) ) {\n            throw new RegimentException( response.getErrorMsg() );\n        }\n\n        this.mLogger.info(\n                \"[NewProcessorRegister] \" +\n                \"( name:`{}`, clientId:`{}`, clusterPath:`{}`, priority:`{}`, queueMaxCapacity:`{}`, runtimeCapacity:`{}` ) \" +\n                \"<RegimentServerAck>\",\n\n                response.getName(), response.getControlClientId(), response.getClusterPath(), response.getPriority(),\n                response.getQueueMaxCapacity(), response.getQueueRuntimeInstanceCapacity()\n        );\n        return response;\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/RavenCollectiveTaskRegiment.java",
    "content": "package com.walnut.odin.conduct;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.framework.util.io.Tracer;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.ProcessManagerSystema;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.component.LogStatuses;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.uma.DuplexAppointServer;\nimport com.pinecone.hydra.umc.wolf.server.UlfServer;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.walnut.odin.conduct.entity.LaunchedContext;\nimport com.walnut.odin.conduct.entity.RegimentJoinRequest;\nimport com.walnut.odin.conduct.entity.RegimentJoinResponse;\nimport com.walnut.odin.dispatch.RavenTaskDispatcher;\nimport com.walnut.odin.dispatch.TaskDispatchException;\nimport com.walnut.odin.dispatch.TaskDispatcher;\nimport com.walnut.odin.dispatch.TaskQueueMeta;\nimport com.walnut.odin.dispatch.entity.TaskProcessorEntity;\nimport com.walnut.odin.proc.RemoteProcessServiceRPCException;\nimport com.walnut.odin.proc.server.RavenRemoteProcessManagerServer;\nimport com.walnut.odin.proc.server.RemoteProcessManagerServer;\nimport com.walnut.odin.task.CentralizedTaskInstrument;\nimport com.walnut.odin.task.RavenTaskInstance;\nimport com.walnut.odin.task.troll.GenericRavenTask;\nimport com.walnut.odin.task.RavenTask;\nimport com.walnut.odin.task.troll.InstanceLaunchException;\nimport com.walnut.odin.task.troll.LaunchFeature;\nimport com.walnut.odin.task.troll.TaskExecutionLauncher;\nimport com.walnut.odin.task.troll.TrollTaskExecutionLauncher;\n\n\npublic class RavenCollectiveTaskRegiment implements CollectiveTaskRegiment {\n\n    protected Hydrogen                      mSystem;\n\n    protected Logger                        mLogger;\n\n    protected CentralizedTaskInstrument     mTaskInstrument;\n\n    protected ProcessManager                mProcessManager;\n\n    protected RemoteProcessManagerServer    mRemoteProcessManagerServer;\n\n    protected TaskExecutionLauncher         mTaskExecutionLauncher;\n\n    protected TaskDispatcher                mTaskDispatcher;\n\n\n\n\n\n    protected void traceWelcomeInfo() {\n        Tracer console = this.mSystem.console();\n        console.getOut().print( \"---------------------------------------------------------------\\n\" );\n        console.getOut().print( \"\\u001B[34mRaven Odin Collective Task Regiment\\u001B[0m\\n\" );\n        console.getOut().print( \"\\u001B[34mCentralized task lifecycle management and deployment system.\\u001B[0m\\n\" );\n        console.getOut().print( \"---------------------------------------------------------------\\n\" );\n\n        this.infoLifecycle( \"Welcome to use Odin task orchestration system.\", LogStatuses.StatusReady );\n    }\n\n    protected void prepare_odin_collective_regiment_subsystem() {\n        this.infoLifecycle( \"Preparing Odin`s army, constructing task-regiment.\", LogStatuses.StatusStart );\n\n        this.mTaskExecutionLauncher = new TrollTaskExecutionLauncher( this );\n        this.infoLifecycle( \"TaskExecutionLauncher: `\" + this.mTaskExecutionLauncher.getClass().getName() + \"` <Constructed>.\", LogStatuses.StatusDone );\n\n        this.mTaskDispatcher = new RavenTaskDispatcher( this );\n        this.infoLifecycle( \"TaskDispatcher: `\" + this.mTaskDispatcher.getClass().getName() + \"` <Constructed>.\", LogStatuses.StatusDone );\n\n        this.traceWelcomeInfo();\n    }\n\n    public RavenCollectiveTaskRegiment( Hydrogen system, CentralizedTaskInstrument taskInstrument, ProcessManager processManager, RemoteProcessManagerServer remoteProcessManagerServer ) {\n        this.mSystem                      = system;\n        this.mTaskInstrument              = taskInstrument;\n        this.mProcessManager              = processManager;\n        this.mRemoteProcessManagerServer  = remoteProcessManagerServer;\n        this.mLogger                      = LoggerFactory.getLogger( \"OdinCollectiveTaskRegiment\" );\n\n        this.prepare_odin_collective_regiment_subsystem();\n    }\n\n    public RavenCollectiveTaskRegiment( ProcessManagerSystema system, CentralizedTaskInstrument taskInstrument, RemoteProcessManagerServer remoteProcessManagerServer ) {\n        this( system, taskInstrument, system.processManager(), remoteProcessManagerServer );\n    }\n\n    public RavenCollectiveTaskRegiment( ProcessManagerSystema system, CentralizedTaskInstrument taskInstrument, UlfServer rpcServer ) {\n        this( system, taskInstrument, system.processManager(), new RavenRemoteProcessManagerServer( system.processManager(), rpcServer ) );\n    }\n\n    @Override\n    public Logger getLogger() {\n        return this.mLogger;\n    }\n\n    @Override\n    public RemoteProcessManagerServer remoteProcessManagerServer() {\n        return this.mRemoteProcessManagerServer;\n    }\n\n    @Override\n    public void startRemoteProcessServer() throws RemoteProcessServiceRPCException {\n        this.mRemoteProcessManagerServer.startService();\n\n        ProcessorLifecycleController controller = new ProcessorLifecycleController( this );\n        DuplexAppointServer duplexAppointServer = this.mRemoteProcessManagerServer.duplexAppointServer();\n        duplexAppointServer.registerController( controller );\n        duplexAppointServer.compile( ProcessorLifecycleIface.class, false );\n    }\n\n    @Override\n    public ProcessManager processManager() {\n        return this.mProcessManager;\n    }\n\n    @Override\n    public CentralizedTaskInstrument taskInstrument() {\n        return this.mTaskInstrument;\n    }\n\n    @Override\n    public TaskExecutionLauncher taskExecutionLauncher() {\n        return this.mTaskExecutionLauncher;\n    }\n\n    @Override\n    public TaskDispatcher taskDispatcher() {\n        return this.mTaskDispatcher;\n    }\n\n    @Override\n    public RavenTask queryTaskByPath( String path ) {\n        GUID objGuid = this.mTaskInstrument.queryGUIDByPath( path );\n        if ( objGuid == null ) {\n            return null;\n        }\n\n        return this.getTaskByGuid( objGuid );\n    }\n\n    @Override\n    public RavenTask getTaskByGuid( GUID taskGuid ) {\n        TreeNode treeNode = this.mTaskInstrument.get( taskGuid );\n        if ( !(treeNode instanceof TaskElement) ) {\n            throw new IllegalArgumentException( \"Object node `\" + taskGuid + \"` is not task.\" );\n        }\n        TaskElement taskElement = (TaskElement) treeNode;\n\n        return this.mTaskInstrument.constructTask( taskElement );\n    }\n\n    @Override\n    public RavenTask createTask( TaskElement taskElement, Identification serviceId ) {\n        RavenTask task = this.mTaskInstrument.createTask( taskElement, serviceId );\n        return task;\n    }\n\n    @Override\n    public RavenTask affirmTask( String path, Identification serviceId, TaskElement metaInfos ) {\n        TaskElement taskElement = this.mTaskInstrument.affirmTask( path ,metaInfos );\n        Debug.trace(taskElement);\n    /*    taskElement.setActuallyPriority( metaInfos.getActuallyPriority() );\n        taskElement.setDeploymentMethod( metaInfos.getDeploymentMethod() );\n        taskElement.setEnable( metaInfos.isEnable());\n        taskElement.setDryRun( metaInfos.isDryRun() );\n        taskElement.setPriority( metaInfos.getPriority() );\n        taskElement.setResourceType( metaInfos.getResourceType() );\n        taskElement.setScheduleCycle( metaInfos.getScheduleCycle() );\n        taskElement.setScheduleType( metaInfos.getScheduleType() );\n        taskElement.setScheduleTypeCode( metaInfos.getScheduleTypeCode() );\n        taskElement.setScheduleCycleCode( metaInfos.getScheduleCycleCode() );\n        taskElement.setType( metaInfos.getType() );\n        taskElement.setImagePath( metaInfos.getImagePath() );\n        taskElement.setName( metaInfos.getName() );\n        taskElement.setGuid( metaInfos.getGuid() );*/\n        /*this.mTaskInstrument.get(  taskElement.getGuid());*/\n        /*this.mTaskInstrument.query(  taskElement.getGuid() );*/\n\n     /*  String newPath = this.mTaskInstrument.getPath(  taskElement.getGuid());\n        Debug.trace(newPath);*/\n\n        this.updateTaskMeta( taskElement );\n        String newPath = this.mTaskInstrument.getPath( taskElement.getGuid() );\n        Debug.trace(newPath);\n        return this.mTaskInstrument.constructTask( taskElement, serviceId );\n    }\n\n    @Override\n    public void purgeTask( GUID guid ) {\n        GenericRavenTask  task = (GenericRavenTask) this.getTaskByGuid( guid );\n        task.removeInstance( guid );\n        this.mTaskInstrument.remove( guid );\n    }\n\n    public void updateTaskMeta( RavenTask task ) {\n        this.updateTaskMeta( task.getTaskElement() );\n    }\n\n    public void updateTaskMeta( TaskElement taskElement ) {\n        this.mTaskInstrument.update( taskElement );\n    }\n\n\n\n\n    protected LaunchedContext launch0( GUID taskGuid, LaunchFeature feature, boolean launch ) throws InstanceLaunchException, TaskDispatchException {\n        RavenTask task = this.getTaskByGuid( taskGuid );\n        RavenTaskInstance instance = task.createInstance();\n\n        UProcess process;\n        if ( launch ) {\n            process = this.mTaskDispatcher.launch( instance, feature );\n        }\n        else {\n            process = this.mTaskDispatcher.create( instance, feature );\n        }\n\n        LaunchedContext context = new LaunchedContext( process, instance );\n        return context;\n    }\n\n    @Override\n    public LaunchedContext create( GUID taskGuid, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException {\n        return this.launch0( taskGuid, feature, false );\n    }\n\n    @Override\n    public LaunchedContext launch( GUID taskGuid, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException {\n        return this.launch0( taskGuid, feature, true );\n    }\n\n\n    @Override\n    public LaunchedContext create( String path, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException {\n        GUID taskGuid = this.mTaskInstrument.queryGUIDByPath( path );\n        if ( taskGuid == null ) {\n            throw new IllegalArgumentException( \"Task `\" + path + \"` is not task.\" );\n        }\n        return this.create( taskGuid, feature );\n    }\n\n    @Override\n    public LaunchedContext launch( String path, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException {\n        GUID taskGuid = this.mTaskInstrument.queryGUIDByPath( path );\n        if ( taskGuid == null ) {\n            throw new IllegalArgumentException( \"Task `\" + path + \"` is not task.\" );\n        }\n        return this.launch( taskGuid, feature );\n    }\n\n    @Override\n    public RegimentJoinResponse invokeJoinRegiment( RegimentJoinRequest request ) {\n        RegimentJoinResponse response = new RegimentJoinResponse();\n        try {\n            TaskProcessorEntity entity = this.mTaskDispatcher.registerProcessor( request.getNodeName(), request.getClientId() );\n\n            response.setGuid( entity.getGuid().toString() );\n            response.setName( entity.getName() );\n            response.setClusterPath( entity.getClusterPath() );\n            response.setClusterName( entity.getClusterName() );\n            response.setControlClientId( entity.getControlClientId() );\n            response.setPriority( entity.getPriority() );\n\n            TaskQueueMeta queueMeta = entity.getTaskQueueMeta();\n            response.setQueueName( queueMeta.getName() );\n            response.setQueueMaxCapacity( queueMeta.getMaxCapacity() );\n            response.setQueueMinCapacity( queueMeta.getMinCapacity() );\n            response.setQueueRuntimeInstanceCapacity( queueMeta.getRuntimeInstanceCapacity() );\n\n            this.mLogger.info(\n                    \"[NewProcessorRegister] \" +\n                    \"( name:`{}`, clientId:`{}`, clusterPath:`{}`, priority:`{}`, queueMaxCapacity:`{}`, runtimeCapacity:`{}` ) \" +\n                    \"<Done>\",\n\n                    entity.getName(), entity.getControlClientId(), entity.getClusterPath(), entity.getPriority(),\n                    queueMeta.getMaxCapacity(), queueMeta.getRuntimeInstanceCapacity()\n            );\n        }\n        catch ( IllegalArgumentException e ) {\n            response.setErrorMsg( e.getMessage() );\n        }\n\n        return response;\n    }\n\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/RavenProcessorDeployManager.java",
    "content": "package com.walnut.odin.conduct;\n\nimport com.pinecone.hydra.uma.DuplexAppointServer;\n\npublic class RavenProcessorDeployManager implements ProcessorDeployManager {\n\n    protected CollectiveTaskRegiment    mCollectiveTaskRegiment;\n\n    protected DuplexAppointServer       mDuplexAppointServer;\n\n    public RavenProcessorDeployManager( CollectiveTaskRegiment regiment ) {\n        this.mCollectiveTaskRegiment = regiment;\n        this.mDuplexAppointServer = this.mCollectiveTaskRegiment.remoteProcessManagerServer().duplexAppointServer();\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/ConfigurableTaskGraphOrchestratorConfig.java",
    "content": "package com.walnut.odin.conduct.dag;\n\npublic class ConfigurableTaskGraphOrchestratorConfig implements TaskGraphOrchestratorConfig {\n\n    private String queueNodesTableName;\n    private String temporaryQueueNodesTableName;\n\n    public ConfigurableTaskGraphOrchestratorConfig() {\n        this.queueNodesTableName          = TaskGraphOrchestratorConstants.STANDARD_GLOBAL_QUEUE_NODES_TABLE;\n        this.temporaryQueueNodesTableName = TaskGraphOrchestratorConstants.STANDARD_GLOBAL_TEMPORARY_QUEUE_NODES_TABLE;\n    }\n\n    @Override\n    public String getQueueNodesTableName() {\n        return this.queueNodesTableName;\n    }\n\n    @Override\n    public String getTemporaryQueueNodesTableName() {\n        return this.temporaryQueueNodesTableName;\n    }\n\n    public void setQueueNodesTableName( String queueNodesTableName ) {\n        this.queueNodesTableName = queueNodesTableName;\n    }\n\n    public void setTemporaryQueueNodesTableName( String temporaryQueueNodesTableName ) {\n        this.temporaryQueueNodesTableName = temporaryQueueNodesTableName;\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/ExecuteCallBack.java",
    "content": "package com.walnut.odin.conduct.dag;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\n\nimport java.util.List;\n\npublic interface ExecuteCallBack extends Pinenut {\n    List<TaskElement> introduceTask();\n\n    void nextTask();\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/RavenTaskGraphOrchestrator.java",
    "content": "package com.walnut.odin.conduct.dag;\n\nimport java.util.ArrayDeque;\nimport java.util.ArrayList;\nimport java.util.Deque;\nimport java.util.List;\n\nimport com.pinecone.hydra.orchestration.SequentialAction;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.iqueue.ConfigurableMegaDeflectPriorityQueueMeta;\nimport com.pinecone.hydra.unit.iqueue.ConfigurableMegaStratumQueueMeta;\nimport com.pinecone.hydra.unit.iqueue.MagnitudeDPQueue;\nimport com.pinecone.hydra.unit.iqueue.MegaDPStratumQueue;\nimport com.pinecone.hydra.unit.iqueue.MegaDeflectPriorityQueueMeta;\nimport com.pinecone.hydra.unit.iqueue.MegaStratumQueueMeta;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\nimport com.pinecone.hydra.unit.vgraph.layer.Layer;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerInstrument;\nimport com.walnut.odin.atlas.graph.RuntimeAtlasInstrument;\n\nimport com.walnut.odin.atlas.advance.GenericTapedBFSGraphAdvancer;\nimport com.walnut.odin.atlas.advance.strategy.AtlasPriorityProcessStrategy;\nimport com.walnut.odin.atlas.advance.strategy.MegaInDegreeFirstStrategy;\n\npublic class RavenTaskGraphOrchestrator implements TaskGraphOrchestrator {\n    protected VectorDAG                                 mVectorDAG;\n\n    protected LayerInstrument                           mLayerInstrument;\n\n    protected RuntimeAtlasInstrument mRuntimeAtlasInstrument;\n\n    protected KOIMappingDriver                          mQueueDriver;\n\n    protected long                                      mnCurrentPos;\n\n    protected int                                       mnTaskBatchSize;\n\n    protected int                                       mnExecuteBatchSize;\n\n    protected TaskGraphOrchestratorConfig               mConfig;\n\n\n    protected volatile Deque<VectorDAG>                 mExecuteGraph;\n\n    public RavenTaskGraphOrchestrator(\n            VectorDAG vectorDAG, LayerInstrument layerInstrument, int taskBatchSize,int executeBatchSize,\n            RuntimeAtlasInstrument runtimeAtlasInstrument,KOIMappingDriver queueDriver\n    ) {\n        this.mVectorDAG                 = vectorDAG;\n        this.mLayerInstrument           = layerInstrument;\n        this.mRuntimeAtlasInstrument    = runtimeAtlasInstrument;\n        this.mnCurrentPos               = 0;\n        this.mnTaskBatchSize            = taskBatchSize;\n        this.mnExecuteBatchSize         = executeBatchSize;\n        this.mQueueDriver               = queueDriver;\n        this.mExecuteGraph              = new ArrayDeque<>();\n\n        this.mConfig = new ConfigurableTaskGraphOrchestratorConfig();\n    }\n\n    @Override\n    public void execute() {\n        // 将图分解为可执行的子图\n        this.createExecuteGraph();\n\n        // 对每个子图生成最终执行队列\n        for( VectorDAG vectorDAG : this.mExecuteGraph ) {\n            this.createExecuteQueue( vectorDAG );\n        }\n\n        // 将队列中生成的节点转换成执行任务加入执行器\n        this.enqueueTasksForExecution();\n\n    }\n\n    private void enqueueTasksForExecution() {\n        for(int i = 0; i < this.mnExecuteBatchSize; i++ ) {\n            VectorDAG vectorDAG = this.mExecuteGraph.pop();\n            SequentialAction action = new SequentialAction();\n            MegaDeflectPriorityQueueMeta meta = new ConfigurableMegaDeflectPriorityQueueMeta();\n            meta.setQueueTableName( this.mConfig.getQueueNodesTableName() );\n            MagnitudeDPQueue magnitudeDPQueue = new MagnitudeDPQueue(this.mQueueDriver, 0, \"segment_name\", vectorDAG.getAffiliateLayerGuid().toString(), meta);\n\n            TaskExecuteCallBack callBack = new TaskExecuteCallBack( magnitudeDPQueue, this.mRuntimeAtlasInstrument,this.mConfig,this.mQueueDriver, this.mExecuteGraph,this.mnTaskBatchSize);\n            TaskExertium taskExertium = new TaskExertium( callBack );\n            action.add( taskExertium );\n            action.start();\n        }\n    }\n\n    private void createExecuteGraph() {\n        List<Layer> layers = this.mLayerInstrument.splitGraphLayer(this.mVectorDAG);\n        ArrayList<VectorDAG> vectorDAGS = new ArrayList<>();\n        for( Layer layer : layers ) {\n            VectorDAG vectorDAG = this.mRuntimeAtlasInstrument.toVectorDAG(layer);\n            vectorDAGS.add( vectorDAG );\n        }\n        this.mExecuteGraph.addAll( vectorDAGS );\n    }\n\n    private void createExecuteQueue( VectorDAG vectorDAG ) {\n        MegaDeflectPriorityQueueMeta meta1 = new ConfigurableMegaDeflectPriorityQueueMeta();\n        meta1.setQueueTableName( this.mConfig.getQueueNodesTableName() );\n\n        MegaStratumQueueMeta meta2 = new ConfigurableMegaStratumQueueMeta();\n        meta2.setQueueTableName( this.mConfig.getTemporaryQueueNodesTableName() );\n\n\n        MagnitudeDPQueue magnitudeDPQueue = new MagnitudeDPQueue(this.mQueueDriver, 0, \"segment_name\", vectorDAG.getAffiliateLayerGuid().toString(), meta1);\n        MegaDPStratumQueue megaDPStratumQueue = new MegaDPStratumQueue(this.mQueueDriver, \"segment_name\", vectorDAG.getAffiliateLayerGuid().toString(), meta2);\n\n        AtlasPriorityProcessStrategy strategy = new AtlasPriorityProcessStrategy();\n        strategy.addStrategy( new MegaInDegreeFirstStrategy( this.mRuntimeAtlasInstrument, magnitudeDPQueue, megaDPStratumQueue,this.mLayerInstrument ) );\n        GenericTapedBFSGraphAdvancer advancer = new GenericTapedBFSGraphAdvancer( this.mRuntimeAtlasInstrument, magnitudeDPQueue,strategy );\n        advancer.traverse( vectorDAG );\n    }\n\n\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/TaskExecuteCallBack.java",
    "content": "package com.walnut.odin.conduct.dag;\n\nimport java.util.ArrayList;\nimport java.util.Deque;\nimport java.util.List;\n\nimport com.pinecone.hydra.orchestration.SequentialAction;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.unit.iqueue.ConfigurableMegaDeflectPriorityQueueMeta;\nimport com.pinecone.hydra.unit.iqueue.MagnitudeDPQueue;\nimport com.pinecone.hydra.unit.iqueue.DeflectPriorityQueue;\nimport com.pinecone.hydra.unit.iqueue.MegaDeflectPriorityQueueMeta;\nimport com.pinecone.hydra.unit.iqueue.entity.QueueElement;\nimport com.pinecone.hydra.unit.vgraph.VectorDAG;\n\nimport com.walnut.odin.atlas.graph.RuntimeAtlasInstrument;\n\n\npublic class TaskExecuteCallBack implements ExecuteCallBack {\n    private RuntimeAtlasInstrument          mRuntimeAtlasInstrument;\n\n    protected KOIMappingDriver              mQueueDriver;\n\n    private TaskGraphOrchestratorConfig     mConfig;\n\n    private DeflectPriorityQueue            mDeflectPriorityQueue;\n\n    private Deque<VectorDAG>                mExecuteVectorDAG;\n\n    private int                             mnTaskBatchSize;\n\n    private int                             mnCurrentPos;\n\n    public TaskExecuteCallBack(DeflectPriorityQueue deflectPriorityQueue, RuntimeAtlasInstrument runtimeAtlasInstrument, TaskGraphOrchestratorConfig config,\n                               KOIMappingDriver driver, Deque<VectorDAG> vectorDAGDeque, int taskBatchSize ) {\n        this.mDeflectPriorityQueue = deflectPriorityQueue;\n        this.mRuntimeAtlasInstrument        = runtimeAtlasInstrument;\n        this.mQueueDriver                   = driver;\n        this.mConfig                        = config;\n        this.mExecuteVectorDAG              = vectorDAGDeque;\n        this.mnTaskBatchSize                = taskBatchSize;\n    }\n\n    @Override\n    public List<TaskElement> introduceTask() {\n        List<QueueElement> queueElements = this.mDeflectPriorityQueue.fetchElements(mnCurrentPos, mnTaskBatchSize);\n        mnCurrentPos += queueElements.size();\n        // todo 目前不知道那边的逻辑先写成多次io的形式\n        ArrayList<TaskElement> taskElements = new ArrayList<>();\n        for( QueueElement queueElement : queueElements ) {\n            TaskElement node = this.mRuntimeAtlasInstrument.queryTaskElementByGuid(queueElement.getObjectGuid());\n            taskElements.add( node );\n        }\n        return taskElements;\n    }\n\n    @Override\n    public synchronized void nextTask() {\n        VectorDAG pop = this.mExecuteVectorDAG.pop();\n        if( pop != null ) {\n            SequentialAction action = new SequentialAction();\n            MegaDeflectPriorityQueueMeta meta = new ConfigurableMegaDeflectPriorityQueueMeta();\n            meta.setQueueTableName( this.mConfig.getQueueNodesTableName() );\n            MagnitudeDPQueue magnitudeDPQueue = new MagnitudeDPQueue(this.mQueueDriver, 0, \"segment_name\", pop.getAffiliateLayerGuid().toString(), meta);\n\n            TaskExecuteCallBack callBack = new TaskExecuteCallBack( magnitudeDPQueue, this.mRuntimeAtlasInstrument,this.mConfig,this.mQueueDriver, this.mExecuteVectorDAG,this.mnTaskBatchSize);\n            TaskExertium taskExertium = new TaskExertium( callBack );\n            action.add( taskExertium );\n            action.start();\n        }\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/TaskExertium.java",
    "content": "package com.walnut.odin.conduct.dag;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.orchestration.Exertium;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\n\nimport java.util.ArrayDeque;\nimport java.util.Deque;\nimport java.util.List;\n\npublic class TaskExertium extends Exertium {\n    private Deque<TaskElement>  mDeque;\n\n    private ExecuteCallBack     mExecuteCallBack;\n\n    private int                 mRemainingNums;\n\n    public TaskExertium( ExecuteCallBack callBack ) {\n        this.mExecuteCallBack = callBack;\n        this.mRemainingNums = 0;\n        this.mDeque = new ArrayDeque<>();\n    }\n\n    @Override\n    protected void doStart() {\n        boolean flag = true;\n        while( flag ) {\n           while( !this.mDeque.isEmpty() ) {\n               TaskElement node = this.mDeque.pop();\n               Debug.trace( \"执行节点\" + node.getId() );\n               this.mRemainingNums--;\n           }\n\n            if( this.mRemainingNums == 0 ) {\n                List<TaskElement> taskElements = this.mExecuteCallBack.introduceTask();\n                this.mRemainingNums = taskElements.size();\n                this.mDeque.addAll( taskElements );\n            }\n            if( this.mRemainingNums == 0 ) {\n                flag = false;\n                this.mExecuteCallBack.nextTask();\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/TaskGraphOrchestrator.java",
    "content": "package com.walnut.odin.conduct.dag;\n\nimport com.pinecone.framework.system.regime.Orchestrator;\n\npublic interface TaskGraphOrchestrator extends Orchestrator {\n    void execute();\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/TaskGraphOrchestratorConfig.java",
    "content": "package com.walnut.odin.conduct.dag;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface TaskGraphOrchestratorConfig extends Pinenut {\n\n    String getQueueNodesTableName();\n\n    String getTemporaryQueueNodesTableName();\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/dag/TaskGraphOrchestratorConstants.java",
    "content": "package com.walnut.odin.conduct.dag;\n\npublic final class TaskGraphOrchestratorConstants {\n\n    public static final String STANDARD_GLOBAL_QUEUE_NODES_TABLE            = \"hydra_global_queue_nodes\";\n\n    public static final String STANDARD_GLOBAL_TEMPORARY_QUEUE_NODES_TABLE  = \"hydra_global_temporary_queue_nodes\";\n\n}"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/entity/GenericInstanceAtlasAdjacent.java",
    "content": "package com.walnut.odin.conduct.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport java.util.Map;\n\npublic class GenericInstanceAtlasAdjacent implements InstanceAtlasAdjacent {\n    protected GUID guid;\n    protected GUID parentGuid;\n\n    public GenericInstanceAtlasAdjacent() {\n    }\n\n    public GenericInstanceAtlasAdjacent(Map<String, Object> joEntity) {\n        BeanMapDecoder.BasicDecoder.decode(this, joEntity);\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public GUID getParentGuid() {\n        return this.parentGuid;\n    }\n\n    @Override\n    public void setParentGuid(GUID parentGuid) {\n        this.parentGuid = parentGuid;\n    }\n}"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/entity/GenericInstanceAtlasNode.java",
    "content": "package com.walnut.odin.conduct.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport java.util.Map;\n\npublic class GenericInstanceAtlasNode implements InstanceAtlasNode {\n    protected GUID    guid;\n    protected GUID    instanceGuid;\n    protected String  nodeName;\n    protected boolean isIsolated;\n\n    public GenericInstanceAtlasNode() {\n    }\n\n    public GenericInstanceAtlasNode(Map<String, Object> joEntity) {\n        BeanMapDecoder.BasicDecoder.decode(this, joEntity);\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public GUID getInstanceGuid() {\n        return this.instanceGuid;\n    }\n\n    @Override\n    public void setInstanceGuid(GUID instanceGuid) {\n        this.instanceGuid = instanceGuid;\n    }\n\n    @Override\n    public String getNodeName() {\n        return this.nodeName;\n    }\n\n    @Override\n    public void setNodeName(String nodeName) {\n        this.nodeName = nodeName;\n    }\n\n    @Override\n    public boolean isIsolated() {\n        return this.isIsolated;\n    }\n\n    @Override\n    public void setIsIsolated(boolean isIsolated) {\n        this.isIsolated = isIsolated;\n    }\n}"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/entity/GenericInstanceEvent.java",
    "content": "package com.walnut.odin.conduct.entity;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport java.time.LocalDateTime;\nimport java.util.Map;\n\npublic class GenericInstanceEvent implements InstanceEvent {\n    protected GUID          guid;\n    protected GUID          taskGuid;\n    protected GUID          instanceGuid;\n    protected String        instanceName;\n    protected int           retryTimes;\n    protected int           currentRetryNumber;\n    protected String        eventType;\n    protected String        state;\n    protected String        eventContext;\n    protected LocalDateTime execTime;\n\n    public GenericInstanceEvent() {\n    }\n\n    public GenericInstanceEvent(Map<String, Object> joEntity) {\n        BeanMapDecoder.BasicDecoder.decode(this, joEntity);\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.guid;\n    }\n\n    @Override\n    public void setGuid(GUID guid) {\n        this.guid = guid;\n    }\n\n    @Override\n    public GUID getTaskGuid() {\n        return this.taskGuid;\n    }\n\n    @Override\n    public void setTaskGuid(GUID taskGuid) {\n        this.taskGuid = taskGuid;\n    }\n\n    @Override\n    public GUID getInstanceGuid() {\n        return this.instanceGuid;\n    }\n\n    @Override\n    public void setInstanceGuid(GUID instanceGuid) {\n        this.instanceGuid = instanceGuid;\n    }\n\n    @Override\n    public String getInstanceName() {\n        return this.instanceName;\n    }\n\n    @Override\n    public void setInstanceName(String instanceName) {\n        this.instanceName = instanceName;\n    }\n\n    @Override\n    public int getRetryTimes() {\n        return this.retryTimes;\n    }\n\n    @Override\n    public void setRetryTimes(int retryTimes) {\n        this.retryTimes = retryTimes;\n    }\n\n    @Override\n    public int getCurrentRetryNumber() {\n        return this.currentRetryNumber;\n    }\n\n    @Override\n    public void setCurrentRetryNumber(int currentRetryNumber) {\n        this.currentRetryNumber = currentRetryNumber;\n    }\n\n    @Override\n    public String getEventType() {\n        return this.eventType;\n    }\n\n    @Override\n    public void setEventType(String eventType) {\n        this.eventType = eventType;\n    }\n\n    @Override\n    public String getState() {\n        return this.state;\n    }\n\n    @Override\n    public void setState(String state) {\n        this.state = state;\n    }\n\n    @Override\n    public String getEventContext() {\n        return this.eventContext;\n    }\n\n    @Override\n    public void setEventContext(String eventContext) {\n        this.eventContext = eventContext;\n    }\n\n    @Override\n    public LocalDateTime getExecTime() {\n        return this.execTime;\n    }\n\n    @Override\n    public void setExecTime(LocalDateTime execTime) {\n        this.execTime = execTime;\n    }\n}"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/entity/GenericInstanceExec.java",
    "content": "package com.walnut.odin.conduct.entity;\n\n\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanMapDecoder;\nimport java.time.LocalDateTime;\nimport java.util.Map;\n\npublic class GenericInstanceExec implements InstanceExec {\n    protected long          id;\n    protected GUID          taskGuid;\n    protected GUID          instanceGuid;\n    protected String        taskName;\n    protected String        instanceName;\n    protected String        processorQueue;\n    protected String        clusterName;\n    protected String        execState;\n    protected int           currentRetryNumber;\n    protected int           retryTimes;\n    protected LocalDateTime startTime;\n    protected LocalDateTime runTime;\n    protected LocalDateTime finishTime;\n\n    public GenericInstanceExec() {\n    }\n\n    public GenericInstanceExec(Map<String, Object> joEntity) {\n        BeanMapDecoder.BasicDecoder.decode(this, joEntity);\n    }\n\n    @Override\n    public long getId() {\n        return this.id;\n    }\n\n    @Override\n    public void setId(long id) {\n        this.id = id;\n    }\n\n    @Override\n    public GUID getTaskGuid() {\n        return this.taskGuid;\n    }\n\n    @Override\n    public void setTaskGuid(GUID taskGuid) {\n        this.taskGuid = taskGuid;\n    }\n\n    @Override\n    public GUID getInstanceGuid() {\n        return this.instanceGuid;\n    }\n\n    @Override\n    public void setInstanceGuid(GUID instanceGuid) {\n        this.instanceGuid = instanceGuid;\n    }\n\n    @Override\n    public String getTaskName() {\n        return this.taskName;\n    }\n\n    @Override\n    public void setTaskName(String taskName) {\n        this.taskName = taskName;\n    }\n\n    @Override\n    public String getInstanceName() {\n        return this.instanceName;\n    }\n\n    @Override\n    public void setInstanceName(String instanceName) {\n        this.instanceName = instanceName;\n    }\n\n    @Override\n    public String getProcessorQueue() {\n        return this.processorQueue;\n    }\n\n    @Override\n    public void setProcessorQueue(String processorQueue) {\n        this.processorQueue = processorQueue;\n    }\n\n    @Override\n    public String getClusterName() {\n        return this.clusterName;\n    }\n\n    @Override\n    public void setClusterName(String clusterName) {\n        this.clusterName = clusterName;\n    }\n\n    @Override\n    public String getExecState() {\n        return this.execState;\n    }\n\n    @Override\n    public void setExecState(String execState) {\n        this.execState = execState;\n    }\n\n    @Override\n    public int getCurrentRetryNumber() {\n        return this.currentRetryNumber;\n    }\n\n    @Override\n    public void setCurrentRetryNumber(int currentRetryNumber) {\n        this.currentRetryNumber = currentRetryNumber;\n    }\n\n    @Override\n    public int getRetryTimes() {\n        return this.retryTimes;\n    }\n\n    @Override\n    public void setRetryTimes(int retryTimes) {\n        this.retryTimes = retryTimes;\n    }\n\n    @Override\n    public LocalDateTime getStartTime() {\n        return this.startTime;\n    }\n\n    @Override\n    public void setStartTime(LocalDateTime startTime) {\n        this.startTime = startTime;\n    }\n\n    @Override\n    public LocalDateTime getRunTime() {\n        return this.runTime;\n    }\n\n    @Override\n    public void setRunTime(LocalDateTime runTime) {\n        this.runTime = runTime;\n    }\n\n    @Override\n    public LocalDateTime getFinishTime() {\n        return this.finishTime;\n    }\n\n    @Override\n    public void setFinishTime(LocalDateTime finishTime) {\n        this.finishTime = finishTime;\n    }\n}"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/schedule/InstanceAtlasNodeManipular.java",
    "content": "package com.walnut.odin.conduct.schedule;\n\nimport com.pinecone.hydra.system.ko.dao.GUIDNameManipulator;\nimport com.walnut.odin.conduct.entity.InstanceAtlasNode;\n\npublic interface InstanceAtlasNodeManipular extends GUIDNameManipulator {\n\n    void insert( InstanceAtlasNode instanceAtlasNode);\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/schedule/RavenInstanceScheduleImpetus.java",
    "content": "package com.walnut.odin.conduct.schedule;\n\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.hydra.system.ko.MetaPersistenceException;\nimport com.pinecone.hydra.task.InstanceEventType;\nimport com.pinecone.hydra.task.TaskInstanceExecState;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.pinecone.hydra.task.kom.UniformTaskInstrument;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\nimport com.pinecone.hydra.task.kom.instance.InstanceInstrument;\nimport com.pinecone.hydra.task.kom.source.TaskNodeManipulator;\nimport com.pinecone.slime.meta.TableIndexMeta;\nimport com.walnut.odin.atlas.graph.RuntimeAtlasInstrument;\nimport com.walnut.odin.conduct.entity.GenericInstanceEvent;\nimport com.walnut.odin.conduct.entity.GenericInstanceExec;\nimport com.walnut.odin.conduct.entity.InstanceEvent;\nimport com.walnut.odin.conduct.entity.InstanceExec;\nimport com.walnut.odin.conduct.schedule.entity.DepartureChecklist;\nimport com.walnut.odin.conduct.schedule.entity.ScheduleFittingContext;\nimport com.walnut.odin.dispatch.PipelineLaunchReport;\nimport com.walnut.odin.dispatch.TaskLaunchContext;\nimport com.walnut.odin.task.CentralizedTaskInstrument;\nimport com.walnut.odin.task.RavenTaskConfig;\nimport com.walnut.odin.task.RavenTaskInstance;\nimport com.walnut.odin.task.mapper.InstanceAtlasNodeMapper;\nimport com.walnut.odin.task.source.RavenTaskMasterManipulator;\nimport com.walnut.odin.task.source.ScheduleManipulator;\nimport com.walnut.odin.task.troll.GenericRavenTaskInstance;\nimport com.walnut.odin.task.troll.LaunchFeature;\nimport com.walnut.odin.task.troll.TaskExecutionLauncher;\n\npublic class RavenInstanceScheduleImpetus implements InstanceScheduleImpetus {\n\n    private Logger log = LoggerFactory.getLogger( this.getClass() );\n\n    private RavenTaskConfig            mRavenTaskConfig;\n    private int                        mnScanThreadCount;\n    private long                       mnScanIdWindow;\n\n    private UniformTaskScheduler       mTaskScheduler;\n    private TaskExecutionLauncher      mTaskExecutionLauncher;\n    private UniformTaskInstrument      mUniformTaskInstrument;\n    private RuntimeAtlasInstrument     mRuntimeAtlasInstrument;\n    private InstanceInstrument         mInstanceInstrument;\n    private CentralizedTaskInstrument  mCentralizedTaskInstrument;\n\n    private RavenTaskMasterManipulator mRavenTaskMasterManipulator;\n    private TaskNodeManipulator        mTaskNodeManipulator;\n    private ScheduleManipulator        mScheduleManipulator;\n    private InstanceAtlasNodeMapper    mInstanceAtlasNodeMapper;\n\n    private InstanceScheduleAllocator  mInstanceScheduleAllocator;\n    private ExecutorService            mExecutorService;\n\n    public RavenInstanceScheduleImpetus( UniformTaskScheduler taskScheduler ) {\n        this.mTaskScheduler              = taskScheduler;\n        this.mRavenTaskConfig            = taskScheduler.ravenTaskConfig();\n        this.mnScanThreadCount           = this.mRavenTaskConfig.getScheduleScanThreadCount();\n        this.mnScanIdWindow              = this.mRavenTaskConfig.getScheduleScanIdWindow();\n\n        this.mRuntimeAtlasInstrument     = taskScheduler.atlasInstrument();\n        this.mTaskExecutionLauncher      = taskScheduler.taskExecutionLauncher();\n        this.mCentralizedTaskInstrument  = taskScheduler.taskInstrument();\n        this.mUniformTaskInstrument      = this.mCentralizedTaskInstrument.getUniformTaskInstrument();\n        this.mInstanceInstrument         = taskScheduler.instanceInstrument();\n\n        this.mRavenTaskMasterManipulator = this.mCentralizedTaskInstrument.getRavenTaskMasterManipulator();\n        this.mTaskNodeManipulator        = this.mRavenTaskMasterManipulator.getTaskMasterManipulator().getTaskNodeManipulator();\n        this.mScheduleManipulator        = this.mRavenTaskMasterManipulator.getScheduleManipulator();\n        this.mInstanceAtlasNodeMapper    = this.mScheduleManipulator.getInstanceAtlasNodeMapper();\n\n        this.mInstanceScheduleAllocator  = taskScheduler.instanceScheduleAllocator();\n        this.mExecutorService            = Executors.newFixedThreadPool( this.mnScanThreadCount * 2 );\n\n        log.info( \"[Odin] [CrucialSchedulerComponentLifecycle] (RavenInstanceScheduleImpetus Construction) <Done>\" );\n    }\n\n\n// 依赖mapper记得看看\n\n\n//    protected DepartureChecklist prelaunch_check_instance( InstanceEntry that ) {\n//\n//\n//    }\n\n\n\n    // [Prelaunch-Stage2] 已完成并行调度配额分配，启动准备程序\n    protected Collection<TaskLaunchContext> initializePrelaunchSequence( Collection<InstanceEntry> fittedInstances ) {\n        Collection<TaskLaunchContext> li = new ArrayList<>();\n        for ( InstanceEntry fittedInstance : fittedInstances ) {\n            RavenTaskInstance instance      = new GenericRavenTaskInstance( fittedInstance, this.mCentralizedTaskInstrument );\n            LaunchFeature launchFeature     = new LaunchFeature();\n            String szProcessor = fittedInstance.getProcessorName();\n\n            if ( StringUtils.isNoneEmpty(szProcessor) ) {\n                // Not affinity(best-effort), but designated(compulsory).\n                // 这里不是建议分配，而是绑核\n                launchFeature.withProcessorDesignated( szProcessor );\n            }\n            TaskLaunchContext launchContext = TaskLaunchContext.of( instance, launchFeature );\n\n            li.add( launchContext );\n        }\n        return li;\n    }\n\n\n    protected Collection<TaskLaunchContext> prepareLaunchContexts( ScheduleFittingContext context ) {\n        Collection<InstanceEntry> fittedInstances    = context.getFittedInstances();\n        Collection<InstanceEntry> discardedInstances = context.getDiscardedInstances();\n\n        Collection<TaskLaunchContext> li = this.initializePrelaunchSequence( fittedInstances );\n\n        for ( InstanceEntry discardedInstance : discardedInstances ) {\n            log.info(\n                    \"[DiscardInstance] ( Task `{}`, Instance `{}` ) has been discarded.\",\n                    discardedInstance.getTaskName(), discardedInstance.getInstanceName()\n            );\n            // TODO, Sophisticate upgradation.\n        }\n\n        return li;\n    }\n\n\n    @Override\n    public void impelSchedulableInstances( Collection<TaskInstanceStatus> statuses, LocalDateTime targetTime ) {\n        if ( targetTime == null ) {\n            targetTime = LocalDateTime.now();\n        }\n\n        TableIndexMeta range = this.mInstanceInstrument.querySchedulableIdRange( statuses, targetTime );\n        if ( range == null ) {\n            return;\n        }\n\n        long idMin = range.getMinId();\n        long idMax = range.getMaxId();\n        if ( idMin <= 0 || idMax <= 0 || idMax < idMin ) {\n            return;\n        }\n\n        long cursor = idMin;\n        while ( cursor <= idMax ) {\n            long windowStart = cursor;\n            long windowEnd   = cursor + this.mnScanIdWindow - 1;\n\n            if ( windowEnd > idMax ) {\n                windowEnd = idMax;\n            }\n\n            final long finalStart = windowStart;\n            final long finalEnd   = windowEnd;\n\n            LocalDateTime finalTargetTime = targetTime;\n            this.mExecutorService.submit( () -> {\n                try {\n                    log.info( \"[TaskSchedulerLifecycle] Impelling schedulable instances (Start: {}, End: {}) <Start>\", finalStart, finalEnd );\n\n\n                    Collection<InstanceEntry> entries = this.mInstanceInstrument.fetchSchedulableInstances(\n                            finalStart, finalEnd, statuses, finalTargetTime\n                    );\n\n\n                    ScheduleFittingContext context = this.mInstanceScheduleAllocator.pipeFitting( entries );\n                    Collection<TaskLaunchContext> launchContexts = this.prepareLaunchContexts( context );\n                    PipelineLaunchReport report = this.mTaskScheduler.taskDispatcher().pipeLaunch( launchContexts );\n                    //elements = this.prepareScheduleTasks( elements, finalTargetTime );\n\n                    log.info( \"[TaskSchedulerLifecycle] Impelling schedulable instances (Start: {}, End: {}, Size: {}) <Done>\", finalStart, finalEnd, entries.size() );\n                }\n                catch ( Exception e ) {\n                    log.error( \"[TaskSchedulerLifecycle] Impelling schedulable instances (Start: {}, End: {}) <Error>\", finalStart, finalEnd, e );\n                }\n            } );\n\n            cursor = windowEnd + 1;\n        }\n    }\n\n    @Override\n    public void impelPrelaunchInstances( LocalDateTime targetTime ) {\n        this.impelSchedulableInstances(\n                List.of(\n                        TaskInstanceStatus.New,          TaskInstanceStatus.DependencyWait,\n                        TaskInstanceStatus.ResourceWait, TaskInstanceStatus.DepartureStandby\n                ),\n                targetTime\n        );\n    }\n\n    /*protected void processAndFireInstances( List<InstanceEntry> instances ) throws MetaPersistenceException {\n        for ( InstanceEntry instance : instances ) {\n            try {\n                log.info( \"GUID: {}, Name: {}\", instance.getGuid(), instance.getInstanceName() );\n                instance.setInstanceStatus( TaskInstanceStatus.ResourceWait );\n                instance.setRunStatus(TaskInstanceStatus.ResourceWait.getName());\n                instance.setStartTime( LocalDateTime.now() );\n                this.mInstanceInstrument.updateInstance( instance );\n                //log.info(this.mInstanceInstrument.getInstanceEntry(instance.getGuid()).getRunStatus());\n                InstanceExec execUpdate = new GenericInstanceExec();\n                execUpdate.setInstanceGuid( instance.getGuid() );\n                execUpdate.setExecState( TaskInstanceExecState.Submitted.getName() );\n                this.mScheduleManipulator.getInstanceExecMapper().updateStateByInstanceGuid( execUpdate );\n\n                InstanceEvent event = new GenericInstanceEvent();\n                event.setGuid( this.mCentralizedTaskInstrument.getGuidAllocator().nextGUID() );\n                event.setTaskGuid( instance.getTaskGuid() );\n                event.setInstanceGuid( instance.getGuid() );\n                event.setInstanceName( instance.getInstanceName() );\n                event.setEventType( instance.getTaskType() );\n                event.setState( InstanceEventType.CheckDependencyReady.getName() );\n                event.setExecTime( LocalDateTime.now() );\n                event.setEventContext( \"{}\" );\n                //    this.mScheduleManipulator.getInstanceEventMapper().insert( event );\n                //LaunchFeature feature = new LaunchFeature();\n                //  this.mTaskExecutionLauncher.launchLocally( instance, feature );\n\n            }\n            catch ( MetaPersistenceException e ) {\n                instance.setInstanceStatus( TaskInstanceStatus.Error );\n                this.mInstanceInstrument.updateInstance( instance );\n            }\n        }\n    }*/\n\n\n    @Override\n    public UniformTaskScheduler taskScheduler() {\n        return this.mTaskScheduler;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/schedule/RavenScheduleAllocator.java",
    "content": "package com.walnut.odin.conduct.schedule;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ConcurrentMap;\nimport java.util.concurrent.atomic.AtomicLong;\nimport java.util.concurrent.locks.Lock;\nimport java.util.concurrent.locks.ReentrantLock;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.util.CollectionUtils;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\nimport com.pinecone.hydra.task.marshal.TaskPriority;\nimport com.walnut.odin.conduct.schedule.entity.ConcurrentQuota;\nimport com.walnut.odin.conduct.schedule.entity.ScheduleFittingContext;\n\npublic class RavenScheduleAllocator implements InstanceScheduleAllocator {\n\n    private Logger log = LoggerFactory.getLogger( this.getClass() );\n\n    private String                                             mszPartitionName;\n    private Map<String, ConcurrentQuota>                       mQuotaConfig;\n    private ConcurrentMap<Integer, ConcurrentQuota>            mPriorityQuota;\n    private AtomicLong                                         mGlobalConcurrentInstance;\n\n    private ConcurrentMap<Integer, Lock>                       mPrioritySegLocks;\n    private ConcurrentMap<Integer, Map<GUID, InstanceEntry>>   mPriorityInstances;\n    private Lock                                               mGlobalInstanceLock;\n\n    protected void from_config( JSONObject config ) {\n        this.mszPartitionName = config.optString( \"name\" );\n\n        JSONObject joQuotaConfig = config.getJSONObject( \"quota\" );\n        this.mQuotaConfig = ConcurrentQuota.fromThose( joQuotaConfig );\n        this.mGlobalConcurrentInstance = new AtomicLong( config.optLong( \"globalConcurrentInstance\" ) );\n\n        for ( Map.Entry<String, ConcurrentQuota> entry : this.mQuotaConfig.entrySet() ) {\n            if ( entry == null ) {\n                continue;\n            }\n\n            String          szKey   = entry.getKey();\n            ConcurrentQuota value   = entry.getValue();\n\n            if ( szKey == null || value == null ) {\n                continue;\n            }\n\n            if ( \"default\".equalsIgnoreCase( szKey ) ) {\n                continue;\n            }\n\n            this.refreshQuotaCount( value, this.mGlobalConcurrentInstance.get() );\n            this.mPriorityQuota.put( (int) value.getPriority(), value );\n        }\n    }\n\n    protected void trace_dispatcher_config() {\n        JSONObject jo = new JSONMaptron();\n\n        jo.put( \"PartitionName\", this.mszPartitionName );\n        jo.put( \"ConcurrentInstance\", this.mGlobalConcurrentInstance.get() );\n        jo.put( \"QuotaConfig\", new JSONMaptron( CollectionUtils.genericConvert( this.mQuotaConfig ), true ) );\n\n        log.info( \"[ScheduleAllocator] Allocator configured with following configs: {}\", jo.toJSONStringI( 2 ) );\n    }\n\n    public RavenScheduleAllocator( JSONObject config ) {\n        this.mPriorityQuota            = new ConcurrentHashMap<>();\n        this.mPrioritySegLocks         = new ConcurrentHashMap<>();\n        this.mPriorityInstances        = new ConcurrentHashMap<>();\n        this.mGlobalInstanceLock       = new ReentrantLock();\n\n        this.from_config( config );\n        this.trace_dispatcher_config();\n    }\n\n    public RavenScheduleAllocator( UniformTaskScheduler taskScheduler ) {\n        this(\n                taskScheduler.ravenTaskConfig().getScheduleGlobalAllocatorConfig().optJSONObject(\n                        taskScheduler.ravenTaskConfig().getSchedulePartitionName()\n                )\n        );\n    }\n\n\n    protected ConcurrentQuota resolveQuotaTemplate( short nPriority ) {\n        if ( this.mQuotaConfig == null || this.mQuotaConfig.isEmpty() ) {\n            return null;\n        }\n\n        if ( isQuotaBypassedPriority( nPriority ) ) {\n            ConcurrentQuota unlimitedQuota = this.mQuotaConfig.get( \"unlimited\" );\n            if ( unlimitedQuota != null ) {\n                return unlimitedQuota.reproduce( nPriority );\n            }\n        }\n\n        ConcurrentQuota directQuota = this.mPriorityQuota.get( (int) nPriority );\n        if ( directQuota != null ) {\n            return directQuota.reproduce( nPriority );\n        }\n\n        ConcurrentQuota defaultQuota = this.mQuotaConfig.get( \"default\" );\n        if ( defaultQuota != null ) {\n            return defaultQuota.reproduce( nPriority );\n        }\n\n        return null;\n    }\n\n    protected static Map<Integer, List<InstanceEntry>> groupInstancesByPriority( Collection<InstanceEntry> instances ) {\n        Map<Integer, List<InstanceEntry>> grouped = new HashMap<>();\n\n        for ( InstanceEntry instance : instances ) {\n            if ( instance == null ) {\n                continue;\n            }\n\n            int nPriority = instance.getActuallyPriority();\n\n            List<InstanceEntry> list = grouped.computeIfAbsent(\n                    nPriority,\n                    k -> new ArrayList<>()\n            );\n            list.add( instance );\n        }\n\n        return grouped;\n    }\n\n    protected Lock affirmPrioritySegLock( Integer nPriority ) {\n        return this.mPrioritySegLocks.computeIfAbsent(\n                nPriority,\n                k -> new ReentrantLock()\n        );\n    }\n\n    protected ConcurrentQuota affirmQuota( short nPriority ) {\n        ConcurrentQuota quota = this.mPriorityQuota.computeIfAbsent(\n                (int) nPriority,\n                k -> {\n                    ConcurrentQuota template = this.resolveQuotaTemplate( nPriority );\n                    if ( template != null ) {\n                        return template;\n                    }\n                    return new ConcurrentQuota( nPriority );\n                }\n        );\n\n        this.refreshQuotaCount( quota, this.mGlobalConcurrentInstance.get() );\n\n        return quota;\n    }\n\n    protected void refreshQuotaCount( ConcurrentQuota quota, long nGlobalConcurrentInstance ) {\n        if ( quota == null ) {\n            return;\n        }\n\n        if ( quota.isMaximumRatioMode() ) {\n            long nMaximumCnt = (long) Math.floor( nGlobalConcurrentInstance * quota.getMaximumRatio() );\n            if ( nMaximumCnt < 0 ) {\n                nMaximumCnt = 0;\n            }\n            quota.setMaximumCnt( nMaximumCnt );\n        }\n        else {\n            Long nMaximumCnt = quota.getMaximumCnt();\n            if ( nMaximumCnt == null ) {\n                quota.setMaximumCnt( 0L );\n            }\n            else if ( nMaximumCnt < 0 ) {\n                quota.setMaximumCnt( Long.MAX_VALUE );\n            }\n        }\n\n        if ( quota.isMinimumRatioMode() ) {\n            long nMinimumCnt = (long) Math.floor( nGlobalConcurrentInstance * quota.getMinimumRatio() );\n            if ( nMinimumCnt < 0 ) {\n                nMinimumCnt = 0;\n            }\n            quota.setMinimumCnt( nMinimumCnt );\n        }\n        else {\n            Long nMinimumCnt = quota.getMinimumCnt();\n            if ( nMinimumCnt == null ) {\n                quota.setMinimumCnt( 0L );\n            }\n            else if ( nMinimumCnt < 0 ) {\n                quota.setMinimumCnt( Long.MAX_VALUE );\n            }\n        }\n    }\n\n    protected Map<GUID, InstanceEntry> affirmPriorityInstances( int nPriority ) {\n        return this.mPriorityInstances.computeIfAbsent(\n                nPriority,\n                k -> new HashMap<>()\n        );\n    }\n\n    public static boolean isQuotaBypassedPriority( int nPriority ) {\n        return nPriority > TaskPriority.UNLIMITED.getValue();\n    }\n\n    public long getGlobalConcurrentInstance() {\n        return this.mGlobalConcurrentInstance.get();\n    }\n\n    public void setGlobalConcurrentInstance( long nGlobalConcurrentInstance ) {\n        this.mGlobalInstanceLock.lock();\n        try {\n            this.mGlobalConcurrentInstance.set( nGlobalConcurrentInstance );\n\n            for ( ConcurrentQuota quota : this.mPriorityQuota.values() ) {\n                if ( quota == null ) {\n                    continue;\n                }\n                this.refreshQuotaCount( quota, nGlobalConcurrentInstance );\n            }\n        }\n        finally {\n            this.mGlobalInstanceLock.unlock();\n        }\n    }\n\n    public Collection<Integer> queryFulledPriority() {\n        Collection<Integer> fulledPriorities = new ArrayList<>();\n\n        for ( Map.Entry<Integer, ConcurrentQuota> kv : this.mPriorityQuota.entrySet() ) {\n            Integer         nPriority = kv.getKey();\n            ConcurrentQuota quota     = kv.getValue();\n\n            if ( nPriority == null || quota == null ) {\n                continue;\n            }\n\n            if ( isQuotaBypassedPriority( nPriority ) ) {\n                continue;\n            }\n\n            Lock segLock = this.affirmPrioritySegLock( nPriority );\n            segLock.lock();\n            try {\n                long nMaximumCnt = quota.getMaximumCnt();\n                if ( nMaximumCnt == Long.MAX_VALUE ) {\n                    continue;\n                }\n\n                Map<GUID, InstanceEntry> instanceMap = this.mPriorityInstances.get( nPriority );\n                long                     nCurrentCnt = 0;\n\n                if ( instanceMap != null ) {\n                    nCurrentCnt = instanceMap.size();\n                }\n\n                if ( nCurrentCnt >= nMaximumCnt ) {\n                    fulledPriorities.add( nPriority );\n                }\n            }\n            finally {\n                segLock.unlock();\n            }\n        }\n\n        return fulledPriorities;\n    }\n\n    public Collection<InstanceEntry> queryPriorityInstances( int nPriority ) {\n        Lock segLock = this.affirmPrioritySegLock( nPriority );\n        segLock.lock();\n        try {\n            Map<GUID, InstanceEntry> instanceMap = this.mPriorityInstances.get( nPriority );\n            if ( instanceMap == null || instanceMap.isEmpty() ) {\n                return new ArrayList<>();\n            }\n\n            return new ArrayList<>( instanceMap.values() );\n        }\n        finally {\n            segLock.unlock();\n        }\n    }\n\n    @Override\n    public String getPartitionName() {\n        return this.mszPartitionName;\n    }\n\n\n    protected void pipeFittingByPriority( int nPriority, Collection<InstanceEntry> instances, ScheduleFittingContext context ) {\n        Lock segLock = this.affirmPrioritySegLock( nPriority );\n        segLock.lock();\n\n        try {\n            if ( instances == null || instances.isEmpty() ) {\n                return;\n            }\n\n            Map<GUID, InstanceEntry>   instanceMap         = this.affirmPriorityInstances( nPriority );\n            Collection<InstanceEntry>  launchedInstances   = context.getFittedInstances();\n            Collection<InstanceEntry>  discardedInstances  = context.getDiscardedInstances();\n\n            if ( isQuotaBypassedPriority( nPriority ) ) {\n                for ( InstanceEntry instance : instances ) {\n                    if ( instance == null || instance.getGuid() == null ) {\n                        continue;\n                    }\n\n                    instanceMap.put( instance.getGuid(), instance );\n                    launchedInstances.add( instance );\n                }\n                return;\n            }\n\n            ConcurrentQuota quota = this.affirmQuota( (short) nPriority );\n            long            nMaximumCnt = quota.getMaximumCnt();\n\n            long nRemaining = nMaximumCnt - instanceMap.size();\n            if ( nRemaining <= 0 ) {\n                discardedInstances.addAll( instances );\n                return;\n            }\n\n            for ( InstanceEntry instance : instances ) {\n                if ( instance == null ) {\n                    continue;\n                }\n\n                GUID instanceGuid = instance.getGuid();\n                if ( instanceGuid == null ) {\n                    discardedInstances.add( instance );\n                    continue;\n                }\n\n                if ( instanceMap.containsKey( instanceGuid ) ) {\n                    continue;\n                }\n\n                if ( nRemaining <= 0 ) {\n                    discardedInstances.add( instance );\n                    continue;\n                }\n\n                instanceMap.put( instanceGuid, instance );\n                launchedInstances.add( instance );\n                --nRemaining;\n            }\n        }\n        finally {\n            segLock.unlock();\n        }\n    }\n\n    @Override\n    public ScheduleFittingContext pipeFitting( Collection<InstanceEntry> instances ) {\n        ScheduleFittingContext context = new ScheduleFittingContext();\n        if ( instances == null || instances.isEmpty() ) {\n            return context;\n        }\n\n        Map<Integer, List<InstanceEntry>> groupedInstances = groupInstancesByPriority( instances );\n        for ( Map.Entry<Integer, List<InstanceEntry>> kv : groupedInstances.entrySet() ) {\n            Integer             priority     = kv.getKey();\n            List<InstanceEntry> instanceList = kv.getValue();\n\n            if ( priority == null || instanceList == null || instanceList.isEmpty() ) {\n                continue;\n            }\n\n            this.pipeFittingByPriority( priority, instanceList, context );\n        }\n\n        return context;\n    }\n\n\n\n\n    public InstanceEntry reclaimInstance( int nPriority, GUID instanceGuid ) {\n        if ( instanceGuid == null ) {\n            return null;\n        }\n\n        Lock segLock = this.affirmPrioritySegLock( nPriority );\n        segLock.lock();\n        try {\n            Map<GUID, InstanceEntry> instanceMap = this.mPriorityInstances.get( nPriority );\n            if ( instanceMap == null ) {\n                return null;\n            }\n\n            return instanceMap.remove( instanceGuid );\n        }\n        finally {\n            segLock.unlock();\n        }\n    }\n\n    public InstanceEntry reclaimInstance( GUID instanceGuid ) {\n        if ( instanceGuid == null ) {\n            return null;\n        }\n\n        for ( Integer nPriority : this.mPriorityInstances.keySet() ) {\n            if ( nPriority == null ) {\n                continue;\n            }\n\n            Lock segLock = this.affirmPrioritySegLock( nPriority );\n            segLock.lock();\n            try {\n                Map<GUID, InstanceEntry> instanceMap = this.mPriorityInstances.get( nPriority );\n                if ( instanceMap == null ) {\n                    continue;\n                }\n\n                InstanceEntry removed = instanceMap.remove( instanceGuid );\n                if ( removed != null ) {\n                    return removed;\n                }\n            }\n            finally {\n                segLock.unlock();\n            }\n        }\n\n        return null;\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/schedule/RavenTaskSchedulePreparator.java",
    "content": "package com.walnut.odin.conduct.schedule;\n\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.task.InstanceEventType;\nimport com.pinecone.hydra.task.TaskInstanceExecState;\nimport com.pinecone.hydra.task.kom.UniformTaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\nimport com.pinecone.hydra.task.kom.source.TaskNodeManipulator;\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\nimport com.pinecone.slime.meta.TableIndex64Meta;\n\nimport com.walnut.odin.atlas.graph.RuntimeAtlasInstrument;\nimport com.walnut.odin.conduct.entity.GenericInstanceAtlasAdjacent;\nimport com.walnut.odin.conduct.entity.GenericInstanceAtlasNode;\nimport com.walnut.odin.conduct.entity.GenericInstanceEvent;\nimport com.walnut.odin.conduct.entity.GenericInstanceExec;\nimport com.walnut.odin.conduct.entity.InstanceAtlasAdjacent;\nimport com.walnut.odin.conduct.entity.InstanceAtlasNode;\nimport com.walnut.odin.conduct.entity.InstanceEvent;\nimport com.walnut.odin.conduct.entity.InstanceExec;\nimport com.walnut.odin.task.CentralizedTaskInstrument;\nimport com.walnut.odin.task.RavenTask;\nimport com.walnut.odin.task.RavenTaskConfig;\nimport com.walnut.odin.task.RavenTaskInstance;\nimport com.walnut.odin.task.mapper.InstanceAtlasAdjacentMapper;\nimport com.walnut.odin.task.mapper.InstanceAtlasNodeMapper;\nimport com.walnut.odin.task.mapper.InstanceEventMapper;\nimport com.walnut.odin.task.mapper.InstanceExecMapper;\nimport com.walnut.odin.task.source.RavenTaskMasterManipulator;\nimport com.walnut.odin.task.source.ScheduleManipulator;\nimport com.walnut.odin.task.troll.LaunchFeature;\nimport com.walnut.odin.task.troll.TaskExecutionLauncher;\n\npublic class RavenTaskSchedulePreparator implements TaskSchedulePreparator {\n\n    // Generate daily batches in advance (24h/Cycle)\n    // 每日提前生成当日批次（24h/Cycle）\n    public static final Collection<TaskScheduleCycle> DailyTaskScheduleCycles = List.of(\n            TaskScheduleCycle.Month, TaskScheduleCycle.Week, TaskScheduleCycle.Day, TaskScheduleCycle.Hour\n    );\n\n    private Logger log = LoggerFactory.getLogger( this.getClass() );\n\n    private GuidAllocator                 mGuidAllocator;\n    private RavenTaskConfig               mRavenTaskConfig;\n    private int                           mnScanThreadCount;\n    private long                          mnScanIdWindow;\n\n    private UniformTaskScheduler          mTaskScheduler;\n    private TaskExecutionLauncher         mTaskExecutionLauncher;\n    private UniformTaskInstrument         mUniformTaskInstrument;\n    private RuntimeAtlasInstrument        mRuntimeAtlasInstrument;\n    private CentralizedTaskInstrument     mCentralizedTaskInstrument;\n\n    private RavenTaskMasterManipulator    mRavenTaskMasterManipulator;\n    private TaskNodeManipulator           mTaskNodeManipulator;\n    private ScheduleManipulator           mScheduleManipulator;\n    private InstanceAtlasNodeMapper       mInstanceAtlasNodeMapper;\n    private InstanceAtlasAdjacentMapper   mInstanceAtlasAdjacentMapper;\n    private InstanceExecMapper            mInstanceExecMapper;\n    private InstanceEventMapper           mInstanceEventMapper;\n\n\n    private ExecutorService               mExecutorService;\n\n    public RavenTaskSchedulePreparator( UniformTaskScheduler taskScheduler ) {\n        this.mTaskScheduler                = taskScheduler;\n        this.mRavenTaskConfig              = taskScheduler.ravenTaskConfig();\n        this.mnScanThreadCount             = this.mRavenTaskConfig.getScheduleScanThreadCount();\n        this.mnScanIdWindow                = this.mRavenTaskConfig.getScheduleScanIdWindow();\n\n        this.mRuntimeAtlasInstrument       = taskScheduler.atlasInstrument();\n        this.mTaskExecutionLauncher        = taskScheduler.taskExecutionLauncher();\n        this.mCentralizedTaskInstrument    = taskScheduler.taskInstrument();\n        this.mUniformTaskInstrument        = this.mCentralizedTaskInstrument.getUniformTaskInstrument();\n\n        this.mGuidAllocator                = this.mCentralizedTaskInstrument.getGuidAllocator();\n\n        this.mRavenTaskMasterManipulator   = this.mCentralizedTaskInstrument.getRavenTaskMasterManipulator();\n        this.mTaskNodeManipulator          = this.mRavenTaskMasterManipulator.getTaskMasterManipulator().getTaskNodeManipulator();\n        this.mScheduleManipulator          = this.mRavenTaskMasterManipulator.getScheduleManipulator();\n        this.mInstanceAtlasNodeMapper      = this.mScheduleManipulator.getInstanceAtlasNodeMapper();\n        this.mInstanceAtlasAdjacentMapper  = this.mScheduleManipulator.getInstanceAtlasAdjacentMapper();\n        this.mInstanceExecMapper           = this.mScheduleManipulator.getInstanceExecMapper();\n        this.mInstanceEventMapper          = this.mScheduleManipulator.getInstanceEventMapper();\n\n        this.mExecutorService              = Executors.newFixedThreadPool( this.mnScanThreadCount * 2 );\n\n        log.info( \"[Odin] [CrucialSchedulerComponentLifecycle] (RavenTaskSchedulePreparator Construction) <Done>\" );\n    }\n\n\n    protected TaskScheduleContext prepareTaskScheduleTimeOffset( TaskElement element, LocalDateTime targetTime ) {\n        TaskScheduleContext context = new TaskScheduleContext( element, targetTime );\n\n        TaskScheduleCycle cycle = element.getScheduleCycle();\n        String            cron  = element.getScheduleCron();\n        if ( cycle == null ) {\n            return context;\n        }\n\n        if ( cron == null || cron.isBlank() ) {\n            String defaultCron = ScheduleCronHelper.generateDefaultCron( cycle );\n            element.setScheduleCron( defaultCron );\n            cron = defaultCron;\n        }\n\n        LocalDateTime next = element.getNextScheduleTime();\n        context.setThisScheduleTime( next );\n        if ( next == null ) {\n            LocalDateTime firstFireTime = ScheduleCronHelper.computeNextByCron( cron, targetTime.minusSeconds( 1 ) );\n\n            if ( firstFireTime == null ) {\n                return context;\n            }\n\n            context.setThisScheduleTime( LocalDateTime.now() ); // 初始化用当前时间\n            context.setNextScheduleTime( firstFireTime ); // 已经向前推进了\n            element.setNextScheduleTime( firstFireTime );\n            this.mTaskNodeManipulator.update( element );\n            return context; // next 已经是下一次了\n        }\n\n\n        LocalDateTime advanced = ScheduleCronHelper.computeNextByCron( cron, next );\n        if ( advanced == null ) {\n            return context;\n        }\n\n        if ( !advanced.equals( next ) ) {\n            element.setNextScheduleTime( advanced );\n            context.setNextScheduleTime( advanced );\n            //this.mTaskNodeManipulator.update( element );\n        }\n\n        return context;\n    }\n\n    protected void prepareInstance( TaskScheduleContext context, RavenTaskInstance that ) {\n        LaunchFeature feature = new LaunchFeature();\n        InstanceEntry it = that.getInstanceEntry();\n        it.setExpectTime( context.getThisScheduleTime() ); // 先更新，后面会插入，妈的\n\n        this.mTaskExecutionLauncher.initializeInstance( that, feature );  // 这里会完成实例插入\n    }\n\n    protected void prepareInstanceLineage( TaskScheduleContext context, RavenTaskInstance instance ) {\n        TaskElement element = context.getElement();\n        GUID instanceGuid = instance.getInstanceEntry().getGuid();\n\n        GraphNode graphNode = this.mRuntimeAtlasInstrument.queryGraphNodeByTaskGuid( element.getGuid() );\n        List<GUID> parentIds = new ArrayList<>();\n\n        InstanceAtlasNode instanceNode = new GenericInstanceAtlasNode();\n\n        instanceNode.setGuid( this.mGuidAllocator.nextGUID() );\n        instanceNode.setInstanceGuid( instanceGuid );\n        instanceNode.setNodeName( instance.getOwnedTask().getName() );\n\n        if ( graphNode != null ) {\n            parentIds = this.mRuntimeAtlasInstrument.fetchParentIds( graphNode.getId() );\n            instanceNode.setIsIsolated( parentIds == null || parentIds.isEmpty() );\n        }\n        else {\n            instanceNode.setIsIsolated( true );\n        }\n\n        this.mInstanceAtlasNodeMapper.insert( instanceNode );\n\n        if ( parentIds != null && !parentIds.isEmpty() ) {\n            for ( GUID parentId : parentIds ) {\n                InstanceAtlasAdjacent adjacent = new GenericInstanceAtlasAdjacent();\n                adjacent.setGuid( this.mGuidAllocator.nextGUID() );\n                adjacent.setParentGuid( parentId );\n                this.mInstanceAtlasAdjacentMapper.insert( adjacent );\n            }\n        }\n    }\n\n    protected void persistTaskExec( TaskScheduleContext context, RavenTaskInstance instance ) {\n        TaskElement element = context.getElement();\n        GUID instanceGuid = instance.getInstanceEntry().getGuid();\n\n        InstanceExec exec = new GenericInstanceExec();\n        exec.setTaskGuid( element.getGuid() );\n        exec.setInstanceGuid( instanceGuid );\n        exec.setTaskName( instance.getOwnedTask().getName() );\n        exec.setInstanceName( instance.getInstanceEntry().getInstanceName() );\n        exec.setProcessorQueue( \"default\" );\n        exec.setClusterName( \"local_cluster\" );\n        exec.setExecState( TaskInstanceExecState.Submitted.getName() );\n        exec.setCurrentRetryNumber( 0 );\n        exec.setRetryTimes( instance.getInstanceEntry().getRetryCnt() );\n        this.mInstanceExecMapper.insert( exec );\n    }\n\n    protected void triggerTaskEventTimeReady( TaskScheduleContext context, RavenTaskInstance instance ) {\n        TaskElement element = context.getElement();\n        GUID instanceGuid = instance.getInstanceEntry().getGuid();\n\n        InstanceEvent event = new GenericInstanceEvent();\n        event.setGuid( this.mGuidAllocator.nextGUID() );\n        event.setTaskGuid( element.getGuid() );\n        event.setInstanceGuid( instanceGuid );\n        event.setInstanceName( instance.getInstanceEntry().getInstanceName() );\n        event.setRetryTimes( instance.getInstanceEntry().getRetryCnt() );\n        event.setCurrentRetryNumber( 0 );\n        event.setEventType( instance.getTaskType() );\n        event.setState( InstanceEventType.TaskTimeReady.getName() );\n        event.setExecTime( LocalDateTime.now() );\n        event.setEventContext( \"{}\" );\n        this.mScheduleManipulator.getInstanceEventMapper().insert( event );\n    }\n\n    protected void prepareTaskInstances( Collection<TaskScheduleContext> contexts, LocalDateTime targetTime ) {\n        for ( TaskScheduleContext context : contexts ) {\n            TaskElement element = context.getElement();\n\n            RavenTask task = this.mCentralizedTaskInstrument.constructTask( element );\n            RavenTaskInstance instance = task.createInstance();\n\n            this.prepareInstance( context, instance );\n            this.prepareInstanceLineage( context, instance );\n            this.persistTaskExec( context, instance );\n            this.triggerTaskEventTimeReady( context, instance );\n        }\n    }\n\n    protected Collection<TaskElement> prepareScheduleTasks( Collection<TaskElement> elements, LocalDateTime targetTime ) {\n        if ( elements == null || elements.isEmpty() ) {\n            return elements;\n        }\n\n        Collection<TaskScheduleContext> contexts = new ArrayList<>();\n        for ( TaskElement element : elements ) {\n            TaskScheduleContext context = this.prepareTaskScheduleTimeOffset( element, targetTime );\n            contexts.add( context );\n        }\n\n        this.prepareTaskInstances( contexts, targetTime );\n        Debug.traceSyn( elements );\n        return elements;\n    }\n\n    @Override\n    public UniformTaskScheduler taskScheduler() {\n        return this.mTaskScheduler;\n    }\n\n\n    @Override\n    public void prepareSchedulableTasks( Collection<TaskScheduleCycle> cycles, LocalDateTime targetTime ) {\n        if ( targetTime == null ) {\n            targetTime = LocalDateTime.now();\n        }\n\n        TableIndex64Meta range = this.mTaskNodeManipulator.selectSchedulableIdRange( cycles, targetTime );\n        if ( range == null ) {\n            return;\n        }\n\n        long idMin = range.getMinId();\n        long idMax = range.getMaxId();\n        if ( idMin <= 0 || idMax <= 0 || idMax < idMin ) {\n            return;\n        }\n\n        long cursor = idMin;\n        while ( cursor <= idMax ) {\n            long windowStart = cursor;\n            long windowEnd   = cursor + this.mnScanIdWindow - 1;\n\n            if ( windowEnd > idMax ) {\n                windowEnd = idMax;\n            }\n\n            final long finalStart = windowStart;\n            final long finalEnd   = windowEnd;\n\n            LocalDateTime finalTargetTime = targetTime;\n            this.mExecutorService.submit( () -> {\n                try {\n                    log.info( \"[TaskSchedulerLifecycle] Preparing schedulable tasks (Start: {}, End: {}) <Start>\", finalStart, finalEnd );\n\n                    Collection<TaskElement> elements = this.mTaskNodeManipulator.fetchSchedulableTasksInRange(\n                            finalStart, finalEnd, cycles, finalTargetTime\n                    );\n\n                    elements = this.prepareScheduleTasks( elements, finalTargetTime );\n\n                    log.info( \"[TaskSchedulerLifecycle] Preparing schedulable tasks (Start: {}, End: {}, Size: {}) <Done>\", finalStart, finalEnd, elements.size() );\n                }\n                catch ( Exception e ) {\n                    log.error( \"[TaskSchedulerLifecycle] Preparing schedulable tasks (Start: {}, End: {}) <Error>\", finalStart, finalEnd, e );\n                }\n            } );\n\n            cursor = windowEnd + 1;\n        }\n    }\n\n    @Override\n    public void prepareSchedulableTasksDaily( LocalDateTime targetTime ) {\n        this.prepareSchedulableTasks( DailyTaskScheduleCycles, targetTime );\n    }\n\n    @Override\n    public List<TaskElement> fetchSchedulableTasksInRange( long idMin, long idMax, Collection<TaskScheduleCycle> cycles, LocalDateTime targetTime ) {\n        return this.mTaskNodeManipulator.fetchSchedulableTasksInRange( idMin, idMax, cycles, targetTime );\n    }\n\n    @Override\n    public List<TaskElement> fetchSchedulableTasksDaily( long idMin, long idMax, LocalDateTime targetTime ) {\n        return this.mTaskNodeManipulator.fetchSchedulableTasksInRange( idMin, idMax, DailyTaskScheduleCycles, targetTime );\n    }\n\n\n\n\n\n\n    public static class TaskScheduleContext {\n        protected TaskElement element;\n        protected LocalDateTime targetTime;\n        protected LocalDateTime nextScheduleTime;\n        protected LocalDateTime thisScheduleTime;\n\n        public TaskScheduleContext( TaskElement element, LocalDateTime targetTime ) {\n            this.element = element;\n            this.targetTime = targetTime;\n        }\n\n        public TaskElement getElement() {\n            return this.element;\n        }\n\n        public void setElement( TaskElement element ) {\n            this.element = element;\n        }\n\n        public LocalDateTime getTargetTime() {\n            return this.targetTime;\n        }\n\n        public void setTargetTime( LocalDateTime targetTime ) {\n            this.targetTime = targetTime;\n        }\n\n        public LocalDateTime getNextScheduleTime() {\n            return this.nextScheduleTime;\n        }\n\n        public void setNextScheduleTime( LocalDateTime nextScheduleTime ) {\n            this.nextScheduleTime = nextScheduleTime;\n        }\n\n        public LocalDateTime getThisScheduleTime() {\n            return this.thisScheduleTime;\n        }\n\n        public void setThisScheduleTime( LocalDateTime thisScheduleTime ) {\n            this.thisScheduleTime = thisScheduleTime;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/schedule/RavenTaskScheduler.java",
    "content": "package com.walnut.odin.conduct.schedule;\n\nimport java.time.LocalDateTime;\nimport java.util.List;\n\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.pinecone.hydra.task.kom.UniformTaskInstrument;\nimport com.pinecone.hydra.task.kom.instance.InstanceInstrument;\n\nimport com.walnut.odin.atlas.graph.RuntimeAtlasInstrument;\nimport com.walnut.odin.dispatch.TaskDispatcher;\nimport com.walnut.odin.task.CentralizedTaskInstrument;\nimport com.walnut.odin.task.RavenTaskConfig;\nimport com.walnut.odin.task.troll.TaskExecutionLauncher;\n\nimport lombok.extern.slf4j.Slf4j;\n\n@Slf4j\npublic class RavenTaskScheduler implements UniformTaskScheduler {\n\n    private RavenTaskConfig              mRavenTaskConfig;\n\n    private InstanceInstrument           mInstanceInstrument;\n    private UniformTaskInstrument        mUniformTaskInstrument;\n    private RuntimeAtlasInstrument       mRuntimeAtlasInstrument;\n    private CentralizedTaskInstrument    mCentralizedTaskInstrument;\n\n    private TaskExecutionLauncher        mTaskExecutionLauncher;\n    private TaskDispatcher               mTaskDispatcher;\n\n    private TaskSchedulePreparator       mTaskSchedulePreparator;\n    private InstanceScheduleImpetus      mInstanceScheduleImpetus;\n\n    private InstanceScheduleAllocator    mInstanceScheduleAllocator;\n    private String                       mszPartitionName;\n\n    public RavenTaskScheduler(\n            CentralizedTaskInstrument taskInstrument, RuntimeAtlasInstrument atlasInstrument,\n            TaskDispatcher dispatcher\n    ) {\n        log.info( \"[Odin] [CrucialSchedulerComponentLifecycle] (RavenTaskScheduler Construction) <Start>\" );\n\n        this.mCentralizedTaskInstrument  = taskInstrument;\n        this.mUniformTaskInstrument      = taskInstrument.getUniformTaskInstrument();\n        this.mInstanceInstrument         = this.mUniformTaskInstrument.getInstanceInstrument();\n        this.mRuntimeAtlasInstrument     = atlasInstrument;\n\n        this.mTaskExecutionLauncher      = dispatcher.taskExecutionLauncher();\n        this.mTaskDispatcher             = dispatcher;\n\n        this.mRavenTaskConfig            = (RavenTaskConfig) taskInstrument.getConfig();\n        this.mszPartitionName            = this.mRavenTaskConfig.getSchedulePartitionName();\n\n        this.mInstanceScheduleAllocator  = new RavenScheduleAllocator( this ); // [1]\n        this.mTaskSchedulePreparator     = new RavenTaskSchedulePreparator( this ); // [2]\n        this.mInstanceScheduleImpetus    = new RavenInstanceScheduleImpetus( this ); // [3]\n\n\n        log.info( \"[Odin] [CrucialSchedulerComponentLifecycle] (RavenTaskScheduler Construction) <Done>\" );\n    }\n\n\n    @Override\n    public TaskSchedulePreparator taskSchedulePreparator() {\n        return this.mTaskSchedulePreparator;\n    }\n\n    @Override\n    public InstanceScheduleImpetus instanceScheduleImpetus() {\n        return this.mInstanceScheduleImpetus;\n    }\n\n    @Override\n    public InstanceScheduleAllocator instanceScheduleAllocator() {\n        return this.mInstanceScheduleAllocator;\n    }\n\n    @Override\n    public RavenTaskConfig ravenTaskConfig() {\n        return this.mRavenTaskConfig;\n    }\n\n    @Override\n    public CentralizedTaskInstrument taskInstrument() {\n        return this.mCentralizedTaskInstrument;\n    }\n\n    @Override\n    public InstanceInstrument instanceInstrument() {\n        return this.mInstanceInstrument;\n    }\n\n    @Override\n    public RuntimeAtlasInstrument atlasInstrument() {\n        return this.mRuntimeAtlasInstrument;\n    }\n\n    @Override\n    public TaskExecutionLauncher taskExecutionLauncher() {\n        return this.mTaskExecutionLauncher;\n    }\n\n    @Override\n    public TaskDispatcher taskDispatcher() {\n        return this.mTaskDispatcher;\n    }\n\n    @Override\n    public String getPartitionName() {\n        return this.mszPartitionName;\n    }\n\n    public void fetch() {\n        //this.mTaskSchedulePreparator.prepareSchedulableTasksDaily( LocalDateTime.now() );\n        this.mInstanceScheduleImpetus.impelPrelaunchInstances( LocalDateTime.now() );\n    }\n\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/conduct/schedule/ScheduleCronHelper.java",
    "content": "package com.walnut.odin.conduct.schedule;\n\nimport java.text.ParseException;\nimport java.time.DayOfWeek;\nimport java.time.LocalDate;\nimport java.time.LocalDateTime;\nimport java.time.ZoneId;\nimport java.util.Date;\n\nimport org.quartz.CronExpression;\n\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\n\npublic final class ScheduleCronHelper {\n\n    private static final ZoneId DEFAULT_ZONE_ID = ZoneId.systemDefault();\n\n    private ScheduleCronHelper() {\n    }\n\n    public static String generateDefaultCron( TaskScheduleCycle cycle ) {\n        if ( cycle == null ) {\n            throw new IllegalArgumentException( \"TaskScheduleCycle is null.\" );\n        }\n\n        switch ( cycle ) {\n            case Minute: {\n                return \"0 * * * * ?\";\n            }\n            case Hour: {\n                return \"0 0 * * * ?\";\n            }\n            case Day: {\n                return \"0 0 0 * * ?\";\n            }\n            case Week: {\n                return \"0 0 0 ? * MON\";\n            }\n            case Month: {\n                return \"0 0 0 1 * ?\";\n            }\n            default: {\n                throw new IllegalStateException( \"Unsupported cycle: \" + cycle );\n            }\n        }\n    }\n\n    public static LocalDateTime alignToCycleStart( TaskScheduleCycle cycle, LocalDateTime referenceTime ) {\n        if ( cycle == null ) {\n            throw new IllegalArgumentException( \"TaskScheduleCycle is null.\" );\n        }\n\n        if ( referenceTime == null ) {\n            throw new IllegalArgumentException( \"Reference time is null.\" );\n        }\n\n        switch ( cycle ) {\n            case Minute: {\n                return referenceTime\n                        .withSecond( 0 )\n                        .withNano( 0 );\n            }\n            case Hour: {\n                return referenceTime\n                        .withMinute( 0 )\n                        .withSecond( 0 )\n                        .withNano( 0 );\n            }\n            case Day: {\n                LocalDate date = referenceTime.toLocalDate();\n                return date.atStartOfDay();\n            }\n            case Week: {\n                LocalDate date = referenceTime\n                        .toLocalDate()\n                        .with( DayOfWeek.MONDAY );\n                return date.atStartOfDay();\n            }\n            case Month: {\n                LocalDate date = referenceTime\n                        .withDayOfMonth( 1 )\n                        .toLocalDate();\n                return date.atStartOfDay();\n            }\n            default: {\n                throw new IllegalStateException( \"Unsupported cycle: \" + cycle );\n            }\n        }\n    }\n\n    public static LocalDateTime advanceByCycle( TaskScheduleCycle cycle, LocalDateTime currentTime ) {\n        if ( cycle == null ) {\n            throw new IllegalArgumentException( \"TaskScheduleCycle is null.\" );\n        }\n\n        if ( currentTime == null ) {\n            throw new IllegalArgumentException( \"Current time is null.\" );\n        }\n\n        switch ( cycle ) {\n            case Minute: {\n                return currentTime.plusMinutes( 1 );\n            }\n            case Hour: {\n                return currentTime.plusHours( 1 );\n            }\n            case Day: {\n                return currentTime.plusDays( 1 );\n            }\n            case Week: {\n                return currentTime.plusWeeks( 1 );\n            }\n            case Month: {\n                return currentTime.plusMonths( 1 );\n            }\n            default: {\n                throw new IllegalStateException( \"Unsupported cycle: \" + cycle );\n            }\n        }\n    }\n\n\n    public static LocalDateTime computeNextScheduleTime(\n            TaskScheduleCycle cycle, LocalDateTime nextScheduleTime, LocalDateTime referenceTime\n    ) {\n        if ( cycle == null ) {\n            throw new IllegalArgumentException( \"TaskScheduleCycle is null.\" );\n        }\n\n        if ( referenceTime == null ) {\n            throw new IllegalArgumentException( \"Reference time is null.\" );\n        }\n\n        if ( nextScheduleTime == null ) {\n            return ScheduleCronHelper.alignToCycleStart( cycle, referenceTime );\n        }\n\n        if ( nextScheduleTime.isAfter( referenceTime ) ) {\n            return nextScheduleTime;\n        }\n\n        LocalDateTime advanced = nextScheduleTime;\n\n        while ( !advanced.isAfter( referenceTime ) ) {\n            advanced = ScheduleCronHelper.advanceByCycle( cycle, advanced );\n        }\n\n        return advanced;\n    }\n\n    public static LocalDateTime computeNextByCron( String cron, LocalDateTime currentFireTime ) {\n        try {\n            CronExpression expression = new CronExpression( cron );\n            Date next = expression.getNextValidTimeAfter( Date.from(\n                    currentFireTime\n                            .atZone( DEFAULT_ZONE_ID )\n                            .toInstant()\n            ));\n            if ( next == null ) {\n                return null;\n            }\n\n            return LocalDateTime.ofInstant(\n                    next.toInstant(),\n                    DEFAULT_ZONE_ID\n            );\n\n        }\n        catch ( ParseException e ) {\n            throw new IllegalStateException(\n                    \"Invalid cron expression: \" + cron,\n                    e\n            );\n        }\n    }\n\n}"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/dispatch/AdaptiveCapacityDispatchStrategy.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.PriorityQueue;\n\nimport com.walnut.odin.task.troll.LaunchFeature;\n\npublic class AdaptiveCapacityDispatchStrategy implements DispatchStrategy {\n\n    protected static final int DEFAULT_HEAP_THRESHOLD = 16;\n\n    protected final int mnHeapThreshold;\n\n    protected static class ProcessorSlot {\n\n        protected TaskExecutionProcessor mProcessor;\n        protected int                    mnRemaining;\n\n        protected ProcessorSlot( TaskExecutionProcessor processor, int nRemaining ) {\n            this.mProcessor   = processor;\n            this.mnRemaining  = nRemaining;\n        }\n    }\n\n    public AdaptiveCapacityDispatchStrategy() {\n        this( DEFAULT_HEAP_THRESHOLD );\n    }\n\n    public AdaptiveCapacityDispatchStrategy( int nHeapThreshold ) {\n        this.mnHeapThreshold = nHeapThreshold > 0 ? nHeapThreshold : DEFAULT_HEAP_THRESHOLD;\n    }\n\n    protected Map<String, ProcessorSlot> buildProcessorSlots( Collection<TaskExecutionProcessor> processors ) {\n        Map<String, ProcessorSlot> slotMap = new HashMap<>();\n\n        for ( TaskExecutionProcessor processor : processors ) {\n            if ( processor.isExclusive() ) {\n                continue;\n            }\n\n            int nPending = processor.getTaskExecutionQueue().pendingCapacity();\n            if ( nPending <= 0 ) {\n                continue;\n            }\n            ProcessorSlot slot = new ProcessorSlot( processor, nPending );\n            slotMap.put( processor.getName(), slot );\n        }\n        return slotMap;\n    }\n\n    protected List<TaskLaunchContext> handleBindingContexts(\n            Collection<TaskLaunchContext> contexts,\n            Map<String, ProcessorSlot> slotMap,\n            Map<TaskExecutionProcessor, Collection<TaskLaunchContext>> plan,\n            TaskDispatcher dispatcher\n    ) throws TaskDispatchException {\n\n        List<TaskLaunchContext> remaining = new ArrayList<>();\n\n        for ( TaskLaunchContext context : contexts ) {\n            String szTarget = null;\n            boolean bStrong = false;\n\n            LaunchFeature feature = context.getLaunchFeature();\n            if ( feature != null && feature.getProcessorDesignated() != null ) {\n                szTarget = feature.getProcessorDesignated();\n                bStrong = true;\n            }\n            else {\n                szTarget = context.getAffinityProcessorName();\n                if ( szTarget == null ) {\n                    TaskExecutionProcessor p = dispatcher.getAffinityTasks( context.getTaskId() );\n                    if ( p != null ) {\n                        szTarget = p.getName();\n                    }\n                }\n            }\n\n            if ( szTarget == null ) {\n                remaining.add( context );\n                continue;\n            }\n\n            ProcessorSlot slot = slotMap.get( szTarget );\n            if ( slot == null ) {\n                if ( bStrong ) {\n                    throw new TaskDispatchException(\n                            \"Designated processor `\" + szTarget + \"` not available.\"\n                    );\n                }\n                remaining.add( context );\n                continue;\n            }\n\n            if ( slot.mnRemaining <= 0 ) {\n                if ( bStrong ) {\n                    throw new TaskDispatchException(\n                            \"Designated processor `\" + szTarget + \"` capacity exceeded.\"\n                    );\n                }\n                remaining.add( context );\n                continue;\n            }\n\n            plan.computeIfAbsent( slot.mProcessor, k -> new ArrayList<>() ).add( context );\n            --slot.mnRemaining;\n        }\n\n        return remaining;\n    }\n\n    protected void dispatchNormal(\n            Map<String, ProcessorSlot> slotMap,\n            List<TaskLaunchContext> contexts,\n            Map<TaskExecutionProcessor, Collection<TaskLaunchContext>> plan\n    ) {\n        if ( slotMap.size() <= this.mnHeapThreshold ) {\n            this.dispatchLinear( slotMap, contexts, plan );\n        }\n        else {\n            this.dispatchHeap( slotMap, contexts, plan );\n        }\n    }\n\n    @Override\n    public Map<TaskExecutionProcessor, Collection<TaskLaunchContext>> dispatch(\n            Collection<TaskExecutionProcessor> processors,\n            Collection<TaskLaunchContext> contexts,\n            TaskDispatcher dispatcher\n    ) throws TaskDispatchException {\n        Map<TaskExecutionProcessor, Collection<TaskLaunchContext>> plan = new HashMap<>();\n\n        if ( processors == null || processors.isEmpty() ) {\n            return plan;\n        }\n        if ( contexts == null || contexts.isEmpty() ) {\n            return plan;\n        }\n\n        Map<String, ProcessorSlot> slotMap = this.buildProcessorSlots( processors );\n        if ( slotMap.isEmpty() ) {\n            return plan;\n        }\n\n        List<TaskLaunchContext> remaining = this.handleBindingContexts( contexts, slotMap, plan, dispatcher );\n        if ( remaining.isEmpty() ) {\n            return plan;\n        }\n\n        this.dispatchNormal( slotMap, remaining, plan );\n        return plan;\n    }\n\n    protected void dispatchLinear(\n            Map<String, ProcessorSlot> slotMap,\n            Collection<TaskLaunchContext> contexts,\n            Map<TaskExecutionProcessor, Collection<TaskLaunchContext>> plan\n    ) {\n        List<ProcessorSlot> slots = new ArrayList<>( slotMap.values() );\n\n        for ( TaskLaunchContext context : contexts ) {\n            ProcessorSlot best = null;\n            for ( ProcessorSlot slot : slots ) {\n                if ( slot.mnRemaining <= 0 ) {\n                    continue;\n                }\n\n                if ( best == null || this.compareSlot( slot, best ) < 0 ) {\n                    best = slot;\n                }\n            }\n\n            if ( best == null ) {\n                break;\n            }\n\n            plan.computeIfAbsent( best.mProcessor, k -> new ArrayList<>() ).add( context );\n            --best.mnRemaining;\n        }\n    }\n\n    protected void dispatchHeap(\n            Map<String, ProcessorSlot> slotMap,\n            Collection<TaskLaunchContext> contexts,\n            Map<TaskExecutionProcessor, Collection<TaskLaunchContext>> plan\n    ) {\n        PriorityQueue<ProcessorSlot> heap = new PriorityQueue<>( this::compareSlot );\n        for ( ProcessorSlot slot : slotMap.values() ) {\n            if ( slot.mnRemaining > 0 ) {\n                heap.offer( slot );\n            }\n        }\n\n        for ( TaskLaunchContext context : contexts ) {\n            ProcessorSlot slot = heap.poll();\n            if ( slot == null ) {\n                break;\n            }\n\n            plan.computeIfAbsent( slot.mProcessor, k -> new ArrayList<>() ).add( context );\n\n            --slot.mnRemaining;\n\n            if ( slot.mnRemaining > 0 ) {\n                heap.offer( slot );\n            }\n        }\n    }\n\n    protected int compareSlot( ProcessorSlot a, ProcessorSlot b ) {\n        if ( a.mnRemaining != b.mnRemaining ) {\n            return Integer.compare( b.mnRemaining, a.mnRemaining );\n        }\n\n        if ( a.mProcessor.getPriority() != b.mProcessor.getPriority() ) {\n            return Integer.compare( b.mProcessor.getPriority(), a.mProcessor.getPriority() );\n        }\n\n        return a.mProcessor.getName().compareTo( b.mProcessor.getName() );\n    }\n}"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/dispatch/DefaultPipelineLaunchReport.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport java.util.Collection;\nimport java.util.Collections;\n\nimport com.pinecone.hydra.proc.UProcess;\n\npublic class DefaultPipelineLaunchReport implements PipelineLaunchReport {\n\n    protected TaskExecutionProcessor          mProcessor;\n    protected Collection<UProcess>            mLaunchedProcesses;\n    protected Collection<TaskLaunchContext>   mLaunchedContext;\n    protected Collection<TaskLaunchContext>   mWaitingContext;\n    protected boolean                         mbPreparing;\n\n    protected DefaultPipelineLaunchReport(\n            TaskExecutionProcessor processor,\n            Collection<UProcess> launchedProcesses,\n            Collection<TaskLaunchContext> launchedContext,\n            Collection<TaskLaunchContext> waitingContext,\n            boolean preparing\n    ) {\n        this.mProcessor          = processor;\n        this.mLaunchedProcesses  = launchedProcesses;\n        this.mLaunchedContext    = launchedContext;\n        this.mWaitingContext     = waitingContext;\n        this.mbPreparing         = preparing;\n    }\n\n    public static DefaultPipelineLaunchReport preparing(\n            TaskExecutionProcessor processor,\n            Collection<UProcess> launchedProcesses,\n            Collection<TaskLaunchContext> waitingContext\n    ) {\n        return new DefaultPipelineLaunchReport(\n                processor,\n                launchedProcesses,\n                Collections.emptyList(),\n                waitingContext,\n                true\n        );\n    }\n\n    public static DefaultPipelineLaunchReport executed(\n            TaskExecutionProcessor processor,\n            Collection<UProcess> launchedProcesses,\n            Collection<TaskLaunchContext> launchedContext,\n            Collection<TaskLaunchContext> waitingContext\n    ) {\n        return new DefaultPipelineLaunchReport(\n                processor,\n                launchedProcesses,\n                launchedContext,\n                waitingContext,\n                false\n        );\n    }\n\n    public static DefaultPipelineLaunchReport recycled(\n            TaskExecutionProcessor processor,\n            Collection<TaskLaunchContext> recycled\n    ) {\n        return new DefaultPipelineLaunchReport(\n                processor,\n                Collections.emptyList(),\n                Collections.emptyList(),\n                recycled,\n                false\n        );\n    }\n\n    @Override\n    public Collection<UProcess> launchedProcesses() {\n        return this.mLaunchedProcesses;\n    }\n\n    @Override\n    public Collection<TaskLaunchContext> launchedContext() {\n        return this.mLaunchedContext;\n    }\n\n    @Override\n    public Collection<TaskLaunchContext> waitingContext() {\n        return this.mWaitingContext;\n    }\n\n    @Override\n    public boolean isPreparing() {\n        return this.mbPreparing;\n    }\n}"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/dispatch/GenericI32TaskQueue.java",
    "content": "package com.walnut.odin.dispatch;\n\npublic class GenericI32TaskQueue extends ArchTaskExecutionI32Queue implements TaskExecutionQueue {\n\n    public GenericI32TaskQueue( TaskQueueMeta queueMeta ) {\n        super();\n\n        if ( queueMeta == null ) {\n            throw new IllegalArgumentException( \"TaskQueueMeta cannot be null.\" );\n        }\n        this.mszName                    = queueMeta.getName();\n        this.mnMaxCapacity              = queueMeta.getMaxCapacity();\n        this.mnMinCapacity              = queueMeta.getMinCapacity();\n        this.mnRuntimeInstanceCapacity  = queueMeta.getRuntimeInstanceCapacity();\n\n        this.validateInitialMeta();\n    }\n\n    private void validateInitialMeta() {\n        if ( this.mnMaxCapacity < 0 ) {\n            throw new IllegalArgumentException( \"Max capacity cannot be negative.\" );\n        }\n\n        if ( this.mnMinCapacity < 0 ) {\n            throw new IllegalArgumentException( \"Min capacity cannot be negative.\" );\n        }\n\n        if ( this.mnRuntimeInstanceCapacity < 0 ) {\n            throw new IllegalArgumentException( \"Runtime instance capacity cannot be negative.\" );\n        }\n\n        if ( this.mnMinCapacity > this.mnMaxCapacity ) {\n            throw new IllegalArgumentException( \"Min capacity cannot exceed max capacity.\" );\n        }\n\n        if ( this.mnRuntimeInstanceCapacity > this.mnMaxCapacity ) {\n            throw new IllegalArgumentException( \"Runtime instance capacity cannot exceed max capacity.\" );\n        }\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/dispatch/RavenTaskDispatcher.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\n\nimport java.util.concurrent.locks.ReentrantLock;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.walnut.odin.conduct.CollectiveTaskRegiment;\nimport com.walnut.odin.dispatch.entity.TaskProcessorEntity;\nimport com.walnut.odin.task.RavenTaskInstance;\nimport com.walnut.odin.task.source.TaskProcessorManipulator;\nimport com.walnut.odin.task.troll.InstanceLaunchException;\nimport com.walnut.odin.task.troll.LaunchFeature;\nimport com.walnut.odin.task.troll.TaskExecutionLauncher;\n\n\npublic class RavenTaskDispatcher implements TaskDispatcher {\n\n    protected final Logger log = LoggerFactory.getLogger( this.getClass() );\n\n    protected final ReentrantLock                        mLock;\n\n    protected final Map<String, TaskExecutionProcessor>  mProcessors;\n    protected final Map<Long, TaskExecutionProcessor>    mClientProcessorsIndex;\n    protected final Map<Identification, TaskProcPair>    mAffinityTable;\n\n\n    protected TaskProcessorManipulator  mTaskProcessorManipulator;\n    protected DispatchStrategy          mDispatchStrategy;\n    protected TaskExecutionLauncher     mTaskExecutionLauncher;\n    protected CollectiveTaskRegiment    mCollectiveTaskRegiment;\n\n    public RavenTaskDispatcher( CollectiveTaskRegiment regiment, DispatchStrategy strategy ) {\n        this.mLock                       = new ReentrantLock();\n        this.mProcessors                 = new LinkedHashMap<>();\n        this.mAffinityTable              = new HashMap<>();\n        this.mDispatchStrategy           = strategy;\n        this.mClientProcessorsIndex      = new HashMap<>();\n        this.mCollectiveTaskRegiment     = regiment;\n        this.mTaskExecutionLauncher      = regiment.taskExecutionLauncher();\n        this.mTaskProcessorManipulator   = regiment.taskInstrument().getRavenTaskMasterManipulator().getTaskProcessorManipulator();\n    }\n\n    public RavenTaskDispatcher( CollectiveTaskRegiment regiment ) {\n        this( regiment, new AdaptiveCapacityDispatchStrategy() );\n    }\n\n\n    @Override\n    public TaskExecutionLauncher taskExecutionLauncher() {\n        return this.mTaskExecutionLauncher;\n    }\n\n    @Override\n    public void registerProcessor( TaskExecutionProcessor processor ) {\n        this.mLock.lock();\n        try {\n            this.mProcessors.put( processor.getName(), processor );\n            this.mClientProcessorsIndex.put( processor.getControlClientId(), processor );\n            this.log.info( \"Registered processor, name:`{}`, clientId:`{}` \", processor.getName(), processor.getControlClientId() );\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public TaskProcessorEntity registerProcessor( String szProcessorName, long nClientId ) throws IllegalArgumentException {\n        TaskProcessorEntity entity = this.mTaskProcessorManipulator.selectByProcessorName( szProcessorName );\n        if ( entity == null ) {\n            throw new IllegalArgumentException( szProcessorName + \" not found\" );\n        }\n\n        entity.setControlClientId( nClientId );\n        TaskExecutionProcessor processor = new RavenTaskExecutionProcessor( entity, this.mTaskExecutionLauncher );\n        this.registerProcessor( processor );\n        return entity;\n    }\n\n    @Override\n    public void unregisterProcessor( String szProcessorName ) {\n        this.mLock.lock();\n        try {\n            this.mProcessors.remove( szProcessorName );\n            this.mAffinityTable.entrySet().removeIf( entry -> {\n                if ( entry.getValue().processor.getName().equals( szProcessorName ) ) {\n                    return true;\n                }\n                return false;\n            } );\n            this.log.info( \"Unregistered processor, name:`{}`\", szProcessorName );\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public void unregisterProcessor( long nClientId ) {\n        TaskExecutionProcessor processor = null;\n        this.mLock.lock();\n        try {\n            processor = this.mClientProcessorsIndex.remove( nClientId );\n        }\n        finally {\n            this.mLock.unlock();\n\n            if ( processor != null ) {\n                this.unregisterProcessor( processor.getName() );\n                this.log.info( \"Unregistered processor, name:`{}`, clientId:`{}` \", processor.getName(), processor.getControlClientId() );\n            }\n        }\n    }\n\n    @Override\n    public Collection<TaskExecutionProcessor> fetchProcessors() {\n        this.mLock.lock();\n        try {\n            return Collections.unmodifiableCollection(\n                    new ArrayList<>( this.mProcessors.values() )\n            );\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public void setProcessorAffinity( String szProcessorName, TaskLaunchContext launchContext ) {\n        this.mLock.lock();\n        try {\n            TaskExecutionProcessor processor = this.mProcessors.get( szProcessorName );\n            if ( processor == null ) {\n                throw new IllegalArgumentException( \"Processor not found: \" + szProcessorName );\n            }\n            this.mAffinityTable.put( launchContext.getTaskId(), new TaskProcPair( processor, launchContext ) );\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public TaskExecutionProcessor getAffinityTasks( Identification taskId ) {\n        TaskProcPair pair = this.mAffinityTable.get( taskId );\n        if ( pair != null ) {\n            return pair.processor;\n        }\n        return null;\n    }\n\n    @Override\n    public Collection<TaskLaunchContext> queryAffinityTasks( String szProcessorName ) {\n        this.mLock.lock();\n        try {\n            Collection<TaskLaunchContext> result = new ArrayList<>();\n            for ( TaskProcPair pair : this.mAffinityTable.values() ) {\n                if ( pair.processor.getName().equals( szProcessorName ) ) {\n                    result.add( pair.launchContext );\n                }\n            }\n            return result;\n        }\n        finally {\n            this.mLock.unlock();\n        }\n    }\n\n    @Override\n    public PipelineLaunchReport pipeCreate( Collection<TaskLaunchContext> contexts ) throws InstanceLaunchException, TaskDispatchException {\n        Map<TaskExecutionProcessor, Collection<TaskLaunchContext>> plan;\n\n        this.mLock.lock();\n        try {\n            plan = this.mDispatchStrategy.dispatch(\n                    new ArrayList<>( this.mProcessors.values() ), contexts, this\n            );\n        }\n        finally {\n            this.mLock.unlock();\n        }\n\n        return this.executeScheme( plan, true );\n    }\n\n    @Override\n    public PipelineLaunchReport pipeLaunch( Collection<TaskLaunchContext> contexts ) throws InstanceLaunchException, TaskDispatchException {\n        Map<TaskExecutionProcessor, Collection<TaskLaunchContext>> plan;\n\n        this.mLock.lock();\n        try {\n            plan = this.mDispatchStrategy.dispatch(\n                    new ArrayList<>( this.mProcessors.values() ), contexts, this\n            );\n        }\n        finally {\n            this.mLock.unlock();\n        }\n\n        return this.executeScheme( plan, false );\n    }\n\n    protected PipelineLaunchReport executeScheme(\n            Map<TaskExecutionProcessor, Collection<TaskLaunchContext>> scheme, boolean bCreation\n    ) throws InstanceLaunchException, TaskDispatchException {\n        List<UProcess> launched = new ArrayList<>();\n        List<TaskLaunchContext> consumed = new ArrayList<>();\n        List<TaskLaunchContext> waiting  = new ArrayList<>();\n\n        for ( Map.Entry<TaskExecutionProcessor, Collection<TaskLaunchContext>> entry : scheme.entrySet() ) {\n            TaskExecutionProcessor processor = entry.getKey();\n            Collection<TaskLaunchContext> assigned = entry.getValue();\n\n            PipelineLaunchReport report;\n\n            if ( bCreation ) {\n                report = processor.pipeCreate( assigned );\n            }\n            else {\n                report = processor.pipeLaunch( assigned );\n            }\n\n            launched.addAll( report.launchedProcesses() );\n            consumed.addAll( report.launchedContext() );\n            waiting.addAll( report.waitingContext() );\n        }\n\n        return DefaultPipelineLaunchReport.executed(\n                null,\n                launched,\n                consumed,\n                waiting\n        );\n    }\n\n    @Override\n    public UProcess create( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException {\n        TaskLaunchContext context = TaskLaunchContext.of( instance, feature );\n        PipelineLaunchReport _r = this.pipeCreate( List.of( context ) );\n        return context.getLaunchedProcess();\n    }\n\n    @Override\n    public UProcess launch( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException, TaskDispatchException {\n        TaskLaunchContext context = TaskLaunchContext.of( instance, feature );\n        PipelineLaunchReport _r = this.pipeLaunch( List.of( context ) );\n        return context.getLaunchedProcess();\n    }\n\n\n    protected static class TaskProcPair {\n        public TaskExecutionProcessor processor;\n        public TaskLaunchContext launchContext;\n\n        public TaskProcPair( TaskExecutionProcessor processor, TaskLaunchContext launchContext ) {\n            this.processor = processor;\n            this.launchContext = launchContext;\n        }\n    }\n}"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/dispatch/RavenTaskExecutionProcessor.java",
    "content": "package com.walnut.odin.dispatch;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.concurrent.ConcurrentHashMap;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.deploy.Server;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.proc.event.ProcessEventHandler;\nimport com.pinecone.hydra.proc.image.EntryPointRunnable;\nimport com.walnut.odin.dispatch.entity.TaskProcessorEntity;\nimport com.walnut.odin.task.RavenTaskInstance;\nimport com.walnut.odin.task.troll.InstanceLaunchException;\nimport com.walnut.odin.task.troll.LaunchFeature;\nimport com.walnut.odin.task.troll.TaskExecutionLauncher;\n\npublic class RavenTaskExecutionProcessor implements TaskExecutionProcessor {\n\n    protected String                          mszName;\n    protected Server                          mDeployClusterServer;\n    protected String                          mszClusterPath;\n    protected String                          mszClusterName;\n    protected long                            mnControlClientId;\n    protected boolean                         mbLocal;\n    protected int                             mnPriority;\n    protected boolean                         mbExclusive;\n\n    protected TaskExecutionQueue              mTaskExecutionQueue;\n    protected TaskExecutionLauncher           mTaskExecutionLauncher;\n\n    protected Map<GUID, TaskLaunchContext>    mRunningProcesses;\n    protected ConsumeCompromisedPolice        mConsumeCompromisedPolice;\n\n    protected Logger                          log = LoggerFactory.getLogger( this.getClass() );\n\n    public RavenTaskExecutionProcessor( TaskProcessorEntity processorEntity, TaskExecutionQueue queue, TaskExecutionLauncher launcher ) {\n        this.mszName                     = processorEntity.getName();\n        this.mDeployClusterServer        = processorEntity.getDeployClusterServer();\n        this.mszClusterPath              = processorEntity.getClusterPath();\n        this.mszClusterName              = processorEntity.getClusterName();\n        this.mnControlClientId           = processorEntity.getControlClientId();\n        this.mbLocal                     = processorEntity.isLocal();\n        this.mnPriority                  = processorEntity.getPriority();\n        this.mbExclusive                 = processorEntity.isExclusive();\n        this.mTaskExecutionQueue         = queue;\n        this.mTaskExecutionLauncher      = launcher;\n        this.mRunningProcesses           = new ConcurrentHashMap<>();\n        this.mConsumeCompromisedPolice   = ConsumeCompromisedPolice.EvictionException; // TODO, Advance\n    }\n\n    public RavenTaskExecutionProcessor( TaskProcessorEntity processorEntity, TaskExecutionLauncher launcher ) {\n        this( processorEntity, new GenericI32TaskQueue( processorEntity.getTaskQueueMeta() ), launcher );\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public Server getDeployClusterServer() {\n        return this.mDeployClusterServer;\n    }\n\n    @Override\n    public String getClusterPath() {\n        return this.mszClusterPath;\n    }\n\n    @Override\n    public String getClusterName() {\n        return this.mszClusterName;\n    }\n\n    @Override\n    public long getControlClientId() {\n        return this.mnControlClientId;\n    }\n\n    @Override\n    public TaskExecutionQueue getTaskExecutionQueue() {\n        return this.mTaskExecutionQueue;\n    }\n\n    @Override\n    public boolean isLocal() {\n        return this.mbLocal;\n    }\n\n    @Override\n    public int getPriority() {\n        return this.mnPriority;\n    }\n\n    @Override\n    public boolean isExclusive() {\n        return this.mbExclusive;\n    }\n\n    @Override\n    public TaskLaunchContext getTaskLaunchContextByPID( GUID pid ) {\n        return this.mRunningProcesses.get( pid );\n    }\n\n    @Override\n    public int getRunningSize() {\n        return  this.mRunningProcesses.size();\n    }\n\n    @Override\n    public int getWaitingSize() {\n        return this.mTaskExecutionQueue.waitingSize();\n    }\n\n    protected void prepareSysEventHandle( LaunchFeature feature ) {\n        feature.withSysProcEventHandlers(new ProcessEventHandler() {\n            @Override\n            public void fired( EntryPointRunnable runnable, ProcessEvent event ) {\n                if ( ProcessEvent.Terminated == event || ProcessEvent.Error == event ) {\n                    UProcess process = runnable.ownedProcess();\n                    TaskLaunchContext context = getTaskLaunchContextByPID( process.getPID() );\n                    try {\n                        afterProcessTerminated( process, context );\n                        log.info(\n                                \"[ProcessSystemEventTriggered] ( ProcName:`{}`, ProcEvent:`{}`, PID:`{}`, InstanceId:`{}` ) <Scavenged>\",\n                                runnable.ownedProcess().getName(), event.getName(),\n                                runnable.ownedProcess().getPID(), context.getTaskInstance().getId()\n                        );\n                    }\n                    catch ( TaskDispatchException e ) {\n                        // 实例错误处理在实例元数据专门回调函数中统一处理，这里不用管了\n                        log.error(\n                                \"[ProcessSystemEventTriggered] ( ProcName:`{}`, ProcEvent:`{}`, PID:`{}`, InstanceId:`{}`, What:`{}` ) <Compromised>\",\n                                runnable.ownedProcess().getName(), event.getName(),\n                                runnable.ownedProcess().getPID(), context.getTaskInstance().getId(),\n                                e.getMessage(), e\n                        );\n                        handleAsyncTaskDispatchException( runnable, event, e );\n                    }\n                }\n            }\n        });\n    }\n\n    protected void handleAsyncTaskDispatchException( EntryPointRunnable runnable, ProcessEvent event, TaskDispatchException e ) {\n        // TODO, 暂时默认驱逐，后面再说\n    }\n\n    @Override\n    public UProcess directlyCreate( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException {\n        this.prepareSysEventHandle( feature );\n\n        if ( this.mbLocal ) {\n            return this.mTaskExecutionLauncher.createLocally( instance, feature );\n        }\n\n        return this.mTaskExecutionLauncher.createRemotely(\n                instance,\n                this.mnControlClientId,\n                feature\n        );\n    }\n\n    @Override\n    public UProcess directlyLaunch( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException {\n        this.prepareSysEventHandle( feature );\n\n        if ( this.mbLocal ) {\n            return this.mTaskExecutionLauncher.launchLocally( instance, feature );\n        }\n\n        return this.mTaskExecutionLauncher.launchRemotely(\n                instance,\n                this.mnControlClientId,\n                feature\n        );\n    }\n\n\n\n\n    protected Collection<TaskLaunchContext> subtractContext( Collection<TaskLaunchContext> source, Collection<TaskLaunchContext> consumed ) {\n        if ( source == null || source.isEmpty() ) {\n            return Collections.emptyList();\n        }\n        if ( consumed == null || consumed.isEmpty() ) {\n            return source;\n        }\n\n        Set<Identification> consumedIds = new HashSet<>( consumed.size() );\n        for ( TaskLaunchContext ctx : consumed ) {\n            consumedIds.add( ctx.getTaskInstance().getId() );\n        }\n\n        List<TaskLaunchContext> waiting = new ArrayList<>();\n        for ( TaskLaunchContext ctx : source ) {\n            if ( !consumedIds.contains( ctx.getTaskInstance().getId() ) ) {\n                waiting.add( ctx );\n            }\n        }\n\n        return waiting;\n    }\n\n    protected void afterProcessLaunched( UProcess process, TaskLaunchContext context ) {\n        context.afterProcessLaunched( process );\n        this.mRunningProcesses.put( process.getPID(), context );\n    }\n\n    protected void afterProcessTerminated( UProcess process, TaskLaunchContext context ) throws TaskDispatchException {\n        this.mRunningProcesses.remove( process.getPID() );\n        this.shiftLaunchsPipeline( List.of( context.getTaskInstance().getId() ) );\n    }\n\n    @Override\n    public PipelineLaunchReport prepare( Collection<TaskLaunchContext> contexts ) throws TaskDispatchException {\n        this.mTaskExecutionQueue.offer( contexts );\n\n        return DefaultPipelineLaunchReport.preparing(\n                this,\n                Collections.emptyList(),\n                contexts\n        );\n    }\n\n    protected PipelineLaunchReport pipeOpt( Collection<TaskLaunchContext> contexts, boolean directlyLaunch ) throws TaskDispatchException {\n        RTaskInstanceConsumer consumer = new RTaskInstanceConsumer( directlyLaunch );\n        Collection<TaskLaunchContext> consumed = this.mTaskExecutionQueue.pipeConsume( contexts, consumer );\n        List<UProcess> launched = consumer.getLaunched();\n\n        Collection<TaskLaunchContext> waiting = this.subtractContext( contexts, consumed );\n        return DefaultPipelineLaunchReport.executed(\n                this,\n                launched,\n                consumed,\n                waiting\n        );\n    }\n\n    @Override\n    public PipelineLaunchReport pipeCreate(Collection<TaskLaunchContext> contexts ) throws TaskDispatchException {\n        return this.pipeOpt( contexts, false );\n    }\n\n    @Override\n    public PipelineLaunchReport pipeLaunch(Collection<TaskLaunchContext> contexts ) throws TaskDispatchException {\n        return this.pipeOpt( contexts, true );\n    }\n\n    @Override\n    public PipelineLaunchReport recycleTerminated(Collection<Identification> terminatedIds ) {\n        Collection<TaskLaunchContext> recycled = this.mTaskExecutionQueue.recycleTerminated( terminatedIds );\n\n        return DefaultPipelineLaunchReport.recycled(\n                this,\n                recycled\n        );\n    }\n\n    @Override\n    public PipelineLaunchReport launchsPending() throws TaskDispatchException {\n        RTaskInstanceConsumer consumer = new RTaskInstanceConsumer( true );\n        Collection<TaskLaunchContext> consumed = this.mTaskExecutionQueue.consumePending( consumer );\n        List<UProcess> launched = consumer.getLaunched();\n\n        return DefaultPipelineLaunchReport.executed(\n                this,\n                launched,\n                consumed,\n                Collections.emptyList()\n        );\n    }\n\n    @Override\n    public PipelineLaunchReport shiftLaunchsPipeline(Collection<Identification> terminatedIds ) throws TaskDispatchException {\n        RTaskInstanceConsumer consumer = new RTaskInstanceConsumer( true );\n        Collection<TaskLaunchContext> consumed = this.mTaskExecutionQueue.shiftPipeline( terminatedIds, consumer );\n        List<UProcess> launched = consumer.getLaunched();\n\n        return DefaultPipelineLaunchReport.executed(\n                this,\n                launched,\n                consumed,\n                Collections.emptyList()\n        );\n    }\n\n\n    protected class RTaskInstanceConsumer implements TaskInstanceConsumer {\n\n        public List<UProcess> launched;\n\n        public boolean directlyLaunch;\n\n        public RTaskInstanceConsumer( boolean directlyLaunch ) {\n            this.launched = new ArrayList<>();\n            this.directlyLaunch = directlyLaunch;\n        }\n\n        @Override\n        public void tryConsume( TaskLaunchContext context ) throws TaskConsumeException {\n            try {\n                UProcess proc;\n                if ( this.directlyLaunch ) {\n                    proc = directlyLaunch( context.getTaskInstance(), context.getLaunchFeature() );\n                }\n                else {\n                    proc = directlyCreate( context.getTaskInstance(), context.getLaunchFeature() );\n                }\n                this.launched.add( proc );\n                afterProcessLaunched( proc, context );\n            }\n            catch ( InstanceLaunchException e ) {\n                log.error( \"Error during shift pipeline, what:'{}' \", e.getMessage(), e );\n                throw new TaskConsumeException( e );\n            }\n        }\n\n        @Override\n        public ConsumeCompromisedPolice compromisedPolice() {\n            return mConsumeCompromisedPolice;\n        }\n\n        public List<UProcess> getLaunched() {\n            return this.launched;\n        }\n    }\n\n}"
  },
  {
    "path": "Odin/odin-framework-conduct/src/main/java/com/walnut/odin/task/mapper/TaskProcessorMapper.java",
    "content": "package com.walnut.odin.task.mapper;\n\nimport java.util.List;\n\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.walnut.odin.dispatch.entity.GenericTaskProcessorEntity;\nimport com.walnut.odin.dispatch.entity.TaskProcessorEntity;\nimport com.walnut.odin.task.source.TaskProcessorManipulator;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface TaskProcessorMapper extends TaskProcessorManipulator {\n\n    @Select(\n        \"SELECT \" +\n        \"  `id`, \" +\n        \"  `guid`, \" +\n        \"  `processor_name` AS name, \" +\n        \"  `cluster_path` AS clusterPath, \" +\n        \"  `cluster_name` AS clusterName, \" +\n        \"  `is_local` AS `local`, \" +\n        \"  `is_exclusive` AS exclusive, \" +\n        \"  `priority`, \" +\n        \"  `queue_name` AS queueName, \" +\n        \"  `queue_max_capacity` AS queueMaxCapacity, \" +\n        \"  `queue_min_capacity` AS queueMinCapacity, \" +\n        \"  `queue_runtime_instance_capacity` AS queueRuntimeInstanceCapacity, \" +\n        \"  `enable`, \" +\n        \"  `create_time` AS createTime, \" +\n        \"  `update_time` AS updateTime \" +\n        \"FROM `odin_task_processor` \" +\n        \"WHERE `processor_name` = #{name}\"\n    )\n    GenericTaskProcessorEntity selectByProcessorName( @Param(\"name\") String szProcessorName );\n\n\n    @Select(\n        \"SELECT \" +\n        \"  `id`, \" +\n        \"  `guid`, \" +\n        \"  `processor_name` AS name, \" +\n        \"  `cluster_path` AS clusterPath, \" +\n        \"  `cluster_name` AS clusterName, \" +\n        \"  `is_local` AS `local`, \" +\n        \"  `is_exclusive` AS exclusive, \" +\n        \"  `priority`, \" +\n        \"  `queue_name` AS queueName, \" +\n        \"  `queue_max_capacity` AS queueMaxCapacity, \" +\n        \"  `queue_min_capacity` AS queueMinCapacity, \" +\n        \"  `queue_runtime_instance_capacity` AS queueRuntimeInstanceCapacity, \" +\n        \"  `enable`, \" +\n        \"  `create_time` AS createTime, \" +\n        \"  `update_time` AS updateTime \" +\n        \"FROM `odin_task_processor` \" +\n        \"WHERE `guid` = #{guid} AND `enable` = 1\"\n    )\n    GenericTaskProcessorEntity selectByGuid( @Param(\"guid\") GUID guid );\n\n\n    @Select(\n        \"SELECT \" +\n        \"  `id`, \" +\n        \"  `guid`, \" +\n        \"  `processor_name` AS name, \" +\n        \"  `cluster_path` AS clusterPath, \" +\n        \"  `cluster_name` AS clusterName, \" +\n        \"  `is_local` AS `local`, \" +\n        \"  `is_exclusive` AS exclusive, \" +\n        \"  `priority`, \" +\n        \"  `queue_name` AS queueName, \" +\n        \"  `queue_max_capacity` AS queueMaxCapacity, \" +\n        \"  `queue_min_capacity` AS queueMinCapacity, \" +\n        \"  `queue_runtime_instance_capacity` AS queueRuntimeInstanceCapacity, \" +\n        \"  `enable`, \" +\n        \"  `create_time` AS createTime, \" +\n        \"  `update_time` AS updateTime \" +\n        \"FROM `odin_task_processor` \" +\n        \"WHERE `cluster_name` = #{clusterName} AND `enable` = 1\"\n    )\n    List<GenericTaskProcessorEntity> selectByClusterName0( @Param(\"clusterName\") String clusterName );\n\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    default List<TaskProcessorEntity> selectByClusterName( @Param(\"clusterName\") String clusterName ) {\n        return (List) this.selectByClusterName0( clusterName );\n    }\n\n\n    @Select(\n        \"SELECT \" +\n        \"  `id`, \" +\n        \"  `guid`, \" +\n        \"  `processor_name` AS name, \" +\n        \"  `cluster_path` AS clusterPath, \" +\n        \"  `cluster_name` AS clusterName, \" +\n        \"  `is_local` AS `local`, \" +\n        \"  `is_exclusive` AS exclusive, \" +\n        \"  `priority`, \" +\n        \"  `queue_name` AS queueName, \" +\n        \"  `queue_max_capacity` AS queueMaxCapacity, \" +\n        \"  `queue_min_capacity` AS queueMinCapacity, \" +\n        \"  `queue_runtime_instance_capacity` AS queueRuntimeInstanceCapacity, \" +\n        \"  `enable`, \" +\n        \"  `create_time` AS createTime, \" +\n        \"  `update_time` AS updateTime \" +\n        \"FROM `odin_task_processor`\"\n    )\n    List<GenericTaskProcessorEntity> selectAll0();\n\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    default List<TaskProcessorEntity> selectAll() {\n        return (List) this.selectAll0();\n    }\n\n\n    @Insert(\n        \"INSERT INTO `odin_task_processor` ( \" +\n        \"  `guid`, \" +\n        \"  `processor_name`, \" +\n        \"  `cluster_path`, \" +\n        \"  `cluster_name`, \" +\n        \"  `is_local`, \" +\n        \"  `is_exclusive`, \" +\n        \"  `priority`, \" +\n        \"  `queue_name`, \" +\n        \"  `queue_max_capacity`, \" +\n        \"  `queue_min_capacity`, \" +\n        \"  `queue_runtime_instance_capacity` \" +\n        \") VALUES ( \" +\n        \"  #{entity.guid}, \" +\n        \"  #{entity.name}, \" +\n        \"  #{entity.clusterPath}, \" +\n        \"  #{entity.clusterName}, \" +\n        \"  #{entity.local}, \" +\n        \"  #{entity.exclusive}, \" +\n        \"  #{entity.priority}, \" +\n        \"  #{entity.queueName}, \" +\n        \"  #{entity.queueMaxCapacity}, \" +\n        \"  #{entity.queueMinCapacity}, \" +\n        \"  #{entity.queueRuntimeInstanceCapacity} \" +\n        \")\"\n    )\n    int insert( @Param(\"entity\") TaskProcessorEntity entity );\n\n\n    @Update(\n        \"UPDATE `odin_task_processor` SET \" +\n        \"  `cluster_path` = #{clusterPath}, \" +\n        \"  `cluster_name` = #{clusterName}, \" +\n        \"  `is_local` = #{local}, \" +\n        \"  `is_exclusive` = #{exclusive}, \" +\n        \"  `priority` = #{priority}, \" +\n                \" `processor_name` = #{name}, \" +\n        \"  `queue_name` = #{queueName}, \" +\n        \"  `queue_max_capacity` = #{queueMaxCapacity}, \" +\n        \"  `queue_min_capacity` = #{queueMinCapacity}, \" +\n        \"  `queue_runtime_instance_capacity` = #{queueRuntimeInstanceCapacity}, \" +\n        \"  `enable` = #{enable} \" +\n        \"WHERE `guid` = #{guid}\"\n    )\n    int updateByGuid( GenericTaskProcessorEntity entity );\n\n\n    @Update(\n        \"UPDATE `odin_task_processor` SET \" +\n        \"  `queue_max_capacity` = #{maxCapacity}, \" +\n        \"  `queue_min_capacity` = #{minCapacity}, \" +\n        \"  `queue_runtime_instance_capacity` = #{runtimeCapacity} \" +\n        \"WHERE `guid` = #{guid}\"\n    )\n    int updateQueueCapacity(\n            @Param(\"guid\") GUID guid,\n            @Param(\"maxCapacity\") int maxCapacity,\n            @Param(\"minCapacity\") int minCapacity,\n            @Param(\"runtimeCapacity\") int runtimeCapacity\n    );\n\n\n    @Update(\n        \"DELETE FROM `odin_task_processor` WHERE `guid` = #{guid}\"\n    )\n    int deleteByGuid( @Param(\"guid\") GUID guid );\n\n\n    @Update(\n        \"UPDATE `odin_task_processor` SET `enable` = 1 WHERE `guid` = #{guid}\"\n    )\n    int enable( @Param(\"guid\") GUID guid );\n\n\n    @Update(\n        \"UPDATE `odin_task_processor` SET `enable` = 0 WHERE `guid` = #{guid}\"\n    )\n    int disable( @Param(\"guid\") GUID guid );\n\n}"
  },
  {
    "path": "Odin/odin-framework-runtime/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>odin</artifactId>\n        <groupId>com.walnut.odin</groupId>\n        <version>2.5.1</version>\n    </parent>\n\n    <artifactId>odin-framework-runtime</artifactId>\n    <version>2.5.1</version>\n    <modelVersion>4.0.0</modelVersion>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime.jelly</groupId>\n            <artifactId>jelly</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n            <artifactId>hydra-kom-default-driver</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-architecture</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n\n</project>"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/ArchRemoteProcessManagerNode.java",
    "content": "package com.walnut.odin.proc;\n\nimport java.net.URI;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.concurrent.locks.ReadWriteLock;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.Unsafe;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.proc.event.ProcessLifecycleHandler;\nimport com.pinecone.hydra.proc.image.EntryPointRunnable;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.image.URLImageLoader;\nimport com.pinecone.hydra.proc.image.kom.ImageElement;\nimport com.pinecone.hydra.system.centrum.UniformCentralSystem;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\nimport com.walnut.odin.proc.client.RemoteProcessManagerClient;\n\npublic abstract class ArchRemoteProcessManagerNode implements RemoteProcessManagerNode {\n\n    protected Logger                         mLogger;\n\n    protected ProcessManager                 mProcessManager;\n\n    protected List<ProcessLifecycleHandler>  mLifecycleHandlers;\n\n    protected ReadWriteLock                  mnClientLock = new ReentrantReadWriteLock();\n\n    protected ArchRemoteProcessManagerNode( ProcessManager processManager ) {\n        this.mLogger             = LoggerFactory.getLogger( this.getClass() );\n        this.mProcessManager     = processManager;\n        this.mLifecycleHandlers  = new ArrayList<>();\n    }\n\n    @Override\n    public URLImageLoader imageLoader() {\n        return (URLImageLoader) this.mProcessManager.getImageLoader();\n    }\n\n    @Override\n    public GuidAllocator getGuidAllocator() {\n        return this.mProcessManager.getGuidAllocator();\n    }\n\n    @Override\n    public ProcessManager localProcessManager() {\n        return this.mProcessManager;\n    }\n\n    @Override\n    public RuntimeSystem superiorSystem() {\n        return this.mProcessManager.superiorSystem();\n    }\n\n    @Override\n    public Logger getLogger() {\n        return this.mLogger;\n    }\n\n    @Override\n    public ExecutionImage queryExecutionImage( String path ) {\n        ExecutionImage image = this.imageLoader().queryExecutionImage( path );\n        if ( image != null ) {\n            return image;\n        }\n\n        if ( this.superiorSystem() instanceof UniformCentralSystem ) {\n            EntityNode e = ((UniformCentralSystem) this.superiorSystem()).imperiumPrivy().getExpressInstrument().queryNode( path );\n            if ( e instanceof ImageElement ) {\n                return ((ImageElement) e).getImage();\n            }\n        }\n\n        return null;\n    }\n\n    @Override\n    public ExecutionImage queryExecutionImage( URI uri ) {\n        return this.imageLoader().queryExecutionImage( uri );\n    }\n\n    @Override\n    public void registerLocalScopeExecutionImage( String dirPath, ExecutionImage image ) {\n        this.imageLoader().registerLocalScopeExecutionImage( dirPath, image );\n    }\n\n    @Override\n    public UProcess getProcess( GUID pid ) {\n        return this.mProcessManager.getProcess( pid );\n    }\n\n    @Override\n    public boolean hasOwnProcess( GUID pid ) {\n        UProcess process = this.mProcessManager.getProcess( pid );\n        if ( process instanceof RemoteProcess) {\n            return false;\n        }\n\n        return process != null;\n    }\n\n    @Override\n    public boolean containProcess( GUID pid ) {\n        return this.mProcessManager.containProcess( pid );\n    }\n\n    @Override\n    public Collection<UProcess> searchProcessesByName( String procName ) {\n        return this.mProcessManager.searchProcessesByName( procName );\n    }\n\n    @Override\n    public Collection<UProcess> searchProcessesByNameNoCase( String procName ) {\n        return this.mProcessManager.searchProcessesByNameNoCase( procName );\n    }\n\n    protected void afterMediatedRemoteProcess( MediatedRemoteProcess process, String imageAddress, boolean isURI ) {\n        this.notifyProcessLifecycleHandlers( imageAddress, null, ProcessEvent.Prepare );\n\n        ExecutionImage image;\n        if ( isURI ) {\n            image = this.queryExecutionImage( URI.create( imageAddress ) );\n        }\n        else {\n            image = this.queryExecutionImage( imageAddress );\n        }\n\n        if ( image == null ) {\n            throw new IllegalStateException( \"[MirrorCompromised] `\" + imageAddress + \"` is not a valid image address.\" );\n        }\n\n        this.mProcessManager.getImageModifier().applyImageAddress( image, imageAddress );\n\n        process.mExecutionImage = image;\n        process.mProcessManager = this.mProcessManager;\n    }\n\n\n    @Override\n    public RemoteProcessManagerNode addProcessLifecycleHandler(ProcessLifecycleHandler handler ) {\n        this.mnClientLock.writeLock().lock();\n        try {\n            this.mLifecycleHandlers.add( handler );\n            return this;\n        }\n        finally {\n            this.mnClientLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public RemoteProcessManagerNode removeProcessLifecycleHandler( ProcessLifecycleHandler handler ) {\n        this.mnClientLock.writeLock().lock();\n        try {\n            this.mLifecycleHandlers.remove( handler );\n            return this;\n        }\n        finally {\n            this.mnClientLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public int getProcessLifecycleHandlersSize() {\n        this.mnClientLock.readLock().lock();\n        try {\n            return this.mLifecycleHandlers.size();\n        }\n        finally {\n            this.mnClientLock.readLock().unlock();\n        }\n    }\n\n    @Override\n    @Unsafe\n    public void notifyProcessLifecycleHandlers( String imageAddress, EntryPointRunnable runnable, ProcessEvent event ) {\n        this.mnClientLock.readLock().lock();\n        try {\n            for ( ProcessLifecycleHandler handler : this.mLifecycleHandlers ) {\n                handler.fired( imageAddress, runnable, event );\n            }\n        }\n        finally {\n            this.mnClientLock.readLock().unlock();\n        }\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/MediatedRemoteProcess.java",
    "content": "package com.walnut.odin.proc;\n\nimport com.pinecone.framework.system.ApoptosisRejectSignalException;\nimport com.pinecone.framework.system.NotImplementedException;\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.executum.Executum;\nimport com.pinecone.framework.system.executum.Lifecycle;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.executum.TaskManager;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.proc.ControllableLevel;\nimport com.pinecone.hydra.proc.ProcessActionTape;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.entity.ElementNode;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.ns.ProcSpace;\nimport com.pinecone.hydra.proc.tomb.RuntimeTombstone;\nimport com.pinecone.hydra.system.ko.entity.ObjectTable;\nimport com.walnut.odin.proc.entity.UProcessRuntimeMeta;\nimport com.walnut.odin.proc.server.RemoteProcessManagerServer;\n\nimport java.time.LocalDateTime;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\n\npublic class MediatedRemoteProcess implements RemoteProcess {\n\n    protected RemoteProcessManagerServer          mRemoteProcessManagerServer;\n\n    protected ProcessManager                      mProcessManager;\n\n    protected ExecutionImage                      mExecutionImage;\n\n    protected String                              mszName;\n\n    protected long                                mnControlClientId;\n\n    protected long                                mnLocalPID;\n\n    protected GUID                                mParentPID;\n\n    protected GUID                                mProcessId;\n\n    protected Map<String, String[]>               mStartupArguments;\n\n    protected Map<String, String[]>               mEnvironmentVariables;\n\n    protected List<ProcessRemoteEventHandler>     mRemoteEventHandlers;\n\n    public MediatedRemoteProcess(\n            long controlClientId, RemoteProcessManagerServer server, String name, long localPID, GUID processId,\n            Map<String, String[]> startupArguments, Map<String, String[]> environmentVariables\n    ) {\n        this.mnControlClientId           = controlClientId;\n        this.mRemoteProcessManagerServer = server;\n        this.mszName                     = name;\n        this.mnLocalPID                  = localPID;\n        this.mProcessId                  = processId;\n        this.mStartupArguments           = startupArguments;\n        this.mEnvironmentVariables       = environmentVariables;\n        this.mRemoteEventHandlers        = new ArrayList<>();\n    }\n\n    public MediatedRemoteProcess( long controlClientId, RemoteProcessManagerServer server, String name, long pid, GUID guid ) {\n        this( controlClientId, server, name, pid, guid, null, null );\n    }\n\n    @Override\n    public void addRemoteEventHandler( ProcessRemoteEventHandler handler ) {\n        this.mRemoteEventHandlers.add( handler );\n    }\n\n    @Override\n    public void removeRemoteEventHandler( ProcessRemoteEventHandler handler ) {\n        this.mRemoteEventHandlers.remove( handler );\n    }\n\n    @Override\n    public int remoteEventHandlerSize() {\n        return this.mRemoteEventHandlers.size();\n    }\n\n    @Override\n    public void notifyRemoteEvent( long pmClientId, ProcessEvent event, Object caused ) {\n        for ( ProcessRemoteEventHandler handler : this.mRemoteEventHandlers ) {\n            handler.fired( pmClientId, event, caused );\n        }\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    public long getControlClientId() {\n        return this.mnControlClientId;\n    }\n\n    @Override\n    public long getLocalPID() {\n        return this.mnLocalPID;\n    }\n\n    @Override\n    public GUID actualParentPID() {\n        return this.mParentPID;\n    }\n\n    @Override\n    public void applyActualParentPID( GUID pid ) {\n        this.mParentPID = pid;\n    }\n\n    @Override\n    public void setName( String szName ) {\n        this.mszName = szName;\n    }\n\n    @Override\n    public long getExecutumId() {\n        return this.mnLocalPID;\n    }\n\n    @Override\n    public UProcessRuntimeMeta retrieveRemoteRuntimeMeta() throws RemoteProcessLifecycleException {\n        return this.mRemoteProcessManagerServer.queryProcessRuntimeMeta( this.mProcessId );\n    }\n\n    protected UProcessRuntimeMeta optRemoteRuntimeMeta() throws IllegalStateException {\n        try {\n            return this.mRemoteProcessManagerServer.queryProcessRuntimeMeta( this.mProcessId );\n        }\n        catch ( RemoteProcessLifecycleException e ) {\n            throw new IllegalStateException( e );\n        }\n    }\n\n    @Override\n    public RuntimeSystem parentSystem() {\n        return null;\n    }\n\n    @Override\n    public RuntimeSystem revealNearestSystem() {\n        return null;\n    }\n\n    @Override\n    public Executum parentExecutum() {\n        return null;\n    }\n\n    @Override\n    public Executum setThreadAffinity( Thread affinity ) {\n        throw new NotImplementedException( \"`RemoteProcess` has no thread affinity, so it cannot be set.\" );\n    }\n\n    @Override\n    public Thread getAffiliateThread() {\n        return null;\n    }\n\n    @Override\n    public boolean isTerminated() {\n        UProcessRuntimeMeta meta = this.optRemoteRuntimeMeta();\n        return meta.isTerminated();\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.mProcessId;\n    }\n\n    @Override\n    public GUID getParentProcessId() {\n        return this.mParentPID;\n    }\n\n    @Override\n    public long getParentLocalPID() {\n        return 0;\n    }\n\n    @Override\n    public LocalDateTime remoteGetEndTime() {\n        return null;\n    }\n\n    @Override\n    public LocalDateTime remoteGetLastUpdateTime() {\n        return null;\n    }\n\n    @Override\n    public UProcess parentProcess() {\n        return null;\n    }\n\n    @Override\n    public ProcessManager getOwnedProcessManager() {\n        return this.mProcessManager;\n    }\n\n    @Override\n    public ProcSpace getProcNamespace() {\n        return null;\n    }\n\n    @Override\n    public RuntimeTombstone getRuntimeTombstone() {\n        return null;\n    }\n\n    @Override\n    public ObjectTable getObjectTable() {\n        return null;\n    }\n\n    @Override\n    public ExecutionImage getExecutionImage() {\n        return this.mExecutionImage;\n    }\n\n    @Override\n    public ControllableLevel getControllableLevel() {\n        return null;\n    }\n\n    @Override\n    public LocalDateTime getEndTime() {\n        return null;\n    }\n\n    @Override\n    public LocalDateTime getLastUpdateTime() {\n        return null;\n    }\n\n    @Override\n    public Map<String, String[]> getStartupArguments() {\n        return this.mStartupArguments;\n    }\n\n    @Override\n    public Map<String, String[]> getEnvironmentVariables() {\n        return this.mEnvironmentVariables;\n    }\n\n\n    @Override\n    public Processum affinityLocalProcess() {\n        return null;\n    }\n\n    @Override\n    public void triggerUpdateTerminationStatus() {\n\n    }\n\n    @Override\n    public void triggerAfterRunnableTerminationStatus() {\n\n    }\n\n    @Override\n    public void start() throws ProvokeHandleException {\n        try {\n            this.mRemoteProcessManagerServer.startRemoteUProcess( this.mProcessId );\n        }\n        catch ( RemoteProcessServiceRPCException e ) {\n            throw new ProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public Map<Long, Executum> getOwnThreadGroup() {\n        return null;\n    }\n\n    @Override\n    public TaskManager getTaskManager() {\n        return null;\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return null;\n    }\n\n    @Override\n    public LocalDateTime getStartTime() {\n        return null;\n    }\n\n    @Override\n    public void apoptosis() throws ApoptosisRejectSignalException {\n\n    }\n\n    @Override\n    public void kill() {\n\n    }\n\n    @Override\n    public void interrupt() {\n\n    }\n\n    @Override\n    public void suspend() {\n\n    }\n\n    @Override\n    public void resume() {\n\n    }\n\n    @Override\n    public void entreatLive() {\n\n    }\n\n    @Override\n    public Thread.State getState() {\n        return null;\n    }\n\n    @Override\n    public ElementNode getAccount() {\n        return null;\n    }\n\n    @Override\n    public int getExceptionRestartTime() {\n        return 0;\n    }\n\n    @Override\n    public Lifecycle applyExceptionRestartTime( int time ) {\n        return null;\n    }\n\n    @Override\n    public ProcessActionTape actionTape() {\n        return null;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/ProcessesUtils.java",
    "content": "package com.walnut.odin.proc;\n\nimport java.time.LocalDateTime;\nimport java.time.format.DateTimeFormatter;\nimport java.util.HashMap;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.datetime.DatePattern;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.walnut.odin.proc.entity.UProcessRuntimeMeta;\n\npublic final class ProcessesUtils {\n\n    public static Map<String, String[]> decode( String json ) {\n        Map<String, String[]> map = new HashMap<>();\n        if ( json == null || json.isEmpty() ) {\n            return map;\n        }\n\n        JSONObject jo = new JSONMaptron( json );\n        for ( Map.Entry<String, Object> kv : jo.entrySet() ) {\n            JSONArray ja = (JSONArray) kv.getValue();\n            String[] vs = new String[ ja.size() ];\n            for ( int i = 0; i < ja.size(); ++i ) {\n                vs[ i ] = ja.optString( i );\n            }\n\n            map.put( kv.getKey(), vs );\n        }\n\n        return map;\n    }\n\n    private static String formatTime( LocalDateTime time ) {\n        if ( time == null ) {\n            return null;\n        }\n        DateTimeFormatter formatter = DatePattern.createFormatter( \"yyyy-MM-dd HH:mm:ss.nnnnnnnnn\" );\n        return time.format( formatter );\n    }\n\n    public static UProcessRuntimeMeta extractProcessMeta( UProcess that ) {\n        UProcessRuntimeMeta meta = new UProcessRuntimeMeta();\n        meta.setPID( that.getPID().toString() );\n        meta.setParentPID( that.getParentProcessId().toString() );\n        meta.setName( that.getName() );\n        meta.setLocalPID( that.getLocalPID() );\n\n        meta.setCreateTime( formatTime( that.getCreateTime() ) );\n        meta.setStartTime( formatTime( that.getStartTime() ) );\n        meta.setEndTime( formatTime( that.getEndTime() ) );\n        meta.setLastUpdateTime( formatTime( that.getLastUpdateTime() ) );\n\n        meta.setMainThreadStatus( that.getState().toString() );\n        meta.setTerminated( that.isTerminated() );\n\n        return meta;\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/RemoteProcessLifecycleExaminer.java",
    "content": "package com.walnut.odin.proc;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.image.ImageModifier;\nimport com.walnut.odin.proc.client.RPCRecallSysProcessEventHandler;\nimport com.walnut.odin.proc.client.SlaveProcessLifecycleIface;\n\npublic class RemoteProcessLifecycleExaminer implements ProcessLifecycleExaminer {\n\n    protected Logger                       mLogger;\n    protected RemoteProcessManagerNode     mRemoteProcessManagerNode;\n    protected SlaveProcessLifecycleIface   mSlaveProcessLifecycleIface;\n    protected ProcessManager               mProcessManager;\n    protected ImageModifier                mImageModifier;\n\n    public RemoteProcessLifecycleExaminer( RemoteProcessManagerNode remoteProcessManagerNode, SlaveProcessLifecycleIface slaveProcessLifecycleIface ) {\n        this.mSlaveProcessLifecycleIface = slaveProcessLifecycleIface;\n        this.mRemoteProcessManagerNode   = remoteProcessManagerNode;\n        this.mProcessManager             = remoteProcessManagerNode.localProcessManager();\n        this.mImageModifier              = this.mProcessManager.getImageModifier();\n        this.mLogger                     = LoggerFactory.getLogger( this.getClass() );\n    }\n\n    @Override\n    public ImageModifier imageModifier() {\n        return this.mImageModifier;\n    }\n\n    @Override\n    public void startProcess( UProcess process ) {\n        this.mLogger.info( \"[RemoteProcessVitalization] (Process: `{}`, PID: `{}`) <InstructionAccepted>\", process.getName(), process.getPID() );\n        ExecutionImage image = process.getExecutionImage();\n        this.mImageModifier.addSystemProcessEventHandler( image.getEntryPoint(), new RPCRecallSysProcessEventHandler(\n                this.mRemoteProcessManagerNode, this.mSlaveProcessLifecycleIface\n        ) );\n\n        process.start(); // TODO, Process Joint\n\n        this.mRemoteProcessManagerNode.notifyProcessLifecycleHandlers(\n                process.getExecutionImage().getImageAddress(), process.getExecutionImage().getEntryPoint(), ProcessEvent.Vitalized\n        );\n\n        this.mLogger.info( \"[RemoteProcessVitalization] (Process: `{}`, PID: `{}`) <InstructionPerformed>\", process.getName(), process.getPID() );\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/client/RPCRecallSysProcessEventHandler.java",
    "content": "package com.walnut.odin.proc.client;\n\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.proc.event.ProcessEventHandler;\nimport com.pinecone.hydra.proc.image.EntryPointRunnable;\nimport com.walnut.odin.proc.RemoteProcessManagerNode;\nimport com.walnut.odin.proc.RemoteTerminationStatus;\nimport com.walnut.odin.proc.entity.RemoteTerminationReport;\n\npublic class RPCRecallSysProcessEventHandler implements ProcessEventHandler {\n\n    protected RemoteProcessManagerNode     mRemoteProcessManagerNode;\n    protected SlaveProcessLifecycleIface   mSlaveProcessLifecycleIface;\n    protected long                         mnClientId;\n\n    public RPCRecallSysProcessEventHandler( long clientId, RemoteProcessManagerNode node, SlaveProcessLifecycleIface iface ) {\n        this.mRemoteProcessManagerNode   = node;\n        this.mSlaveProcessLifecycleIface = iface;\n        this.mnClientId                  = clientId;\n    }\n\n    public RPCRecallSysProcessEventHandler( RemoteProcessManagerNode node, SlaveProcessLifecycleIface iface ) {\n        this( -1, node, iface );\n\n        if ( node instanceof RemoteProcessManagerClient ) {\n            this.mnClientId = ((RemoteProcessManagerClient) node).getClientId();\n        }\n    }\n\n    @Override\n    public void fired( EntryPointRunnable runnable, ProcessEvent event ) {\n        switch ( event ) {\n            case Terminated: {\n                this.notifyProcessTerminated( runnable );\n                break;\n            }\n            case Prepare:\n            case Created:\n            case Vitalized:\n            default: {\n                break;\n            }\n        }\n    }\n\n    protected void notifyProcessTerminated( EntryPointRunnable runnable ) {\n        UProcess process = runnable.ownedProcess();\n        RemoteTerminationReport report = new RemoteTerminationReport();\n        report.setProcessID( process.getPID() );\n        report.setExitCode( process.actionTape().getExitCode() );\n        report.setLocalPID( process.getLocalPID() );\n        report.setRemoteTerminationStatus( RemoteTerminationStatus.Expected );\n\n        Throwable lastError = process.actionTape().getLastError();\n        if ( lastError != null ) {\n            report.setErrorMsg( lastError.getMessage() );\n            report.setRemoteTerminationStatus( RemoteTerminationStatus.Error );\n            this.mRemoteProcessManagerNode.notifyProcessLifecycleHandlers(\n                    process.getExecutionImage().getImageAddress(), process.getExecutionImage().getEntryPoint(), ProcessEvent.Error\n            );\n        }\n        else {\n            this.mRemoteProcessManagerNode.notifyProcessLifecycleHandlers(\n                    process.getExecutionImage().getImageAddress(), process.getExecutionImage().getEntryPoint(), ProcessEvent.Terminated\n            );\n        }\n\n        this.mSlaveProcessLifecycleIface.reportProcessTerminated( this.mnClientId, report );\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/client/RavenRemoteProcessManagerClient.java",
    "content": "package com.walnut.odin.proc.client;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.proc.LocalUProcess;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.system.component.LogStatuses;\nimport com.pinecone.hydra.uma.DuplexAppointClient;\nimport com.pinecone.hydra.uma.wolf.WolvesAppointClient;\nimport com.pinecone.hydra.umc.wolf.client.UlfClient;\nimport com.walnut.odin.proc.ArchRemoteProcessManagerNode;\nimport com.walnut.odin.proc.ProcessesUtils;\nimport com.walnut.odin.proc.RemoteProcess;\nimport com.walnut.odin.proc.RemoteProcessLifecycleExaminer;\nimport com.walnut.odin.proc.ProcessLifecycleExaminer;\nimport com.walnut.odin.proc.RemoteProcessLifecycleException;\nimport com.walnut.odin.proc.RemoteProcessServiceRPCException;\nimport com.walnut.odin.proc.RemoteVitalizationStatus;\nimport com.walnut.odin.proc.entity.RemoteVitalizationResponse;\nimport com.walnut.odin.proc.entity.UProcessMirrorDTO;\nimport com.walnut.odin.proc.entity.UProcessRuntimeMeta;\n\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.util.Map;\n\npublic class RavenRemoteProcessManagerClient extends ArchRemoteProcessManagerNode implements RemoteProcessManagerClient {\n\n    protected DuplexAppointClient            mDuplexAppointClient;\n\n    protected SlaveProcessLifecycleIface     mProcessLifecycleIface;\n\n    protected ProcessLifecycleExaminer       mProcessLifecycleExaminer;\n\n    protected long                           mnClientId;\n\n    protected UlfClient                      mRPCClient;\n\n    public RavenRemoteProcessManagerClient( ProcessManager processManager, UlfClient rpcClient ) {\n        super( processManager );\n        this.mRPCClient                = rpcClient;\n        this.mnClientId                = rpcClient.getMessageNodeId();\n    }\n\n\n    protected void initRPCSubsystem() throws RemoteProcessServiceRPCException {\n        if ( this.mDuplexAppointClient != null && !this.mDuplexAppointClient.getMessageNode().isTerminated() ) {\n            throw new IllegalStateException( \"DuplexAppointClient has started.\" );\n        }\n\n        this.mDuplexAppointClient = new WolvesAppointClient( this.mRPCClient );\n        try {\n            this.mDuplexAppointClient.compile( SlaveProcessLifecycleIface.class,false );\n            this.mProcessLifecycleIface = this.mDuplexAppointClient.getIface( SlaveProcessLifecycleIface.class );\n            this.mDuplexAppointClient.getRouteDispatcher().registerController( new ReactiveMasterProcessLifecycleController( this ) );\n\n            this.mProcessLifecycleExaminer = new RemoteProcessLifecycleExaminer( this, this.mProcessLifecycleIface );\n            this.infoLifecycle( \"RPC Subsystem Register Controllers\", LogStatuses.StatusDone );\n        }\n        catch ( Exception e ) {\n            this.mProcessLifecycleIface = null;\n            throw new RemoteProcessServiceRPCException( e );\n        }\n    }\n\n    protected void vitalizeRPCSubsystem() throws RemoteProcessServiceRPCException {\n        try {\n            if ( this.mDuplexAppointClient.getMessageNode().isTerminated() ) {\n                this.mDuplexAppointClient.execute();\n                this.mDuplexAppointClient.embraces( 2 );\n                this.mProcessLifecycleIface.reportClientInitialized( this.mnClientId );\n\n                this.infoLifecycle( \"RPC Subsystem Service Vitalization, ( ClientId: `\" + this.mnClientId + \"` )\", LogStatuses.StatusDone );\n            }\n        }\n        catch ( Exception e ) {\n            throw new RemoteProcessServiceRPCException( e );\n        }\n    }\n\n\n    @Override\n    public long getClientId() {\n        return this.mnClientId;\n    }\n\n    @Override\n    public void startService() throws RemoteProcessServiceRPCException {\n        this.initRPCSubsystem();\n        this.vitalizeRPCSubsystem();\n    }\n\n    @Override\n    public void terminateService() {\n        if ( this.mDuplexAppointClient == null ) {\n            throw new IllegalStateException( \"RPCClient dose not started yet.\" );\n        }\n\n        this.mDuplexAppointClient.terminate();\n        this.mDuplexAppointClient = null;\n    }\n\n    @Override\n    public UProcess createLocalUProcess( ExecutionImage image, UProcess parent, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars ) {\n        LocalUProcess localHostedProcess = this.mProcessManager.createLocalHostedProcess( image, parent, startupArgs, contextEnvironmentVars );\n\n        if ( this.mProcessLifecycleIface != null ) {\n            UProcessMirrorDTO processMirrorDTO = new UProcessMirrorDTO( localHostedProcess.getName(), localHostedProcess.getLocalPID(), localHostedProcess.getGuid().toString() );\n            this.mProcessLifecycleIface.registerRemoteProcess( this.mnClientId, processMirrorDTO);\n            this.getLogger().info( \"[SuperiorRegister] [createLocalUProcess] <Done>\" );\n        }\n        else {\n            this.getLogger().info( \"[SuperiorRegister] [createLocalUProcess] <Pass>\" ); // Missing central connection, skip reporting; 失联，跳过上报中央.\n        }\n        return localHostedProcess;\n    }\n\n    @Override\n    public RemoteVitalizationResponse createLocalUProcess( UProcessMirrorDTO handlerDTO, UProcess[] lpProcess ) throws RemoteProcessLifecycleException {\n        try {\n            String imageAddress = handlerDTO.getImageAddress();\n            boolean isURI       = handlerDTO.isImageAddressURI();\n            RemoteVitalizationResponse response = new RemoteVitalizationResponse();\n            response.setRemoteVitalizationStatus( RemoteVitalizationStatus.New );\n\n            this.notifyProcessLifecycleHandlers( imageAddress, null, ProcessEvent.Prepare );\n\n            ExecutionImage image;\n            if ( isURI ) {\n                URI uri = new URI( imageAddress );\n                image = this.queryExecutionImage( uri );\n            }\n            else {\n                image = this.queryExecutionImage( imageAddress );\n            }\n            this.mProcessManager.getImageModifier().applyImageAddress( image, imageAddress );\n\n            if ( image == null ) {\n                response.setRemoteVitalizationStatus( RemoteVitalizationStatus.NoImage );\n                return response;\n            }\n\n            String szStartupArguments      = handlerDTO.getStartupArguments();\n            String szEnvironmentVariables  = handlerDTO.getEnvironmentVariables();\n            String szParentPID             = handlerDTO.getParentPID();\n\n            Map<String, String[]> startupArgs  = ProcessesUtils.decode( szStartupArguments );\n            Map<String, String[]> envVariables = ProcessesUtils.decode( szEnvironmentVariables );\n            GUID parentPID = null;\n            if ( szParentPID != null ) {\n                parentPID = this.mProcessManager.getGuidAllocator().parse( szParentPID );\n            }\n\n            LocalUProcess localHostedProcess = this.mProcessManager.createLocalHostedProcess( image, this.mProcessManager.getRootUProcess(), startupArgs, envVariables );\n            localHostedProcess.applyActualParentPID( parentPID );\n            response.setName( localHostedProcess.getName() );\n            response.setProcessID( localHostedProcess.getPID() );\n            response.setLocalPID( localHostedProcess.getLocalPID() );\n            response.setEnvironmentVariables( szEnvironmentVariables );\n            response.setStartupArguments( szStartupArguments );\n\n            response.setImageAddress(imageAddress);\n            response.setImageAddressURI(isURI);\n\n            if ( lpProcess != null && lpProcess.length > 0 ) {\n                lpProcess[0] = localHostedProcess;\n            }\n\n            this.notifyProcessLifecycleHandlers( imageAddress, null, ProcessEvent.Created );\n            return response;\n        }\n        catch ( URISyntaxException e ) {\n            throw new RemoteProcessLifecycleException( e );\n        }\n    }\n\n    @Override\n    public RemoteVitalizationResponse vitalizeLocalUProcess( UProcessMirrorDTO handlerDTO ) throws RemoteProcessLifecycleException {\n        UProcess[] lpProcess = new UProcess[1];\n        RemoteVitalizationResponse response = this.createLocalUProcess( handlerDTO, lpProcess );\n        LocalUProcess localHostedProcess = (LocalUProcess) lpProcess[ 0 ];\n\n        if ( response.getStatus() != RemoteVitalizationStatus.New.getCode() && response.getStatus() != RemoteVitalizationStatus.Vitalized.getCode() ) {\n            return response;\n        }\n\n        // Asynchronous startup may cause consistency errors if local execution finishes before the remote mirror is ready to handle events.\n        // Sync and confirmation are required.\n        // 进程启动为异步过程，若本地执行过快，远端镜像未就绪即本地完成（远端进程可能无法被后续事件清理），将导致一致性错误，需上报并等待同步。\n        // Note: Strong consistency is required. RPC sync must precede remote mirror process initialization.\n        // PS：该过程要求强一致性，必须先通过 RPC 同步，等待远端镜像进程完成创建。\n        String pid = this.mProcessLifecycleIface.reportProcessCreated( this.mnClientId, response );\n        if ( !response.getPID().equals( pid ) ) {\n            throw new RemoteProcessLifecycleException( \"An internal error has been happened, whit unmatched remote-process PID.\" );\n        }\n\n        this.mProcessLifecycleExaminer.startProcess( localHostedProcess );\n\n        return response;\n    }\n\n    @Override\n    public void startLocalUProcess( GUID pid ) throws IllegalArgumentException {\n        UProcess process = this.mProcessManager.getProcess( pid );\n        if ( process == null ) {\n            throw new IllegalArgumentException( \"No such process, PID => `\" + pid + \"`\" );\n        }\n\n        this.mProcessLifecycleExaminer.startProcess( process );\n    }\n\n    @Override\n    public void register( UProcess that ) {\n        this.mProcessManager.register( that );\n    }\n\n    @Override\n    public void erase( UProcess that ) {\n        this.mProcessManager.erase( that );\n    }\n\n    @Override\n    public UProcessRuntimeMeta queryProcessRuntimeMeta( GUID pid ) throws RemoteProcessLifecycleException {\n        UProcess process = this.mProcessManager.getProcess( pid );\n        if ( process instanceof RemoteProcess ) {\n            RemoteProcess remoteProcess = (RemoteProcess) process;\n            return remoteProcess.retrieveRemoteRuntimeMeta(); // Cascading retrieval of runtime meta information\n        }\n\n        if ( process == null ) {\n            return null;\n        }\n\n        UProcessRuntimeMeta meta = ProcessesUtils.extractProcessMeta( process );\n        // 不要直接return 老子好打断点.\n        return meta;\n    }\n\n    @Override\n    public DuplexAppointClient duplexAppointClient() {\n        return this.mDuplexAppointClient;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/client/ReactiveMasterProcessLifecycleController.java",
    "content": "package com.walnut.odin.proc.client;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.umct.AddressMapping;\nimport com.pinecone.hydra.umct.stereotype.Controller;\nimport com.walnut.odin.proc.RemoteProcessLifecycleException;\nimport com.walnut.odin.proc.entity.RemoteVitalizationResponse;\nimport com.walnut.odin.proc.entity.UProcessMirrorDTO;\nimport com.walnut.odin.proc.entity.UProcessRuntimeMeta;\n\n@Controller\n@AddressMapping( \"com.walnut.odin.proc.server.MasterProcessLifecycleIface.\" )\npublic class ReactiveMasterProcessLifecycleController implements Pinenut {\n\n    private final RemoteProcessManagerClient mRemoteProcessManagerClient;\n\n    private final GuidAllocator mGuidAllocator;\n\n    public ReactiveMasterProcessLifecycleController( RemoteProcessManagerClient remoteProcessManagerClient ) {\n        this.mRemoteProcessManagerClient = remoteProcessManagerClient;\n        this.mGuidAllocator = remoteProcessManagerClient.getGuidAllocator();\n    }\n\n    @AddressMapping(\"startRemoteUProcess\")\n    public void startRemoteUProcess( String szPid ) {\n        this.mRemoteProcessManagerClient.startLocalUProcess( this.mGuidAllocator.parse(szPid) );\n    }\n\n    @AddressMapping(\"vitalizeRemoteUProcess\")\n    public RemoteVitalizationResponse vitalizeRemoteUProcess( UProcessMirrorDTO handlerDTO ) throws RemoteProcessLifecycleException {\n        String imageAddress = handlerDTO.getImageAddress();\n        this.mRemoteProcessManagerClient.getLogger().info( \"[RemoteProcessVitalization] [PRC] (Process: `{}`) <InstructionAccepted>\", imageAddress );\n        RemoteVitalizationResponse response = this.mRemoteProcessManagerClient.vitalizeLocalUProcess( handlerDTO );\n        this.mRemoteProcessManagerClient.getLogger().info( \"[RemoteProcessVitalization] [PRC] (Process: `{}`) <InstructionPerformed>\", imageAddress );\n        return response;\n    }\n\n    @AddressMapping(\"createRemoteUProcess\")\n    public RemoteVitalizationResponse createRemoteUProcess( UProcessMirrorDTO handlerDTO ) throws RemoteProcessLifecycleException {\n        String imageAddress = handlerDTO.getImageAddress();\n        this.mRemoteProcessManagerClient.getLogger().info( \"[RemoteProcessCreation] [PRC] (Process: `{}`) <InstructionAccepted>\", imageAddress );\n        RemoteVitalizationResponse response = this.mRemoteProcessManagerClient.createLocalUProcess( handlerDTO, null );\n        this.mRemoteProcessManagerClient.getLogger().info( \"[RemoteProcessCreation] [PRC] (Process: `{}`) <InstructionPerformed>\", imageAddress );\n        return response;\n    }\n\n    @AddressMapping(\"hasOwnProcess\")\n    public boolean hasOwnProcess( String processId ) {\n        boolean has = this.mRemoteProcessManagerClient.hasOwnProcess( this.mGuidAllocator.parse(processId) );\n        return has;\n    }\n\n    @AddressMapping(\"containProcess\")\n    public boolean containProcess( String processId ) {\n        boolean has = this.mRemoteProcessManagerClient.containProcess( this.mGuidAllocator.parse(processId) );\n        return has;\n    }\n\n    @AddressMapping(\"queryRemoteProcessRuntimeMeta\")\n    public UProcessRuntimeMeta queryRemoteProcessRuntimeMeta( String processId ) throws RemoteProcessLifecycleException {\n        return this.mRemoteProcessManagerClient.queryProcessRuntimeMeta( this.mGuidAllocator.parse(processId) );\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/client/SlaveProcessLifecycleIface.java",
    "content": "package com.walnut.odin.proc.client;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umct.stereotype.Iface;\nimport com.walnut.odin.proc.entity.RemoteTerminationReport;\nimport com.walnut.odin.proc.entity.RemoteVitalizationResponse;\nimport com.walnut.odin.proc.entity.UProcessMirrorDTO;\n\n@Iface\npublic interface SlaveProcessLifecycleIface extends Pinenut {\n\n    long reportClientInitialized( long clientId );\n\n    void registerRemoteProcess( long clientId, UProcessMirrorDTO processDTO );\n\n    void reportProcessTerminated( long clientId, RemoteTerminationReport terminationReport );\n\n    String reportProcessCreated( long clientId, RemoteVitalizationResponse vitalizationResponse );\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/server/MasterProcessLifecycleIface.java",
    "content": "package com.walnut.odin.proc.server;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umct.stereotype.Iface;\nimport com.walnut.odin.proc.entity.RemoteVitalizationResponse;\nimport com.walnut.odin.proc.entity.UProcessMirrorDTO;\nimport com.walnut.odin.proc.entity.UProcessRuntimeMeta;\n\n@Iface\npublic interface MasterProcessLifecycleIface extends Pinenut {\n\n    void startRemoteUProcess( String processId );\n\n    RemoteVitalizationResponse vitalizeRemoteUProcess( UProcessMirrorDTO handlerDTO );\n\n    RemoteVitalizationResponse createRemoteUProcess( UProcessMirrorDTO handlerDTO );\n\n    boolean hasOwnProcess( String processId );\n\n    boolean containProcess( String processId );\n\n    UProcessRuntimeMeta queryRemoteProcessRuntimeMeta( String processId );\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/server/RavenRemoteProcessManagerServer.java",
    "content": "package com.walnut.odin.proc.server;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.hydra.proc.ArchProcessManager;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.system.component.LogStatuses;\nimport com.pinecone.hydra.uma.DuplexAppointServer;\nimport com.pinecone.hydra.uma.HuskyDuplexExpress;\nimport com.pinecone.hydra.uma.wolf.WolvesAppointServer;\nimport com.pinecone.hydra.umc.wolf.server.UlfServer;\nimport com.walnut.odin.proc.ArchRemoteProcessManagerNode;\nimport com.walnut.odin.proc.ProcessesUtils;\nimport com.walnut.odin.proc.RemoteProcess;\nimport com.walnut.odin.proc.MediatedRemoteProcess;\nimport com.walnut.odin.proc.RemoteProcessLifecycleException;\nimport com.walnut.odin.proc.RemoteProcessServiceRPCException;\nimport com.walnut.odin.proc.RemoteVitalizationStatus;\nimport com.walnut.odin.proc.entity.RemoteVitalizationResponse;\nimport com.walnut.odin.proc.entity.UProcessMirrorDTO;\nimport com.walnut.odin.proc.entity.UProcessRuntimeMeta;\n\nimport java.io.IOException;\nimport java.net.URI;\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\n\npublic class RavenRemoteProcessManagerServer extends ArchRemoteProcessManagerNode implements RemoteProcessManagerServer {\n\n    protected GuidAllocator                             mGuidAllocator;\n\n    //protected Map<GUID, Long>                           mPidClientIdMap;\n\n    protected Map<Long, MasterProcessLifecycleIface>    mLifecycleIfaceCMap;\n\n    protected UlfServer                                 mRPCServer;\n\n    protected DuplexAppointServer                       mDuplexAppointServer;\n\n    public RavenRemoteProcessManagerServer( ProcessManager localProcessManager, UlfServer ulfServer ) {\n        super( localProcessManager );\n        //this.mPidClientIdMap        = new ConcurrentHashMap<>();\n        this.mLifecycleIfaceCMap    = new ConcurrentHashMap<>();\n        this.mGuidAllocator         = localProcessManager.getGuidAllocator();\n        this.mRPCServer             = ulfServer;\n    }\n\n    protected void initRPCSubsystem() throws RemoteProcessServiceRPCException {\n        if ( this.mDuplexAppointServer != null && !this.mDuplexAppointServer.getMessageNode().isTerminated() ) {\n            throw new IllegalStateException( \"DuplexAppointServer has started.\" );\n        }\n\n        try {\n            this.mDuplexAppointServer = new WolvesAppointServer( this.mRPCServer, HuskyDuplexExpress.class );\n            ReactiveSlaveProcessLifecycleController controller = new ReactiveSlaveProcessLifecycleController( this );\n            this.mDuplexAppointServer.registerController( controller );\n            this.mDuplexAppointServer.compile( MasterProcessLifecycleIface.class, false );\n\n            this.infoLifecycle( \"RPC Subsystem Register Controllers\", LogStatuses.StatusDone );\n        }\n        catch ( Exception e ) {\n            throw new RemoteProcessServiceRPCException( e );\n        }\n    }\n\n    protected void vitalizeRPCSubsystem() throws RemoteProcessServiceRPCException {\n        try {\n            if ( this.mDuplexAppointServer.getMessageNode().isTerminated() ) {\n                this.mDuplexAppointServer.execute();\n                this.infoLifecycle( \"RPC Subsystem Service Vitalization\", LogStatuses.StatusDone );\n            }\n        }\n        catch ( Exception e ) {\n            throw new RemoteProcessServiceRPCException( e );\n        }\n    }\n\n\n    @Override\n    public DuplexAppointServer duplexAppointServer() {\n        return this.mDuplexAppointServer;\n    }\n\n    @Override\n    public void startService() throws RemoteProcessServiceRPCException {\n        this.initRPCSubsystem();\n        this.vitalizeRPCSubsystem();\n    }\n\n    @Override\n    public void terminateService() throws IllegalStateException {\n        if ( this.mDuplexAppointServer == null ) {\n            throw new IllegalStateException( \"RPCServer dose not started yet.\" );\n        }\n\n        this.mDuplexAppointServer.terminate();\n        this.mDuplexAppointServer = null;\n    }\n\n    @Override\n    public void registerProcess( long clientId, UProcessMirrorDTO processDTO ) {\n        this.createMediatedRemoteProcess( clientId, processDTO );\n\n        this.getLogger().info( \"[SubordinateRegister] [RegisterProcess (ClientId: {}, PID: {})] <Done>\", clientId, processDTO.getPID() );\n    }\n\n    @Override\n    public void startRemoteUProcess( GUID pid ) throws RemoteProcessServiceRPCException {\n        UProcess process = this.mProcessManager.getProcess( pid );\n        if ( process == null  ) {\n            throw new IllegalArgumentException( \"No such process, PID => `\" + pid + \"`\" );\n        }\n        if ( !( process instanceof RemoteProcess )  ) {\n            throw new IllegalArgumentException( \"{Target process is not remote process, PID => `\" + pid + \"`\" );\n        }\n\n        RemoteProcess rp = (RemoteProcess) process;\n        long clientId = rp.getControlClientId();\n\n        try {\n            this.mDuplexAppointServer.invokeInform( clientId, \"com.walnut.odin.proc.server.MasterProcessLifecycleIface.startRemoteUProcess\", pid );\n        }\n        catch ( IOException e ) {\n            throw new RemoteProcessServiceRPCException( e );\n        }\n    }\n\n    protected RemoteVitalizationResponse vitalizeRemoteUProcess0(\n            long clientId, String imageAddress, boolean isURI, GUID parentPID, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars,\n            boolean directStart\n    ) throws RemoteProcessLifecycleException {\n        UProcessMirrorDTO handlerDTO = new UProcessMirrorDTO();\n        String szParentPID = this.mProcessManager.getRootUProcess().getPID().toString();\n        if ( parentPID != null ) {\n            szParentPID = parentPID.toString();\n        }\n        handlerDTO.setParentPID( szParentPID );\n        if ( startupArgs != null ) {\n            handlerDTO.setStartupArguments( JSON.stringify( startupArgs ) );\n        }\n        if ( contextEnvironmentVars != null ) {\n            handlerDTO.setEnvironmentVariables( JSON.stringify( contextEnvironmentVars ) );\n        }\n\n        handlerDTO.setImageAddress( imageAddress );\n        handlerDTO.setImageAddressURI( isURI );\n\n        try {\n            Object ret;\n\n            if ( directStart ) {\n                ret = this.mDuplexAppointServer.invokeInform(\n                        clientId, \"com.walnut.odin.proc.server.MasterProcessLifecycleIface.vitalizeRemoteUProcess\",\n                        handlerDTO\n                );\n            }\n            else {\n                ret = this.mDuplexAppointServer.invokeInform(\n                        clientId, \"com.walnut.odin.proc.server.MasterProcessLifecycleIface.createRemoteUProcess\",\n                        handlerDTO\n                );\n            }\n\n\n            RemoteVitalizationResponse response = (RemoteVitalizationResponse) ret;\n            if ( response.getPID() != null ) {\n                response.setProcessID( this.mGuidAllocator.parse( response.getPID() ) );\n            }\n\n            return response;\n        }\n        catch ( IOException e ) {\n            throw new RemoteProcessLifecycleException( e );\n        }\n    }\n\n    @Override\n    public RemoteVitalizationResponse vitalizeRemoteUProcess( long clientId, String imageAddress, boolean isURI, GUID parentPID, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars ) throws RemoteProcessLifecycleException {\n       return this.vitalizeRemoteUProcess0( clientId, imageAddress, isURI, parentPID, startupArgs, contextEnvironmentVars, true );\n    }\n\n    @Override\n    public RemoteVitalizationResponse vitalizeRemoteUProcess( long clientId, String imagePath, GUID parentPID, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars ) throws RemoteProcessLifecycleException {\n        return this.vitalizeRemoteUProcess( clientId, imagePath, false, parentPID, startupArgs, contextEnvironmentVars );\n    }\n\n    @Override\n    public RemoteVitalizationResponse vitalizeRemoteUProcess( long clientId, URI imageURI, GUID parentPID, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars ) throws RemoteProcessLifecycleException {\n        return this.vitalizeRemoteUProcess( clientId, imageURI.toString(), true, parentPID, startupArgs, contextEnvironmentVars );\n    }\n\n\n\n    @Override\n    public RemoteCreationResult createRemoteUProcess( long clientId, String imageAddress, boolean isURI, GUID parentPID, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars ) throws RemoteProcessLifecycleException {\n        RemoteVitalizationResponse response = this.vitalizeRemoteUProcess0( clientId, imageAddress, isURI, parentPID, startupArgs, contextEnvironmentVars, false );\n        RemoteCreationResult result = new RemoteCreationResult();\n        result.response = response;\n        if ( response.getStatus() != RemoteVitalizationStatus.New.getCode() ) {\n            return result;\n        }\n\n        RemoteProcess remoteProcess = this.createMediatedRemoteProcess( clientId, response );\n        if ( remoteProcess != null ) {\n            String pid = remoteProcess.getPID().toString();\n            this.getLogger().info(\n                    \"[RemoteProcessCreated] [New::PendingVitalization] [MirrorHooked] (ClientId: `{}`, PID: `{}`) <Done>\", clientId, pid\n            );\n        }\n        else {\n            this.getLogger().warn(\n                    \"[RemoteProcessCreated] [New::PendingVitalization] [MirrorHooked] (ClientId: `{}`, ClientProvidedPID: `{}`) <Failure>\", clientId, response.getPID()\n            );\n        }\n\n        result.process  = remoteProcess;\n        return result;\n    }\n\n    @Override\n    public RemoteCreationResult createRemoteUProcess( long clientId, String imagePath, GUID parentPID, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars ) throws RemoteProcessLifecycleException {\n        return this.createRemoteUProcess( clientId, imagePath, false, parentPID, startupArgs, contextEnvironmentVars );\n    }\n\n    @Override\n    public RemoteCreationResult createRemoteUProcess( long clientId, URI imageURI, GUID parentPID, Map<String, String[]> startupArgs, Map<String, String[]> contextEnvironmentVars ) throws RemoteProcessLifecycleException {\n        return this.createRemoteUProcess( clientId, imageURI.toString(), true, parentPID, startupArgs, contextEnvironmentVars );\n    }\n\n\n\n\n    @Override\n    public void register( UProcess that ) {\n        this.mProcessManager.register( that );\n    }\n\n    @Override\n    public Long queryClientIdByPID( GUID pid ) {\n        UProcess process = this.mProcessManager.getProcess( pid );\n        if ( process instanceof RemoteProcess ) {\n            RemoteProcess rp = (RemoteProcess) process;\n            return rp.getControlClientId();\n        }\n        return null;\n    }\n\n    protected void expungeSelf( GUID pid ) {\n        // this.mPidClientIdMap.remove( pid );\n\n        // Reserved.\n    }\n\n    @Override\n    public void erase( UProcess that ) {\n        this.mProcessManager.erase( that );\n        this.expungeSelf( that.getPID() );\n    }\n\n    protected void expunge( UProcess that ) {\n        ArchProcessManager.invokeExpunge( this.mProcessManager, that );\n        this.expungeSelf( that.getPID() );\n    }\n\n    protected UProcess expunge( GUID pid ) {\n        UProcess that = this.mProcessManager.getProcess( pid );\n        if ( that != null ) {\n            this.expunge( that );\n        }\n        return that;\n    }\n\n    protected void registerProcess( long clientId, RemoteProcess process ) {\n        //this.mPidClientIdMap.put( process.getPID(), clientId );\n        this.register( process );\n    }\n\n    protected RemoteProcess createMediatedRemoteProcess(\n            long clientId, String name, long localPID, GUID processId,\n            String szStartupArguments, String szEnvironmentVariables, String imageAddress, boolean isURI\n    ) {\n        MediatedRemoteProcess process = new MediatedRemoteProcess(\n                clientId,this, name, localPID, processId,\n                ProcessesUtils.decode( szStartupArguments ), ProcessesUtils.decode( szEnvironmentVariables )\n        );\n\n        this.afterMediatedRemoteProcess( process, imageAddress, isURI );\n        this.registerProcess( clientId, process );\n        return process;\n    }\n\n    @Override\n    public RemoteProcess createMediatedRemoteProcess( long clientId, RemoteVitalizationResponse response ) {\n        return this.createMediatedRemoteProcess(\n                clientId, response.getName(), response.getLocalPID(), this.mGuidAllocator.parse( response.getPID() ),\n                response.getStartupArguments(), response.getEnvironmentVariables(),\n                response.getImageAddress(), response.isImageAddressURI()\n        );\n    }\n\n    @Override\n    public RemoteProcess createMediatedRemoteProcess( long clientId, UProcessMirrorDTO processDTO ) {\n        return this.createMediatedRemoteProcess(\n                clientId, processDTO.getName(), processDTO.getLocalPID(), this.mGuidAllocator.parse( processDTO.getPID() ),\n                processDTO.getStartupArguments(), processDTO.getEnvironmentVariables(),\n                processDTO.getImageAddress(), processDTO.isImageAddressURI()\n        );\n    }\n\n    public static UProcess invokeExpunge( RemoteProcessManagerServer server, String pid ) {\n        if ( server instanceof RavenRemoteProcessManagerServer ) {\n            RavenRemoteProcessManagerServer ravenServer = (RavenRemoteProcessManagerServer) server;\n            return ravenServer.expunge( ravenServer.mGuidAllocator.parse( pid ) );\n        }\n        return null;\n    }\n\n    @Override\n    public UProcessRuntimeMeta queryProcessRuntimeMeta( GUID pid ) throws RemoteProcessLifecycleException {\n        try {\n            UProcess process = this.mProcessManager.getProcess( pid );\n            if ( process instanceof RemoteProcess ) {\n                RemoteProcess remoteProcess = (RemoteProcess) process;\n                long clientId = remoteProcess.getControlClientId();\n                Object ret = this.mDuplexAppointServer.invokeInform(\n                        clientId, \"com.walnut.odin.proc.server.MasterProcessLifecycleIface.queryRemoteProcessRuntimeMeta\",\n                        pid.toString()\n                );\n                return (UProcessRuntimeMeta) ret; // Cascading retrieval of runtime meta information\n            }\n\n            if ( process == null ) {\n                return null;\n            }\n\n            UProcessRuntimeMeta meta = ProcessesUtils.extractProcessMeta( process );\n            // 不要直接return 老子好打断点.\n            return meta;\n        }\n        catch ( IOException e ) {\n            throw new RemoteProcessLifecycleException( e );\n        }\n\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/proc/server/ReactiveSlaveProcessLifecycleController.java",
    "content": "package com.walnut.odin.proc.server;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.umct.AddressMapping;\nimport com.pinecone.hydra.umct.stereotype.Controller;\nimport com.walnut.odin.proc.RemoteProcess;\nimport com.walnut.odin.proc.entity.RemoteTerminationReport;\nimport com.walnut.odin.proc.entity.RemoteVitalizationResponse;\nimport com.walnut.odin.proc.entity.UProcessMirrorDTO;\n\n\n@Controller\n@AddressMapping( \"com.walnut.odin.proc.client.SlaveProcessLifecycleIface.\" )\npublic class ReactiveSlaveProcessLifecycleController implements Pinenut {\n\n    protected RemoteProcessManagerServer  mRemoteProcessManagerServer;\n\n    public ReactiveSlaveProcessLifecycleController( RemoteProcessManagerServer remoteProcessManagerServer ) {\n        this.mRemoteProcessManagerServer = remoteProcessManagerServer;\n    }\n\n    @AddressMapping( \"registerRemoteProcess\" )\n    public void registerRemoteProcess( long clientId, UProcessMirrorDTO processDTO ) {\n        this.mRemoteProcessManagerServer.registerProcess( clientId, processDTO );\n    }\n\n    @AddressMapping( \"reportClientInitialized\" )\n    public long reportClientInitialized( long clientId ) {\n        this.mRemoteProcessManagerServer.getLogger().info( \"[ClientInitializedRecall] [RPC] (ClientId: `{}`) <Done>\", clientId );\n        return clientId;\n    }\n\n    @AddressMapping( \"reportProcessTerminated\" )\n    public void reportProcessTerminated( long clientId, RemoteTerminationReport terminationReport ) {\n        this.mRemoteProcessManagerServer.getLogger().info(\n                \"[RemoteProcessTerminated] [RPC] (ClientId: `{}`, PID: `{}`, ExitCode: `{}`) <Done>\", clientId, terminationReport.getPID(), terminationReport.getExitCode()\n        );\n\n        UProcess that = RavenRemoteProcessManagerServer.invokeExpunge( this.mRemoteProcessManagerServer, terminationReport.getPID() );\n        String procName = \"NonExistent\";\n        if ( that != null ) {\n            procName = that.getName();\n            RemoteProcess remoteProcess = (RemoteProcess) that;\n            remoteProcess.notifyRemoteEvent( clientId, ProcessEvent.Terminated, terminationReport );\n        }\n\n        this.mRemoteProcessManagerServer.getLogger().info(\n                \"[RemoteProcessTerminated] [RPC] [MirrorUnhook] (ClientId: `{}`, PID: `{}`, Process: `{}`) <Done>\", clientId, terminationReport.getPID(), procName\n        );\n    }\n\n    @AddressMapping( \"reportProcessCreated\" )\n    public String reportProcessCreated( long clientId, RemoteVitalizationResponse vitalizationResponse ) {\n        String pid = null;\n\n        RemoteProcess remoteProcess = this.mRemoteProcessManagerServer.createMediatedRemoteProcess( clientId, vitalizationResponse );\n        if ( remoteProcess != null ) {\n            pid = remoteProcess.getPID().toString();\n            this.mRemoteProcessManagerServer.getLogger().info(\n                    \"[RemoteProcessCreated] [RPC] [MirrorHooked] (ClientId: `{}`, PID: `{}`) <Done>\", clientId, pid\n            );\n        }\n        else {\n            this.mRemoteProcessManagerServer.getLogger().warn(\n                    \"[RemoteProcessCreated] [RPC] [MirrorHooked] (ClientId: `{}`, ClientProvidedPID: `{}`) <Failure>\", clientId, vitalizationResponse.getPID()\n            );\n        }\n        return pid;\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/RavenTaskInstrument.java",
    "content": "package com.walnut.odin.task;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.system.ko.CascadeInstrument;\nimport com.pinecone.hydra.system.ko.KernelObjectConfig;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.system.ko.kom.KOMInstrument;\nimport com.pinecone.hydra.task.ibatis.hydranium.TaskMappingDriver;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.UniformTaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.ElementNode;\nimport com.pinecone.hydra.task.kom.entity.AppElement;\nimport com.pinecone.hydra.task.kom.entity.Namespace;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.kom.entity.TaskTreeNode;\nimport com.pinecone.hydra.task.kom.instance.InstanceInstrument;\nimport com.pinecone.hydra.unit.imperium.ImperialTree;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\nimport com.pinecone.hydra.unit.imperium.entity.ReparseLinkNode;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\n\n\nimport com.walnut.odin.task.service.CategoryService;\nimport com.walnut.odin.task.service.RavenCategoryService;\nimport com.walnut.odin.task.source.RavenTaskMasterManipulator;\n\nimport com.walnut.odin.task.system.TaskPathInvalidException;\nimport com.walnut.odin.task.troll.GenericRavenTask;\n\npublic class RavenTaskInstrument implements CentralizedTaskInstrument {\n    protected RavenTaskMasterManipulator ravenTaskMasterManipulator;\n\n    protected UniformTaskInstrument      uniformTaskInstrument;\n\n    protected CategoryService            categoryService;\n\n\n\n\n    protected void overrideTaskInstrument( Processum superiorProcess, TaskMappingDriver driver, TaskInstrument parent, String name, KernelObjectConfig config, @Nullable GuidAllocator guidAllocator ) {\n        this.uniformTaskInstrument      = new UniformTaskInstrument( superiorProcess, driver.getMasterManipulator(), parent, name, config, guidAllocator ) {\n        /*    @Override\n            public RavenTaskElement affirmTask( String path ,TaskElement metaInfos ) {\n                TaskElement taskElement           = super.affirmTask( path , metaInfos);\n                if ( taskElement == null ) {\n                    return null;\n                }\n\n                return RavenTaskInstrument.this.transformTaskElement( taskElement, true );\n            }\n\n            @Override\n            public ElementNode queryElement( String path ) {\n                ElementNode proto = super.queryElement( path );\n                if ( proto instanceof TaskElement ) {\n                    return RavenTaskInstrument.this.transformTaskElement( (TaskElement) proto, false );\n                }\n\n                return proto;\n            }\n\n            @Override\n            public TaskTreeNode get( GUID guid ) {\n                TaskTreeNode treeNode = super.get( guid );\n                return RavenTaskInstrument.this.transformTreeNode( treeNode, false );\n            }\n\n            @Override\n            public TreeNode get( GUID guid, int depth ) {\n                TreeNode treeNode = super.get( guid, depth );\n                return RavenTaskInstrument.this.transformTreeNode( (TaskTreeNode) treeNode, false );\n            }\n\n            @Override\n            public TreeNode getAsRootDepth( GUID guid ) {\n                TreeNode treeNode =  super.getAsRootDepth( guid );\n                return RavenTaskInstrument.this.transformTreeNode( (TaskTreeNode) treeNode, false );\n            }*/\n        };\n    }\n\n    public RavenTaskInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, TaskInstrument parent, String name, KernelObjectConfig config, @Nullable GuidAllocator guidAllocator ) {\n        this.ravenTaskMasterManipulator = (RavenTaskMasterManipulator) masterManipulator;\n        TaskMappingDriver driver        = (TaskMappingDriver) this.ravenTaskMasterManipulator.getTaskMappingDriver();\n        this.overrideTaskInstrument     ( superiorProcess, driver, parent, name, config, guidAllocator );\n\n        this.categoryService            = new RavenCategoryService( this );\n\n    }\n\n    public RavenTaskInstrument( Processum superiorProcess, KOIMasterManipulator masterManipulator, KernelObjectConfig config ) {\n        this( superiorProcess, masterManipulator, null, CentralizedTaskInstrument.class.getSimpleName(), config, null );\n    }\n\n    public RavenTaskInstrument( KOIMappingDriver driver, CentralizedTaskInstrument parent, String name, KernelObjectConfig config ){\n        this( driver.getSuperiorProcess(), driver.getMasterManipulator(), parent, name, config, null );\n    }\n\n    public RavenTaskInstrument( KOIMappingDriver driver, KernelObjectConfig config ) {\n        this( driver.getSuperiorProcess(), driver.getMasterManipulator(), config );\n    }\n\n    @Override\n    public void applyGuidAllocator( GuidAllocator guidAllocator ) {\n        this.uniformTaskInstrument.applyGuidAllocator( guidAllocator );\n    }\n\n    @Override\n    public InstanceInstrument getInstanceInstrument() {\n        return this.uniformTaskInstrument.getInstanceInstrument();\n    }\n\n    @Override\n    public GUID assertGUIDByPath ( String taskTreePath ) throws TaskPathInvalidException {\n        GUID guid = this.uniformTaskInstrument.queryGUIDByPath( taskTreePath );\n        if ( guid == null ) {\n            throw new TaskPathInvalidException( taskTreePath );\n        }\n\n        return guid;\n    }\n\n    @Override\n    public GUID assertTaskGUIDByPath ( String taskTreePath ) throws TaskPathInvalidException, IllegalArgumentException {\n        ElementNode node = this.uniformTaskInstrument.queryElement( taskTreePath );\n        if ( node == null ) {\n            throw new TaskPathInvalidException( taskTreePath );\n        }\n        if ( node.evinceTaskElement() == null ) {\n            throw new IllegalArgumentException( \"Path `\" + taskTreePath + \"` is not a task.\" );\n        }\n\n        return node.getGuid();\n    }\n\n\n    @Override\n    public UniformTaskInstrument getUniformTaskInstrument() {\n        return this.uniformTaskInstrument;\n    }\n\n    @Override\n    public RavenTaskMasterManipulator getRavenTaskMasterManipulator() {\n        return this.ravenTaskMasterManipulator;\n    }\n\n    @Override\n    public void newLinkTag( String originalPath, String dirPath, String tagName ) {\n        this.uniformTaskInstrument.newLinkTag( originalPath, dirPath, tagName );\n    }\n\n    @Override\n    public void removeReparseLink( GUID guid ) {\n        this.uniformTaskInstrument.removeReparseLink( guid );\n    }\n\n    @Override\n    public void affirmOwnedNode( GUID parentGuid, GUID childGuid ) {\n        this.uniformTaskInstrument.affirmOwnedNode( parentGuid, childGuid );\n    }\n\n    @Override\n    public void newHardLink( GUID sourceGuid, GUID targetGuid ) {\n        this.uniformTaskInstrument.newHardLink( sourceGuid, targetGuid );\n    }\n\n    @Override\n    public void newLinkTag( GUID originalGuid, GUID dirGuid, String tagName ) {\n        this.uniformTaskInstrument.newLinkTag( originalGuid, dirGuid, tagName );\n    }\n\n    @Override\n    public void updateLinkTag( GUID tagGuid, String tagName ) {\n        this.uniformTaskInstrument.updateLinkTag( tagGuid, tagName );\n    }\n\n    @Override\n    public ReparseLinkNode queryReparseLinkByNS( String path, String szBadSep, String szTargetSep ) {\n        return this.uniformTaskInstrument.queryReparseLinkByNS( path, szBadSep, szTargetSep );\n    }\n\n    @Override\n    public ReparseLinkNode queryReparseLink( String path ) {\n        return this.uniformTaskInstrument.queryReparseLink( path );\n    }\n\n    @Override\n    public CategoryService getCategoryService() {\n        return this.categoryService;\n    }\n\n\n\n\n    @Override\n    public AppElement affirmJob(String path ) {\n        return this.uniformTaskInstrument.affirmJob( path );\n    }\n\n    @Override\n    public Namespace affirmNamespace( String path ) {\n        return this.uniformTaskInstrument.affirmNamespace( path );\n    }\n\n    @Override\n    public TaskElement affirmTask( String path ,TaskElement metaInfos) {\n        return (TaskElement) this.uniformTaskInstrument.affirmTask( path ,metaInfos);\n    }\n\n    @Override\n    public ElementNode queryElement( String path ) {\n        return this.uniformTaskInstrument.queryElement( path );\n    }\n\n    @Override\n    public boolean containsChild( GUID parentGuid, String childName ) {\n        return this.uniformTaskInstrument.containsChild( parentGuid, childName );\n    }\n\n    @Override\n    public void update( TreeNode treeNode ) {\n        this.uniformTaskInstrument.update( treeNode );\n    }\n\n    @Override\n    public TreeNode get( GUID guid ) {\n        return this.uniformTaskInstrument.get( guid );\n    }\n\n    @Override\n    public TreeNode get( GUID guid, int depth ) {\n        return this.uniformTaskInstrument.get( guid, depth );\n    }\n\n    @Override\n    public TreeNode getAsRootDepth( GUID guid ) {\n        return this.uniformTaskInstrument.getAsRootDepth( guid );\n    }\n\n\n\n\n    /** Directly proxied **/\n\n    @Override\n    public KOMInstrument parent() {\n        return this.uniformTaskInstrument.parent();\n    }\n\n    @Override\n    public void setParent( CascadeInstrument parent ) {\n        this.uniformTaskInstrument.setParent( parent );\n    }\n\n    @Override\n    public com.pinecone.framework.util.name.Namespace getTargetingName() {\n        return this.uniformTaskInstrument.getTargetingName();\n    }\n\n    @Override\n    public void setTargetingName( com.pinecone.framework.util.name.Namespace name ) {\n        this.uniformTaskInstrument.setTargetingName( name );\n    }\n\n    @Override\n    public String getPath( GUID guid ) {\n        return this.uniformTaskInstrument.getPath( guid );\n    }\n\n    @Override\n    public String querySystemKernelObjectPath( GUID objectGuid ) {\n        return this.uniformTaskInstrument.querySystemKernelObjectPath( objectGuid );\n    }\n\n    @Override\n    public String getFullName( GUID guid ) {\n        return this.uniformTaskInstrument.getFullName( guid );\n    }\n\n    @Override\n    public GUID queryGUIDByPath( String path ) {\n        return this.uniformTaskInstrument.queryGUIDByPath( path );\n    }\n\n    @Override\n    public GUID queryGUIDByFN( String fullName ) {\n        return this.uniformTaskInstrument.queryGUIDByFN( fullName );\n    }\n\n    @Override\n    public boolean contains( GUID nodeGuid ) {\n        return this.uniformTaskInstrument.contains( nodeGuid );\n    }\n\n    @Override\n    public GUID put( TreeNode treeNode ) {\n        return this.uniformTaskInstrument.put( treeNode );\n    }\n\n    @Override\n    public GUID queryGUIDByNS( String path, String szBadSep, String szTargetSep ) {\n        return this.uniformTaskInstrument.queryGUIDByNS( path, szBadSep, szTargetSep );\n    }\n\n    @Override\n    public void remove( GUID guid ) {\n        this.uniformTaskInstrument.remove( guid );\n    }\n\n    @Override\n    public void remove( String path ) {\n        this.uniformTaskInstrument.remove( path );\n    }\n\n    @Override\n    public List<TreeNode> getChildren( GUID guid ) {\n        return this.uniformTaskInstrument.getChildren( guid );\n    }\n\n    @Override\n    public List<GUID> fetchChildrenGuids( GUID guid ) {\n        return this.uniformTaskInstrument.fetchChildrenGuids( guid );\n    }\n\n    @Override\n    public Object queryEntityHandleByNS( String path, String szBadSep, String szTargetSep ) {\n        return this.uniformTaskInstrument.queryEntityHandleByNS( path, szBadSep, szTargetSep );\n    }\n\n    @Override\n    public EntityNode queryNode( String path ) {\n        return this.uniformTaskInstrument.queryNode( path );\n    }\n\n    @Override\n    public TreeNode queryTreeNode( String path ) {\n        return this.uniformTaskInstrument.queryTreeNode( path );\n    }\n\n    @Override\n    public List<? extends TreeNode> fetchRoot() {\n        return this.uniformTaskInstrument.fetchRoot();\n    }\n\n    @Override\n    public void rename( GUID guid, String name ) {\n        this.uniformTaskInstrument.rename( guid, name );\n    }\n\n    @Override\n    public Processum getSuperiorProcess() {\n        return this.uniformTaskInstrument.getSuperiorProcess();\n    }\n\n    @Override\n    public GuidAllocator getGuidAllocator() {\n        return this.uniformTaskInstrument.getGuidAllocator();\n    }\n\n    @Override\n    public ImperialTree getMasterTrieTree() {\n        return this.uniformTaskInstrument.getMasterTrieTree();\n    }\n\n    @Override\n    public KernelObjectConfig getConfig() {\n        return this.uniformTaskInstrument.getConfig();\n    }\n\n    @Override\n    public String getSuperiorPathScope() {\n        return this.uniformTaskInstrument.getSuperiorPathScope();\n    }\n\n    @Override\n    public void applySuperiorPathScope( String superiorPathScope ) {\n        this.uniformTaskInstrument.applySuperiorPathScope( superiorPathScope );\n    }\n\n\n\n\n\n\n    @Override\n    public RavenTask constructTask( TaskElement taskElement ) {\n        return this.constructTask( taskElement, null );\n    }\n\n    @Override\n    public RavenTask constructTask( TaskElement taskElement, @Nullable Identification serviceId ) {\n        if ( serviceId == null ) {\n            //serviceId = taskElement.\n        }\n        RavenTask task = new GenericRavenTask( this, serviceId, taskElement );\n\n        return task;\n    }\n\n    @Override\n    public RavenTask createTask( TaskElement taskElement, Identification serviceId ) {\n        this.put( taskElement );\n        RavenTask task = this.constructTask( taskElement, serviceId );\n\n\n        return task;\n    }\n\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/dto/GenericCategoryTag.java",
    "content": "package com.walnut.odin.task.dto;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.slime.entity.ArchEnumIndexableEntity;\n\n\npublic class GenericCategoryTag extends ArchEnumIndexableEntity implements CategoryTag {\n    protected GUID   mTaskGuid;\n\n    protected String mszCategoryType;\n\n    protected String mszCategoryName;\n\n    public GenericCategoryTag() {\n        super();\n    }\n\n    @Override\n    public void setEnumId( long id ) {\n        this.mnEnumId = id;\n    }\n\n    @Override\n    public void setTaskGuid( GUID taskGuid ) {\n        this.mTaskGuid = taskGuid;\n    }\n\n    @Override\n    public GUID getTaskGuid() {\n        return this.mTaskGuid;\n    }\n\n    @Override\n    public void setCategoryName( String categoryName ) {\n        this.mszCategoryName = categoryName;\n    }\n\n    @Override\n    public String getCategoryName() {\n        return this.mszCategoryName;\n    }\n\n    @Override\n    public void setCategoryType( String categoryType ) {\n        this.mszCategoryType = categoryType;\n    }\n\n    @Override\n    public String getCategoryType() {\n        return this.mszCategoryType;\n    }\n\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/entity/pyramid/ArchCategory.java",
    "content": "package com.walnut.odin.task.entity.pyramid;\n\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.slime.entity.ArchEnumIndexableEntity;\n\npublic abstract class ArchCategory extends ArchEnumIndexableEntity implements Category {\n    protected String mszName;\n    protected String mszAlias;\n    protected String mszDescription;\n\n    public ArchCategory() {\n        super();\n    }\n\n    @Override\n    public void setEnumId( long id ) {\n        this.mnEnumId = id;\n    }\n\n    @Override\n    public void setName( String name ) {\n        this.mszName = name;\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public void setAlias( String alias ) {\n        this.mszAlias = alias;\n    }\n\n    @Override\n    public String getAlias() {\n        return this.mszAlias;\n    }\n\n    @Override\n    public void setDescription( String description ) {\n        this.mszDescription = description;\n    }\n\n    @Override\n    public String getDescription() {\n        return this.mszDescription;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/entity/pyramid/GenericCategoryType.java",
    "content": "package com.walnut.odin.task.entity.pyramid;\n\npublic class GenericCategoryType extends ArchCategory implements CategoryType {\n    public GenericCategoryType() {\n        super();\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/entity/pyramid/GenericTaskCategory.java",
    "content": "package com.walnut.odin.task.entity.pyramid;\n\npublic class GenericTaskCategory extends ArchCategory implements TaskCategory {\n    public GenericTaskCategory() {\n        super();\n    }\n\n}\n\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/mapper/CategoryMappingMapper.java",
    "content": "package com.walnut.odin.task.mapper;\n\nimport java.util.List;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.walnut.odin.task.dto.CategoryTag;\nimport com.walnut.odin.task.dto.GenericCategoryTag;\nimport com.walnut.odin.task.source.CategoryMappingManipulator;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface CategoryMappingMapper extends CategoryMappingManipulator {\n\n    @Override\n    @Insert( \"INSERT INTO `odin_task_category_mapping` ( `task_guid`, `category_type`, `category_name` ) \" +\n            \"VALUES ( #{taskGuid}, #{categoryType}, #{categoryName} )\" )\n    void insert( CategoryTag categoryTag );\n\n    @Select( \"SELECT `id` AS enumId, `task_guid` AS taskGuid, `category_type` AS categoryType, `category_name` AS categoryName \" +\n            \"FROM `odin_task_category_mapping` \" +\n            \"WHERE `task_guid` = #{taskGuid}\" )\n    List<GenericCategoryTag> queryByTaskGuid0( @Param( \"taskGuid\" ) GUID taskGuid );\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default List<CategoryTag> queryByTaskGuid( GUID taskGuid ) {\n        return ( List ) this.queryByTaskGuid0( taskGuid );\n    }\n\n\n    @Override\n    @Select( \"SELECT `id` AS enumId, `task_guid` AS taskGuid, `category_type` AS categoryType, `category_name` AS categoryName \" +\n            \"FROM `odin_task_category_mapping` \" +\n            \"WHERE `task_guid` = #{taskGuid} \" +\n            \"AND `category_type` = #{type} \" +\n            \"AND `category_name` = #{name} \" +\n            \"LIMIT 1\" )\n    GenericCategoryTag queryOwnedTag( @Param( \"taskGuid\" ) GUID taskGuid,\n                                     @Param( \"type\" ) String type,\n                                     @Param( \"name\" ) String name );\n\n    @Override\n    @Select( \"SELECT COUNT( * ) \" +\n            \"FROM `odin_task_category_mapping` \" +\n            \"WHERE `category_type` = #{type} \" +\n            \"AND `category_name` = #{name}\" )\n    long countTag( @Param( \"type\" ) String type, @Param( \"name\" ) String name );\n\n    @Select( \"SELECT `id` AS enumId, `task_guid` AS taskGuid, `category_type` AS categoryType, `category_name` AS categoryName \" +\n            \"FROM `odin_task_category_mapping` \" +\n            \"WHERE `category_type` = #{type} \" +\n            \"AND `category_name` = #{name} \" +\n            \"ORDER BY `id` ASC \" +\n            \"LIMIT #{offset}, #{pageSize}\" )\n    List<GenericCategoryTag> queryTag0( @Param( \"type\" ) String type,\n                                        @Param( \"name\" ) String name,\n                                        @Param( \"offset\" ) long offset,\n                                        @Param( \"pageSize\" ) long pageSize );\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default List<CategoryTag> queryTag( String type, String name, long offset, long pageSize ) {\n        return ( List ) this.queryTag0( type, name, offset, pageSize );\n    }\n\n    @Override\n    @Select( \"SELECT COUNT( * ) \" +\n            \"FROM `odin_task_category_mapping` \" +\n            \"WHERE `category_name` = #{name}\" )\n    long countTagsByName( @Param( \"name\" ) String name );\n\n    @Select( \"SELECT `id` AS enumId, `task_guid` AS taskGuid, `category_type` AS categoryType, `category_name` AS categoryName \" +\n            \"FROM `odin_task_category_mapping` \" +\n            \"WHERE `category_name` = #{name} \" +\n            \"ORDER BY `id` ASC \" +\n            \"LIMIT #{offset}, #{pageSize}\" )\n    List<GenericCategoryTag> fetchByName0( @Param( \"name\" ) String name,\n                                           @Param( \"offset\" ) long offset,\n                                           @Param( \"pageSize\" ) long pageSize );\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default List<CategoryTag> fetchByName( String name, long offset, long pageSize ) {\n        return ( List ) this.fetchByName0( name, offset, pageSize );\n    }\n\n    @Override\n    @Update( \"UPDATE `odin_task_category_mapping` \" +\n            \"SET `task_guid` = #{taskGuid}, \" +\n            \"`category_type` = #{categoryType}, \" +\n            \"`category_name` = #{categoryName} \" +\n            \"WHERE `id` = #{enumId}\" )\n    void update( CategoryTag categoryTag );\n\n    @Delete( \"<script>\" +\n            \"DELETE FROM `odin_task_category_mapping` \" +\n            \"<where> \" +\n            \"    1 = 1\" +\n            \"    <if test='taskGuid != null'>AND `task_guid` = #{taskGuid} </if> \" +\n            \"    <if test='type != null'>AND `category_type` = #{type} </if> \" +\n            \"    <if test='name != null'>AND `category_name` = #{name} </if> \" +\n            \"    <if test='taskGuid == null and type == null and name == null'>\\n\" +\n            \"       AND 1 = 0\\n\" + // Prevent to eradicate the whole table.\n            \"    </if>\" +\n            \"</where>\" +\n            \"</script>\" )\n    void purge( @Param( \"taskGuid\" ) GUID taskGuid,\n                @Param( \"type\" ) String type,\n                @Param( \"name\" ) String name );\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/mapper/CategoryTypeMapper.java",
    "content": "package com.walnut.odin.task.mapper;\n\nimport java.util.List;\n\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.walnut.odin.task.entity.pyramid.GenericCategoryType;\nimport com.walnut.odin.task.entity.pyramid.CategoryType;\nimport com.walnut.odin.task.source.CategoryTypeManipulator;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface CategoryTypeMapper extends CategoryTypeManipulator {\n\n    @Override\n    @Insert(\"INSERT INTO `odin_task_category_type` (`name`, `alias`, `description`) \" +\n            \"VALUES (#{name}, #{alias}, #{description})\")\n    void insert( CategoryType categoryType );\n\n    @Override\n    @Select(\"SELECT `id` as enumId, `name`, `alias`, `description` \" +\n            \"FROM `odin_task_category_type` \" +\n            \"WHERE `name` = #{name}\")\n    GenericCategoryType queryType( String name );\n\n    @Override\n    @Select( \"SELECT COUNT(*) FROM `odin_task_category_type`\" )\n    long countTypes( );\n\n    @Select( \"SELECT `id` AS enumId, `name`, `alias`, `description` \" +\n            \"FROM `odin_task_category_type` \" +\n            \"ORDER BY `id` ASC \" +\n            \"LIMIT #{offset}, #{pageSize}\" )\n    List<GenericCategoryType> fetchType0( @Param( \"offset\" ) long offset, @Param( \"pageSize\" ) long pageSize );\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default List<CategoryType> fetchType( long offset, long pageSize ) {\n        return (List) this.fetchType0( offset, pageSize );\n    }\n\n    @Override\n    @Delete(\"DELETE FROM `odin_task_category_type` \" +\n            \"WHERE `name` = #{name}\")\n    void remove( String name );\n\n    @Override\n    @Update(\"UPDATE `odin_task_category_type` \" +\n            \"SET `alias` = #{alias}, `description` = #{description} \" +\n            \"WHERE `name` = #{name}\")\n    void update( CategoryType categoryType);\n\n}"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/mapper/TaskCategoryMapper.java",
    "content": "package com.walnut.odin.task.mapper;\n\nimport java.util.List;\n\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport com.walnut.odin.task.entity.pyramid.GenericTaskCategory;\nimport com.walnut.odin.task.entity.pyramid.TaskCategory;\nimport com.walnut.odin.task.source.TaskCategoryManipulator;\n\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\n@Mapper\n@IbatisDataAccessObject\npublic interface TaskCategoryMapper extends TaskCategoryManipulator {\n\n    @Override\n    @Insert( \"INSERT INTO `odin_task_category` ( `name`, `alias`, `description` ) \" +\n            \"VALUES ( #{name}, #{alias}, #{description} )\" )\n    void insert( TaskCategory taskCategory );\n\n    @Override\n    @Select( \"SELECT `id` AS enumId, `name`, `alias`, `description` \" +\n            \"FROM `odin_task_category` \" +\n            \"WHERE `name` = #{name}\" )\n    GenericTaskCategory queryTaskCategory( String name );\n\n    @Override\n    @Select( \"SELECT COUNT(*) FROM `odin_task_category`\" )\n    long countCategories( );\n\n    @Select( \"SELECT `id` AS enumId, `name`, `alias`, `description` \" +\n            \"FROM `odin_task_category` \" +\n            \"ORDER BY `id` ASC \" +\n            \"LIMIT #{offset}, #{pageSize}\" )\n    List<GenericTaskCategory> fetchCategory0( @Param( \"offset\" ) long offset, @Param( \"pageSize\" ) long pageSize );\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default List<TaskCategory> fetchCategory( long offset, long pageSize ) {\n        return (List) this.fetchCategory0( offset, pageSize );\n    }\n\n    @Override\n    @Delete( \"DELETE FROM `odin_task_category` \" +\n            \"WHERE `name` = #{name}\" )\n    void remove( String name );\n\n    @Override\n    @Update( \"UPDATE `odin_task_category` \" +\n            \"SET `alias` = #{alias}, `description` = #{description} \" +\n            \"WHERE `name` = #{name}\" )\n    void update( TaskCategory taskCategory );\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/service/RavenCategoryService.java",
    "content": "package com.walnut.odin.task.service;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.NonNull;\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.task.kom.UniformTaskInstrument;\nimport com.walnut.odin.task.CentralizedTaskInstrument;\nimport com.walnut.odin.task.dto.CategoryTag;\nimport com.walnut.odin.task.source.CategoryMappingManipulator;\nimport com.walnut.odin.task.source.CategoryTypeManipulator;\nimport com.walnut.odin.task.source.RavenTaskMasterManipulator;\nimport com.walnut.odin.task.source.TaskCategoryManipulator;\nimport com.walnut.odin.task.system.TaskPathInvalidException;\n\npublic class RavenCategoryService implements CategoryService {\n\n    private RavenTaskMasterManipulator ravenTaskMasterManipulator;\n\n    private CategoryTypeManipulator    categoryTypeManipulator;\n\n    private TaskCategoryManipulator    taskCategoryManipulator;\n\n    private CategoryMappingManipulator categoryMappingManipulator;\n\n    private CentralizedTaskInstrument  centralizedTaskInstrument;\n\n    private UniformTaskInstrument      uniformTaskInstrument;\n\n    public RavenCategoryService( CentralizedTaskInstrument instrument ) {\n        this.ravenTaskMasterManipulator = instrument.getRavenTaskMasterManipulator();\n        this.categoryTypeManipulator    = this.ravenTaskMasterManipulator.getCategoryTypeManipulator();\n        this.taskCategoryManipulator    = this.ravenTaskMasterManipulator.getTaskCategoryManipulator();\n        this.categoryMappingManipulator = this.ravenTaskMasterManipulator.getCategoryMappingManipulator();\n        this.centralizedTaskInstrument  = instrument;\n        this.uniformTaskInstrument      = this.centralizedTaskInstrument.getUniformTaskInstrument();\n    }\n\n\n\n\n    @Override\n    public void addCategoryTag ( CategoryTag categoryTag ) {\n        this.categoryMappingManipulator.insert( categoryTag );\n    }\n\n    @Override\n    public void addCategoryTag ( String taskTreePath, CategoryTag categoryTag ) throws TaskPathInvalidException, IllegalArgumentException {\n        GUID guid = this.centralizedTaskInstrument.assertTaskGUIDByPath( taskTreePath );\n        categoryTag.setTaskGuid( guid );\n        this.categoryMappingManipulator.insert( categoryTag );\n    }\n\n    @Override\n    public CategoryTag setCategoryTag ( String taskTreePath, CategoryTag categoryTag ) throws TaskPathInvalidException, IllegalArgumentException {\n        GUID guid = this.centralizedTaskInstrument.assertTaskGUIDByPath( taskTreePath );\n        CategoryTag tag = this.queryOwnedTag( guid, categoryTag.getCategoryType(), categoryTag.getCategoryName() );\n        if ( tag != null ) {\n            return tag;\n        }\n\n        categoryTag.setTaskGuid( guid );\n        this.categoryMappingManipulator.insert( categoryTag );\n        return categoryTag;\n    }\n\n    @Override\n    public void updateCategoryTag ( CategoryTag categoryTag ) {\n        this.categoryMappingManipulator.update( categoryTag );\n    }\n\n    @Override\n    public CategoryTag queryOwnedTag( GUID taskGuid, String type, String name ) {\n        return this.categoryMappingManipulator.queryOwnedTag( taskGuid, type, name );\n    }\n\n    @Override\n    public List<CategoryTag> queryCategoryTag ( GUID taskGuid ) {\n        return this.categoryMappingManipulator.queryByTaskGuid( taskGuid );\n    }\n\n    @Override\n    public List<CategoryTag> queryCategoryTag( String taskTreePath ) {\n        GUID guid = this.uniformTaskInstrument.queryGUIDByPath( taskTreePath );\n        if ( guid == null ) {\n            return null;\n        }\n        return this.queryCategoryTag( guid );\n    }\n\n    @Override\n    public long countCategoryTag( String type, String name ) {\n        return this.categoryMappingManipulator.countTag( type, name );\n    }\n\n    @Override\n    public List<CategoryTag> queryCategoryTag ( String type, String name, long offset, long pageSize ) {\n        return this.categoryMappingManipulator.queryTag( type, name, offset, pageSize );\n    }\n\n    @Override\n    public long countCategoryTagsByName( String name ) {\n        return this.categoryMappingManipulator.countTagsByName( name );\n    }\n\n    @Override\n    public List<CategoryTag> fetchCategoryTagByName ( String name, long offset, long pageSize ) {\n        return this.categoryMappingManipulator.fetchByName( name, offset, pageSize );\n    }\n\n    @Override\n    public void purgeCategoryTag( @Nullable GUID taskGuid, @Nullable String type, @Nullable String name ) {\n        this.categoryMappingManipulator.purge( taskGuid, type, name );\n    }\n\n    @Override\n    public void purgeCategoryTag( @NonNull String name ) {\n        this.categoryMappingManipulator.purgeByName( name );\n    }\n\n    @Override\n    public void purgeCategoryTag( @NonNull GUID taskGuid ) {\n        this.categoryMappingManipulator.purgeByTaskGuid( taskGuid );\n    }\n\n    @Override\n    public void removeCategoryTag( @NonNull GUID taskGuid, @NonNull String type, @NonNull String name ) {\n        this.categoryMappingManipulator.remove( taskGuid, type, name );\n    }\n\n    @Override\n    public void eraseCategoryTag( @NonNull String taskTreePath, @Nullable String type, @Nullable String name ) throws TaskPathInvalidException, IllegalArgumentException {\n        GUID guid = this.centralizedTaskInstrument.assertTaskGUIDByPath( taskTreePath );\n        this.categoryMappingManipulator.remove( guid, type, name );\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/troll/ArchRavenTask.java",
    "content": "package com.walnut.odin.task.troll;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.task.ArchTask;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.walnut.odin.task.RavenTask;\n\n\npublic abstract class ArchRavenTask extends ArchTask implements RavenTask {\n\n    public ArchRavenTask( Identification serviceId, TaskElement serviceElement, Map<String, Object > metaDataScope ){\n        super( serviceId, serviceElement, metaDataScope );\n    }\n\n    public ArchRavenTask( Identification serviceId, TaskElement serviceElement ){\n        this( serviceId, serviceElement, null );\n    }\n\n    @Override\n    public TaskElement getTaskElement() {\n        return  super.getTaskElement();\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/troll/ArchRavenTaskInstance.java",
    "content": "package com.walnut.odin.task.troll;\n\nimport java.net.URI;\nimport java.net.URISyntaxException;\n\nimport com.pinecone.hydra.task.ArchTaskInstance;\nimport com.pinecone.hydra.task.Task;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\nimport com.pinecone.hydra.task.kom.instance.InstanceInstrument;\nimport com.walnut.odin.task.RavenTaskInstance;\n\npublic abstract class ArchRavenTaskInstance extends ArchTaskInstance implements RavenTaskInstance {\n\n    protected URI processImageURI;\n\n    protected InstanceInstrument instanceInstrument;\n\n    public ArchRavenTaskInstance( InstanceEntry instanceEntry, Task ownedTask ) {\n        super( instanceEntry, ownedTask );\n\n        try {\n            TaskElement taskElement = ownedTask.getTaskElement();\n            if ( taskElement != null ) {\n                String imagePath = taskElement.getImagePath();\n                if ( imagePath != null ) {\n                    this.processImageURI = URI.create( imagePath );\n                }\n            }\n        }\n        catch ( IllegalArgumentException e ) {\n            this.processImageURI = null;\n        }\n\n        this.instanceInstrument = instanceEntry.getTaskInstrument().getInstanceInstrument();\n    }\n\n    @Override\n    public URI getProcessImageURI() {\n        return this.processImageURI;\n    }\n\n    @Override\n    public InstanceInstrument instanceInstrument() {\n        return this.instanceInstrument;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/troll/GenericRavenTask.java",
    "content": "package com.walnut.odin.task.troll;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.system.ko.MetaPersistenceException;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.kom.instance.GenericInstanceEntry;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\nimport com.pinecone.hydra.task.kom.instance.InstanceInstrument;\nimport com.walnut.odin.task.CentralizedTaskInstrument;\nimport com.walnut.odin.task.RavenTaskInstance;\nimport com.walnut.odin.task.RavenTask;\n\nimport java.time.LocalDateTime;\nimport java.util.Map;\n\npublic class GenericRavenTask extends ArchRavenTask implements RavenTask {\n    protected CentralizedTaskInstrument  mTaskInstrument;\n\n    protected InstanceInstrument         mInstanceInstrument;\n\n    protected GuidAllocator              mGuidAllocator;\n\n    public GenericRavenTask(CentralizedTaskInstrument taskInstrument, @Nullable Identification serviceId, TaskElement serviceElement, Map<String, Object> metaDataScope ) {\n        super( serviceId, serviceElement, metaDataScope );\n        this.mTaskInstrument       = taskInstrument;\n        this.mInstanceInstrument   = taskInstrument.getInstanceInstrument();\n        this.mGuidAllocator        = taskInstrument.getGuidAllocator();\n    }\n\n    public GenericRavenTask( CentralizedTaskInstrument taskInstrument, @Nullable Identification serviceId, TaskElement serviceElement ) {\n        this( taskInstrument, serviceId, serviceElement, null );\n    }\n\n    public GenericRavenTask( CentralizedTaskInstrument taskInstrument, TaskElement serviceElement ) {\n        this( taskInstrument, null, serviceElement );\n    }\n\n\n\n    @Override\n    public RavenTaskInstance createInstance() {\n        GUID guid = this.mGuidAllocator.nextGUID();\n        GenericInstanceEntry entry = new GenericInstanceEntry( this.mTaskInstrument, this.mTaskElement );\n        entry.setGuid( guid );\n        entry.setActuallyPriority( this.mTaskElement.getActuallyPriority() );\n        entry.setImagePath( this.mTaskElement.getImagePath() );\n        entry.setTaskGuid( this.mTaskElement.getGuid());\n        entry.setCreateTime( LocalDateTime.now() );\n        entry.setScheduleCycle( this.mTaskElement.getScheduleCycle() );\n        entry.setScheduleType( this.mTaskElement.getScheduleType() );\n        entry.setRunCount( 1 );\n        entry.setSequenceCnt( 1 );\n        entry.setRetryCnt( 0 );\n        entry.setTaskType( this.mTaskElement.getType() );\n        entry.setInstanceStatus( TaskInstanceStatus.New );\n        entry.setTaskName( this.mTaskElement.getName() );\n        entry.setProcessorName( this.mTaskElement.getProcessorName() );\n\n        GenericRavenTaskInstance instance = new GenericRavenTaskInstance( entry, this );\n        return instance;\n    }\n\n    public RavenTaskInstance constructInstance( InstanceEntry instanceEntry ) {\n        return this.constructInstance( instanceEntry, null );\n    }\n\n    public RavenTaskInstance constructInstance( InstanceEntry instanceEntry, Identification serviceId ) {\n        if ( serviceId == null ) {\n            //serviceId = taskElement.\n        }\n\n        return null;\n    }\n\n    public RavenTask createInstance( InstanceEntry instanceEntry, Identification serviceId ) {\n//        RavenTaskInstance task = this.constructInstance( taskElement, serviceId );\n//\n//\n//        return task;\n        return null;\n    }\n\n    public void removeInstance( GUID insGuid ) {\n       this.mInstanceInstrument.removeInstance( insGuid );\n    }\n\n    public void updateInstanceMeta( RavenTaskInstance instance ) throws MetaPersistenceException {\n        this.updateTaskMeta( instance.getInstanceEntry() );\n    }\n\n    public void updateTaskMeta( InstanceEntry instanceEntry ) throws MetaPersistenceException {\n        this.mInstanceInstrument.updateInstance( instanceEntry );\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/troll/GenericRavenTaskInstance.java",
    "content": "package com.walnut.odin.task.troll;\n\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.system.ko.MetaPersistenceException;\nimport com.pinecone.hydra.task.ArchTaskInstance;\nimport com.pinecone.hydra.task.Task;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\nimport com.walnut.odin.task.CentralizedTaskInstrument;\nimport com.walnut.odin.task.RavenTaskInstance;\n\npublic class GenericRavenTaskInstance extends ArchRavenTaskInstance implements RavenTaskInstance {\n\n    public GenericRavenTaskInstance( InstanceEntry instanceEntry, Task ownedTask ) {\n        super( instanceEntry, ownedTask );\n    }\n\n    public GenericRavenTaskInstance( InstanceEntry instanceEntry, CentralizedTaskInstrument instrument ) {\n        super( instanceEntry, new GenericRavenTask( instrument, instanceEntry.taskElement() ) );\n    }\n\n\n\n    @Override\n    public Object getProcessObject() {\n        return null;\n    }\n\n    @Override\n    public UProcess affinityProcess() {\n        return null;\n    }\n\n    @Override\n    public void startLocalProcess() {\n\n    }\n\n    @Override\n    public void startRemoteProcess() {\n\n    }\n\n    @Override\n    public void startRemoteProcess( boolean bDirectlyVitalize ) {\n\n    }\n\n    @Override\n    public void startRemoteProcess( boolean bDirectlyVitalize, long processClientId ) {\n\n    }\n\n    @Override\n    public void update() throws MetaPersistenceException {\n        this.instanceInstrument.updateInstance( this.mInstanceEntry );\n    }\n\n    @Override\n    public void persist() throws MetaPersistenceException {\n        InstanceEntry leg = this.instanceInstrument.getInstanceEntry( this.getGuid() );\n        if ( leg == null ) {\n            this.instanceInstrument.addInstance( this.mInstanceEntry );\n        }\n        else {\n            this.update();\n        }\n    }\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/troll/LaunchErrorCauses.java",
    "content": "package com.walnut.odin.task.troll;\n\npublic final class LaunchErrorCauses {\n\n    public static final String NoSuchImage = \"NoSuchImage\";\n\n    public static final String RemoteProcessCreationFailure = \"RemoteProcessCreationFailure\";\n\n}\n"
  },
  {
    "path": "Odin/odin-framework-runtime/src/main/java/com/walnut/odin/task/troll/TrollTaskExecutionLauncher.java",
    "content": "package com.walnut.odin.task.troll;\n\nimport java.net.URI;\nimport java.time.LocalDateTime;\nimport java.time.format.DateTimeFormatter;\nimport java.util.List;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.util.CollectionUtils;\nimport com.pinecone.framework.util.datetime.DatePattern;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.proc.event.ProcessEventHandler;\nimport com.pinecone.hydra.proc.image.EntryPointRunnable;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.image.ImageLoader;\nimport com.pinecone.hydra.proc.image.ImageModifier;\nimport com.pinecone.hydra.proc.image.URLImageLoader;\nimport com.pinecone.hydra.system.component.LogStatuses;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.pinecone.hydra.system.ko.MetaPersistenceException;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\nimport com.pinecone.hydra.task.kom.instance.InstanceInstrument;\nimport com.pinecone.hydra.task.marshal.TaskScheduleCycle;\nimport com.walnut.odin.conduct.CollectiveTaskRegiment;\nimport com.walnut.odin.proc.ProcessRemoteEventHandler;\nimport com.walnut.odin.proc.RemoteProcess;\nimport com.walnut.odin.proc.RemoteVitalizationStatus;\nimport com.walnut.odin.proc.server.RemoteProcessManagerServer;\nimport com.walnut.odin.task.CentralizedTaskInstrument;\nimport com.walnut.odin.task.RavenTaskConfig;\nimport com.walnut.odin.task.RavenTaskInstance;\n\npublic class TrollTaskExecutionLauncher implements TaskExecutionLauncher, Slf4jTraceable {\n\n    protected Logger mLogger;\n\n    protected RemoteProcessManagerServer mRemoteProcessManagerServer;\n\n    protected CollectiveTaskRegiment mCollectiveTaskRegiment;\n\n    protected CentralizedTaskInstrument mTaskInstrument;\n\n    protected InstanceInstrument mInstanceInstrument;\n\n    protected ProcessManager mProcessManager;\n\n    protected RavenTaskConfig mRavenTaskConfig;\n\n    protected DateTimeFormatter mInstanceTitleTimeFormat;\n\n    protected DateTimeFormatter mDefaultDateTimeFormat;\n\n    protected GuidAllocator mGuidAllocator;\n\n    protected ImageModifier mImageModifier;\n\n\n    public TrollTaskExecutionLauncher( CollectiveTaskRegiment taskRegiment ) {\n        this.mLogger                      = LoggerFactory.getLogger( this.getClass() );\n        this.mRemoteProcessManagerServer  = taskRegiment.remoteProcessManagerServer();\n        this.mProcessManager              = taskRegiment.processManager();\n        this.mCollectiveTaskRegiment      = taskRegiment;\n        this.mTaskInstrument              = taskRegiment.taskInstrument();\n        this.mInstanceInstrument          = this.mTaskInstrument.getInstanceInstrument();\n        this.mRavenTaskConfig             = (RavenTaskConfig) this.mTaskInstrument.getConfig();\n        this.mGuidAllocator               = this.mTaskInstrument.getGuidAllocator();\n        this.mInstanceTitleTimeFormat     = DatePattern.createFormatter( this.mRavenTaskConfig.getInstanceTitleTimeFormat() );\n        this.mDefaultDateTimeFormat       = DatePattern.createFormatter( this.mRavenTaskConfig.getDefaultDateTimeFormat() );\n        this.mImageModifier               = this.mProcessManager.getImageModifier();\n\n        this.infoLifecycle( \"Welcome to use Skynet cloud deployment system, Odin Troll task execution system.\", LogStatuses.StatusReady );\n    }\n\n    @Override\n    public Logger getLogger() {\n        return this.mLogger;\n    }\n\n    @Override\n    public ProcessManager processManager() {\n        return this.mProcessManager;\n    }\n\n    @Override\n    public LocalDateTime evalBusinessTime( RavenTaskInstance instance, LocalDateTime biz ) {\n        TaskScheduleCycle cycle = instance.getKernelScheduleCycle();\n\n        LocalDateTime adjustedTime;\n\n        switch ( cycle ) {\n            case Month:\n            case Week:\n            case Day: {\n                adjustedTime = biz.withHour(0).withMinute(0).withSecond(0).withNano(0);\n                break;\n            }\n            case Hour: {\n                adjustedTime = biz.withMinute(0).withSecond(0).withNano(0);\n                break;\n            }\n            case Minute: {\n                adjustedTime = biz.withSecond(0).withNano(0);\n                break;\n            }\n            case Undefined:\n            default: {\n                adjustedTime = biz;\n                break;\n            }\n        }\n\n        return adjustedTime;\n    }\n\n    @Override\n    public LocalDateTime evalBusinessTime( RavenTaskInstance instance ) {\n        return this.evalBusinessTime( instance, LocalDateTime.now() );\n    }\n\n    @Override\n    public String evalBusinessTimeLabel( RavenTaskInstance instance, LocalDateTime biz ) {\n        return this.evalBusinessTime( instance, biz ).format( this.mInstanceTitleTimeFormat );\n    }\n\n    @Override\n    public String evalBusinessTimeLabel( RavenTaskInstance instance ) {\n        return this.evalBusinessTime( instance ).format( this.mInstanceTitleTimeFormat );\n    }\n\n    @Override\n    public String evalInstanceName( RavenTaskInstance instance, LocalDateTime now, LocalDateTime bizTimeEpoch ) {\n        String bizTimeLab     = this.evalBusinessTimeLabel( instance, bizTimeEpoch );\n        String execTimeLab    = now.format( this.mInstanceTitleTimeFormat );\n        String szInstanceName = String.format(\n                \"%s_%s_ET_%s\",\n                instance.getOwnedTask().getName(),\n                bizTimeLab,\n                execTimeLab\n        );\n        return szInstanceName;\n    }\n\n    @Override\n    public String evalInstanceName( RavenTaskInstance instance, LocalDateTime bizTimeEpoch ) {\n        return this.evalInstanceName( instance, LocalDateTime.now(), bizTimeEpoch );\n    }\n\n\n\n\n    @Override\n    public void initializeInstance( RavenTaskInstance instance, LaunchFeature feature ) {\n        LocalDateTime now = LocalDateTime.now();\n        this.getLogger().info(\n                \"[TaskLaunchSequence] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`, Time: `{}`) <Start>\",\n                instance.getOwnedTask().getName(),\n                instance.getOwnedTask().getFullName(),\n                instance.getOwnedTask().getId(),\n                now.format( this.mDefaultDateTimeFormat )\n        );\n\n        String szInstanceName = this.evalInstanceName( instance, now, feature.getBizTimeEpoch() );\n        InstanceEntry entry   = instance.getInstanceEntry();\n\n        entry.setInstanceName( szInstanceName );\n        String bizTimeLab = this.evalBusinessTimeLabel( instance, feature.getBizTimeEpoch() );\n        InstanceEntry previous = this.mInstanceInstrument.findLastExecuted( instance.getTaskGuid(), bizTimeLab );\n\n        int runCount      = 0;\n        int sequenceCnt   = 0;\n        int retryCnt      = 0;\n\n        if ( previous != null ) {\n            runCount = previous.getRunCount() + 1;\n\n            if ( feature.isRetry() ) {\n                sequenceCnt = previous.getSequenceCnt();\n                retryCnt    = previous.getRetryCnt() + 1;\n            }\n            else {\n                sequenceCnt = previous.getSequenceCnt() + 1;\n                retryCnt    = 0;\n            }\n        }\n\n        if ( previous == null ) {\n            runCount    = 1;\n            sequenceCnt = 1;\n            retryCnt    = 0;\n        }\n\n        LocalDateTime bizTime = this.evalBusinessTime( instance, feature.getBizTimeEpoch() );\n        entry.setRunCount( runCount );\n        entry.setSequenceCnt( sequenceCnt );\n        entry.setRetryCnt( retryCnt );\n        if ( entry.getGuid() == null ) {\n            entry.setGuid( this.mGuidAllocator.nextGUID() );\n        }\n        entry.setInstanceStatus( TaskInstanceStatus.New );\n        entry.setBusinessTime( bizTime );\n\n        this.mTaskInstrument.getInstanceInstrument().addInstance( entry );\n        this.getLogger().info(\n                \"[TaskLaunchSequence] [Schema] (Task: `{}`, InstanceName: `{}`, InsGuid: `{}`, RunCount: {}, SequenceCnt: {}, RetryCnt: {}, RetryMode: {}, BusinessTime: {}) <Ready to elevate>\",\n                instance.getOwnedTask().getName(),\n                szInstanceName,\n                entry.getGuid(),\n                runCount,\n                sequenceCnt,\n                retryCnt,\n                feature.isRetry(),\n                bizTime\n        );\n    }\n\n    protected void afterProcessCreated( RavenTaskInstance instance, UProcess process ) throws MetaPersistenceException {\n        instance.getInstanceEntry().setInstanceStatus( TaskInstanceStatus.ProcessStandby );\n        instance.update();\n    }\n\n    protected URI evalImageURI( RavenTaskInstance instance, LaunchFeature feature ) {\n        URI imageURI = feature.getDesignatedImageURI();\n        if ( imageURI == null ) {\n            imageURI = instance.getProcessImageURI();\n        }\n\n        return imageURI;\n    }\n\n    protected UProcess prepareProcessHandle( UProcess process, LaunchFeature feature ) {\n        List<ProcessEventHandler> handlers = feature.getSysProcEventHandlers();\n        if ( CollectionUtils.isNoneEmpty(handlers) ) {\n            for ( ProcessEventHandler handler : handlers ) {\n                this.mImageModifier.addSystemProcessEventHandler( process.getExecutionImage().getEntryPoint(), handler );\n            }\n        }\n        return process;\n    }\n\n    @Override\n    public UProcess createLocally( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException {\n        try {\n            this.initializeInstance( instance, feature );\n            URI imageURI = this.evalImageURI( instance, feature );\n            ImageLoader imageLoader = this.mProcessManager.getImageLoader();\n            ExecutionImage image;\n            UProcess process = null;\n            if ( imageLoader instanceof URLImageLoader ) {\n                URLImageLoader urlImageLoader = (URLImageLoader) imageLoader;\n                image = urlImageLoader.queryExecutionImage( imageURI );\n            }\n            else {\n                image = imageLoader.queryExecutionImage( imageURI.getPath() );\n            }\n\n            if ( image == null ) {\n                instance.getInstanceEntry().setErrorCause( LaunchErrorCauses.NoSuchImage );\n            }\n            else {\n                instance.getInstanceEntry().setImagePath( imageURI.toString() );\n                this.mLogger.info( \"[TaskLaunchSequence] [LocalProcessAnchored] (Process: `{}`) <Standby>\", imageURI );\n                process = this.mProcessManager.createLocalHostedProcess(\n                        image, feature.getParentProcess(), feature.getStartupArgs(), feature.getContextEnvironmentVars()\n                );\n            }\n\n\n            this.prepareProcessHandle( process, feature );\n            this.afterProcessCreated( instance, process );\n            return process;\n        }\n        catch ( Exception e ) {\n            throw new InstanceLaunchException( e );\n        }\n    }\n\n    @Override\n    public UProcess createRemotely( RavenTaskInstance instance, long pmClientId, LaunchFeature feature ) throws InstanceLaunchException {\n        try {\n            this.initializeInstance( instance, feature );\n            URI imageURI = this.evalImageURI( instance, feature );\n            RemoteProcess process = null;\n\n            GUID parentPid = null;\n            if ( feature.getParentProcess() != null ) {\n                parentPid = feature.getParentProcess().getPID();\n            }\n            else if ( feature.getParentPid() != null ) {\n                parentPid = feature.getParentPid();\n            }\n\n            instance.getInstanceEntry().setImagePath( imageURI.toString() );\n            this.mLogger.info( \"[TaskLaunchSequence] [RemoteProcessAnchored] (Process: `{}`, DestinationDeployClient: `{}`) <Standby>\", imageURI, pmClientId );\n            RemoteProcessManagerServer.RemoteCreationResult result = this.mRemoteProcessManagerServer.createRemoteUProcess(\n                    pmClientId, imageURI.toString(), true, parentPid, feature.getStartupArgs(), feature.getContextEnvironmentVars()\n            );\n            process = result.getProcess();\n            if ( result.getResponse().getStatus() != RemoteVitalizationStatus.New.getCode() || process == null ) {\n                instance.getInstanceEntry().setErrorCause( LaunchErrorCauses.RemoteProcessCreationFailure );\n            }\n\n            this.prepareProcessHandle( process, feature );\n            this.afterProcessCreated( instance, process );\n            return process;\n        }\n        catch ( Exception e ) {\n            throw new InstanceLaunchException( e );\n        }\n    }\n\n\n    protected void afterOwnedProcessTerminated( RavenTaskInstance instance, UProcess process ) {\n        try {\n            instance.getInstanceEntry().setInstanceStatus( TaskInstanceStatus.Finished );\n            instance.getInstanceEntry().setLastEndTime( LocalDateTime.now() );\n            instance.update();\n        }\n        catch ( MetaPersistenceException e ) {\n            mLogger.error(\n                    \"[TaskLaunchSequence] [MetaPersistenceException] (Process: `{}`, PID: `{}`) <Error>\", process.getName(), process.getPID()\n            );\n            mLogger.error( \"[TaskLaunchSequence] [MetaPersistenceException: `{}`]\", e );\n        }\n    }\n\n    protected void afterOwnedProcessStarted( RavenTaskInstance instance, UProcess process ) throws InstanceLaunchException {\n        try {\n            instance.getInstanceEntry().setInstanceStatus( TaskInstanceStatus.Running );\n            instance.getInstanceEntry().setLastStartTime( LocalDateTime.now() );\n            instance.update();\n        }\n        catch ( MetaPersistenceException e ) {\n            throw new InstanceLaunchException( e );\n        }\n    }\n\n\n    @Override\n    public UProcess launchLocally( RavenTaskInstance instance, LaunchFeature feature ) throws InstanceLaunchException {\n        this.getLogger().info(\n                \"[TaskLaunchSequence] [LaunchLocally] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`) <Start>\",\n                instance.getOwnedTask().getName(),\n                instance.getOwnedTask().getFullName(),\n                instance.getOwnedTask().getId()\n        );\n\n        UProcess process = this.createLocally( instance, feature );\n        if ( process == null ) {\n            return null;\n        }\n\n        this.mLogger.info( \"[TaskLaunchSequence] [LocalProcessStandby] (Process: `{}`, PID: `{}`) <LaunchServerAck>\", process.getName(), process.getPID() );\n        this.mLogger.info( \"[TaskLaunchSequence] [ExecutingVitalizationInstruction] (Process: `{}`, PID: `{}`) <Start>\", process.getName(), process.getPID() );\n\n\n        this.mImageModifier.addSystemProcessEventHandler(process.getExecutionImage().getEntryPoint(), new ProcessEventHandler() {\n            @Override\n            public void fired( EntryPointRunnable runnable, ProcessEvent event ) {\n                if ( event == ProcessEvent.Terminated ) {\n                    afterOwnedProcessTerminated( instance, process );\n\n                    mLogger.info(\n                            \"[TaskLaunchSequence] [LocalTaskFinished] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`) <Done>\",\n                            instance.getOwnedTask().getName(),\n                            instance.getOwnedTask().getFullName(),\n                            instance.getOwnedTask().getId()\n                    );\n                }\n            }\n        });\n        process.start();\n        this.afterOwnedProcessStarted( instance, process );\n\n        this.mLogger.info(\n                \"[TaskLaunchSequence] [LocalTaskLaunched] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`, PID: `{}`) <Done>\",\n                instance.getOwnedTask().getName(),\n                instance.getOwnedTask().getFullName(),\n                instance.getOwnedTask().getId(),\n                process.getPID()\n        );\n        return process;\n    }\n\n    @Override\n    public UProcess launchRemotely( RavenTaskInstance instance, long pmClientId, LaunchFeature feature ) throws InstanceLaunchException {\n        this.getLogger().info(\n                \"[TaskLaunchSequence] [LaunchRemotely] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`) <Start>\",\n                instance.getOwnedTask().getName(),\n                instance.getOwnedTask().getFullName(),\n                instance.getOwnedTask().getId()\n        );\n\n        UProcess process = this.createRemotely( instance, pmClientId, feature );\n        if ( process == null ) {\n            return null;\n        }\n\n        this.mLogger.info( \"[TaskLaunchSequence] [RemoteProcessStandby] (Process: `{}`, PID: `{}`) <LaunchServerAck>\", process.getName(), process.getPID() );\n        this.mLogger.info( \"[TaskLaunchSequence] [SendingVitalizationInstruction] (Process: `{}`, PID: `{}`, DestinationClient: `{}`) <Start>\", process.getName(), process.getPID(), pmClientId );\n\n        RemoteProcess remoteProcess = (RemoteProcess) process;\n        remoteProcess.addRemoteEventHandler(new ProcessRemoteEventHandler() {\n            @Override\n            public void fired( long pmClientId, ProcessEvent event, Object caused ) {\n                if ( event == ProcessEvent.Terminated ) {\n                    afterOwnedProcessTerminated( instance, process );\n\n                    mLogger.info(\n                            \"[TaskLaunchSequence] [RemoteTaskFinished] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`) <Done>\",\n                            instance.getOwnedTask().getName(),\n                            instance.getOwnedTask().getFullName(),\n                            instance.getOwnedTask().getId()\n                    );\n                }\n            }\n        });\n        process.start();\n        this.afterOwnedProcessStarted( instance, process );\n\n        this.mLogger.info(\n                \"[TaskLaunchSequence] [RemoteTaskLaunched] (TaskName: `{}`, KernelHandleName: `/{}`, TaskGuid: `{}`, PID: `{}`) <Done>\",\n                instance.getOwnedTask().getName(),\n                instance.getOwnedTask().getFullName(),\n                instance.getOwnedTask().getId(),\n                process.getPID()\n        );\n        return process;\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-mapper-driver/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>odin</artifactId>\n        <groupId>com.walnut.odin</groupId>\n        <version>2.5.1</version>\n    </parent>\n\n    <artifactId>odin-mapper-driver</artifactId>\n    <version>2.5.1</version>\n    <modelVersion>4.0.0</modelVersion>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime.jelly</groupId>\n            <artifactId>jelly</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n            <artifactId>hydra-kom-default-driver</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-architecture</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-framework-conduct</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-framework-atlas</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n\n</project>"
  },
  {
    "path": "Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/ArchAtlasMappingDriver.java",
    "content": "package com.walnut.odin.atlas.mapper;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.construction.UnifyStructureInjector;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.homotype.StereotypicInjector;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.slime.jelly.source.ibatis.ProxySessionMapperPool;\n\nimport java.util.List;\nimport java.util.Map;\n\npublic abstract class ArchAtlasMappingDriver implements AtlasMappingDriver {\n    protected Hydrogen mSystem;\n\n    protected Processum mSuperiorProcess;\n\n    protected IbatisClient mIbatisClient;\n\n    //protected SqlSession           mSqlSession;\n\n    protected List<Class<? > > mMapperCandidates;\n\n    protected ResourceDispenserCenter mResourceDispenserCenter;\n\n    public ArchAtlasMappingDriver( Processum superiorProcess ) {\n        this.mSuperiorProcess                 = superiorProcess;\n        if ( this.mSuperiorProcess instanceof Hydrogen) {\n            this.mSystem                      = (Hydrogen) this.mSuperiorProcess;\n        }\n        else {\n            this.mSystem                      = (Hydrogen) superiorProcess.parentSystem();\n        }\n    }\n\n    // Temp , TODO\n    public ArchAtlasMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter, String szPackageName ) {\n        this( superiorProcess );\n\n        this.mIbatisClient = ibatisClient;\n        //this.mSqlSession   = ibatisClient.openSession( true );\n\n        //SqlSessionTemplate\n\n        ibatisClient.getConfiguration().getTypeHandlerRegistry().register( GUID72TypeHandler.class );\n        ibatisClient.getConfiguration().getTypeHandlerRegistry().register( GUIDTypeHandler.class );\n        ibatisClient.getConfiguration().getTypeHandlerRegistry().register( UOITypeHandler.class );\n\n        ibatisClient.addXMLObjectScope( \"mapper.kernel.task\" );\n        this.mMapperCandidates = ibatisClient.addDataAccessObjectScope( szPackageName );\n\n        for( Class<? > mapperClass : this.mMapperCandidates ) {\n            dispenserCenter.getInstanceDispenser().register(\n                    mapperClass,\n                    //new SoloSessionMapperPool( this.mSqlSession, mapperClass )\n                    new ProxySessionMapperPool( ibatisClient, mapperClass )\n            );\n        }\n\n        this.mResourceDispenserCenter = dispenserCenter;\n    }\n\n    @Override\n    public StereotypicInjector autoConstruct(Class<?> stereotype, Map config, Object instance ) {\n        UnifyStructureInjector injector = new UnifyStructureInjector( stereotype, this.mResourceDispenserCenter.getInstanceDispenser() );\n        try{\n            injector.inject( config, instance );\n        }\n        catch ( Exception e ){\n            throw new ProxyProvokeHandleException( e );\n        }\n        return injector;\n    }\n\n    @Override\n    public String getVersionSignature() {\n        return \"HydraniumV2.1\";\n    }\n\n    @Override\n    public Hydrogen getSystem() {\n        return this.mSystem;\n    }\n\n    @Override\n    public Processum getSuperiorProcess() {\n        return this.mSuperiorProcess;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/GUID72TypeHandler.java",
    "content": "package com.walnut.odin.atlas.mapper;\n\nimport com.pinecone.ulf.util.guid.i64.GUID72;\nimport org.apache.ibatis.type.BaseTypeHandler;\nimport org.apache.ibatis.type.JdbcType;\nimport org.apache.ibatis.type.MappedJdbcTypes;\nimport org.apache.ibatis.type.MappedTypes;\n\nimport java.sql.CallableStatement;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\n\n@MappedTypes(GUID72.class)\n@MappedJdbcTypes(JdbcType.VARCHAR)\npublic class GUID72TypeHandler extends BaseTypeHandler<GUID72> {\n\n    @Override\n    public void setNonNullParameter(PreparedStatement ps, int i, GUID72 parameter, JdbcType jdbcType) throws SQLException {\n        ps.setString(i, parameter.toString());\n    }\n\n    @Override\n    public GUID72 getNullableResult(ResultSet rs, String columnName) throws SQLException {\n        String value = rs.getString(columnName);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new GUID72( value );\n    }\n\n    @Override\n    public GUID72 getNullableResult(ResultSet rs, int columnIndex) throws SQLException {\n        String value = rs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new GUID72( value );\n    }\n\n    @Override\n    public GUID72 getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {\n        String value = cs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new GUID72( value );\n    }\n}\n"
  },
  {
    "path": "Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/GUIDTypeHandler.java",
    "content": "package com.walnut.odin.atlas.mapper;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport org.apache.ibatis.type.BaseTypeHandler;\nimport org.apache.ibatis.type.JdbcType;\nimport org.apache.ibatis.type.MappedJdbcTypes;\nimport org.apache.ibatis.type.MappedTypes;\n\nimport java.sql.CallableStatement;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\n\n@MappedTypes(GUID.class)\n@MappedJdbcTypes(JdbcType.VARCHAR)\npublic class GUIDTypeHandler extends BaseTypeHandler<GUID> {\n\n    @Override\n    public void setNonNullParameter(PreparedStatement ps, int i, GUID parameter, JdbcType jdbcType) throws SQLException {\n        ps.setString(i, parameter.toString());\n    }\n\n    @Override\n    public GUID getNullableResult(ResultSet rs, String columnName) throws SQLException {\n        String value = rs.getString(columnName);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UUID128( value );\n    }\n\n    @Override\n    public GUID getNullableResult(ResultSet rs, int columnIndex) throws SQLException {\n        String value = rs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UUID128( value );\n    }\n\n    @Override\n    public GUID getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {\n        String value = cs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UUID128( value );\n    }\n}"
  },
  {
    "path": "Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/OdinAtlasMappingDriver.java",
    "content": "package com.walnut.odin.atlas.mapper;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver;\nimport com.pinecone.hydra.unit.vgraph.source.AtlasMasterManipulator;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class OdinAtlasMappingDriver extends ArchAtlasMappingDriver implements AtlasMappingDriver {\n    protected AtlasMasterManipulator mVectorGraphMasterManipulator;\n\n    public OdinAtlasMappingDriver( Processum superiorProcess ){\n        super( superiorProcess );\n    }\n\n    public OdinAtlasMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, OdinAtlasMappingDriver.class.getPackageName() );\n\n        this.mVectorGraphMasterManipulator = new OdinAtlasMasterManipulatorImpl( this );\n    }\n\n    @Override\n    public AtlasMasterManipulator getMasterManipulator() {\n        return this.mVectorGraphMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/OdinAtlasMasterGraphManipulatorImpl.java",
    "content": "package com.walnut.odin.atlas.mapper;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphManipulator;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphMasterManipulator;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphPathCacheManipulator;\n\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class OdinAtlasMasterGraphManipulatorImpl implements VectorGraphMasterManipulator {\n\n    @Resource\n    @Structure( type = RuntimeVGraphMapper.class)\n    VectorGraphManipulator mVectorGraphManipulator;\n\n    @Resource\n    @Structure( type = RuntimeVectorGraphPathCacheMapper.class)\n    VectorGraphPathCacheManipulator mVectorGraphPathCacheManipulator;\n\n    public OdinAtlasMasterGraphManipulatorImpl(){}\n\n    public OdinAtlasMasterGraphManipulatorImpl(AtlasMappingDriver driver){\n        driver.autoConstruct( OdinAtlasMasterGraphManipulatorImpl.class, Map.of(), this);\n    }\n\n    @Override\n    public VectorGraphManipulator getVectorGraphManipulator() {\n        return this.mVectorGraphManipulator;\n    }\n\n    @Override\n    public VectorGraphPathCacheManipulator getVectorGraphPathCacheManipulator() {\n        return this.mVectorGraphPathCacheManipulator;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/OdinAtlasMasterManipulatorImpl.java",
    "content": "package com.walnut.odin.atlas.mapper;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\nimport org.springframework.stereotype.Component;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver;\nimport com.pinecone.hydra.unit.vgraph.source.VectorGraphMasterManipulator;\n\n\n@Component\npublic class OdinAtlasMasterManipulatorImpl implements RunAtlasMasterManipulator {\n    @Resource\n    @Structure( type = OdinAtlasMasterGraphManipulatorImpl.class )\n    VectorGraphMasterManipulator mVectorGraphMasterManipulator;\n\n    @Resource\n    @Structure( type = QueueStratumMapper.class )\n    QueueStratumManipulator mQueueStratumManipulator;\n\n    public OdinAtlasMasterManipulatorImpl() {}\n\n    public OdinAtlasMasterManipulatorImpl(AtlasMappingDriver driver ) {\n        driver.autoConstruct(OdinAtlasMasterManipulatorImpl.class, Map.of(),this);\n        this.mVectorGraphMasterManipulator = new OdinAtlasMasterGraphManipulatorImpl(driver);\n    }\n\n    @Override\n    public VectorGraphMasterManipulator getVectorGraphMasterManipulator() {\n        return this.mVectorGraphMasterManipulator;\n    }\n\n    @Override\n    public QueueStratumManipulator getQueueStratumManipulator() {\n        return this.mQueueStratumManipulator;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-mapper-driver/src/main/java/com/walnut/odin/atlas/mapper/UOITypeHandler.java",
    "content": "package com.walnut.odin.atlas.mapper;\n\nimport com.pinecone.framework.util.uoi.UOI;\nimport org.apache.ibatis.type.BaseTypeHandler;\nimport org.apache.ibatis.type.JdbcType;\nimport org.apache.ibatis.type.MappedJdbcTypes;\nimport org.apache.ibatis.type.MappedTypes;\n\nimport java.sql.CallableStatement;\nimport java.sql.PreparedStatement;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\n\n\n@MappedTypes(UOI.class)\n@MappedJdbcTypes(JdbcType.VARCHAR)\npublic class UOITypeHandler extends BaseTypeHandler<UOI > {\n    @Override\n    public void setNonNullParameter( PreparedStatement ps, int i, UOI parameter, JdbcType jdbcType ) throws SQLException {\n        ps.setString(i, parameter.toString());\n    }\n\n    @Override\n    public UOI getNullableResult( ResultSet rs, String columnName ) throws SQLException {\n        String value = rs.getString(columnName);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UOI( value );\n    }\n\n    @Override\n    public UOI getNullableResult( ResultSet rs, int columnIndex ) throws SQLException {\n        String value = rs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UOI( value );\n    }\n\n    @Override\n    public UOI getNullableResult(CallableStatement cs, int columnIndex) throws SQLException {\n        String value = cs.getString(columnIndex);\n        if (value == null) {\n            return null; // 如果值为 null，则直接返回 null\n        }\n        return new UOI( value );\n    }\n}"
  },
  {
    "path": "Odin/odin-mapper-driver/src/main/java/com/walnut/odin/task/mapper/OdinUniformTaskMappingDriver.java",
    "content": "package com.walnut.odin.task.mapper;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.entity.ibatis.hydranium.ArchMappingDriver;\nimport com.pinecone.hydra.system.component.ResourceDispenserCenter;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMasterManipulator;\nimport com.pinecone.hydra.task.ibatis.hydranium.TaskMappingDriver;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\npublic class OdinUniformTaskMappingDriver extends ArchMappingDriver implements OdinTaskMappingDriver {\n    protected KOIMasterManipulator mKOIMasterManipulator;\n\n    protected KOIMappingDriver     mParentDriver;\n\n    public OdinUniformTaskMappingDriver( Processum superiorProcess ) {\n        super( superiorProcess );\n    }\n\n    public OdinUniformTaskMappingDriver( Processum superiorProcess, IbatisClient ibatisClient, ResourceDispenserCenter dispenserCenter ) {\n        super( superiorProcess, ibatisClient, dispenserCenter, OdinUniformTaskMappingDriver.class.getPackageName().replace( \"hydranium\", \"\" ) );\n\n        this.mParentDriver = new TaskMappingDriver(\n                superiorProcess, ibatisClient, dispenserCenter\n        );\n\n        this.mKOIMasterManipulator = new RavenTaskMasterManipulatorImpl( this, (TaskMappingDriver)this.getParentDriver() );\n    }\n\n    @Override\n    public KOIMasterManipulator getMasterManipulator() {\n        return this.mKOIMasterManipulator;\n    }\n\n    @Override\n    public KOIMappingDriver getParentDriver() {\n        return this.mParentDriver;\n    }\n\n}\n"
  },
  {
    "path": "Odin/odin-mapper-driver/src/main/java/com/walnut/odin/task/mapper/RavenTaskMasterManipulatorImpl.java",
    "content": "package com.walnut.odin.task.mapper;\n\nimport com.pinecone.framework.system.construction.Structure;\n\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.task.ibatis.hydranium.TaskMappingDriver;\nimport com.pinecone.hydra.task.kom.source.TaskMasterManipulator;\nimport com.walnut.odin.task.source.ScheduleManipulator;\nimport com.walnut.odin.task.source.CategoryMappingManipulator;\nimport com.walnut.odin.task.source.CategoryTypeManipulator;\nimport com.walnut.odin.task.source.RavenTaskMasterManipulator;\nimport com.walnut.odin.task.source.TaskCategoryManipulator;\nimport com.walnut.odin.task.source.TaskProcessorManipulator;\n\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class RavenTaskMasterManipulatorImpl implements RavenTaskMasterManipulator {\n\n    protected KOISkeletonMasterManipulator skeletonMasterManipulator;\n\n    protected TaskMappingDriver            taskMappingDriver;\n\n    protected TaskMasterManipulator        taskMasterManipulator;\n\n    @Resource\n    @Structure( type = CategoryTypeMapper.class )\n    protected CategoryTypeManipulator categoryTypeManipulator;\n\n    @Resource\n    @Structure( type = TaskCategoryMapper.class )\n    protected TaskCategoryManipulator taskCategoryManipulator;\n\n    @Resource\n    @Structure( type = CategoryMappingMapper.class )\n    protected CategoryMappingManipulator categoryMappingManipulator;\n\n    @Resource\n    @Structure( type = TaskProcessorMapper.class )\n    protected TaskProcessorManipulator taskProcessorManipulator;\n\n    protected ScheduleManipulator      scheduleManipulator;\n\n    public RavenTaskMasterManipulatorImpl( KOIMappingDriver driver, TaskMappingDriver taskMappingDriver ) {\n        driver.autoConstruct( RavenTaskMasterManipulatorImpl.class, Map.of(), this );\n        this.taskMappingDriver         = taskMappingDriver;\n        this.taskMasterManipulator     = (TaskMasterManipulator)taskMappingDriver.getMasterManipulator();\n        this.skeletonMasterManipulator = this.taskMasterManipulator.getSkeletonMasterManipulator();\n\n        this.scheduleManipulator       = new ScheduleManipulatorImpl( driver );\n    }\n\n    @Override\n    public TaskMasterManipulator getTaskMasterManipulator() {\n        return this.taskMasterManipulator;\n    }\n\n    @Override\n    public TaskMappingDriver getTaskMappingDriver() {\n        return this.taskMappingDriver;\n    }\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return this.skeletonMasterManipulator;\n    }\n\n    @Override\n    public CategoryTypeManipulator getCategoryTypeManipulator() {\n        return this.categoryTypeManipulator;\n    }\n\n    @Override\n    public TaskCategoryManipulator getTaskCategoryManipulator() {\n        return this.taskCategoryManipulator;\n    }\n\n    @Override\n    public CategoryMappingManipulator getCategoryMappingManipulator() {\n        return this.categoryMappingManipulator;\n    }\n\n    @Override\n    public TaskProcessorManipulator getTaskProcessorManipulator() {\n        return this.taskProcessorManipulator;\n    }\n\n    @Override\n    public ScheduleManipulator getScheduleManipulator() {\n        return this.scheduleManipulator;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-mapper-driver/src/main/java/com/walnut/odin/task/mapper/ScheduleManipulatorImpl.java",
    "content": "package com.walnut.odin.task.mapper;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.walnut.odin.task.source.ScheduleManipulator;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\n\n@Component\npublic class ScheduleManipulatorImpl implements ScheduleManipulator {\n\n    @Resource\n    @Structure(type = InstanceEventMapper.class)\n    private InstanceEventMapper minstanceEventMapper;\n\n    @Resource\n    @Structure(type = InstanceAtlasAdjacentMapper.class)\n    private InstanceAtlasAdjacentMapper minstanceAtlasAdjacentMapper;\n\n    @Resource\n    @Structure(type = InstanceAtlasNodeMapper.class)\n    private InstanceAtlasNodeMapper minstanceAtlasNodeMapper;\n\n    @Resource\n    @Structure(type = InstanceExecMapper.class)\n    private InstanceExecMapper minstanceExecMapper;\n\n    public ScheduleManipulatorImpl() {\n    }\n\n    public ScheduleManipulatorImpl(KOIMappingDriver driver) {\n        driver.autoConstruct(ScheduleManipulatorImpl.class, Map.of(), this);\n    }\n\n    public ScheduleManipulatorImpl(\n            InstanceEventMapper instanceEventMapper,\n            InstanceAtlasAdjacentMapper instanceAtlasAdjacentMapper,\n            InstanceAtlasNodeMapper instanceAtlasNodeMapper,\n            InstanceExecMapper instanceExecMapper\n    ) {\n        this.minstanceEventMapper = instanceEventMapper;\n        this.minstanceAtlasAdjacentMapper = instanceAtlasAdjacentMapper;\n        this.minstanceAtlasNodeMapper = instanceAtlasNodeMapper;\n        this.minstanceExecMapper = instanceExecMapper;\n    }\n\n    @Override\n    public InstanceEventMapper getInstanceEventMapper() {\n        return this.minstanceEventMapper;\n    }\n\n    @Override\n    public InstanceAtlasAdjacentMapper getInstanceAtlasAdjacentMapper() {\n        return this.minstanceAtlasAdjacentMapper;\n    }\n\n    @Override\n    public InstanceAtlasNodeMapper getInstanceAtlasNodeMapper() {\n        return this.minstanceAtlasNodeMapper;\n    }\n\n    @Override\n    public InstanceExecMapper getInstanceExecMapper() {\n        return this.minstanceExecMapper;\n    }\n}\n"
  },
  {
    "path": "Odin/odin-system/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>odin</artifactId>\n        <groupId>com.walnut.odin</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.odin</groupId>\n    <artifactId>odin-system</artifactId>\n    <version>2.5.1</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-message-control</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime.jelly</groupId>\n            <artifactId>jelly</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-architecture</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-framework-atlas</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-mapper-driver</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Odin/odin-system/src/main/java/com/walnut/odin/system/Odin.java",
    "content": "package com.walnut.odin.system;\n\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.framework.util.io.Tracer;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.MapStructure;\nimport com.pinecone.hydra.layer.ibatis.hydranium.LayerMappingDriver;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.ProcessManagerSystema;\nimport com.pinecone.hydra.system.ArchModularizedSubsystem;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.component.LogStatuses;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.umc.msg.MessageNode;\nimport com.pinecone.hydra.umc.wolf.server.UlfServer;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerInstrument;\nimport com.pinecone.hydra.unit.vgraph.layer.VLayerInstrument;\nimport com.pinecone.hydra.unit.vgraph.source.AtlasMappingDriver;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.tritium.system.TritiumSystem;\nimport com.walnut.odin.atlas.graph.RuntimeAtlasInstrument;\nimport com.walnut.odin.atlas.graph.UniformRuntimeAtlas;\nimport com.walnut.odin.atlas.mapper.OdinAtlasMappingDriver;\nimport com.walnut.odin.conduct.CollectiveTaskRegiment;\nimport com.walnut.odin.conduct.RavenCollectiveTaskRegiment;\nimport com.walnut.odin.conduct.schedule.RavenTaskScheduler;\nimport com.walnut.odin.conduct.schedule.UniformTaskScheduler;\nimport com.walnut.odin.proc.server.RavenRemoteProcessManagerServer;\nimport com.walnut.odin.proc.server.RemoteProcessManagerServer;\nimport com.walnut.odin.task.CentralizedTaskInstrument;\nimport com.walnut.odin.task.GenericRavenTaskConfig;\nimport com.walnut.odin.task.RavenTaskInstrument;\nimport com.walnut.odin.task.mapper.OdinUniformTaskMappingDriver;\n\npublic class Odin extends ArchModularizedSubsystem implements TaskCentralControl {\n\n    private CollectiveTaskRegiment  mTaskRegiment;\n\n    private LayerInstrument         mLayerInstrument;\n    private RuntimeAtlasInstrument  mAtlasInstrument;\n\n    private UniformTaskScheduler    mTaskScheduler;\n\n    @MapStructure(\"metaDependent.atlasDatabase\")\n    private String                  mszAtlasDatabaseKey;\n\n    @MapStructure(\"metaDependent.taskInstrument\")\n    private String                  mszTaskInstrumentKey;;\n\n    @MapStructure(\"metaDependent.controlRPCDriver\")\n    private String                  mszControlRPCDriverKey;\n\n    @MapStructure(\"metaDependent.processManager\")\n    private String                  mszProcessManagerKey;\n\n    public Odin( Hydrogen primarySystem, String name, PatriarchalConfig config ) {\n        super( primarySystem, name, config );\n\n        TritiumSystem sys = (TritiumSystem) this.parentSystem();\n        sys.getPrimaryConfigScope().autoInject( Odin.class, config, this );\n    }\n\n    @Override\n    protected void traceWelcomeInfo() {\n        Tracer console = this.mPrimarySystem.console();\n        console.getOut().print( \"---------------------------------------------------------------\\n\" );\n        console.getOut().print( \"\\u001B[31mBean Nuts Acorn Odin\\u001B[0m\\n\" );\n        console.getOut().print( \"\\u001B[31mMassive Task Orchestration System \\u001B[0m\\n\" );\n        console.getOut().print( \"\\u001B[32mCopyright(C) 2008-2028 Bean Nuts Foundation. All rights reserved.\\u001B[0m\\n\" );\n        console.getOut().print( \"---------------------------------------------------------------\\n\" );\n    }\n\n    protected void init() {\n        this.getLogger().info( \"<Odin> >>> System Booting...\" );\n\n        this.infoLifecycle( \"<Odin> Domain Subsystem Initialization\", LogStatuses.StatusStart );\n        this.traceWelcomeInfo();\n        this.prepare_system_skeleton();\n\n        this.infoLifecycle( \"<Odin> Welcome to the Odin task central control!\", LogStatuses.StatusReady );\n        this.infoLifecycle( \"<Odin> Domain Subsystem Initialization\", LogStatuses.StatusReady );\n    }\n\n    protected void prepare_instrumentation() {\n        this.infoLifecycle( \"<Odin> Constructing components `Instrumentation`.\", LogStatuses.StatusStart );\n\n\n        TritiumSystem sys = (TritiumSystem) this.parentSystem();\n        KOIMappingDriver layerMappingDriver = new LayerMappingDriver(\n                sys, (IbatisClient) sys.getMiddlewareDirector().getRDBManager().getRDBClientByName( this.mszAtlasDatabaseKey ),\n                sys.getDispenserCenter()\n        );\n\n        AtlasMappingDriver atlasMappingDriver = new OdinAtlasMappingDriver(\n                sys, (IbatisClient) sys.getMiddlewareDirector().getRDBManager().getRDBClientByName( this.mszAtlasDatabaseKey ),\n                sys.getDispenserCenter()\n        );\n\n        KOIMappingDriver taskDriver = new OdinUniformTaskMappingDriver(\n                sys, (IbatisClient) sys.getMiddlewareDirector().getRDBManager().getRDBClientByName( this.mszTaskInstrumentKey ),\n                sys.getDispenserCenter()\n        );\n\n\n        CentralizedTaskInstrument taskInstrument = new RavenTaskInstrument(\n                taskDriver, new GenericRavenTaskConfig( (JSONObject) this.mSubsystemConfig )\n        );\n        this.infoLifecycle( \"<Odin> Constructing component `TaskInstrument`.\", LogStatuses.StatusDone );\n\n        this.mLayerInstrument = new VLayerInstrument( layerMappingDriver );\n        this.mAtlasInstrument = new UniformRuntimeAtlas( atlasMappingDriver, taskInstrument, this.mLayerInstrument );\n        this.infoLifecycle( \"<Odin> Constructing component `AtlasInstrument`.\", LogStatuses.StatusDone );\n\n        MessageNode messageNode = sys.getMiddlewareDirector().getMessagersManager().getMessageNodeByName( this.mszControlRPCDriverKey );\n        if ( messageNode == null ) {\n            messageNode = (MessageNode) sys.getDispenserCenter().getInstanceDispenser().getRegisteredInstance( this.mszControlRPCDriverKey );\n        }\n        UlfServer rpcServer = (UlfServer) messageNode;\n        if ( rpcServer != null ) {\n            ProcessManager pm = (ProcessManager) sys.getDispenserCenter().getInstanceDispenser().getRegisteredInstance( this.mszProcessManagerKey );\n            RemoteProcessManagerServer server = new RavenRemoteProcessManagerServer( pm, rpcServer );\n            this.mTaskRegiment = new RavenCollectiveTaskRegiment( (ProcessManagerSystema) sys, taskInstrument, server );\n        }\n        this.infoLifecycle( \"<Odin> Constructing component `TaskRegiment`.\", LogStatuses.StatusDone );\n\n\n        this.infoLifecycle( \"<Odin> Constructing components `Instrumentation`.\", LogStatuses.StatusDone );\n    }\n\n    protected void prepare_scheduler() {\n        this.infoLifecycle( \"<Odin> Constructing component `TaskScheduler`.\", LogStatuses.StatusStart );\n\n        this.mTaskScheduler = new RavenTaskScheduler(\n                this.mTaskRegiment.taskInstrument(), this.mAtlasInstrument, this.mTaskRegiment.taskDispatcher()\n        );\n\n        this.infoLifecycle( \"<Odin> Constructing component `TaskScheduler`.\", LogStatuses.StatusDone );\n    }\n\n    protected void prepare_system_skeleton() {\n        this.infoLifecycle( \"<Odin> Preparing system skeleton.\", LogStatuses.StatusStart );\n\n        this.prepare_instrumentation();\n        this.prepare_scheduler();\n\n\n        this.infoLifecycle( \"<Odin> Preparing system skeleton.\", LogStatuses.StatusDone );\n    }\n\n    @Override\n    public void vitalize() {\n        this.init();\n    }\n\n    @Override\n    public void terminate() {\n\n    }\n\n\n    public LayerInstrument layerInstrument() {\n        return this.mLayerInstrument;\n    }\n\n    public RuntimeAtlasInstrument atlasInstrument() {\n        return this.mAtlasInstrument;\n    }\n\n    public CollectiveTaskRegiment taskRegiment() {\n        return this.mTaskRegiment;\n    }\n\n    public UniformTaskScheduler taskScheduler() {\n        return this.mTaskScheduler;\n    }\n\n\n}"
  },
  {
    "path": "Odin/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sauron</artifactId>\n        <groupId>com.sauron</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.odin</groupId>\n    <artifactId>odin</artifactId>\n    <packaging>pom</packaging>\n    <version>2.5.1</version>\n\n    <modules>\n        <module>odin-architecture</module>\n        <module>odin-framework-atlas</module>\n        <module>odin-framework-runtime</module>\n        <module>odin-framework-conduct</module>\n        <module>odin-mapper-driver</module>\n        <module>odin-system</module>\n    </modules>\n</project>"
  },
  {
    "path": "Pinecones/Jelly/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>pinecones</artifactId>\n        <groupId>com.pinecones</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>9</source>\n                    <target>9</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.slime.jelly</groupId>\n    <artifactId>jelly</artifactId>\n    <version>2.1.0</version>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime</groupId>\n            <artifactId>slime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n\n        <!-- MyBatis dependencies -->\n        <dependency>\n            <groupId>org.mybatis</groupId>\n            <artifactId>mybatis</artifactId>\n            <version>3.5.9</version>\n        </dependency>\n        <dependency>\n            <groupId>org.mybatis</groupId>\n            <artifactId>mybatis-spring</artifactId>\n            <version>2.0.6</version>\n        </dependency>\n\n        <!-- MySQL Connector -->\n        <dependency>\n            <groupId>mysql</groupId>\n            <artifactId>mysql-connector-java</artifactId>\n            <version>8.0.26</version>\n        </dependency>\n\n        <!-- Logging dependencies -->\n        <dependency>\n            <groupId>org.slf4j</groupId>\n            <artifactId>slf4j-api</artifactId>\n            <version>1.7.30</version>\n        </dependency>\n\n        <dependency>\n            <groupId>redis.clients</groupId>\n            <artifactId>jedis</artifactId>\n            <version>3.3.0</version>\n        </dependency>\n\n        <dependency>\n            <groupId>net.spy</groupId>\n            <artifactId>spymemcached</artifactId>\n            <version>2.12.3</version>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/map/SS.java",
    "content": "package com.pinecone.slime.jelly.map;\n\npublic class SS {\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/NamespacedKey.java",
    "content": "package com.pinecone.slime.jelly.source;\n\nimport com.pinecone.slime.source.indexable.IndexableTargetScopeMeta;\n\npublic final class NamespacedKey {\n    public static String getFullKey( IndexableTargetScopeMeta meta, String szNameSeparator, String szNamespace, Object key ) {\n        String ns = null;\n        if ( szNamespace != null ) {\n            ns = szNamespace;\n        }\n        else if ( meta.getIndexKey() != null ) { // Index as namespace\n            ns = meta.getIndexKey();\n        }\n\n        if( ns != null && !ns.isEmpty() ) {\n            return ns + szNameSeparator + key.toString();\n        }\n        return key.toString();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/CannotAcquireLockException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\npublic class CannotAcquireLockException extends PessimisticLockingFailureException {\n    public CannotAcquireLockException( String msg ) {\n        super(msg);\n    }\n\n    public CannotAcquireLockException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/CannotSerializeTransactionException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\npublic class CannotSerializeTransactionException extends PessimisticLockingFailureException {\n    public CannotSerializeTransactionException( String msg ) {\n        super(msg);\n    }\n\n    public CannotSerializeTransactionException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/ConcurrencyFailureException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\nimport com.pinecone.framework.system.Nullable;\n\npublic class ConcurrencyFailureException extends TransientDataAccessException {\n    public ConcurrencyFailureException( String msg ) {\n        super(msg);\n    }\n\n    public ConcurrencyFailureException( String msg, @Nullable Throwable cause ) {\n        super(msg, cause);\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/DataAccessException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\nimport com.pinecone.framework.system.NestedRuntimeException;\nimport com.pinecone.framework.system.Nullable;\n\npublic abstract class DataAccessException extends NestedRuntimeException {\n    public DataAccessException( String msg ) {\n        super(msg);\n    }\n\n    public DataAccessException( @Nullable String msg, @Nullable Throwable cause ) {\n        super(msg, cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/DataAccessResourceFailureException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\nimport com.pinecone.framework.system.Nullable;\n\npublic class DataAccessResourceFailureException extends NonTransientDataAccessResourceException {\n    public DataAccessResourceFailureException(String msg) {\n        super(msg);\n    }\n\n    public DataAccessResourceFailureException(String msg, @Nullable Throwable cause) {\n        super(msg, cause);\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/DataIntegrityViolationException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\npublic class DataIntegrityViolationException extends NonTransientDataAccessException {\n    public DataIntegrityViolationException( String msg ) {\n        super(msg);\n    }\n\n    public DataIntegrityViolationException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/DeadlockLoserDataAccessException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\npublic class DeadlockLoserDataAccessException extends PessimisticLockingFailureException {\n    public DeadlockLoserDataAccessException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/DuplicateKeyException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\npublic class DuplicateKeyException extends DataIntegrityViolationException {\n    public DuplicateKeyException( String msg ) {\n        super(msg);\n    }\n\n    public DuplicateKeyException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/InvalidDataAccessApiUsageException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\npublic class InvalidDataAccessApiUsageException extends NonTransientDataAccessException {\n    public InvalidDataAccessApiUsageException(String msg) {\n        super(msg);\n    }\n\n    public InvalidDataAccessApiUsageException(String msg, Throwable cause) {\n        super(msg, cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/InvalidDataAccessResourceUsageException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\npublic class InvalidDataAccessResourceUsageException extends NonTransientDataAccessException {\n    public InvalidDataAccessResourceUsageException( String msg ) {\n        super(msg);\n    }\n\n    public InvalidDataAccessResourceUsageException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/NonTransientDataAccessException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\nimport com.pinecone.framework.system.Nullable;\n\npublic abstract class NonTransientDataAccessException extends DataAccessException {\n    public NonTransientDataAccessException( String msg ) {\n        super(msg);\n    }\n\n    public NonTransientDataAccessException( @Nullable String msg, @Nullable Throwable cause ) {\n        super(msg, cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/NonTransientDataAccessResourceException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\nimport com.pinecone.framework.system.Nullable;\n\npublic class NonTransientDataAccessResourceException extends NonTransientDataAccessException {\n    public NonTransientDataAccessResourceException( String msg ) {\n        super(msg);\n    }\n\n    public NonTransientDataAccessResourceException( String msg, @Nullable Throwable cause ) {\n        super(msg, cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/PermissionDeniedDataAccessException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\npublic class PermissionDeniedDataAccessException extends NonTransientDataAccessException {\n    public PermissionDeniedDataAccessException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/PersistenceExceptionTranslator.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\n@FunctionalInterface\npublic interface PersistenceExceptionTranslator extends Pinenut {\n    @Nullable\n    DataAccessException translateExceptionIfPossible(RuntimeException e );\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/PessimisticLockingFailureException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\npublic class PessimisticLockingFailureException extends ConcurrencyFailureException {\n    public PessimisticLockingFailureException( String msg ) {\n        super(msg);\n    }\n\n    public PessimisticLockingFailureException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/QueryTimeoutException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\npublic class QueryTimeoutException extends TransientDataAccessException {\n    public QueryTimeoutException( String msg ) {\n        super(msg);\n    }\n\n    public QueryTimeoutException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/RecoverableDataAccessException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\npublic class RecoverableDataAccessException extends DataAccessException {\n    public RecoverableDataAccessException( String msg ) {\n        super(msg);\n    }\n\n    public RecoverableDataAccessException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/TransientDataAccessException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\nimport com.pinecone.framework.system.Nullable;\n\npublic abstract class TransientDataAccessException extends DataAccessException {\n    public TransientDataAccessException( String msg ) {\n        super(msg);\n    }\n\n    public TransientDataAccessException( String msg, @Nullable Throwable cause ) {\n        super(msg, cause);\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/dao/TransientDataAccessResourceException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.dao;\n\npublic class TransientDataAccessResourceException extends TransientDataAccessException {\n    public TransientDataAccessResourceException( String msg ) {\n        super(msg);\n    }\n\n    public TransientDataAccessResourceException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/AbstractFallbackSQLExceptionTranslator.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport java.sql.SQLException;\n\nimport org.apache.commons.logging.Log;\nimport org.apache.commons.logging.LogFactory;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.slime.jelly.source.ds.dao.DataAccessException;\n\n\npublic abstract class AbstractFallbackSQLExceptionTranslator implements SQLExceptionTranslator {\n    protected final Log logger = LogFactory.getLog(this.getClass());\n\n    @Nullable\n    private SQLExceptionTranslator fallbackTranslator;\n\n    public AbstractFallbackSQLExceptionTranslator() {\n    }\n\n    public void setFallbackTranslator(@Nullable SQLExceptionTranslator fallback) {\n        this.fallbackTranslator = fallback;\n    }\n\n    @Nullable\n    public SQLExceptionTranslator getFallbackTranslator() {\n        return this.fallbackTranslator;\n    }\n\n    @Nullable\n    @Override\n    public DataAccessException translate(String task, @Nullable String sql, SQLException ex) {\n        Assert.notNull(ex, \"Cannot translate a null SQLException\");\n        DataAccessException dae = this.doTranslate(task, sql, ex);\n        if (dae != null) {\n            return dae;\n        }\n        else {\n            SQLExceptionTranslator fallback = this.getFallbackTranslator();\n            return fallback != null ? fallback.translate(task, sql, ex) : null;\n        }\n    }\n\n    @Nullable\n    protected abstract DataAccessException doTranslate(String var1, @Nullable String var2, SQLException var3);\n\n    protected String buildMessage(String task, @Nullable String sql, SQLException ex) {\n        return task + \"; \" + (sql != null ? \"SQL [\" + sql + \"]; \" : \"\") + ex.getMessage();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/BadSqlGrammarException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport java.sql.SQLException;\n\nimport com.pinecone.slime.jelly.source.ds.dao.InvalidDataAccessResourceUsageException;\n\npublic class BadSqlGrammarException extends InvalidDataAccessResourceUsageException {\n    private final String sql;\n\n    public BadSqlGrammarException( String task, String sql, SQLException ex ) {\n        super( task + \"; bad SQL grammar [\" + sql + \"]\", ex );\n        this.sql = sql;\n    }\n\n    public SQLException getSQLException() {\n        return (SQLException)this.getCause();\n    }\n\n    public String getSql() {\n        return this.sql;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/CustomSQLErrorCodesTranslation.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.slime.jelly.source.ds.dao.DataAccessException;\n\npublic class CustomSQLErrorCodesTranslation {\n    private String[] errorCodes = new String[0];\n    @Nullable\n    private Class<?> exceptionClass;\n\n    public CustomSQLErrorCodesTranslation() {\n    }\n\n    public void setErrorCodes(String... errorCodes) {\n        this.errorCodes = StringUtils.sortStringArray(errorCodes);\n    }\n\n    public String[] getErrorCodes() {\n        return this.errorCodes;\n    }\n\n    public void setExceptionClass(@Nullable Class<?> exceptionClass) {\n        if ( exceptionClass != null && !DataAccessException.class.isAssignableFrom(exceptionClass) ) {\n            throw new IllegalArgumentException(\"Invalid exception class [\" + exceptionClass + \"]: needs to be a subclass of [org.springframework.dao.DataAccessException]\");\n        }\n        else {\n            this.exceptionClass = exceptionClass;\n        }\n    }\n\n    @Nullable\n    public Class<?> getExceptionClass() {\n        return this.exceptionClass;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/DatabaseMetaDataCallback.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport java.sql.DatabaseMetaData;\nimport java.sql.SQLException;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\n@FunctionalInterface\npublic interface DatabaseMetaDataCallback<T> extends Pinenut {\n    T processMetaData( DatabaseMetaData metaData ) throws SQLException, MetaDataAccessException;\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/InvalidResultSetAccessException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport java.sql.SQLException;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.slime.jelly.source.ds.dao.InvalidDataAccessResourceUsageException;\n\npublic class InvalidResultSetAccessException extends InvalidDataAccessResourceUsageException {\n    @Nullable\n    private final String sql;\n\n    public InvalidResultSetAccessException(String task, String sql, SQLException ex) {\n        super( task + \"; invalid ResultSet access for SQL [\" + sql + \"]\", ex );\n        this.sql = sql;\n    }\n\n    public InvalidResultSetAccessException(SQLException ex) {\n        super( ex.getMessage(), ex );\n        this.sql = null;\n    }\n\n    public SQLException getSQLException() {\n        return (SQLException)this.getCause();\n    }\n\n    @Nullable\n    public String getSql() {\n        return this.sql;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/MetaDataAccessException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\n\nimport com.pinecone.framework.system.NestedCheckedException;\n\npublic class MetaDataAccessException extends NestedCheckedException {\n    public MetaDataAccessException( String msg ) {\n        super(msg);\n    }\n\n    public MetaDataAccessException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/SQLErrorCodeSQLExceptionTranslator.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport java.lang.reflect.Constructor;\nimport java.sql.BatchUpdateException;\nimport java.sql.SQLException;\nimport java.util.Arrays;\nimport javax.sql.DataSource;\n\nimport com.pinecone.framework.unit.SingletonSupplier;\nimport com.pinecone.framework.util.SupplierUtils;\nimport com.pinecone.slime.jelly.source.ds.dao.CannotAcquireLockException;\nimport com.pinecone.slime.jelly.source.ds.dao.CannotSerializeTransactionException;\nimport com.pinecone.slime.jelly.source.ds.dao.DataAccessException;\nimport com.pinecone.slime.jelly.source.ds.dao.DataAccessResourceFailureException;\nimport com.pinecone.slime.jelly.source.ds.dao.DataIntegrityViolationException;\nimport com.pinecone.slime.jelly.source.ds.dao.DeadlockLoserDataAccessException;\nimport com.pinecone.slime.jelly.source.ds.dao.DuplicateKeyException;\nimport com.pinecone.slime.jelly.source.ds.dao.PermissionDeniedDataAccessException;\nimport com.pinecone.slime.jelly.source.ds.dao.TransientDataAccessResourceException;\nimport com.pinecone.framework.system.Nullable;\n\npublic class SQLErrorCodeSQLExceptionTranslator extends AbstractFallbackSQLExceptionTranslator {\n    private static final int MESSAGE_ONLY_CONSTRUCTOR = 1;\n    private static final int MESSAGE_THROWABLE_CONSTRUCTOR = 2;\n    private static final int MESSAGE_SQLEX_CONSTRUCTOR = 3;\n    private static final int MESSAGE_SQL_THROWABLE_CONSTRUCTOR = 4;\n    private static final int MESSAGE_SQL_SQLEX_CONSTRUCTOR = 5;\n\n    @Nullable\n    private SingletonSupplier<SQLErrorCodes > sqlErrorCodes;\n\n    public SQLErrorCodeSQLExceptionTranslator() {\n        this.setFallbackTranslator(new SQLExceptionSubclassTranslator());\n    }\n\n    public SQLErrorCodeSQLExceptionTranslator( DataSource dataSource) {\n        this();\n        this.setDataSource(dataSource);\n    }\n\n    public SQLErrorCodeSQLExceptionTranslator(String dbName) {\n        this();\n        this.setDatabaseProductName(dbName);\n    }\n\n    public SQLErrorCodeSQLExceptionTranslator(SQLErrorCodes sec) {\n        this();\n        this.sqlErrorCodes = SingletonSupplier.of(sec);\n    }\n\n    public void setDataSource(DataSource dataSource) {\n        this.sqlErrorCodes = SingletonSupplier.of(() -> {\n            return SQLErrorCodesFactory.getInstance().resolveErrorCodes(dataSource);\n        });\n        this.sqlErrorCodes.get();\n    }\n\n    public void setDatabaseProductName(String dbName) {\n        this.sqlErrorCodes = SingletonSupplier.of(SQLErrorCodesFactory.getInstance().getErrorCodes(dbName));\n    }\n\n    public void setSqlErrorCodes(@Nullable SQLErrorCodes sec) {\n        this.sqlErrorCodes = SingletonSupplier.ofNullable(sec);\n    }\n\n    @Nullable\n    public SQLErrorCodes getSqlErrorCodes() {\n        return (SQLErrorCodes) SupplierUtils.resolve(this.sqlErrorCodes);\n    }\n\n    @Nullable\n    protected DataAccessException doTranslate(String task, @Nullable String sql, SQLException ex) {\n        SQLException sqlEx = ex;\n        if (ex instanceof BatchUpdateException && ex.getNextException() != null) {\n            SQLException nestedSqlEx = ex.getNextException();\n            if (nestedSqlEx.getErrorCode() > 0 || nestedSqlEx.getSQLState() != null) {\n                sqlEx = nestedSqlEx;\n            }\n        }\n\n        DataAccessException dae = this.customTranslate(task, sql, sqlEx);\n        if (dae != null) {\n            return dae;\n        } else {\n            SQLErrorCodes sqlErrorCodes = this.getSqlErrorCodes();\n            if (sqlErrorCodes != null) {\n                SQLExceptionTranslator customTranslator = sqlErrorCodes.getCustomSqlExceptionTranslator();\n                if (customTranslator != null) {\n                    DataAccessException customDex = customTranslator.translate(task, sql, sqlEx);\n                    if (customDex != null) {\n                        return customDex;\n                    }\n                }\n            }\n\n            String errorCode;\n            if (sqlErrorCodes != null) {\n                if (sqlErrorCodes.isUseSqlStateForTranslation()) {\n                    errorCode = sqlEx.getSQLState();\n                } else {\n                    SQLException current;\n                    for(current = sqlEx; current.getErrorCode() == 0 && current.getCause() instanceof SQLException; current = (SQLException)current.getCause()) {\n                    }\n\n                    errorCode = Integer.toString(current.getErrorCode());\n                }\n\n                if (errorCode != null) {\n                    CustomSQLErrorCodesTranslation[] customTranslations = sqlErrorCodes.getCustomTranslations();\n                    if (customTranslations != null) {\n                        CustomSQLErrorCodesTranslation[] var9 = customTranslations;\n                        int var10 = customTranslations.length;\n\n                        for(int var11 = 0; var11 < var10; ++var11) {\n                            CustomSQLErrorCodesTranslation customTranslation = var9[var11];\n                            if (Arrays.binarySearch(customTranslation.getErrorCodes(), errorCode) >= 0 && customTranslation.getExceptionClass() != null) {\n                                DataAccessException customException = this.createCustomException(task, sql, sqlEx, customTranslation.getExceptionClass());\n                                if (customException != null) {\n                                    this.logTranslation(task, sql, sqlEx, true);\n                                    return customException;\n                                }\n                            }\n                        }\n                    }\n\n                    if (Arrays.binarySearch(sqlErrorCodes.getBadSqlGrammarCodes(), errorCode) >= 0) {\n                        this.logTranslation(task, sql, sqlEx, false);\n                        return new BadSqlGrammarException(task, sql != null ? sql : \"\", sqlEx);\n                    }\n\n                    if (Arrays.binarySearch(sqlErrorCodes.getInvalidResultSetAccessCodes(), errorCode) >= 0) {\n                        this.logTranslation(task, sql, sqlEx, false);\n                        return new InvalidResultSetAccessException(task, sql != null ? sql : \"\", sqlEx);\n                    }\n\n                    if (Arrays.binarySearch(sqlErrorCodes.getDuplicateKeyCodes(), errorCode) >= 0) {\n                        this.logTranslation(task, sql, sqlEx, false);\n                        return new DuplicateKeyException(this.buildMessage(task, sql, sqlEx), sqlEx);\n                    }\n\n                    if (Arrays.binarySearch(sqlErrorCodes.getDataIntegrityViolationCodes(), errorCode) >= 0) {\n                        this.logTranslation(task, sql, sqlEx, false);\n                        return new DataIntegrityViolationException(this.buildMessage(task, sql, sqlEx), sqlEx);\n                    }\n\n                    if (Arrays.binarySearch(sqlErrorCodes.getPermissionDeniedCodes(), errorCode) >= 0) {\n                        this.logTranslation(task, sql, sqlEx, false);\n                        return new PermissionDeniedDataAccessException(this.buildMessage(task, sql, sqlEx), sqlEx);\n                    }\n\n                    if (Arrays.binarySearch(sqlErrorCodes.getDataAccessResourceFailureCodes(), errorCode) >= 0) {\n                        this.logTranslation(task, sql, sqlEx, false);\n                        return new DataAccessResourceFailureException(this.buildMessage(task, sql, sqlEx), sqlEx);\n                    }\n\n                    if (Arrays.binarySearch(sqlErrorCodes.getTransientDataAccessResourceCodes(), errorCode) >= 0) {\n                        this.logTranslation(task, sql, sqlEx, false);\n                        return new TransientDataAccessResourceException(this.buildMessage(task, sql, sqlEx), sqlEx);\n                    }\n\n                    if (Arrays.binarySearch(sqlErrorCodes.getCannotAcquireLockCodes(), errorCode) >= 0) {\n                        this.logTranslation(task, sql, sqlEx, false);\n                        return new CannotAcquireLockException(this.buildMessage(task, sql, sqlEx), sqlEx);\n                    }\n\n                    if (Arrays.binarySearch(sqlErrorCodes.getDeadlockLoserCodes(), errorCode) >= 0) {\n                        this.logTranslation(task, sql, sqlEx, false);\n                        return new DeadlockLoserDataAccessException(this.buildMessage(task, sql, sqlEx), sqlEx);\n                    }\n\n                    if (Arrays.binarySearch(sqlErrorCodes.getCannotSerializeTransactionCodes(), errorCode) >= 0) {\n                        this.logTranslation(task, sql, sqlEx, false);\n                        return new CannotSerializeTransactionException(this.buildMessage(task, sql, sqlEx), sqlEx);\n                    }\n                }\n            }\n\n            if (this.logger.isDebugEnabled()) {\n                if (sqlErrorCodes != null && sqlErrorCodes.isUseSqlStateForTranslation()) {\n                    errorCode = \"SQL state '\" + sqlEx.getSQLState() + \"', error code '\" + sqlEx.getErrorCode();\n                } else {\n                    errorCode = \"Error code '\" + sqlEx.getErrorCode() + \"'\";\n                }\n\n                this.logger.debug(\"Unable to translate SQLException with \" + errorCode + \", will now try the fallback translator\");\n            }\n\n            return null;\n        }\n    }\n\n    @Nullable\n    protected DataAccessException customTranslate(String task, @Nullable String sql, SQLException sqlEx) {\n        return null;\n    }\n\n    @Nullable\n    protected DataAccessException createCustomException(String task, @Nullable String sql, SQLException sqlEx, Class<?> exceptionClass) {\n        try {\n            int constructorType = 0;\n            Constructor<?>[] constructors = exceptionClass.getConstructors();\n            Constructor[] var7 = constructors;\n            int var8 = constructors.length;\n\n            for(int var9 = 0; var9 < var8; ++var9) {\n                Constructor<?> constructor = var7[var9];\n                Class<?>[] parameterTypes = constructor.getParameterTypes();\n                if (parameterTypes.length == 1 && String.class == parameterTypes[0] && constructorType < 1) {\n                    constructorType = 1;\n                }\n\n                if (parameterTypes.length == 2 && String.class == parameterTypes[0] && Throwable.class == parameterTypes[1] && constructorType < 2) {\n                    constructorType = 2;\n                }\n\n                if (parameterTypes.length == 2 && String.class == parameterTypes[0] && SQLException.class == parameterTypes[1] && constructorType < 3) {\n                    constructorType = 3;\n                }\n\n                if (parameterTypes.length == 3 && String.class == parameterTypes[0] && String.class == parameterTypes[1] && Throwable.class == parameterTypes[2] && constructorType < 4) {\n                    constructorType = 4;\n                }\n\n                if (parameterTypes.length == 3 && String.class == parameterTypes[0] && String.class == parameterTypes[1] && SQLException.class == parameterTypes[2] && constructorType < 5) {\n                    constructorType = 5;\n                }\n            }\n\n            Constructor exceptionConstructor;\n            switch(constructorType) {\n                case 1:\n                    Class<?>[] messageOnlyArgsClass = new Class[]{String.class};\n                    Object[] messageOnlyArgs = new Object[]{task + \": \" + sqlEx.getMessage()};\n                    exceptionConstructor = exceptionClass.getConstructor(messageOnlyArgsClass);\n                    return (DataAccessException)exceptionConstructor.newInstance(messageOnlyArgs);\n                case 2:\n                    Class<?>[] messageAndThrowableArgsClass = new Class[]{String.class, Throwable.class};\n                    Object[] messageAndThrowableArgs = new Object[]{task + \": \" + sqlEx.getMessage(), sqlEx};\n                    exceptionConstructor = exceptionClass.getConstructor(messageAndThrowableArgsClass);\n                    return (DataAccessException)exceptionConstructor.newInstance(messageAndThrowableArgs);\n                case 3:\n                    Class<?>[] messageAndSqlExArgsClass = new Class[]{String.class, SQLException.class};\n                    Object[] messageAndSqlExArgs = new Object[]{task + \": \" + sqlEx.getMessage(), sqlEx};\n                    exceptionConstructor = exceptionClass.getConstructor(messageAndSqlExArgsClass);\n                    return (DataAccessException)exceptionConstructor.newInstance(messageAndSqlExArgs);\n                case 4:\n                    Class<?>[] messageAndSqlAndThrowableArgsClass = new Class[]{String.class, String.class, Throwable.class};\n                    Object[] messageAndSqlAndThrowableArgs = new Object[]{task, sql, sqlEx};\n                    exceptionConstructor = exceptionClass.getConstructor(messageAndSqlAndThrowableArgsClass);\n                    return (DataAccessException)exceptionConstructor.newInstance(messageAndSqlAndThrowableArgs);\n                case 5:\n                    Class<?>[] messageAndSqlAndSqlExArgsClass = new Class[]{String.class, String.class, SQLException.class};\n                    Object[] messageAndSqlAndSqlExArgs = new Object[]{task, sql, sqlEx};\n                    exceptionConstructor = exceptionClass.getConstructor(messageAndSqlAndSqlExArgsClass);\n                    return (DataAccessException)exceptionConstructor.newInstance(messageAndSqlAndSqlExArgs);\n                default:\n                    if (this.logger.isWarnEnabled()) {\n                        this.logger.warn(\"Unable to find appropriate constructor of custom exception class [\" + exceptionClass.getName() + \"]\");\n                    }\n\n                    return null;\n            }\n        }\n        catch ( Throwable e ) {\n            if (this.logger.isWarnEnabled()) {\n                this.logger.warn(\"Unable to instantiate custom exception class [\" + exceptionClass.getName() + \"]\", e);\n            }\n\n            return null;\n        }\n    }\n\n    private void logTranslation(String task, @Nullable String sql, SQLException sqlEx, boolean custom) {\n        if (this.logger.isDebugEnabled()) {\n            String intro = custom ? \"Custom translation of\" : \"Translating\";\n            this.logger.debug(intro + \" SQLException with SQL state '\" + sqlEx.getSQLState() + \"', error code '\" + sqlEx.getErrorCode() + \"', message [\" + sqlEx.getMessage() + \"]\" + (sql != null ? \"; SQL was [\" + sql + \"]\" : \"\") + \" for task [\" + task + \"]\");\n        }\n\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/SQLErrorCodes.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.ReflectionUtils;\n\npublic class SQLErrorCodes implements Pinenut {\n    @Nullable\n    private String[] databaseProductNames;\n    private boolean useSqlStateForTranslation = false;\n    private String[] badSqlGrammarCodes = new String[0];\n    private String[] invalidResultSetAccessCodes = new String[0];\n    private String[] duplicateKeyCodes = new String[0];\n    private String[] dataIntegrityViolationCodes = new String[0];\n    private String[] permissionDeniedCodes = new String[0];\n    private String[] dataAccessResourceFailureCodes = new String[0];\n    private String[] transientDataAccessResourceCodes = new String[0];\n    private String[] cannotAcquireLockCodes = new String[0];\n    private String[] deadlockLoserCodes = new String[0];\n    private String[] cannotSerializeTransactionCodes = new String[0];\n    @Nullable\n    private CustomSQLErrorCodesTranslation[] customTranslations;\n    @Nullable\n    private SQLExceptionTranslator customSqlExceptionTranslator;\n\n    public SQLErrorCodes() {\n    }\n\n    public void setDatabaseProductName(@Nullable String databaseProductName) {\n        this.databaseProductNames = new String[]{databaseProductName};\n    }\n\n    @Nullable\n    public String getDatabaseProductName() {\n        return this.databaseProductNames != null && this.databaseProductNames.length > 0 ? this.databaseProductNames[0] : null;\n    }\n\n    public void setDatabaseProductNames(@Nullable String... databaseProductNames) {\n        this.databaseProductNames = databaseProductNames;\n    }\n\n    @Nullable\n    public String[] getDatabaseProductNames() {\n        return this.databaseProductNames;\n    }\n\n    public void setUseSqlStateForTranslation(boolean useStateCodeForTranslation) {\n        this.useSqlStateForTranslation = useStateCodeForTranslation;\n    }\n\n    public boolean isUseSqlStateForTranslation() {\n        return this.useSqlStateForTranslation;\n    }\n\n    public void setBadSqlGrammarCodes(String... badSqlGrammarCodes) {\n        this.badSqlGrammarCodes = StringUtils.sortStringArray(badSqlGrammarCodes);\n    }\n\n    public String[] getBadSqlGrammarCodes() {\n        return this.badSqlGrammarCodes;\n    }\n\n    public void setInvalidResultSetAccessCodes(String... invalidResultSetAccessCodes) {\n        this.invalidResultSetAccessCodes = StringUtils.sortStringArray(invalidResultSetAccessCodes);\n    }\n\n    public String[] getInvalidResultSetAccessCodes() {\n        return this.invalidResultSetAccessCodes;\n    }\n\n    public String[] getDuplicateKeyCodes() {\n        return this.duplicateKeyCodes;\n    }\n\n    public void setDuplicateKeyCodes(String... duplicateKeyCodes) {\n        this.duplicateKeyCodes = duplicateKeyCodes;\n    }\n\n    public void setDataIntegrityViolationCodes(String... dataIntegrityViolationCodes) {\n        this.dataIntegrityViolationCodes = StringUtils.sortStringArray(dataIntegrityViolationCodes);\n    }\n\n    public String[] getDataIntegrityViolationCodes() {\n        return this.dataIntegrityViolationCodes;\n    }\n\n    public void setPermissionDeniedCodes(String... permissionDeniedCodes) {\n        this.permissionDeniedCodes = StringUtils.sortStringArray(permissionDeniedCodes);\n    }\n\n    public String[] getPermissionDeniedCodes() {\n        return this.permissionDeniedCodes;\n    }\n\n    public void setDataAccessResourceFailureCodes(String... dataAccessResourceFailureCodes) {\n        this.dataAccessResourceFailureCodes = StringUtils.sortStringArray(dataAccessResourceFailureCodes);\n    }\n\n    public String[] getDataAccessResourceFailureCodes() {\n        return this.dataAccessResourceFailureCodes;\n    }\n\n    public void setTransientDataAccessResourceCodes(String... transientDataAccessResourceCodes) {\n        this.transientDataAccessResourceCodes = StringUtils.sortStringArray(transientDataAccessResourceCodes);\n    }\n\n    public String[] getTransientDataAccessResourceCodes() {\n        return this.transientDataAccessResourceCodes;\n    }\n\n    public void setCannotAcquireLockCodes(String... cannotAcquireLockCodes) {\n        this.cannotAcquireLockCodes = StringUtils.sortStringArray(cannotAcquireLockCodes);\n    }\n\n    public String[] getCannotAcquireLockCodes() {\n        return this.cannotAcquireLockCodes;\n    }\n\n    public void setDeadlockLoserCodes(String... deadlockLoserCodes) {\n        this.deadlockLoserCodes = StringUtils.sortStringArray(deadlockLoserCodes);\n    }\n\n    public String[] getDeadlockLoserCodes() {\n        return this.deadlockLoserCodes;\n    }\n\n    public void setCannotSerializeTransactionCodes(String... cannotSerializeTransactionCodes) {\n        this.cannotSerializeTransactionCodes = StringUtils.sortStringArray(cannotSerializeTransactionCodes);\n    }\n\n    public String[] getCannotSerializeTransactionCodes() {\n        return this.cannotSerializeTransactionCodes;\n    }\n\n    public void setCustomTranslations(CustomSQLErrorCodesTranslation... customTranslations) {\n        this.customTranslations = customTranslations;\n    }\n\n    @Nullable\n    public CustomSQLErrorCodesTranslation[] getCustomTranslations() {\n        return this.customTranslations;\n    }\n\n    public void setCustomSqlExceptionTranslatorClass( @Nullable Class<? extends SQLExceptionTranslator> customTranslatorClass ) {\n        if (customTranslatorClass != null) {\n            try {\n                this.customSqlExceptionTranslator = (SQLExceptionTranslator) ReflectionUtils.accessibleConstructor(customTranslatorClass, new Class[0]).newInstance();\n            }\n            catch (Throwable e) {\n                throw new IllegalStateException(\"Unable to instantiate custom translator\", e);\n            }\n        }\n        else {\n            this.customSqlExceptionTranslator = null;\n        }\n\n    }\n\n    public void setCustomSqlExceptionTranslator( @Nullable SQLExceptionTranslator customSqlExceptionTranslator ) {\n        this.customSqlExceptionTranslator = customSqlExceptionTranslator;\n    }\n\n    @Nullable\n    public SQLExceptionTranslator getCustomSqlExceptionTranslator() {\n        return this.customSqlExceptionTranslator;\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/SQLErrorCodesFactory.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport java.util.Collections;\nimport java.util.Iterator;\nimport java.util.Map;\nimport javax.sql.DataSource;\nimport org.apache.commons.logging.Log;\nimport org.apache.commons.logging.LogFactory;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.ConcurrentReferenceHashMap;\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.framework.util.PatternMatchUtils;\n\npublic class SQLErrorCodesFactory implements Pinenut {\n    private static final Log logger = LogFactory.getLog(SQLErrorCodesFactory.class);\n    private static final SQLErrorCodesFactory instance = new SQLErrorCodesFactory();\n    private final Map<String, SQLErrorCodes > errorCodesMap;\n    private final Map<DataSource, SQLErrorCodes > dataSourceCache = new ConcurrentReferenceHashMap<>(16);\n\n    public static SQLErrorCodesFactory getInstance() {\n        return instance;\n    }\n\n    protected SQLErrorCodesFactory() {\n        Map errorCodes = Collections.emptyMap();\n        /*try {\n            DefaultListableBeanFactory lbf = new DefaultListableBeanFactory();\n            lbf.setBeanClassLoader(this.getClass().getClassLoader());\n            XmlBeanDefinitionReader bdr = new XmlBeanDefinitionReader(lbf);\n            Resource resource = this.loadResource(\"org/springframework/jdbc/support/sql-error-codes.xml\");\n            if (resource != null && resource.exists()) {\n                bdr.loadBeanDefinitions(resource);\n            } else {\n                logger.info(\"Default sql-error-codes.xml not found (should be included in spring-jdbc jar)\");\n            }\n\n            resource = this.loadResource(\"sql-error-codes.xml\");\n            if (resource != null && resource.exists()) {\n                bdr.loadBeanDefinitions(resource);\n                logger.debug(\"Found custom sql-error-codes.xml file at the root of the classpath\");\n            }\n\n            errorCodes = lbf.getBeansOfType(SQLErrorCodes.class, true, false);\n            if (logger.isTraceEnabled()) {\n                logger.trace(\"SQLErrorCodes loaded: \" + errorCodes.keySet());\n            }\n        }\n        catch (BeansException var5) {\n            logger.warn(\"Error loading SQL error codes from config file\", var5);\n            errorCodes = Collections.emptyMap();\n        }*/\n\n        this.errorCodesMap = errorCodes;\n    }\n\n    public SQLErrorCodes getErrorCodes(String databaseName) {\n        Assert.notNull(databaseName, \"Database product name must not be null\");\n        SQLErrorCodes sec = (SQLErrorCodes)this.errorCodesMap.get(databaseName);\n        if (sec == null) {\n            Iterator iter = this.errorCodesMap.values().iterator();\n\n            while(iter.hasNext()) {\n                SQLErrorCodes candidate = (SQLErrorCodes)iter.next();\n                if ( PatternMatchUtils.simpleMatch(candidate.getDatabaseProductNames(), databaseName) ) {\n                    sec = candidate;\n                    break;\n                }\n            }\n        }\n\n        if (sec != null) {\n            //this.checkCustomTranslatorRegistry(databaseName, sec);\n            if (logger.isDebugEnabled()) {\n                logger.debug(\"SQL error codes for '\" + databaseName + \"' found\");\n            }\n\n            return sec;\n        }\n        else {\n            if (logger.isDebugEnabled()) {\n                logger.debug(\"SQL error codes for '\" + databaseName + \"' not found\");\n            }\n\n            return new SQLErrorCodes();\n        }\n    }\n\n    public SQLErrorCodes getErrorCodes(DataSource dataSource) {\n        SQLErrorCodes sec = this.resolveErrorCodes(dataSource);\n        return sec != null ? sec : new SQLErrorCodes();\n    }\n\n    @Nullable\n    public SQLErrorCodes resolveErrorCodes( DataSource dataSource ) {\n        Assert.notNull(dataSource, \"DataSource must not be null\");\n        if (logger.isDebugEnabled()) {\n            logger.debug(\"Looking up default SQLErrorCodes for DataSource [\" + this.identify(dataSource) + \"]\");\n        }\n\n        SQLErrorCodes sec = (SQLErrorCodes)this.dataSourceCache.get(dataSource);\n        if (sec == null) {\n            synchronized(this.dataSourceCache) {\n                sec = (SQLErrorCodes)this.dataSourceCache.get(dataSource);\n                if (sec == null) {\n                    // TODO, implement JDBC support.\n//                    try {\n//                        String name = (String)JdbcUtils.extractDatabaseMetaData(dataSource, DatabaseMetaData::getDatabaseProductName);\n//                        if ( StringUtils.hasLength( name ) ) {\n//                            return this.registerDatabase(dataSource, name);\n//                        }\n//                    }\n//                    catch ( MetaDataAccessException e ) {\n//                        logger.warn(\"Error while extracting database name\", e);\n//                    }\n\n                    return null;\n                }\n            }\n        }\n\n        if (logger.isDebugEnabled()) {\n            logger.debug(\"SQLErrorCodes found in cache for DataSource [\" + this.identify(dataSource) + \"]\");\n        }\n\n        return sec;\n    }\n\n    public SQLErrorCodes registerDatabase(DataSource dataSource, String databaseName) {\n        SQLErrorCodes sec = this.getErrorCodes(databaseName);\n        if (logger.isDebugEnabled()) {\n            logger.debug(\"Caching SQL error codes for DataSource [\" + this.identify(dataSource) + \"]: database product name is '\" + databaseName + \"'\");\n        }\n\n        this.dataSourceCache.put(dataSource, sec);\n        return sec;\n    }\n\n    @Nullable\n    public SQLErrorCodes unregisterDatabase(DataSource dataSource) {\n        return (SQLErrorCodes)this.dataSourceCache.remove(dataSource);\n    }\n\n    private String identify(DataSource dataSource) {\n        return dataSource.getClass().getName() + '@' + Integer.toHexString(dataSource.hashCode());\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/SQLExceptionSubclassTranslator.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport java.sql.SQLDataException;\nimport java.sql.SQLException;\nimport java.sql.SQLFeatureNotSupportedException;\nimport java.sql.SQLIntegrityConstraintViolationException;\nimport java.sql.SQLInvalidAuthorizationSpecException;\nimport java.sql.SQLNonTransientConnectionException;\nimport java.sql.SQLNonTransientException;\nimport java.sql.SQLRecoverableException;\nimport java.sql.SQLSyntaxErrorException;\nimport java.sql.SQLTimeoutException;\nimport java.sql.SQLTransactionRollbackException;\nimport java.sql.SQLTransientConnectionException;\nimport java.sql.SQLTransientException;\n\nimport com.pinecone.slime.jelly.source.ds.dao.ConcurrencyFailureException;\nimport com.pinecone.slime.jelly.source.ds.dao.DataAccessException;\nimport com.pinecone.slime.jelly.source.ds.dao.DataAccessResourceFailureException;\nimport com.pinecone.slime.jelly.source.ds.dao.DataIntegrityViolationException;\nimport com.pinecone.slime.jelly.source.ds.dao.InvalidDataAccessApiUsageException;\nimport com.pinecone.slime.jelly.source.ds.dao.PermissionDeniedDataAccessException;\nimport com.pinecone.slime.jelly.source.ds.dao.QueryTimeoutException;\nimport com.pinecone.slime.jelly.source.ds.dao.RecoverableDataAccessException;\nimport com.pinecone.slime.jelly.source.ds.dao.TransientDataAccessResourceException;\nimport com.pinecone.framework.system.Nullable;\n\n\npublic class SQLExceptionSubclassTranslator extends AbstractFallbackSQLExceptionTranslator {\n    public SQLExceptionSubclassTranslator() {\n        this.setFallbackTranslator( new SQLStateSQLExceptionTranslator() );\n    }\n\n    @Nullable\n    @Override\n    protected DataAccessException doTranslate(String task, @Nullable String sql, SQLException ex) {\n        if (ex instanceof SQLTransientException) {\n            if (ex instanceof SQLTransientConnectionException) {\n                return new TransientDataAccessResourceException(this.buildMessage(task, sql, ex), ex);\n            }\n\n            if (ex instanceof SQLTransactionRollbackException) {\n                return new ConcurrencyFailureException(this.buildMessage(task, sql, ex), ex);\n            }\n\n            if (ex instanceof SQLTimeoutException) {\n                return new QueryTimeoutException(this.buildMessage(task, sql, ex), ex);\n            }\n        }\n        else if (ex instanceof SQLNonTransientException) {\n            if (ex instanceof SQLNonTransientConnectionException) {\n                return new DataAccessResourceFailureException(this.buildMessage(task, sql, ex), ex);\n            }\n\n            if (ex instanceof SQLDataException) {\n                return new DataIntegrityViolationException(this.buildMessage(task, sql, ex), ex);\n            }\n\n            if (ex instanceof SQLIntegrityConstraintViolationException) {\n                return new DataIntegrityViolationException(this.buildMessage(task, sql, ex), ex);\n            }\n\n            if (ex instanceof SQLInvalidAuthorizationSpecException) {\n                return new PermissionDeniedDataAccessException(this.buildMessage(task, sql, ex), ex);\n            }\n\n            if (ex instanceof SQLSyntaxErrorException) {\n                return new BadSqlGrammarException(task, sql != null ? sql : \"\", ex);\n            }\n\n            if (ex instanceof SQLFeatureNotSupportedException) {\n                return new InvalidDataAccessApiUsageException(this.buildMessage(task, sql, ex), ex);\n            }\n        }\n        else if (ex instanceof SQLRecoverableException) {\n            return new RecoverableDataAccessException(this.buildMessage(task, sql, ex), ex);\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/SQLExceptionTranslator.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport java.sql.SQLException;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.slime.jelly.source.ds.dao.DataAccessException;\n\n@FunctionalInterface\npublic interface SQLExceptionTranslator extends Pinenut {\n    @Nullable\n    DataAccessException translate(String task, @Nullable String sql, SQLException e );\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/SQLStateSQLExceptionTranslator.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport java.sql.SQLException;\nimport java.util.HashSet;\nimport java.util.Set;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.slime.jelly.source.ds.dao.ConcurrencyFailureException;\nimport com.pinecone.slime.jelly.source.ds.dao.DataAccessException;\nimport com.pinecone.slime.jelly.source.ds.dao.DataAccessResourceFailureException;\nimport com.pinecone.slime.jelly.source.ds.dao.DataIntegrityViolationException;\nimport com.pinecone.slime.jelly.source.ds.dao.QueryTimeoutException;\nimport com.pinecone.slime.jelly.source.ds.dao.TransientDataAccessResourceException;\n\npublic class SQLStateSQLExceptionTranslator extends AbstractFallbackSQLExceptionTranslator {\n    private static final Set<String> BAD_SQL_GRAMMAR_CODES                 = new HashSet<>(8);\n    private static final Set<String> DATA_INTEGRITY_VIOLATION_CODES        = new HashSet<>(8);\n    private static final Set<String> DATA_ACCESS_RESOURCE_FAILURE_CODES    = new HashSet<>(8);\n    private static final Set<String> TRANSIENT_DATA_ACCESS_RESOURCE_CODES  = new HashSet<>(8);\n    private static final Set<String> CONCURRENCY_FAILURE_CODES             = new HashSet<>(4);\n\n    public SQLStateSQLExceptionTranslator() {\n\n    }\n\n    @Nullable\n    @Override\n    protected DataAccessException doTranslate(String task, @Nullable String sql, SQLException ex) {\n        String sqlState = this.getSqlState(ex);\n        if (sqlState != null && sqlState.length() >= 2) {\n            String classCode = sqlState.substring(0, 2);\n            if (this.logger.isDebugEnabled()) {\n                this.logger.debug(\"Extracted SQL state class '\" + classCode + \"' from value '\" + sqlState + \"'\");\n            }\n\n            if (BAD_SQL_GRAMMAR_CODES.contains(classCode)) {\n                return new BadSqlGrammarException(task, sql != null ? sql : \"\", ex);\n            }\n\n            if (DATA_INTEGRITY_VIOLATION_CODES.contains(classCode)) {\n                return new DataIntegrityViolationException(this.buildMessage(task, sql, ex), ex);\n            }\n\n            if (DATA_ACCESS_RESOURCE_FAILURE_CODES.contains(classCode)) {\n                return new DataAccessResourceFailureException(this.buildMessage(task, sql, ex), ex);\n            }\n\n            if (TRANSIENT_DATA_ACCESS_RESOURCE_CODES.contains(classCode)) {\n                return new TransientDataAccessResourceException(this.buildMessage(task, sql, ex), ex);\n            }\n\n            if (CONCURRENCY_FAILURE_CODES.contains(classCode)) {\n                return new ConcurrencyFailureException(this.buildMessage(task, sql, ex), ex);\n            }\n        }\n\n        return ex.getClass().getName().contains(\"Timeout\") ? new QueryTimeoutException(this.buildMessage(task, sql, ex), ex) : null;\n    }\n\n    @Nullable\n    private String getSqlState(SQLException ex) {\n        String sqlState = ex.getSQLState();\n        if (sqlState == null) {\n            SQLException nestedEx = ex.getNextException();\n            if (nestedEx != null) {\n                sqlState = nestedEx.getSQLState();\n            }\n        }\n\n        return sqlState;\n    }\n\n    static {\n        BAD_SQL_GRAMMAR_CODES.add(\"07\");\n        BAD_SQL_GRAMMAR_CODES.add(\"21\");\n        BAD_SQL_GRAMMAR_CODES.add(\"2A\");\n        BAD_SQL_GRAMMAR_CODES.add(\"37\");\n        BAD_SQL_GRAMMAR_CODES.add(\"42\");\n        BAD_SQL_GRAMMAR_CODES.add(\"65\");\n        DATA_INTEGRITY_VIOLATION_CODES.add(\"01\");\n        DATA_INTEGRITY_VIOLATION_CODES.add(\"02\");\n        DATA_INTEGRITY_VIOLATION_CODES.add(\"22\");\n        DATA_INTEGRITY_VIOLATION_CODES.add(\"23\");\n        DATA_INTEGRITY_VIOLATION_CODES.add(\"27\");\n        DATA_INTEGRITY_VIOLATION_CODES.add(\"44\");\n        DATA_ACCESS_RESOURCE_FAILURE_CODES.add(\"08\");\n        DATA_ACCESS_RESOURCE_FAILURE_CODES.add(\"53\");\n        DATA_ACCESS_RESOURCE_FAILURE_CODES.add(\"54\");\n        DATA_ACCESS_RESOURCE_FAILURE_CODES.add(\"57\");\n        DATA_ACCESS_RESOURCE_FAILURE_CODES.add(\"58\");\n        TRANSIENT_DATA_ACCESS_RESOURCE_CODES.add(\"JW\");\n        TRANSIENT_DATA_ACCESS_RESOURCE_CODES.add(\"JZ\");\n        TRANSIENT_DATA_ACCESS_RESOURCE_CODES.add(\"S1\");\n        CONCURRENCY_FAILURE_CODES.add(\"40\");\n        CONCURRENCY_FAILURE_CODES.add(\"61\");\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/TransientDataAccessException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.slime.jelly.source.ds.dao.DataAccessException;\n\npublic abstract class TransientDataAccessException extends DataAccessException {\n    public TransientDataAccessException( String msg ) {\n        super(msg);\n    }\n\n    public TransientDataAccessException( String msg, @Nullable Throwable cause ) {\n        super(msg, cause);\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/TransientDataAccessResourceException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\npublic class TransientDataAccessResourceException extends TransientDataAccessException {\n    public TransientDataAccessResourceException(String msg) {\n        super(msg);\n    }\n\n    public TransientDataAccessResourceException(String msg, Throwable cause) {\n        super(msg, cause);\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/UncategorizedDataAccessException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.slime.jelly.source.ds.dao.NonTransientDataAccessException;\n\npublic abstract class UncategorizedDataAccessException extends NonTransientDataAccessException {\n    public UncategorizedDataAccessException( @Nullable String msg, @Nullable Throwable cause ) {\n        super( msg, cause );\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/jdbc/UncategorizedSQLException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.jdbc;\n\nimport java.sql.SQLException;\n\nimport com.pinecone.framework.system.Nullable;\n\npublic class UncategorizedSQLException extends UncategorizedDataAccessException {\n    @Nullable\n    private final String sql;\n\n    public UncategorizedSQLException( String task, @Nullable String sql, SQLException ex ) {\n        super(task + \"; uncategorized SQLException\" + (sql != null ? \" for SQL [\" + sql + \"]\" : \"\") + \"; SQL state [\" + ex.getSQLState() + \"]; error code [\" + ex.getErrorCode() + \"]; \" + ex.getMessage(), ex);\n        this.sql = sql;\n    }\n\n    public SQLException getSQLException() {\n        return (SQLException)this.getCause();\n    }\n\n    @Nullable\n    public String getSql() {\n        return this.sql;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/PlatformTransactionManager.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nimport com.pinecone.framework.system.Nullable;\n\npublic interface PlatformTransactionManager extends TransactionManager {\n    TransactionStatus getTransaction( @Nullable TransactionDefinition definition ) throws TransactionException;\n\n    void commit( TransactionStatus status ) throws TransactionException;\n\n    void rollback( TransactionStatus status ) throws TransactionException;\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/ResourceHolder.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ResourceHolder extends Pinenut {\n    void reset();\n\n    void unbound();\n\n    boolean isVoid();\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/ResourceHolderSupport.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nimport java.util.Date;\n\nimport com.pinecone.framework.system.Nullable;\n\npublic abstract class ResourceHolderSupport implements ResourceHolder {\n    private boolean synchronizedWithTransaction = false;\n    private boolean rollbackOnly = false;\n    @Nullable\n    private Date deadline;\n    private int referenceCount = 0;\n    private boolean isVoid = false;\n\n    public ResourceHolderSupport() {\n    }\n\n    public void setSynchronizedWithTransaction(boolean synchronizedWithTransaction) {\n        this.synchronizedWithTransaction = synchronizedWithTransaction;\n    }\n\n    public boolean isSynchronizedWithTransaction() {\n        return this.synchronizedWithTransaction;\n    }\n\n    public void setRollbackOnly() {\n        this.rollbackOnly = true;\n    }\n\n    public void resetRollbackOnly() {\n        this.rollbackOnly = false;\n    }\n\n    public boolean isRollbackOnly() {\n        return this.rollbackOnly;\n    }\n\n    public void setTimeoutInSeconds(int seconds) {\n        this.setTimeoutInMillis((long)seconds * 1000L);\n    }\n\n    public void setTimeoutInMillis(long millis) {\n        this.deadline = new Date(System.currentTimeMillis() + millis);\n    }\n\n    public boolean hasTimeout() {\n        return this.deadline != null;\n    }\n\n    @Nullable\n    public Date getDeadline() {\n        return this.deadline;\n    }\n\n    public int getTimeToLiveInSeconds() {\n        double diff = (double)this.getTimeToLiveInMillis() / 1000.0D;\n        int secs = (int)Math.ceil(diff);\n        this.checkTransactionTimeout(secs <= 0);\n        return secs;\n    }\n\n    public long getTimeToLiveInMillis() throws TransactionTimedOutException {\n        if (this.deadline == null) {\n            throw new IllegalStateException(\"No timeout specified for this resource holder\");\n        } else {\n            long timeToLive = this.deadline.getTime() - System.currentTimeMillis();\n            this.checkTransactionTimeout(timeToLive <= 0L);\n            return timeToLive;\n        }\n    }\n\n    private void checkTransactionTimeout(boolean deadlineReached) throws TransactionTimedOutException {\n        if (deadlineReached) {\n            this.setRollbackOnly();\n            throw new TransactionTimedOutException(\"Transaction timed out: deadline was \" + this.deadline);\n        }\n    }\n\n    public void requested() {\n        ++this.referenceCount;\n    }\n\n    public void released() {\n        --this.referenceCount;\n    }\n\n    public boolean isOpen() {\n        return this.referenceCount > 0;\n    }\n\n    public void clear() {\n        this.synchronizedWithTransaction = false;\n        this.rollbackOnly = false;\n        this.deadline = null;\n    }\n\n    public void reset() {\n        this.clear();\n        this.referenceCount = 0;\n    }\n\n    public void unbound() {\n        this.isVoid = true;\n    }\n\n    public boolean isVoid() {\n        return this.isVoid;\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/ResourceTransactionManager.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\npublic interface ResourceTransactionManager extends PlatformTransactionManager {\n    Object getResourceFactory();\n}\n\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/SavepointManager.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface SavepointManager extends Pinenut {\n    Object createSavepoint() throws TransactionException;\n\n    void rollbackToSavepoint(Object point) throws TransactionException;\n\n    void releaseSavepoint(Object point) throws TransactionException;\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/StaticTransactionDefinition.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nfinal class StaticTransactionDefinition implements TransactionDefinition {\n    static final StaticTransactionDefinition INSTANCE = new StaticTransactionDefinition();\n\n    private StaticTransactionDefinition() {\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionDefinition.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface TransactionDefinition extends Pinenut {\n    int PROPAGATION_REQUIRED = 0;\n    int PROPAGATION_SUPPORTS = 1;\n    int PROPAGATION_MANDATORY = 2;\n    int PROPAGATION_REQUIRES_NEW = 3;\n    int PROPAGATION_NOT_SUPPORTED = 4;\n    int PROPAGATION_NEVER = 5;\n    int PROPAGATION_NESTED = 6;\n    int ISOLATION_DEFAULT = -1;\n    int ISOLATION_READ_UNCOMMITTED = 1;\n    int ISOLATION_READ_COMMITTED = 2;\n    int ISOLATION_REPEATABLE_READ = 4;\n    int ISOLATION_SERIALIZABLE = 8;\n    int TIMEOUT_DEFAULT = -1;\n\n    default int getPropagationBehavior() {\n        return 0;\n    }\n\n    default int getIsolationLevel() {\n        return -1;\n    }\n\n    default int getTimeout() {\n        return -1;\n    }\n\n    default boolean isReadOnly() {\n        return false;\n    }\n\n    @Nullable\n    default String getName() {\n        return null;\n    }\n\n    static TransactionDefinition withDefaults() {\n        return StaticTransactionDefinition.INSTANCE;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nimport com.pinecone.framework.system.NestedRuntimeException;\n\npublic abstract class TransactionException extends NestedRuntimeException {\n    public TransactionException( String msg ) {\n        super(msg);\n    }\n\n    public TransactionException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionExecution.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface TransactionExecution extends Pinenut {\n    boolean isNewTransaction();\n\n    void setRollbackOnly();\n\n    boolean isRollbackOnly();\n\n    boolean isCompleted();\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionManager.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface TransactionManager extends Pinenut {\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionStatus.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nimport java.io.Flushable;\n\npublic interface TransactionStatus extends TransactionExecution, SavepointManager, Flushable {\n    boolean hasSavepoint();\n\n    @Override\n    void flush();\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionSynchronization.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nimport java.io.Flushable;\n\nimport com.pinecone.framework.util.comparator.Ordered;\n\npublic interface TransactionSynchronization extends Ordered, Flushable {\n    int STATUS_COMMITTED = 0;\n    int STATUS_ROLLED_BACK = 1;\n    int STATUS_UNKNOWN = 2;\n\n    default int getOrder() {\n        return 2147483647;\n    }\n\n    default void suspend() {\n    }\n\n    default void resume() {\n    }\n\n    default void flush() {\n    }\n\n    default void beforeCommit(boolean readOnly) {\n    }\n\n    default void beforeCompletion() {\n    }\n\n    default void afterCommit() {\n    }\n\n    default void afterCompletion(int status) {\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionSynchronizationAdapter.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nimport com.pinecone.framework.util.comparator.Ordered;\n\npublic abstract class TransactionSynchronizationAdapter implements TransactionSynchronization, Ordered {\n    public TransactionSynchronizationAdapter() {\n    }\n\n    public int getOrder() {\n        return 2147483647;\n    }\n\n    public void suspend() {\n    }\n\n    public void resume() {\n    }\n\n    public void flush() {\n    }\n\n    public void beforeCommit(boolean readOnly) {\n    }\n\n    public void beforeCompletion() {\n    }\n\n    public void afterCommit() {\n    }\n\n    public void afterCompletion(int status) {\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionSynchronizationManager.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.LinkedHashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport org.apache.commons.logging.Log;\nimport org.apache.commons.logging.LogFactory;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.framework.util.comparator.OrderComparator;\nimport com.pinecone.framework.lang.NamedThreadLocal;\n\npublic final class TransactionSynchronizationManager {\n    private static final Log logger                                                    = LogFactory.getLog(TransactionSynchronizationManager.class);\n    private static final ThreadLocal<Map<Object, Object> > resources                   = new NamedThreadLocal<>( \"Transactional resources\" );\n    private static final ThreadLocal<Set<TransactionSynchronization>> synchronizations = new NamedThreadLocal<>( \"Transaction synchronizations\" );\n    private static final ThreadLocal<String> currentTransactionName                    = new NamedThreadLocal<>( \"Current transaction name\" );\n    private static final ThreadLocal<Boolean> currentTransactionReadOnly               = new NamedThreadLocal<>( \"Current transaction read-only status\" );\n    private static final ThreadLocal<Integer> currentTransactionIsolationLevel         = new NamedThreadLocal<>( \"Current transaction isolation level\" );\n    private static final ThreadLocal<Boolean> actualTransactionActive                  = new NamedThreadLocal<>( \"Actual transaction active\" );\n\n    public TransactionSynchronizationManager() {\n    }\n\n    public static Map<Object, Object> getResourceMap() {\n        Map<Object, Object> map = (Map)resources.get();\n        return map != null ? Collections.unmodifiableMap(map) : Collections.emptyMap();\n    }\n\n    public static boolean hasResource(Object key) {\n        Object actualKey = TransactionSynchronizationUtils.unwrapResourceIfNecessary(key);\n        Object value = doGetResource(actualKey);\n        return value != null;\n    }\n\n    @Nullable\n    public static Object getResource(Object key) {\n        Object actualKey = TransactionSynchronizationUtils.unwrapResourceIfNecessary(key);\n        Object value = doGetResource(actualKey);\n        if ( value != null && logger.isTraceEnabled() ) {\n            logger.trace(\"Retrieved value [\" + value + \"] for key [\" + actualKey + \"] bound to thread [\" + Thread.currentThread().getName() + \"]\");\n        }\n\n        return value;\n    }\n\n    @Nullable\n    private static Object doGetResource(Object actualKey) {\n        Map<Object, Object> map = (Map)resources.get();\n        if ( map == null ) {\n            return null;\n        }\n        else {\n            Object value = map.get(actualKey);\n            if ( value instanceof ResourceHolder && ((ResourceHolder)value).isVoid() ) {\n                map.remove( actualKey );\n                if ( map.isEmpty() ) {\n                    TransactionSynchronizationManager.resources.remove();\n                }\n\n                value = null;\n            }\n\n            return value;\n        }\n    }\n\n    public static void bindResource(Object key, Object value) throws IllegalStateException {\n        Object actualKey = TransactionSynchronizationUtils.unwrapResourceIfNecessary(key);\n        Assert.notNull(value, \"Value must not be null\");\n        Map<Object, Object> map = (Map)TransactionSynchronizationManager.resources.get();\n        if ( map == null ) {\n            map = new HashMap<>();\n            TransactionSynchronizationManager.resources.set(map);\n        }\n\n        Object oldValue = ((Map)map).put(actualKey, value);\n        if ( oldValue instanceof ResourceHolder && ((ResourceHolder)oldValue).isVoid() ) {\n            oldValue = null;\n        }\n\n        if ( oldValue != null ) {\n            throw new IllegalStateException(\"Already value [\" + oldValue + \"] for key [\" + actualKey + \"] bound to thread [\" + Thread.currentThread().getName() + \"]\");\n        }\n        else {\n            if (logger.isTraceEnabled()) {\n                logger.trace(\"Bound value [\" + value + \"] for key [\" + actualKey + \"] to thread [\" + Thread.currentThread().getName() + \"]\");\n            }\n        }\n    }\n\n    public static Object unbindResource(Object key) throws IllegalStateException {\n        Object actualKey = TransactionSynchronizationUtils.unwrapResourceIfNecessary(key);\n        Object value = doUnbindResource(actualKey);\n        if (value == null) {\n            throw new IllegalStateException(\"No value for key [\" + actualKey + \"] bound to thread [\" + Thread.currentThread().getName() + \"]\");\n        }\n        else {\n            return value;\n        }\n    }\n\n    @Nullable\n    public static Object unbindResourceIfPossible(Object key) {\n        Object actualKey = TransactionSynchronizationUtils.unwrapResourceIfNecessary(key);\n        return doUnbindResource(actualKey);\n    }\n\n    @Nullable\n    private static Object doUnbindResource(Object actualKey) {\n        Map<Object, Object> map = (Map)TransactionSynchronizationManager.resources.get();\n        if ( map == null ) {\n            return null;\n        }\n        else {\n            Object value = map.remove(actualKey);\n            if ( map.isEmpty() ) {\n                TransactionSynchronizationManager.resources.remove();\n            }\n\n            if ( value instanceof ResourceHolder && ((ResourceHolder)value).isVoid() ) {\n                value = null;\n            }\n\n            if ( value != null && TransactionSynchronizationManager.logger.isTraceEnabled() ) {\n                TransactionSynchronizationManager.logger.trace(\"Removed value [\" + value + \"] for key [\" + actualKey + \"] from thread [\" + Thread.currentThread().getName() + \"]\");\n            }\n\n            return value;\n        }\n    }\n\n    public static boolean isSynchronizationActive() {\n        return TransactionSynchronizationManager.synchronizations.get() != null;\n    }\n\n    public static void initSynchronization() throws IllegalStateException {\n        if ( isSynchronizationActive() ) {\n            throw new IllegalStateException(\"Cannot activate transaction synchronization - already active\");\n        }\n        else {\n            TransactionSynchronizationManager.logger.trace(\"Initializing transaction synchronization\");\n            TransactionSynchronizationManager.synchronizations.set( new LinkedHashSet<>() );\n        }\n    }\n\n    public static void registerSynchronization(TransactionSynchronization synchronization) throws IllegalStateException {\n        Assert.notNull(synchronization, \"TransactionSynchronization must not be null\");\n        Set<TransactionSynchronization> synchs = (Set)TransactionSynchronizationManager.synchronizations.get();\n        if ( synchs == null ) {\n            throw new IllegalStateException(\"Transaction synchronization is not active\");\n        }\n        else {\n            synchs.add(synchronization);\n        }\n    }\n\n    public static List<TransactionSynchronization> getSynchronizations() throws IllegalStateException {\n        Set<TransactionSynchronization> synchs = (Set)TransactionSynchronizationManager.synchronizations.get();\n        if (synchs == null) {\n            throw new IllegalStateException(\"Transaction synchronization is not active\");\n        }\n        else if (synchs.isEmpty()) {\n            return Collections.emptyList();\n        }\n        else {\n            List<TransactionSynchronization> sortedSynchs = new ArrayList<>(synchs);\n            OrderComparator.sort( sortedSynchs );\n            return Collections.unmodifiableList(sortedSynchs);\n        }\n    }\n\n    public static void clearSynchronization() throws IllegalStateException {\n        if (!isSynchronizationActive()) {\n            throw new IllegalStateException(\"Cannot deactivate transaction synchronization - not active\");\n        }\n        else {\n            TransactionSynchronizationManager.logger.trace(\"Clearing transaction synchronization\");\n            TransactionSynchronizationManager.synchronizations.remove();\n        }\n    }\n\n    public static void setCurrentTransactionName( @Nullable String name ) {\n        TransactionSynchronizationManager.currentTransactionName.set(name);\n    }\n\n    @Nullable\n    public static String getCurrentTransactionName() {\n        return (String)TransactionSynchronizationManager.currentTransactionName.get();\n    }\n\n    public static void setCurrentTransactionReadOnly(boolean readOnly) {\n        TransactionSynchronizationManager.currentTransactionReadOnly.set(readOnly ? Boolean.TRUE : null);\n    }\n\n    public static boolean isCurrentTransactionReadOnly() {\n        return TransactionSynchronizationManager.currentTransactionReadOnly.get() != null;\n    }\n\n    public static void setCurrentTransactionIsolationLevel( @Nullable Integer isolationLevel ) {\n        TransactionSynchronizationManager.currentTransactionIsolationLevel.set(isolationLevel);\n    }\n\n    @Nullable\n    public static Integer getCurrentTransactionIsolationLevel() {\n        return (Integer)TransactionSynchronizationManager.currentTransactionIsolationLevel.get();\n    }\n\n    public static void setActualTransactionActive(boolean active) {\n        TransactionSynchronizationManager.actualTransactionActive.set(active ? Boolean.TRUE : null);\n    }\n\n    public static boolean isActualTransactionActive() {\n        return TransactionSynchronizationManager.actualTransactionActive.get() != null;\n    }\n\n    public static void clear() {\n        TransactionSynchronizationManager.synchronizations.remove();\n        TransactionSynchronizationManager.currentTransactionName.remove();\n        TransactionSynchronizationManager.currentTransactionReadOnly.remove();\n        TransactionSynchronizationManager.currentTransactionIsolationLevel.remove();\n        TransactionSynchronizationManager.actualTransactionActive.remove();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionSynchronizationUtils.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\nimport java.util.Iterator;\nimport java.util.List;\nimport org.apache.commons.logging.Log;\nimport org.apache.commons.logging.LogFactory;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.framework.util.ClassUtils;\nimport com.pinecone.framework.system.aop.InfrastructureProxy;\nimport com.pinecone.framework.system.aop.ScopedObject;\n\npublic final class TransactionSynchronizationUtils {\n    private static final Log           logger = LogFactory.getLog( TransactionSynchronizationUtils.class );\n    private static final boolean aopAvailable = ClassUtils.isPresent(\"com.pinecone.framework.system.aop.ScopedObject\", TransactionSynchronizationUtils.class.getClassLoader());\n\n    public TransactionSynchronizationUtils() {\n    }\n\n    public static boolean sameResourceFactory( ResourceTransactionManager tm, Object resourceFactory ) {\n        return unwrapResourceIfNecessary( tm.getResourceFactory()).equals(unwrapResourceIfNecessary(resourceFactory) );\n    }\n\n    static Object unwrapResourceIfNecessary( Object resource ) {\n        Assert.notNull( resource, \"Resource must not be null\" );\n        Object resourceRef = resource;\n        if ( resource instanceof InfrastructureProxy ) {\n            resourceRef = ((InfrastructureProxy)resource).getWrappedObject();\n        }\n\n        if ( TransactionSynchronizationUtils.aopAvailable ) {\n            resourceRef = TransactionSynchronizationUtils.ScopedProxyUnwrapper.unwrapIfNecessary( resourceRef );\n        }\n\n        return resourceRef;\n    }\n\n    public static void triggerFlush() {\n        Iterator iter = TransactionSynchronizationManager.getSynchronizations().iterator();\n\n        while( iter.hasNext() ) {\n            TransactionSynchronization synchronization = (TransactionSynchronization)iter.next();\n            synchronization.flush();\n        }\n\n    }\n\n    public static void triggerBeforeCommit(boolean readOnly) {\n        Iterator iter = TransactionSynchronizationManager.getSynchronizations().iterator();\n\n        while( iter.hasNext() ) {\n            TransactionSynchronization synchronization = (TransactionSynchronization)iter.next();\n            synchronization.beforeCommit(readOnly);\n        }\n\n    }\n\n    public static void triggerBeforeCompletion() {\n        Iterator iter = TransactionSynchronizationManager.getSynchronizations().iterator();\n\n        while( iter.hasNext() ) {\n            TransactionSynchronization synchronization = (TransactionSynchronization)iter.next();\n\n            try {\n                synchronization.beforeCompletion();\n            }\n            catch ( Throwable e ) {\n                logger.debug( \"TransactionSynchronization.beforeCompletion threw exception\", e );\n            }\n        }\n\n    }\n\n    public static void triggerAfterCommit() {\n        invokeAfterCommit(TransactionSynchronizationManager.getSynchronizations());\n    }\n\n    public static void invokeAfterCommit( @Nullable List<TransactionSynchronization> synchronizations ) {\n        if ( synchronizations != null ) {\n            Iterator iter = synchronizations.iterator();\n\n            while( iter.hasNext() ) {\n                TransactionSynchronization synchronization = (TransactionSynchronization)iter.next();\n                synchronization.afterCommit();\n            }\n        }\n\n    }\n\n    public static void triggerAfterCompletion( int completionStatus ) {\n        List<TransactionSynchronization> synchronizations = TransactionSynchronizationManager.getSynchronizations();\n        TransactionSynchronizationUtils.invokeAfterCompletion(synchronizations, completionStatus);\n    }\n\n    public static void invokeAfterCompletion( @Nullable List<TransactionSynchronization> synchronizations, int completionStatus ) {\n        if ( synchronizations != null ) {\n            Iterator iter = synchronizations.iterator();\n\n            while( iter.hasNext() ) {\n                TransactionSynchronization synchronization = (TransactionSynchronization)iter.next();\n\n                try {\n                    synchronization.afterCompletion(completionStatus);\n                }\n                catch ( Throwable e ) {\n                    logger.debug(\"TransactionSynchronization.afterCompletion threw exception\", e);\n                }\n            }\n        }\n\n    }\n\n    private static class ScopedProxyUnwrapper {\n        private ScopedProxyUnwrapper() {\n        }\n\n        public static Object unwrapIfNecessary(Object resource) {\n            return resource instanceof ScopedObject ? ((ScopedObject)resource).getTargetObject() : resource;\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ds/transaction/TransactionTimedOutException.java",
    "content": "package com.pinecone.slime.jelly.source.ds.transaction;\n\npublic class TransactionTimedOutException extends TransactionException {\n    public TransactionTimedOutException( String msg ) {\n        super(msg);\n    }\n\n    public TransactionTimedOutException( String msg, Throwable cause ) {\n        super(msg, cause);\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/ArchDynamicQuerierResultHandler.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis;\n\nimport com.pinecone.slime.source.GenericResultConverter;\nimport com.pinecone.slime.source.rdb.RDBTargetTableMeta;\nimport com.pinecone.slime.source.ResultConverter;\nimport org.apache.ibatis.session.ResultHandler;\n\nimport java.util.Map;\n\npublic abstract class ArchDynamicQuerierResultHandler<V > implements ResultHandler<Map<Object, V > > {\n    protected ResultConverter<V > mConverter ;\n    protected RDBTargetTableMeta  mRDBTargetTableMeta;\n\n    public ArchDynamicQuerierResultHandler( RDBTargetTableMeta meta ) {\n        this.mRDBTargetTableMeta = meta;\n\n        if( this.mRDBTargetTableMeta.getResultConverter() == null ) {\n            this.mRDBTargetTableMeta.setResultConverter( new GenericResultConverter<>( this.mRDBTargetTableMeta.getValueType(), this.mRDBTargetTableMeta.getValueMetaKeys() ) );\n        }\n        this.mConverter          = this.mRDBTargetTableMeta.getResultConverter();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/DynamicQuerierEntityResultHandler.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis;\n\nimport com.pinecone.slime.source.rdb.RDBTargetTableMeta;\nimport org.apache.ibatis.session.ResultContext;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\n\npublic class DynamicQuerierEntityResultHandler<V > extends ArchDynamicQuerierResultHandler<V > {\n    private List<V > mResults = new ArrayList<>();\n\n    public DynamicQuerierEntityResultHandler( RDBTargetTableMeta meta ) {\n        super( meta );\n    }\n\n    @Override\n    public void handleResult( ResultContext<? extends Map<Object, V > > context ) {\n        Map<Object, V > resultObject = context.getResultObject();\n        this.mResults.add( this.mConverter.convert( resultObject ) );\n    }\n\n    public List<V > getResults() {\n        return this.mResults;\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/DynamicQuerierMappedResultHandler.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis;\n\nimport com.pinecone.slime.map.QueryRange;\nimport com.pinecone.slime.source.rdb.RDBTargetTableMeta;\nimport org.apache.ibatis.session.ResultContext;\n\nimport java.util.LinkedHashMap;\nimport java.util.Map;\n\npublic class DynamicQuerierMappedResultHandler<V > extends ArchDynamicQuerierResultHandler<V > {\n    private Map<Object, V > mResults = new LinkedHashMap<>();\n    private QueryRange      mRange;\n\n    public DynamicQuerierMappedResultHandler( RDBTargetTableMeta meta, QueryRange range ) {\n        super( meta );\n\n        this.mRange = range;\n    }\n\n    @Override\n    public void handleResult( ResultContext<? extends Map<Object, V > > context ) {\n        Map<Object, V > resultObject = context.getResultObject();\n        String szRangeKey = this.mRange.getRangeKey();\n        Object keyVal = resultObject.get( szRangeKey );\n        resultObject.remove( szRangeKey );\n\n        this.mResults.put( keyVal, this.mConverter.convert( resultObject ) );\n    }\n\n    public Map<Object, V > getResults() {\n        return this.mResults;\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/DynamicQuerierSqlBuilder.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis;\n\nimport com.pinecone.slime.map.QueryRange;\nimport com.pinecone.slime.source.rdb.RDBTargetTableMeta;\nimport com.pinecone.slime.source.ResultConverter;\nimport org.apache.ibatis.jdbc.SQL;\n\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.TreeSet;\n\npublic final class DynamicQuerierSqlBuilder {\n\n    public static SQL assembleSelectSQL( RDBTargetTableMeta meta, Set<String > keys ) {\n        return new SQL() {{\n            if ( keys.isEmpty() ) {\n                SELECT(\"*\");\n            }\n            else {\n                SELECT( String.join( \", \", keys ) );\n            }\n            FROM( meta.getTableName() );\n        }};\n    }\n\n    public static String selectList  ( Map<String, Object> params ) {\n        RDBTargetTableMeta meta = (RDBTargetTableMeta) params.get( \"meta\" );\n        Set<String > keys       = meta.getValueMetaKeys();\n\n        SQL sql = DynamicQuerierSqlBuilder.assembleSelectSQL( meta, keys );\n        return sql.toString() + \"${exSafeSQL}\";\n    }\n\n    public static String selectObjectByRange( Map<String, Object> params, boolean bWithRangeKey ) {\n        RDBTargetTableMeta meta = (RDBTargetTableMeta) params.get(\"meta\");\n        QueryRange        range = (QueryRange) params.get(\"range\");\n        Set<String >       keys = meta.getValueMetaKeys();\n\n        if( range != null && bWithRangeKey ) {\n            keys = new TreeSet<>( keys );\n            keys.add( range.getRangeKey() );\n        }\n\n        SQL sql = DynamicQuerierSqlBuilder.assembleSelectSQL( meta, keys );\n\n        if ( range != null ) {\n            sql.WHERE( range.getRangeKey() + \" >= #{range.min}\" );\n            sql.WHERE( range.getRangeKey() + \" <= #{range.max}\" );\n        }\n        return sql.toString();\n    }\n\n    public static String selectListByRange( Map<String, Object> params ) {\n        return DynamicQuerierSqlBuilder.selectObjectByRange( params, false );\n    }\n\n    public static String selectMappedByRange( Map<String, Object> params ) {\n        return DynamicQuerierSqlBuilder.selectObjectByRange( params, true );\n    }\n\n    public static String selectListByColumn  ( Map<String, Object> params ) {\n        RDBTargetTableMeta meta = (RDBTargetTableMeta) params.get( \"meta\" );\n        Set<String > keys       = meta.getValueMetaKeys();\n\n        SQL sql = new SQL() {{\n            if ( keys.isEmpty() ) {\n                SELECT(\"*\");\n            }\n            else {\n                SELECT( String.join( \", \", keys ) );\n            }\n            FROM( meta.getTableName() );\n\n            WHERE( \"#{columnKey} = #{key}\" );\n        }};\n        return sql.toString();\n    }\n\n    public static String insert      ( Map<String, Object> params ) {\n        RDBTargetTableMeta meta = (RDBTargetTableMeta) params.get(\"meta\");\n        Object              key = params.get(\"key\");\n        Object           entity = params.get(\"entity\");\n        Set<String >       keys = meta.getValueMetaKeys();\n\n        SQL sql = new SQL() {{\n            INSERT_INTO( meta.getTableName() );\n            if ( key != null ) {\n                String szIdxKey = meta.getIndexKey();\n                VALUES( szIdxKey, \"#{key}\" );\n                keys.remove( szIdxKey );\n            }\n\n            if( ResultConverter.isPrimitiveOrSpecialType( entity.getClass() ) ) {\n                keys.forEach( k -> VALUES( k, \"#{entity}\" ));\n            }\n            else {\n                keys.forEach( k -> VALUES( k, \"#{entity.\" + k + \"}\" ));\n            }\n        }};\n\n        return sql.toString();\n    }\n\n    public static String updateByEntity ( Map<String, Object> params ) {\n        RDBTargetTableMeta meta = (RDBTargetTableMeta) params.get(\"meta\");\n        Object entity = params.get(\"entity\");\n        Set<String> keys = meta.getValueMetaKeys();\n        Object key = params.get(\"key\");\n\n        SQL sql = new SQL() {{\n            UPDATE(meta.getTableName());\n\n            if( ResultConverter.isPrimitiveOrSpecialType( entity.getClass() ) ) {\n                keys.forEach(key -> SET(key + \" = #{entity}\"));\n                if( key != null ) {\n                    WHERE(key + \" = #{entity}\");\n                }\n                else {\n                    WHERE(meta.getIndexKey() + \" = #{entity}\");\n                }\n            }\n            else {\n                keys.forEach(key -> SET(key + \" = #{entity.\" + key + \"}\"));\n                WHERE(meta.getIndexKey() + \" = #{entity.\" + meta.getIndexKey() + \"}\");\n            }\n        }};\n\n        return sql.toString();\n    }\n\n    public static String deleteByKey ( Map<String, Object> params ) {\n        RDBTargetTableMeta meta = (RDBTargetTableMeta) params.get(\"meta\");\n        Object key = params.get(\"key\");\n\n        SQL sql = new SQL() {{\n            DELETE_FROM(meta.getTableName());\n            WHERE(meta.getIndexKey() + \" = #{key}\");\n        }};\n\n        return sql.toString();\n    }\n\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/GenericMybatisQuerierDataManipulator.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.util.rdb.SQLStrings;\nimport com.pinecone.slime.map.QueryRange;\nimport com.pinecone.slime.source.GenericResultConverter;\nimport com.pinecone.slime.source.ResultConverter;\nimport com.pinecone.slime.source.rdb.RDBTargetTableMeta;\nimport com.pinecone.slime.source.rdb.RangedRDBQuerierDataManipulator;\n\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.ResultType;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.SelectProvider;\nimport org.apache.ibatis.annotations.InsertProvider;\nimport org.apache.ibatis.annotations.UpdateProvider;\nimport org.apache.ibatis.annotations.DeleteProvider;\nimport org.apache.ibatis.annotations.Update;\nimport org.apache.ibatis.session.ResultHandler;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\n\npublic interface GenericMybatisQuerierDataManipulator<K, V > extends RangedRDBQuerierDataManipulator<K, V > {\n\n    @Override\n    @Select( \"SELECT COUNT(*) FROM ${meta.tableName} ${exSafeSQL}\" )\n    long counts( @Param(\"meta\") RDBTargetTableMeta meta, @Param(\"exSafeSQL\") String szExSafeSQL );\n\n    @Override\n    @Select( \"SELECT COUNT(*) FROM ${meta.tableName} WHERE ${keyName} = ${key}\" )\n    long countsByColumn( @Param(\"meta\") RDBTargetTableMeta meta, @Param(\"keyName\") String szSpecificColumnKeyName, @Param(\"key\") Object key );\n\n    @Override\n    @Select( \"SELECT COUNT(*) FROM ${meta.tableName} WHERE ${range.rangeKey} >= ${range.min} AND ${range.rangeKey} <= ${range.max}\" )\n    long countsByRange( @Param(\"meta\") RDBTargetTableMeta meta, @Param(\"range\") QueryRange range );\n\n    @SelectProvider( type = DynamicQuerierSqlBuilder.class, method = \"selectList\" )\n    @ResultType( LinkedTreeMap.class )\n    void selectList0( @Param(\"meta\") RDBTargetTableMeta meta, @Param(\"handler\") ResultHandler<Map<Object, V > > handler, @Param(\"exSafeSQL\") String szExSafeSQL );\n\n    @Override\n    @SelectProvider( type = DynamicQuerierSqlBuilder.class, method = \"selectListByRange\" )\n    List<V > selectListByRange( @Param(\"meta\") RDBTargetTableMeta meta, @Param(\"range\") QueryRange range );\n\n    @SelectProvider( type = DynamicQuerierSqlBuilder.class, method = \"selectMappedByRange\" )\n    @ResultType( LinkedTreeMap.class )\n    void selectMappedByRange0( RDBTargetTableMeta meta, @Param(\"handler\") ResultHandler<Map<Object, V > > handler, QueryRange range );\n\n    @Override\n    default Map selectMappedByRange( RDBTargetTableMeta meta, QueryRange range ) {\n        DynamicQuerierMappedResultHandler<V> handler = new DynamicQuerierMappedResultHandler<>( meta, range );\n        this.selectMappedByRange0( meta, handler, range );\n        return handler.getResults();\n    }\n\n    @Override\n    default List<V > selectList( RDBTargetTableMeta meta, String szExSafeSQL ) {\n        DynamicQuerierEntityResultHandler<V> handler = new DynamicQuerierEntityResultHandler<>( meta );\n        this.selectList0( meta, handler, szExSafeSQL );\n        return handler.getResults();\n    }\n\n    @Override\n    @Select( \"${statement}\" )\n    List<Map > query ( @Param(\"meta\") RDBTargetTableMeta meta, @Param(\"statement\") String szStatementSQL );\n\n    default List<V > queryVal ( RDBTargetTableMeta meta, String szStatementSQL ) {\n        if( meta.getResultConverter() == null ) {\n            meta.setResultConverter( new GenericResultConverter<>( meta.getValueType(), meta.getValueMetaKeys() ) );\n        }\n        ResultConverter<V > converter   = meta.getResultConverter();\n        List<V > results = new ArrayList<>();\n        List<Map > raw   = this.query( meta, szStatementSQL );\n        for( Map map : raw ) {\n            results.add( converter.convert( map ) );\n        }\n\n        return results;\n    }\n\n    @Override\n    @Select( \"SELECT MAX(${rangeKeyName}) FROM ${meta.tableName}\" )\n    Object getMaximumRangeVal( @Param(\"meta\")RDBTargetTableMeta meta, @Param(\"rangeKeyName\") String szRangeKeyName );\n\n    @Override\n    @Select( \"SELECT MIN(${rangeKeyName}) FROM ${meta.tableName}\" )\n    Object getMinimumRangeVal( @Param(\"meta\")RDBTargetTableMeta meta, @Param(\"rangeKeyName\") String szRangeKeyName );\n\n    @Override\n    @SelectProvider( type = DynamicQuerierSqlBuilder.class, method = \"selectListByColumn\" )\n    List<V > selectListByColumn( @Param(\"meta\") RDBTargetTableMeta meta, @Param(\"columnKey\") String szSpecificColumnKeyName, @Param(\"key\") Object key );\n\n    @Override\n    default V selectByKey( RDBTargetTableMeta meta, Object key ) {\n        DynamicQuerierEntityResultHandler<V> handler = new DynamicQuerierEntityResultHandler<>( meta );\n        this.selectList0( meta, handler, String.format( \" WHERE `%s` = %s\", meta.getIndexKey(), SQLStrings.format( key )) );\n        List<V > list = handler.getResults();\n        if( list != null && !list.isEmpty() ) {\n            return handler.getResults().get(0);\n        }\n        return null;\n    }\n\n    @InsertProvider( type = DynamicQuerierSqlBuilder.class, method = \"insert\" )\n    void insert( @Param(\"meta\") RDBTargetTableMeta meta, @Param(\"key\") K key, @Param(\"entity\") V entity );\n\n    @Override\n    @UpdateProvider( type = DynamicQuerierSqlBuilder.class, method = \"updateByEntity\" )\n    void update( @Param(\"meta\") RDBTargetTableMeta meta, @Param(\"key\") K key, @Param(\"entity\") V entity );\n\n    @Override\n    @DeleteProvider( type = DynamicQuerierSqlBuilder.class, method = \"deleteByKey\" )\n    void deleteByKey( @Param(\"meta\") RDBTargetTableMeta meta, @Param(\"key\") Object key );\n\n    @Override\n    @Update( \"TRUNCATE TABLE ${meta.tableName}\" )\n    void truncate( @Param(\"meta\") RDBTargetTableMeta meta );\n\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/IbatisClient.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.slime.source.DAOScanner;\nimport com.pinecone.slime.source.rdb.RDBClient;\nimport org.apache.ibatis.mapping.Environment;\nimport org.apache.ibatis.session.ExecutorType;\nimport org.apache.ibatis.session.SqlSession;\nimport org.apache.ibatis.session.Configuration;\nimport org.apache.ibatis.session.SqlSessionFactory;\nimport org.apache.ibatis.session.TransactionIsolationLevel;\n\nimport javax.sql.DataSource;\nimport java.sql.Connection;\nimport java.util.List;\n\npublic interface IbatisClient extends RDBClient {\n    Configuration     getConfiguration();\n\n    DataSource        getDataSource();\n\n    Environment       getEnvironment();\n\n    JSONObject        getIbatisConf();\n\n    String            getJDBCDriverName();\n\n    JSONObject        getClientConf();\n\n    DAOScanner        getDAOScanner();\n\n    <T> void addMapper( Class<T> type ) ;\n\n\n\n    SqlSessionFactory getSqlSessionFactory();\n\n    SqlSession        openSession();\n\n    SqlSession        openSession( boolean autoCommit );\n\n    SqlSession        openSession( Connection connection);\n\n    SqlSession        openSession( TransactionIsolationLevel level );\n\n    SqlSession        openSession( ExecutorType execType );\n\n    SqlSession        openSession( ExecutorType execType, boolean autoCommit );\n\n    SqlSession        openSession( ExecutorType execType, TransactionIsolationLevel level );\n\n    SqlSession        openSession( ExecutorType execType, Connection connection );\n\n\n\n    void addXMLObjectScope( String szPacketName );\n\n    void addXMLObjectScopeNoneSync( String szPacketName );\n\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/IbatisDAOScanner.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis;\n\nimport com.pinecone.framework.util.lang.ClassScope;\nimport com.pinecone.framework.util.lang.ClassScopeNSProtocolIteratorsFactory;\nimport com.pinecone.framework.util.lang.NSProtocolIteratorsFactoryAdapter;\nimport com.pinecone.slime.source.DAOScanner;\nimport com.pinecone.ulf.util.lang.GenericPreloadClassInspector;\nimport com.pinecone.ulf.util.lang.HierarchyClassInspector;\nimport com.pinecone.ulf.util.lang.PooledClassCandidateScanner;\nimport com.pinecone.ulf.util.lang.SimpleAnnotationExcludeFilter;\nimport javassist.ClassPool;\n\npublic class IbatisDAOScanner extends PooledClassCandidateScanner implements DAOScanner {\n    protected HierarchyClassInspector mClassInspector     ;\n\n    public IbatisDAOScanner     ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory, ClassPool classPool ) {\n        super( searchScope, classLoader, iteratorsFactory, classPool );\n\n        this.mClassInspector = new GenericPreloadClassInspector( this.mClassPool );\n        this.addExcludeFilter( new SimpleAnnotationExcludeFilter( this.mClassInspector, IbatisDataAccessObject.class ) );\n    }\n\n    public IbatisDAOScanner     ( ClassScope searchScope, ClassLoader classLoader, ClassPool classPool ) {\n        this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), classPool );\n    }\n\n    public IbatisDAOScanner     ( ClassScope searchScope, ClassLoader classLoader ) {\n        this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), ClassPool.getDefault() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/IbatisDataAccessObject.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport java.lang.annotation.ElementType;\n\n\n@Target({ElementType.TYPE})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface IbatisDataAccessObject {\n    String value() default \"\";\n\n    // Which databases or data-manipulator that affinity to.\n    // For multi databases scenario.\n    String scope() default \"\";\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/IbatisManipulatorProxyMapperFactory.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.slime.source.rdb.RDBQuerierDataManipulator;\nimport org.apache.ibatis.session.SqlSession;\n\nimport java.lang.reflect.InvocationHandler;\nimport java.lang.reflect.Method;\nimport java.lang.reflect.Proxy;\n\npublic class IbatisManipulatorProxyMapperFactory implements Pinenut {\n    static class ManipulatorProxyHandler<T extends RDBQuerierDataManipulator > implements InvocationHandler {\n        private final T original;\n        private final SqlSession sqlSession;\n\n        public ManipulatorProxyHandler( T original, SqlSession sqlSession ) {\n            this.original   = original;\n            this.sqlSession = sqlSession;\n        }\n\n        @Override\n        public Object invoke( Object proxy, Method method, Object[] args ) throws Throwable {\n            if ( \"commit\".equals( method.getName() ) ) {\n                this.sqlSession.commit();\n                return null;\n            }\n            return method.invoke( this.original, args );\n        }\n    }\n\n\n    @SuppressWarnings(\"unchecked\")\n    public static <T extends RDBQuerierDataManipulator > T getMapper( SqlSession sqlSession, Class<T > clazz ) {\n        T original = sqlSession.getMapper(clazz);\n        Class<? >[] interfaces = original.getClass().getInterfaces();\n\n        return (T) Proxy.newProxyInstance(\n                original.getClass().getClassLoader(),\n                interfaces,\n                new ManipulatorProxyHandler<>(original, sqlSession)\n        );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/IbatisXMLResourceScanner.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis;\n\nimport com.pinecone.framework.util.lang.ClassScope;\nimport com.pinecone.framework.util.lang.GenericScopeNSProtocolIteratorsFactory;\nimport com.pinecone.framework.util.lang.NSProtocolIteratorsFactoryAdapter;\nimport com.pinecone.framework.util.lang.ObjectCandidateScanner;\nimport com.pinecone.slime.source.XMLResourceScanner;\n\npublic class IbatisXMLResourceScanner extends ObjectCandidateScanner implements XMLResourceScanner {\n\n    public IbatisXMLResourceScanner     ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory ) {\n        super( searchScope, classLoader, iteratorsFactory );\n    }\n\n    public IbatisXMLResourceScanner     ( ClassScope searchScope, ClassLoader classLoader ) {\n        this( searchScope, classLoader, new GenericScopeNSProtocolIteratorsFactory( classLoader, searchScope, \".xml\" ) );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/ProxySessionMapperPool.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis;\n\nimport org.apache.ibatis.session.ExecutorType;\n\nimport com.pinecone.framework.system.construction.InstancePool;\nimport com.pinecone.slime.jelly.source.ibatis.proxy.SqlSessionTemplate;\n\npublic class ProxySessionMapperPool implements InstancePool {\n    protected IbatisClient         mIbatisClient;\n    protected Class<? >            mType;\n    protected SqlSessionTemplate   mSqlSessionTemplate;\n\n    protected ProxySessionMapperPool( IbatisClient ibatisClient, Class<? > type, Void dummy ) {\n        this.mIbatisClient       = ibatisClient;\n        this.mType               = type;\n    }\n\n    public ProxySessionMapperPool( IbatisClient ibatisClient, Class<? > type, ExecutorType executorType ) {\n        this( ibatisClient, type, (Void) null );\n        this.mSqlSessionTemplate = new SqlSessionTemplate( ibatisClient.getSqlSessionFactory(), executorType );\n    }\n\n    public ProxySessionMapperPool( IbatisClient ibatisClient, Class<? > type ) {\n        this( ibatisClient, type, (Void) null );\n        this.mSqlSessionTemplate = new SqlSessionTemplate( ibatisClient.getSqlSessionFactory() );\n    }\n\n    @Override\n    public Object allocate() {\n        return this.mSqlSessionTemplate.getMapper ( this.mType );\n    }\n\n    @Override\n    public void free( Object obj ) {\n\n    }\n\n    @Override\n    public int freeSize() {\n        return Integer.MAX_VALUE;\n    }\n\n    @Override\n    public int pooledSize() {\n        return 0;\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return false;\n    }\n\n    @Override\n    public void preAllocate( int count ) {\n    }\n\n    @Override\n    public void setCapacity( int capacity ) {\n    }\n\n    @Override\n    public int getCapacity() {\n        return 0;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/SoloSessionMapperPool.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis;\n\nimport com.pinecone.framework.system.construction.InstancePool;\nimport org.apache.ibatis.session.SqlSession;\n\npublic class SoloSessionMapperPool implements InstancePool {\n    protected SqlSession        mSqlSession;\n    protected Class<? >         mType;\n\n    public SoloSessionMapperPool( SqlSession sqlSession, Class<? > type ) {\n        this.mSqlSession = sqlSession;\n        this.mType       = type;\n    }\n\n    @Override\n    public Object allocate() {\n        return this.mSqlSession.getMapper( this.mType );\n    }\n\n    @Override\n    public void free( Object obj ) {\n\n    }\n\n    @Override\n    public int freeSize() {\n        return Integer.MAX_VALUE;\n    }\n\n    @Override\n    public int pooledSize() {\n        return 0;\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return false;\n    }\n\n    @Override\n    public void preAllocate( int count ) {\n    }\n\n    @Override\n    public void setCapacity( int capacity ) {\n    }\n\n    @Override\n    public int getCapacity() {\n        return 0;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/proxy/MyBatisExceptionTranslator.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis.proxy;\n\nimport java.sql.SQLException;\nimport java.util.function.Supplier;\nimport javax.sql.DataSource;\nimport org.apache.ibatis.exceptions.PersistenceException;\n//import org.springframework.dao.DataAccessException;\n//import org.springframework.dao.support.PersistenceExceptionTranslator;\n//import org.springframework.jdbc.UncategorizedSQLException;\n\n\nimport com.pinecone.slime.jelly.source.ds.dao.DataAccessException;\nimport com.pinecone.slime.jelly.source.ds.dao.PersistenceExceptionTranslator;\nimport com.pinecone.slime.jelly.source.ds.jdbc.SQLErrorCodeSQLExceptionTranslator;\nimport com.pinecone.slime.jelly.source.ds.jdbc.SQLExceptionTranslator;\nimport com.pinecone.slime.jelly.source.ds.jdbc.UncategorizedSQLException;\nimport com.pinecone.slime.jelly.source.ds.transaction.TransactionException;\n//import org.springframework.jdbc.support.SQLExceptionTranslator;\n//import org.springframework.transaction.TransactionException;\n\npublic class MyBatisExceptionTranslator implements PersistenceExceptionTranslator {\n    private final Supplier<SQLExceptionTranslator> exceptionTranslatorSupplier;\n    private SQLExceptionTranslator exceptionTranslator;\n\n    public MyBatisExceptionTranslator(DataSource dataSource, boolean exceptionTranslatorLazyInit) {\n        this(() -> {\n            return new SQLErrorCodeSQLExceptionTranslator(dataSource);\n        }, exceptionTranslatorLazyInit);\n    }\n\n    public MyBatisExceptionTranslator(Supplier<SQLExceptionTranslator> exceptionTranslatorSupplier, boolean exceptionTranslatorLazyInit) {\n        this.exceptionTranslatorSupplier = exceptionTranslatorSupplier;\n        if (!exceptionTranslatorLazyInit) {\n            this.initExceptionTranslator();\n        }\n\n    }\n\n    @Override\n    public DataAccessException translateExceptionIfPossible(RuntimeException e) {\n        if (e instanceof PersistenceException) {\n            if (((RuntimeException)e).getCause() instanceof PersistenceException) {\n                e = (PersistenceException)((RuntimeException)e).getCause();\n            }\n\n            if (((RuntimeException)e).getCause() instanceof SQLException) {\n                this.initExceptionTranslator();\n                String task = ((RuntimeException)e).getMessage() + \"\\n\";\n                SQLException se = (SQLException)((RuntimeException)e).getCause();\n                DataAccessException dae = this.exceptionTranslator.translate(task, (String)null, se);\n                return (DataAccessException)(dae != null ? dae : new UncategorizedSQLException(task, (String)null, se));\n            }\n            else if (((RuntimeException)e).getCause() instanceof TransactionException) {\n                throw (TransactionException)((RuntimeException)e).getCause();\n            }\n            else {\n                return new MyBatisSystemException((Throwable)e);\n            }\n        }\n        else {\n            return null;\n        }\n    }\n\n    private synchronized void initExceptionTranslator() {\n        if (this.exceptionTranslator == null) {\n            this.exceptionTranslator = (SQLExceptionTranslator)this.exceptionTranslatorSupplier.get();\n        }\n\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/proxy/MyBatisSystemException.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis.proxy;\n\nimport com.pinecone.slime.jelly.source.ds.jdbc.UncategorizedDataAccessException;\n\npublic class MyBatisSystemException extends UncategorizedDataAccessException {\n    private static final long serialVersionUID = 1284728621670758938L;\n\n    public MyBatisSystemException( Throwable cause ) {\n        super((String)null, cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/proxy/SqlSessionHolder.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis.proxy;\n\nimport org.apache.ibatis.session.ExecutorType;\nimport org.apache.ibatis.session.SqlSession;\n\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.slime.jelly.source.ds.dao.PersistenceExceptionTranslator;\nimport com.pinecone.slime.jelly.source.ds.transaction.ResourceHolderSupport;\n\npublic final class SqlSessionHolder extends ResourceHolderSupport {\n    private final SqlSession sqlSession;\n    private final ExecutorType executorType;\n    private final PersistenceExceptionTranslator exceptionTranslator;\n\n    public SqlSessionHolder( SqlSession sqlSession, ExecutorType executorType, PersistenceExceptionTranslator exceptionTranslator ) {\n        Assert.notNull(sqlSession, \"SqlSession must not be null\");\n        Assert.notNull(executorType, \"ExecutorType must not be null\");\n        this.sqlSession = sqlSession;\n        this.executorType = executorType;\n        this.exceptionTranslator = exceptionTranslator;\n    }\n\n    public SqlSession getSqlSession() {\n        return this.sqlSession;\n    }\n\n    public ExecutorType getExecutorType() {\n        return this.executorType;\n    }\n\n    public PersistenceExceptionTranslator getPersistenceExceptionTranslator() {\n        return this.exceptionTranslator;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/proxy/SqlSessionTemplate.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis.proxy;\n\nimport java.lang.reflect.InvocationHandler;\nimport java.lang.reflect.Method;\nimport java.lang.reflect.Proxy;\nimport java.sql.Connection;\nimport java.util.List;\nimport java.util.Map;\nimport org.apache.ibatis.cursor.Cursor;\nimport org.apache.ibatis.exceptions.PersistenceException;\nimport org.apache.ibatis.executor.BatchResult;\nimport org.apache.ibatis.reflection.ExceptionUtil;\nimport org.apache.ibatis.session.Configuration;\nimport org.apache.ibatis.session.ExecutorType;\nimport org.apache.ibatis.session.ResultHandler;\nimport org.apache.ibatis.session.RowBounds;\nimport org.apache.ibatis.session.SqlSession;\nimport org.apache.ibatis.session.SqlSessionFactory;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.slime.jelly.source.ds.dao.PersistenceExceptionTranslator;\n\npublic class SqlSessionTemplate implements SqlSession, Pinenut {\n    private final SqlSessionFactory sqlSessionFactory;\n    private final ExecutorType executorType;\n    private final SqlSession sqlSessionProxy;\n    private final PersistenceExceptionTranslator exceptionTranslator;\n\n    public SqlSessionTemplate( SqlSessionFactory sqlSessionFactory ) {\n        this(sqlSessionFactory, sqlSessionFactory.getConfiguration().getDefaultExecutorType() );\n    }\n\n    public SqlSessionTemplate( SqlSessionFactory sqlSessionFactory, ExecutorType executorType ) {\n        this(sqlSessionFactory, executorType, new MyBatisExceptionTranslator( sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(), true) );\n    }\n\n    public SqlSessionTemplate( SqlSessionFactory sqlSessionFactory, ExecutorType executorType, PersistenceExceptionTranslator exceptionTranslator ) {\n        Assert.notNull(sqlSessionFactory, \"Property 'sqlSessionFactory' is required\");\n        Assert.notNull(executorType, \"Property 'executorType' is required\");\n        this.sqlSessionFactory = sqlSessionFactory;\n        this.executorType = executorType;\n        this.exceptionTranslator = exceptionTranslator;\n        this.sqlSessionProxy = (SqlSession)Proxy.newProxyInstance(SqlSessionFactory.class.getClassLoader(), new Class[]{SqlSession.class}, new SqlSessionTemplate.SqlSessionInterceptor());\n    }\n\n    public SqlSessionFactory getSqlSessionFactory() {\n        return this.sqlSessionFactory;\n    }\n\n    public ExecutorType getExecutorType() {\n        return this.executorType;\n    }\n\n    public PersistenceExceptionTranslator getPersistenceExceptionTranslator() {\n        return this.exceptionTranslator;\n    }\n\n    @Override\n    public <T> T selectOne(String statement) {\n        return this.sqlSessionProxy.selectOne(statement);\n    }\n\n    @Override\n    public <T> T selectOne(String statement, Object parameter) {\n        return this.sqlSessionProxy.selectOne(statement, parameter);\n    }\n\n    @Override\n    public <K, V> Map<K, V> selectMap(String statement, String mapKey) {\n        return this.sqlSessionProxy.selectMap(statement, mapKey);\n    }\n\n    @Override\n    public <K, V> Map<K, V> selectMap(String statement, Object parameter, String mapKey) {\n        return this.sqlSessionProxy.selectMap(statement, parameter, mapKey);\n    }\n\n    @Override\n    public <K, V> Map<K, V> selectMap(String statement, Object parameter, String mapKey, RowBounds rowBounds) {\n        return this.sqlSessionProxy.selectMap(statement, parameter, mapKey, rowBounds);\n    }\n\n    @Override\n    public <T> Cursor<T> selectCursor(String statement) {\n        return this.sqlSessionProxy.selectCursor(statement);\n    }\n\n    @Override\n    public <T> Cursor<T> selectCursor(String statement, Object parameter) {\n        return this.sqlSessionProxy.selectCursor(statement, parameter);\n    }\n\n    @Override\n    public <T> Cursor<T> selectCursor(String statement, Object parameter, RowBounds rowBounds) {\n        return this.sqlSessionProxy.selectCursor(statement, parameter, rowBounds);\n    }\n\n    @Override\n    public <E> List<E> selectList(String statement) {\n        return this.sqlSessionProxy.selectList(statement);\n    }\n\n    @Override\n    public <E> List<E> selectList(String statement, Object parameter) {\n        return this.sqlSessionProxy.selectList(statement, parameter);\n    }\n\n    @Override\n    public <E> List<E> selectList(String statement, Object parameter, RowBounds rowBounds) {\n        return this.sqlSessionProxy.selectList(statement, parameter, rowBounds);\n    }\n\n    @Override\n    public void select(String statement, ResultHandler handler) {\n        this.sqlSessionProxy.select(statement, handler);\n    }\n\n    @Override\n    public void select(String statement, Object parameter, ResultHandler handler) {\n        this.sqlSessionProxy.select(statement, parameter, handler);\n    }\n\n    @Override\n    public void select(String statement, Object parameter, RowBounds rowBounds, ResultHandler handler) {\n        this.sqlSessionProxy.select(statement, parameter, rowBounds, handler);\n    }\n\n    @Override\n    public int insert(String statement) {\n        return this.sqlSessionProxy.insert(statement);\n    }\n\n    @Override\n    public int insert(String statement, Object parameter) {\n        return this.sqlSessionProxy.insert(statement, parameter);\n    }\n\n    @Override\n    public int update(String statement) {\n        return this.sqlSessionProxy.update(statement);\n    }\n\n    @Override\n    public int update(String statement, Object parameter) {\n        return this.sqlSessionProxy.update(statement, parameter);\n    }\n\n    @Override\n    public int delete(String statement) {\n        return this.sqlSessionProxy.delete(statement);\n    }\n\n    @Override\n    public int delete(String statement, Object parameter) {\n        return this.sqlSessionProxy.delete(statement, parameter);\n    }\n\n    @Override\n    public <T> T getMapper(Class<T> type) {\n        return this.getConfiguration().getMapper(type, this);\n    }\n\n    @Override\n    public void commit() {\n        throw new UnsupportedOperationException(\"Manual commit is not allowed over a Hydra managed SqlSession\");\n    }\n\n    @Override\n    public void commit(boolean force) {\n        throw new UnsupportedOperationException(\"Manual commit is not allowed over a Hydra managed SqlSession\");\n    }\n\n    @Override\n    public void rollback() {\n        throw new UnsupportedOperationException(\"Manual rollback is not allowed over a Hydra managed SqlSession\");\n    }\n\n    @Override\n    public void rollback(boolean force) {\n        throw new UnsupportedOperationException(\"Manual rollback is not allowed over a Hydra managed SqlSession\");\n    }\n\n    @Override\n    public void close() {\n        throw new UnsupportedOperationException(\"Manual close is not allowed over a Hydra managed SqlSession\");\n    }\n\n    @Override\n    public void clearCache() {\n        this.sqlSessionProxy.clearCache();\n    }\n\n    @Override\n    public Configuration getConfiguration() {\n        return this.sqlSessionFactory.getConfiguration();\n    }\n\n    @Override\n    public Connection getConnection() {\n        return this.sqlSessionProxy.getConnection();\n    }\n\n    @Override\n    public List<BatchResult> flushStatements() {\n        return this.sqlSessionProxy.flushStatements();\n    }\n\n    //@Override\n    public void destroy() throws Exception {\n    }\n\n    private class SqlSessionInterceptor implements InvocationHandler {\n        private SqlSessionInterceptor() {\n        }\n\n        @Override\n        public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {\n            SqlSession sqlSession = SqlSessionUtils.getSqlSession(SqlSessionTemplate.this.sqlSessionFactory, SqlSessionTemplate.this.executorType, SqlSessionTemplate.this.exceptionTranslator);\n\n            Object unwrapped;\n            try {\n                Object result = method.invoke(sqlSession, args);\n                if (!SqlSessionUtils.isSqlSessionTransactional(sqlSession, SqlSessionTemplate.this.sqlSessionFactory)) {\n                    sqlSession.commit(true);\n                }\n\n                unwrapped = result;\n            }\n            catch ( Throwable e ) {\n                unwrapped = ExceptionUtil.unwrapThrowable( e );\n                if ( SqlSessionTemplate.this.exceptionTranslator != null && unwrapped instanceof PersistenceException ) {\n                    SqlSessionUtils.closeSqlSession(sqlSession, SqlSessionTemplate.this.sqlSessionFactory);\n                    sqlSession = null;\n                    Throwable translated = SqlSessionTemplate.this.exceptionTranslator.translateExceptionIfPossible((PersistenceException)unwrapped);\n                    if ( translated != null ) {\n                        unwrapped = translated;\n                    }\n                }\n\n                throw (Throwable) unwrapped;\n            }\n            finally {\n                if (sqlSession != null) {\n                    SqlSessionUtils.closeSqlSession( sqlSession, SqlSessionTemplate.this.sqlSessionFactory );\n                }\n            }\n\n            return unwrapped;\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/ibatis/proxy/SqlSessionUtils.java",
    "content": "package com.pinecone.slime.jelly.source.ibatis.proxy;\n\nimport org.apache.ibatis.exceptions.PersistenceException;\nimport org.apache.ibatis.mapping.Environment;\nimport org.apache.ibatis.session.ExecutorType;\nimport org.apache.ibatis.session.SqlSession;\nimport org.apache.ibatis.session.SqlSessionFactory;\nimport org.mybatis.logging.Logger;\nimport org.mybatis.logging.LoggerFactory;\n//import org.mybatis.spring.SqlSessionHolder;\nimport org.mybatis.spring.transaction.SpringManagedTransactionFactory;\n//import org.springframework.dao.DataAccessException;\n//import org.springframework.dao.TransientDataAccessResourceException;\n//import org.springframework.dao.support.PersistenceExceptionTranslator;\n//import org.springframework.transaction.support.TransactionSynchronizationAdapter;\n//import org.springframework.transaction.support.TransactionSynchronizationManager;\n//import org.springframework.util.Assert;\n\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.slime.jelly.source.ds.dao.DataAccessException;\nimport com.pinecone.slime.jelly.source.ds.dao.PersistenceExceptionTranslator;\nimport com.pinecone.slime.jelly.source.ds.jdbc.TransientDataAccessResourceException;\nimport com.pinecone.slime.jelly.source.ds.transaction.TransactionSynchronizationAdapter;\nimport com.pinecone.slime.jelly.source.ds.transaction.TransactionSynchronizationManager;\n\npublic final class SqlSessionUtils {\n    private static final Logger LOGGER = LoggerFactory.getLogger(SqlSessionUtils.class);\n    private static final String NO_EXECUTOR_TYPE_SPECIFIED = \"No ExecutorType specified\";\n    private static final String NO_SQL_SESSION_FACTORY_SPECIFIED = \"No SqlSessionFactory specified\";\n    private static final String NO_SQL_SESSION_SPECIFIED = \"No SqlSession specified\";\n\n    private SqlSessionUtils() {\n    }\n\n    public static SqlSession getSqlSession(SqlSessionFactory sessionFactory) {\n        ExecutorType executorType = sessionFactory.getConfiguration().getDefaultExecutorType();\n        return getSqlSession(sessionFactory, executorType, (PersistenceExceptionTranslator)null);\n    }\n\n    public static SqlSession getSqlSession(SqlSessionFactory sessionFactory, ExecutorType executorType, PersistenceExceptionTranslator exceptionTranslator) {\n        Assert.notNull(sessionFactory, \"No SqlSessionFactory specified\");\n        Assert.notNull(executorType, \"No ExecutorType specified\");\n        SqlSessionHolder holder = (SqlSessionHolder) TransactionSynchronizationManager.getResource(sessionFactory);\n        SqlSession session = sessionHolder(executorType, holder);\n        if (session != null) {\n            return session;\n        }\n        else {\n            LOGGER.debug(() -> {\n                return \"Creating a new SqlSession\";\n            });\n            session = sessionFactory.openSession(executorType);\n            registerSessionHolder(sessionFactory, executorType, exceptionTranslator, session);\n            return session;\n        }\n    }\n\n    private static void registerSessionHolder(SqlSessionFactory sessionFactory, ExecutorType executorType, PersistenceExceptionTranslator exceptionTranslator, SqlSession session) {\n        if (TransactionSynchronizationManager.isSynchronizationActive()) {\n            Environment environment = sessionFactory.getConfiguration().getEnvironment();\n            if (environment.getTransactionFactory() instanceof SpringManagedTransactionFactory) {\n                LOGGER.debug(() -> {\n                    return \"Registering transaction synchronization for SqlSession [\" + session + \"]\";\n                });\n                SqlSessionHolder holder = new SqlSessionHolder(session, executorType, exceptionTranslator);\n                TransactionSynchronizationManager.bindResource(sessionFactory, holder);\n                TransactionSynchronizationManager.registerSynchronization(new SqlSessionUtils.SqlSessionSynchronization(holder, sessionFactory));\n                holder.setSynchronizedWithTransaction(true);\n                holder.requested();\n            }\n            else {\n                if (TransactionSynchronizationManager.getResource(environment.getDataSource()) != null) {\n                    throw new TransientDataAccessResourceException(\"SqlSessionFactory must be using a SpringManagedTransactionFactory in order to use Hydra transaction synchronization\");\n                }\n\n                LOGGER.debug(() -> {\n                    return \"SqlSession [\" + session + \"] was not registered for synchronization because DataSource is not transactional\";\n                });\n            }\n        }\n        else {\n            LOGGER.debug(() -> {\n                return \"SqlSession [\" + session + \"] was not registered for synchronization because synchronization is not active\";\n            });\n        }\n\n    }\n\n    private static SqlSession sessionHolder(ExecutorType executorType, SqlSessionHolder holder) {\n        SqlSession session = null;\n        if ( holder != null && holder.isSynchronizedWithTransaction() ) {\n            if (holder.getExecutorType() != executorType) {\n                throw new TransientDataAccessResourceException(\"Cannot change the ExecutorType when there is an existing transaction\");\n            }\n\n            holder.requested();\n            LOGGER.debug(() -> {\n                return \"Fetched SqlSession [\" + holder.getSqlSession() + \"] from current transaction\";\n            });\n            session = holder.getSqlSession();\n        }\n\n        return session;\n    }\n\n    public static void closeSqlSession(SqlSession session, SqlSessionFactory sessionFactory) {\n        Assert.notNull(session, \"No SqlSession specified\");\n        Assert.notNull(sessionFactory, \"No SqlSessionFactory specified\");\n        SqlSessionHolder holder = (SqlSessionHolder)TransactionSynchronizationManager.getResource(sessionFactory);\n        if (holder != null && holder.getSqlSession() == session) {\n            LOGGER.debug(() -> {\n                return \"Releasing transactional SqlSession [\" + session + \"]\";\n            });\n            holder.released();\n        } else {\n            LOGGER.debug(() -> {\n                return \"Closing non transactional SqlSession [\" + session + \"]\";\n            });\n            session.close();\n        }\n\n    }\n\n    public static boolean isSqlSessionTransactional(SqlSession session, SqlSessionFactory sessionFactory) {\n        Assert.notNull(session, \"No SqlSession specified\");\n        Assert.notNull(sessionFactory, \"No SqlSessionFactory specified\");\n        SqlSessionHolder holder = (SqlSessionHolder)TransactionSynchronizationManager.getResource(sessionFactory);\n        return holder != null && holder.getSqlSession() == session;\n    }\n\n    private static final class SqlSessionSynchronization extends TransactionSynchronizationAdapter {\n        private final SqlSessionHolder holder;\n        private final SqlSessionFactory sessionFactory;\n        private boolean holderActive = true;\n\n        public SqlSessionSynchronization(SqlSessionHolder holder, SqlSessionFactory sessionFactory) {\n            Assert.notNull(holder, \"Parameter 'holder' must be not null\");\n            Assert.notNull(sessionFactory, \"Parameter 'sessionFactory' must be not null\");\n            this.holder = holder;\n            this.sessionFactory = sessionFactory;\n        }\n\n        public int getOrder() {\n            return 999;\n        }\n\n        public void suspend() {\n            if (this.holderActive) {\n                SqlSessionUtils.LOGGER.debug(() -> {\n                    return \"Transaction synchronization suspending SqlSession [\" + this.holder.getSqlSession() + \"]\";\n                });\n                TransactionSynchronizationManager.unbindResource(this.sessionFactory);\n            }\n\n        }\n\n        public void resume() {\n            if (this.holderActive) {\n                SqlSessionUtils.LOGGER.debug(() -> {\n                    return \"Transaction synchronization resuming SqlSession [\" + this.holder.getSqlSession() + \"]\";\n                });\n                TransactionSynchronizationManager.bindResource(this.sessionFactory, this.holder);\n            }\n\n        }\n\n        public void beforeCommit(boolean readOnly) {\n            if (TransactionSynchronizationManager.isActualTransactionActive()) {\n                try {\n                    SqlSessionUtils.LOGGER.debug(() -> {\n                        return \"Transaction synchronization committing SqlSession [\" + this.holder.getSqlSession() + \"]\";\n                    });\n                    this.holder.getSqlSession().commit();\n                } catch (PersistenceException var4) {\n                    if (this.holder.getPersistenceExceptionTranslator() != null) {\n                        DataAccessException translated = this.holder.getPersistenceExceptionTranslator().translateExceptionIfPossible(var4);\n                        if (translated != null) {\n                            throw translated;\n                        }\n                    }\n\n                    throw var4;\n                }\n            }\n\n        }\n\n        public void beforeCompletion() {\n            if (!this.holder.isOpen()) {\n                SqlSessionUtils.LOGGER.debug(() -> {\n                    return \"Transaction synchronization deregistering SqlSession [\" + this.holder.getSqlSession() + \"]\";\n                });\n                TransactionSynchronizationManager.unbindResource(this.sessionFactory);\n                this.holderActive = false;\n                SqlSessionUtils.LOGGER.debug(() -> {\n                    return \"Transaction synchronization closing SqlSession [\" + this.holder.getSqlSession() + \"]\";\n                });\n                this.holder.getSqlSession().close();\n            }\n\n        }\n\n        public void afterCompletion(int status) {\n            if (this.holderActive) {\n                SqlSessionUtils.LOGGER.debug(() -> {\n                    return \"Transaction synchronization deregistering SqlSession [\" + this.holder.getSqlSession() + \"]\";\n                });\n                TransactionSynchronizationManager.unbindResourceIfPossible(this.sessionFactory);\n                this.holderActive = false;\n                SqlSessionUtils.LOGGER.debug(() -> {\n                    return \"Transaction synchronization closing SqlSession [\" + this.holder.getSqlSession() + \"]\";\n                });\n                this.holder.getSqlSession().close();\n            }\n\n            this.holder.reset();\n        }\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/memcached/GenericMemcachedManipulator.java",
    "content": "package com.pinecone.slime.jelly.source.memcached;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.slime.jelly.source.NamespacedKey;\nimport com.pinecone.slime.source.GenericResultConverter;\nimport com.pinecone.slime.source.indexable.IndexableTargetScopeMeta;\nimport net.spy.memcached.MemcachedClient;\n\nimport java.io.Serializable;\nimport java.net.SocketAddress;\nimport java.util.Iterator;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Collection;\nimport java.util.concurrent.ExecutionException;\nimport java.util.concurrent.Future;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.TimeoutException;\n\npublic class GenericMemcachedManipulator<V extends Serializable > implements MemcachedManipulator<String, V > {\n    private final MemcachedClient   mMemClient;\n    private final String            mszNameSeparator;\n    protected int                   mnExpireTime;\n\n    public GenericMemcachedManipulator( MemcachedClient client, String szSeparator, int expire ) {\n        this.mMemClient       = client;\n        this.mszNameSeparator = szSeparator;\n        this.mnExpireTime     = expire;\n    }\n\n    public GenericMemcachedManipulator( MemcachedClient client, String szSeparator ) {\n        this( client, szSeparator, 0 );\n    }\n\n    public GenericMemcachedManipulator( MemcachedClient client ) {\n        this( client, \":\" );\n    }\n\n\n    @Override\n    public MemcachedClient getClient() {\n        return this.mMemClient;\n    }\n\n    private String getFullKey(IndexableTargetScopeMeta meta, String szNamespace, Object key ) {\n        return NamespacedKey.getFullKey( meta, this.mszNameSeparator, szNamespace, key );\n    }\n\n    @Override\n    public long counts( IndexableTargetScopeMeta meta, String szScopeKey ) {\n        return this.countsNS( meta, szScopeKey );\n    }\n\n    @Override\n    public long countsByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) {\n        if( this.selectByNS( meta, szNamespace, key ) != null ){\n            return 1;\n        }\n        return 0;\n    }\n\n    @Override\n    public long countsNS( IndexableTargetScopeMeta meta, String szNamespace ) {\n        boolean bEN = StringUtils.isEmpty( szNamespace );\n\n        long count = 0;\n        Map<SocketAddress, Map<String, String > > items = this.mMemClient.getStats( \"items\" );\n\n        for ( Map.Entry<SocketAddress, Map<String, String > > entry : items.entrySet() ) {\n            Map<String, String > itemMap = entry.getValue();\n            for ( String key : itemMap.keySet() ) {\n                if ( key.startsWith(\"items:\") ) {\n                    String[] parts = key.split(\":\");\n                    if ( parts.length > 2 && \"number\".equals(parts[2]) ) {\n                        int slabNumber = Integer.parseInt(parts[1]);\n                        int limit = Integer.parseInt( itemMap.get(key) );\n                        Map<SocketAddress, Map<String, String> > dump = this.mMemClient.getStats( \"cachedump \" + slabNumber + \" \" + limit );\n                        for ( Map<String, String > dumpMap : dump.values() ) {\n                            for( String k : dumpMap.keySet() ){\n                                if( bEN || k.startsWith( szNamespace ) ) {\n                                    ++count;\n                                }\n                            }\n                        }\n                    }\n                }\n            }\n        }\n        return count;\n    }\n\n    @Override\n    public List query( IndexableTargetScopeMeta meta, String szStatement ) {\n        throw new UnsupportedOperationException( \"Query method not supported for GenericMemcachedManipulator.\" );\n    }\n\n    @Override\n    public List<V> queryVal( IndexableTargetScopeMeta meta, String szStatement ) {\n        throw new UnsupportedOperationException( \"QueryVal method not supported for GenericMemcachedManipulator.\" );\n    }\n\n    @Override\n    public Object selectAllByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) {\n        if( key != null && szNamespace != null ) {\n            return this.mMemClient.get( this.getFullKey( meta, szNamespace, key ) );\n        }\n\n        Map<String, Object > map = new LinkedHashMap<>();\n        if( szNamespace == null ) {\n            Collection<String > keys = this.keys();\n            for( String k : keys ) {\n                map.put( k, this.mMemClient.get( k ) );\n            }\n        }\n        else {\n            Collection<String > keys = this.keys();\n            for( String k : keys ) {\n                if( k.startsWith( szNamespace ) ) {\n                    map.put( k, this.mMemClient.get( k ) );\n                }\n            }\n        }\n\n        return map;\n    }\n\n    @Override\n    public List<V > selectsByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) {\n        return List.of( this.selectByNS( meta, szNamespace, key ) );\n    }\n\n    @Override\n    public V selectByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) {\n        return selectByKey( meta, this.getFullKey( meta, szNamespace, key ) );\n    }\n\n    @Override\n    public V selectByKey( IndexableTargetScopeMeta meta, Object key ) {\n        if ( meta.getResultConverter() == null ) {\n            meta.setResultConverter( new GenericResultConverter<>( meta.getValueType(), meta.getValueMetaKeys() ) );\n        }\n        return meta.<V >getResultConverter().convert( this.mMemClient.get( key.toString() ) );\n    }\n\n    protected void insert0( IndexableTargetScopeMeta meta, String szKey, V entity ) {\n        Future<Boolean > setFuture = this.mMemClient.set( szKey, this.mnExpireTime, entity );\n        try{\n            if( !setFuture.get( 5, TimeUnit.SECONDS ) ){\n                throw new IllegalStateException( \"Unseated key: \" + szKey );\n            }\n        }\n        catch ( TimeoutException | ExecutionException | InterruptedException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public void insert( IndexableTargetScopeMeta meta, String key, V entity, long expireMill ) {\n        int expireSeconds = (int) (expireMill / 1000);\n\n        Future<Boolean> setFuture = this.mMemClient.set(key, expireSeconds, entity);\n        try {\n            if ( !setFuture.get( 5, TimeUnit.SECONDS ) ) {\n                throw new IllegalStateException(\"Failed to insert key: \" + key);\n            }\n        }\n        catch ( TimeoutException | ExecutionException | InterruptedException e ) {\n            throw new ProxyProvokeHandleException(e);\n        }\n    }\n\n    @Override\n    public void insertByNS( IndexableTargetScopeMeta meta, String szNamespace, String key, V entity ) {\n        String scopeKey = this.getFullKey( meta, szNamespace, key );\n        this.insert0( meta, scopeKey, entity );\n    }\n\n    @Override\n    public void insert( IndexableTargetScopeMeta meta, String key, V entity ) {\n        this.insert0( meta, key.toString(), entity );\n    }\n\n    @Override\n    public void updateByNS( IndexableTargetScopeMeta meta, String szNamespace, String key, V entity ) {\n        this.insertByNS( meta, szNamespace, key, entity );\n    }\n\n    @Override\n    public void update( IndexableTargetScopeMeta meta, String key, V entity ) {\n        this.insert( meta, key, entity );\n    }\n\n    @Override\n    public void deleteByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) {\n        String scopeKey = this.getFullKey( meta, szNamespace,key );\n        this.deleteByKey( meta, scopeKey );\n    }\n\n    @Override\n    public void deleteByKey( IndexableTargetScopeMeta meta, Object key ) {\n        Future<Boolean > setFuture = this.mMemClient.delete(\"key1\");\n\n        try{\n            if( !setFuture.get( 5, TimeUnit.SECONDS ) ){\n                throw new IllegalStateException( \"Deletion compromised, with key: \" + key );\n            }\n        }\n        catch ( TimeoutException | ExecutionException | InterruptedException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public void purge( IndexableTargetScopeMeta meta ) {\n        this.purgeByNS( meta, meta.getScopeNS() );\n    }\n\n    @Override\n    public void purgeByNS( IndexableTargetScopeMeta meta, String szNamespace ) {\n        if( szNamespace != null && !szNamespace.isEmpty() ) {\n            Collection<String > keys = this.keys();\n            for( String k : keys ) {\n                if( k.startsWith( szNamespace ) ) {\n                    this.deleteByKey( meta, k );\n                }\n            }\n        }\n        else {\n            this.mMemClient.flush();\n        }\n    }\n\n    @Override\n    public void commit() {\n        // Redis operations are atomic, no explicit commit needed.\n    }\n\n    @Override\n    public Iterator<String > keysIterator( IndexableTargetScopeMeta meta ) {\n        return this.keySet().iterator();\n    }\n\n    @Override\n    public Iterator<Map.Entry<String, V > > iterator( IndexableTargetScopeMeta meta ) {\n        return new EntryIterator( meta );\n    }\n\n    protected final class EntryIterator implements Iterator<Map.Entry<String, V > > {\n        Iterator<String > keyIterator;\n        IndexableTargetScopeMeta meta;\n\n        EntryIterator( IndexableTargetScopeMeta meta ) {\n            this.meta = meta;\n            this.keyIterator = GenericMemcachedManipulator.this.keysIterator( meta );\n        }\n\n        @Override\n        public final boolean hasNext() {\n            return this.keyIterator.hasNext();\n        }\n\n        @Override\n        public final Map.Entry<String, V > next() {\n            String k = this.keyIterator.next();\n            return new KeyValue<>( k, GenericMemcachedManipulator.this.selectByKey( this.meta, k ) );\n        }\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/memcached/MemcachedManipulator.java",
    "content": "package com.pinecone.slime.jelly.source.memcached;\n\nimport com.pinecone.framework.unit.Units;\nimport com.pinecone.slime.source.indexable.IndexableIterableManipulator;\nimport net.spy.memcached.MemcachedClient;\n\nimport java.io.Serializable;\nimport java.net.SocketAddress;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.LinkedHashSet;\nimport java.util.Map;\n\npublic interface MemcachedManipulator<K extends String, V extends Serializable> extends IndexableIterableManipulator<K, V > {\n    MemcachedClient getClient();\n\n    default Collection<String > keys( Class<? extends Collection > stereo ) {\n        try{\n            MemcachedClient client = this.getClient();\n\n            Collection<String > allKeys = Units.newInstance( stereo );\n            Map<SocketAddress, Map<String, String > > items = client.getStats( \"items\" );\n\n            for ( Map.Entry<SocketAddress, Map<String, String > > entry : items.entrySet() ) {\n                Map<String, String > itemMap = entry.getValue();\n                for ( String key : itemMap.keySet() ) {\n                    if ( key.startsWith(\"items:\") ) {\n                        String[] parts = key.split(\":\");\n                        if ( parts.length > 2 && \"number\".equals(parts[2]) ) {\n                            int slabNumber = Integer.parseInt(parts[1]);\n                            int limit = Integer.parseInt( itemMap.get(key) );\n                            Map<SocketAddress, Map<String, String> > dump = client.getStats( \"cachedump \" + slabNumber + \" \" + limit );\n                            for  ( Map<String, String > dumpMap : dump.values() ) {\n                                allKeys.addAll( dumpMap.keySet() );\n                            }\n                        }\n                    }\n                }\n            }\n\n            return allKeys;\n        }\n        catch ( IllegalArgumentException e ) {\n            return this.keys();\n        }\n    }\n\n    default Collection<String > keys() {\n        return this.keys( ArrayList.class );\n    }\n\n    default Collection<String > keySet() {\n        return this.keys( LinkedHashSet.class );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/redis/GenericRedisHashManipulator.java",
    "content": "package com.pinecone.slime.jelly.source.redis;\n\nimport com.pinecone.slime.source.GenericResultConverter;\nimport com.pinecone.slime.source.indexable.IndexableIterableManipulator;\nimport com.pinecone.slime.source.indexable.IndexableTargetScopeMeta;\n\nimport redis.clients.jedis.Jedis;\nimport redis.clients.jedis.ScanParams;\nimport redis.clients.jedis.ScanResult;\n\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\n\npublic class GenericRedisHashManipulator<K extends String, V> implements IndexableIterableManipulator<K, V > {\n    private final Jedis mJedis;\n\n    public GenericRedisHashManipulator( Jedis jedis ) {\n        this.mJedis           = jedis;\n    }\n\n    private String getScopeKey( IndexableTargetScopeMeta meta, Object namespace ) {\n        if ( namespace != null && !\"\".equals( namespace ) ) {\n            return namespace.toString();\n        }\n        else if ( meta.getIndexKey() != null && !meta.getIndexKey().isEmpty() ) {\n            return meta.getIndexKey();\n        }\n        else {\n            throw new IllegalArgumentException( \"Both namespace and meta's index key are empty.\" );\n        }\n    }\n\n    @Override\n    public long counts( IndexableTargetScopeMeta meta, String szParentIndexKey ) {\n        return this.mJedis.hlen( this.getScopeKey( meta, szParentIndexKey ) );\n    }\n\n    @Override\n    public long countsByNS( IndexableTargetScopeMeta meta, String szParentIndexKey, Object key) {\n        String scopeKey = this.getScopeKey( meta, szParentIndexKey );\n        if ( this.mJedis.hexists( scopeKey, key.toString()) ) {\n            return 1;\n        }\n        return 0;\n    }\n\n    @Override\n    public long countsNS( IndexableTargetScopeMeta meta, String szNamespace ) {\n        long fieldCount = 0;\n        String cursor = ScanParams.SCAN_POINTER_START;\n        ScanParams scanParams = new ScanParams().match( szNamespace + \"*\" ).count( 1000 );\n\n        do {\n            ScanResult<Map.Entry<String, String>> scanResult = this.mJedis.hscan( meta.getIndexKey(), cursor, scanParams );\n            fieldCount += scanResult.getResult().size();\n            cursor = scanResult.getCursor();\n        }\n        while (!cursor.equals(ScanParams.SCAN_POINTER_START));\n\n        return fieldCount;\n    }\n\n    @Override\n    public List query( IndexableTargetScopeMeta meta, String szStatement ) {\n        throw new UnsupportedOperationException( \"Query method not supported for Redis Hash manipulator.\" );\n    }\n\n    @Override\n    public List<V> queryVal( IndexableTargetScopeMeta meta, String szStatement ) {\n        throw new UnsupportedOperationException( \"QueryVal method not supported for Redis Hash manipulator.\" );\n    }\n\n    @Override\n    public Object selectAllByNS ( IndexableTargetScopeMeta meta, String szParentIndexKey, Object key ) {\n        if( key == null ) {\n            String scopeKey = this.getScopeKey( meta, szParentIndexKey );\n            return this.mJedis.hgetAll( scopeKey );\n        }\n        else  {\n            return this.selectsByNS( meta, szParentIndexKey, key );\n        }\n    }\n\n    @Override\n    public List<V > selectsByNS( IndexableTargetScopeMeta meta, String szParentIndexKey, Object key ) {\n        return List.of( this.selectByNS( meta, szParentIndexKey, key ) );\n    }\n\n    @Override\n    public V selectByNS( IndexableTargetScopeMeta meta, String szParentIndexKey, Object key ) {\n        String scopeKey = this.getScopeKey( meta, szParentIndexKey );\n        if( meta.getResultConverter() == null ) {\n            meta.setResultConverter( new GenericResultConverter<>( meta.getValueType(), meta.getValueMetaKeys() ));\n        }\n        Object val = this.mJedis.hget( scopeKey, key.toString() );\n        if( val == null ) {\n            return null;\n        }\n        return meta.<V >getResultConverter().convert( val ) ;\n    }\n\n    @Override\n    public V selectByKey( IndexableTargetScopeMeta meta, Object key ) {\n        return this.selectByNS( meta, meta.getIndexKey(), key );\n    }\n\n    @Override\n    public void insertByNS( IndexableTargetScopeMeta meta, String szParentIndexKey, K key, V entity ) {\n        this.mJedis.hset( this.getScopeKey( meta, szParentIndexKey ), key.toString() , entity.toString() );\n    }\n\n    @Override\n    public void insert( IndexableTargetScopeMeta meta, K key, V entity ) {\n        this.insertByNS( meta, meta.getIndexKey(), key, entity );\n    }\n\n    @Override\n    public void insert( IndexableTargetScopeMeta meta, K key, V entity, long expireMill ) {\n        this.insertByNS( meta, meta.getIndexKey(), key, entity ); // Not supported.\n    }\n\n    @Override\n    public void updateByNS( IndexableTargetScopeMeta meta, String szParentIndexKey, K key, V entity ) {\n        this.insertByNS( meta, szParentIndexKey, key, entity );\n    }\n\n    @Override\n    public void update( IndexableTargetScopeMeta meta, K key, V entity ) {\n        this.insert( meta, key, entity );\n    }\n\n    @Override\n    public void deleteByNS( IndexableTargetScopeMeta meta, String szParentIndexKey, Object key ) {\n        this.mJedis.hdel( this.getScopeKey( meta, szParentIndexKey ), key.toString() );\n    }\n\n    @Override\n    public void deleteByKey( IndexableTargetScopeMeta meta, Object key ) {\n        this.deleteByNS( meta, meta.getIndexKey(), key );\n    }\n\n    @Override\n    public void purge( IndexableTargetScopeMeta meta ) {\n        this.purgeByNS( meta, meta.getIndexKey() );\n    }\n\n    @Override\n    public void purgeByNS( IndexableTargetScopeMeta meta, String szParentIndexKey ) {\n        String ns = this.getScopeKey( meta, szParentIndexKey );\n        this.mJedis.del( ns );\n    }\n\n    @Override\n    public void commit() {\n        // Redis operations are atomic, no explicit commit needed.\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Iterator<K > keysIterator( IndexableTargetScopeMeta meta ) {\n        return (Iterator) new RedisKeysIterator( this.mJedis, \"\", new IteratorSourceAdapter() {\n            @Override\n            public ScanResult<Map.Entry<String, String> > scan( String cursor, ScanParams params ) {\n                return GenericRedisHashManipulator.this.mJedis.hscan( meta.getIndexKey(), cursor, params );\n            }\n        });\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Iterator<Map.Entry<K, V > > iterator( IndexableTargetScopeMeta meta ) {\n        return (Iterator) new RedisEntryIterator( this.mJedis, \"\", new IteratorSourceAdapter() {\n            @Override\n            public ScanResult<Map.Entry<String, String> > scan( String cursor, ScanParams params ) {\n                return GenericRedisHashManipulator.this.mJedis.hscan( meta.getIndexKey(), cursor, params );\n            }\n        });\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/redis/GenericRedisMasterManipulator.java",
    "content": "package com.pinecone.slime.jelly.source.redis;\n\nimport com.pinecone.framework.system.prototype.ObjectiveBean;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.slime.jelly.source.NamespacedKey;\nimport com.pinecone.slime.source.GenericResultConverter;\nimport com.pinecone.slime.source.indexable.IndexableIterableManipulator;\nimport com.pinecone.slime.source.indexable.IndexableTargetScopeMeta;\nimport redis.clients.jedis.Jedis;\nimport redis.clients.jedis.ScanParams;\nimport redis.clients.jedis.ScanResult;\nimport redis.clients.jedis.exceptions.JedisException;\n\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.LinkedHashMap;\n\npublic class GenericRedisMasterManipulator<K extends String, V > implements IndexableIterableManipulator<K, V > {\n    private final Jedis  mJedis;\n    private final String mszNameSeparator;\n\n    public GenericRedisMasterManipulator( Jedis jedis, String szSeparator ) {\n        this.mJedis           = jedis;\n        this.mszNameSeparator = szSeparator;\n    }\n\n    public GenericRedisMasterManipulator( Jedis jedis ) {\n        this( jedis, \":\" );\n    }\n\n    private String getFullKey( IndexableTargetScopeMeta meta, String szNamespace, Object key ) {\n        return NamespacedKey.getFullKey( meta, this.mszNameSeparator, szNamespace, key );\n    }\n\n    private String getKeyType( String key ) {\n        try {\n            return mJedis.type( key );\n        }\n        catch ( JedisException e ) {\n            // Handle exception (log, throw, etc.)\n            return null; // Return null or throw exception to indicate failure\n        }\n    }\n\n    @Override\n    public long counts( IndexableTargetScopeMeta meta, String szScopeKey ) {\n        if( szScopeKey == null || szScopeKey.isEmpty() ) {\n            return this.mJedis.dbSize();\n        }\n        try {\n            String type = this.getKeyType( szScopeKey );\n            if ( \"list\".equals(type) ) {\n                return this.mJedis.llen( szScopeKey );\n            }\n            else if ( \"set\".equals(type) ) {\n                return this.mJedis.scard( szScopeKey );\n            }\n            else if ( \"zset\".equals(type) ) {\n                return this.mJedis.zcard( szScopeKey );\n            }\n            else if ( \"hash\".equals(type) ) {\n                return this.mJedis.hlen( szScopeKey );\n            }\n            else {\n                throw new IllegalArgumentException( \"Unsupported data type[ \" + type + \" ] for counts operation.\" );\n            }\n        }\n        catch ( JedisException e ) {\n            return -1;\n        }\n    }\n\n    @Override\n    public long countsByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) {\n        String scopeKey = this.getFullKey( meta, szNamespace, key );\n        if( this.mJedis.exists( scopeKey ) ){\n            return 1;\n        }\n        return 0;\n    }\n\n    @Override\n    public long countsNS( IndexableTargetScopeMeta meta, String szNamespace ) {\n        long count = 0;\n        String cursor = ScanParams.SCAN_POINTER_START;\n        ScanParams scanParams = new ScanParams().match( szNamespace + \"*\" ).count( 1000 );\n\n        do {\n            ScanResult<String > scanResult = this.mJedis.scan( cursor, scanParams );\n            count += scanResult.getResult().size();\n            cursor = scanResult.getCursor();\n        }\n        while ( !cursor.equals(ScanParams.SCAN_POINTER_START) );\n\n        return count;\n    }\n\n    @Override\n    public List query( IndexableTargetScopeMeta meta, String szStatement ) {\n        throw new UnsupportedOperationException(\"Query method not supported for GenericRedisMasterManipulator.\");\n    }\n\n    @Override\n    public List<V> queryVal( IndexableTargetScopeMeta meta, String szStatement ) {\n        throw new UnsupportedOperationException(\"QueryVal method not supported for GenericRedisMasterManipulator.\");\n    }\n\n    protected Object selectElementByKey( IndexableTargetScopeMeta meta, Object key ) {\n        String szKey = key.toString();\n        try {\n            String type = this.getKeyType( szKey );\n            if ( \"string\".equals( type ) ) {\n                String value = this.mJedis.get( szKey );\n                if ( value == null ) {\n                    return null;\n                }\n                return value;\n            }\n            else if ( \"hash\".equals( type ) ) {\n                Map<String, String > map = this.mJedis.hgetAll( szKey );\n                if ( map == null ) {\n                    return null;\n                }\n                return map;\n            }\n            else if ( \"list\".equals( type ) ) {\n                List<String > list = this.mJedis.lrange( szKey, 0, -1 );\n                if ( list == null || list.isEmpty() ) {\n                    return null;\n                }\n                return list;\n            }\n            else if ( \"set\".equals( type ) ) {\n                Set<String > set = this.mJedis.smembers( szKey );\n                if ( set == null || set.isEmpty() ) {\n                    return null;\n                }\n                return set;\n            }\n            else if ( \"zset\".equals( type ) ) {\n                Set<String > zset = this.mJedis.zrange( szKey, 0, -1 );\n                if ( zset == null || zset.isEmpty() ) {\n                    return null;\n                }\n                return zset;\n            }\n            else if ( \"none\".equals( type ) ) {\n                return null;\n            }\n            else {\n                throw new IllegalArgumentException( \"Unsupported data type[\" + type + \"] for selectByNS operation.\" );\n            }\n        }\n        catch ( JedisException | ClassCastException e ) {\n            // Handle exceptions (log, throw, etc.)\n            return null;\n        }\n    }\n\n    @Override\n    public Object selectAllByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) {\n        if( key != null && szNamespace != null ) {\n            return this.selectElementByKey( meta, this.getFullKey( meta, szNamespace, key ) );\n        }\n\n        if( szNamespace == null ) {\n            szNamespace = \"\";\n        }\n\n        String cursor = ScanParams.SCAN_POINTER_START;\n        ScanParams scanParams = new ScanParams().match( szNamespace + \"*\" ).count( 1000 );\n\n        Map<String, Object > map = new LinkedHashMap<>();\n        do {\n            ScanResult<String > scanResult = this.mJedis.scan( cursor, scanParams );\n            for( String k : scanResult.getResult() ) {\n                map.put( k, this.selectElementByKey( meta, this.getFullKey( meta, szNamespace, k ) ) );\n            }\n            cursor = scanResult.getCursor();\n        }\n        while ( !cursor.equals(ScanParams.SCAN_POINTER_START) );\n\n        return map;\n    }\n\n    @Override\n    public List<V > selectsByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) {\n        return List.of( this.selectByNS( meta, szNamespace, key ) );\n    }\n\n    @Override\n    public V selectByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) {\n        return selectByKey( meta, this.getFullKey( meta, szNamespace, key ) );\n    }\n\n    @Override\n    public V selectByKey( IndexableTargetScopeMeta meta, Object key ) {\n        if ( meta.getResultConverter() == null ) {\n            meta.setResultConverter( new GenericResultConverter<>( meta.getValueType(), meta.getValueMetaKeys() ) );\n        }\n        return meta.<V >getResultConverter().convert(\n                this.selectElementByKey( meta, key )\n        );\n    }\n\n    protected void insert0( IndexableTargetScopeMeta meta, String szKey, V entity, long expireMill ) {\n        try {\n            if( entity instanceof String ) {\n                this.mJedis.set( szKey, (String)entity );\n            }\n            else if( entity instanceof Map ) {\n                Map<?, ? > map = (Map) entity;\n                if( map.get( szKey ) instanceof String ) {\n                    this.mJedis.hset( szKey, (Map<String, String>) map ); // Check once.\n                }\n                else {\n                    for( Map.Entry kv : map.entrySet() ) {\n                        this.mJedis.hset( szKey, kv.getKey().toString(), kv.getValue().toString() );\n                    }\n                }\n            }\n            else if( entity instanceof List ) {\n                List<? > list = (List) entity;\n                int i = 0;\n                for( Object e : list ) {\n                    this.mJedis.lset( szKey, i, e.toString() );\n                    ++i;\n                }\n            }\n            else if( entity instanceof Set) {\n                Set<? > list = (Set) entity;\n                for( Object e : list ) {\n                    this.mJedis.sadd( szKey, e.toString() );\n                }\n            }\n            else if( entity != null ){\n                ObjectiveBean bean = new ObjectiveBean( entity );\n                String[] keys = bean.keys();\n                for( String k : keys ) {\n                    this.mJedis.hset( szKey, k, bean.get( k ).toString() );\n                }\n            }\n\n            if( expireMill > 0 ) {\n                this.mJedis.pexpire( szKey, expireMill );\n            }\n        }\n        catch ( JedisException | ClassCastException e ) {\n            // Handle exceptions (log, throw, etc.)\n        }\n    }\n\n    protected void insert0( IndexableTargetScopeMeta meta, String szKey, V entity ) {\n        this.insert0( meta, szKey, entity, -1 );\n    }\n\n    @Override\n    public void insertByNS( IndexableTargetScopeMeta meta, String szNamespace, K key, V entity ) {\n        String scopeKey = this.getFullKey( meta, szNamespace, key );\n        this.insert0( meta, scopeKey, entity );\n    }\n\n    @Override\n    public void insert( IndexableTargetScopeMeta meta, K key, V entity ) {\n        this.insert0( meta, key.toString(), entity );\n    }\n\n    @Override\n    public void insert( IndexableTargetScopeMeta meta, K key, V entity, long expireMill ) {\n        this.insert0( meta, key.toString(), entity, expireMill );\n    }\n\n    @Override\n    public void updateByNS( IndexableTargetScopeMeta meta, String szNamespace, K key, V entity ) {\n        this.insertByNS( meta, szNamespace, key, entity );\n    }\n\n    @Override\n    public void update( IndexableTargetScopeMeta meta, K key, V entity ) {\n        this.insert( meta, key, entity );\n    }\n\n    @Override\n    public void deleteByNS( IndexableTargetScopeMeta meta, String szNamespace, Object key ) {\n        String scopeKey = this.getFullKey( meta, szNamespace,key );\n        this.mJedis.unlink( scopeKey );\n    }\n\n    @Override\n    public void deleteByKey( IndexableTargetScopeMeta meta, Object key ) {\n        this.mJedis.unlink( key.toString() );\n    }\n\n    @Override\n    public void purge( IndexableTargetScopeMeta meta ) {\n        this.purgeByNS( meta, meta.getScopeNS() );\n    }\n\n    @Override\n    public void purgeByNS( IndexableTargetScopeMeta meta, String szNamespace ) {\n        this.mJedis.select( Integer.parseInt( szNamespace ) );\n        this.mJedis.flushDB();\n    }\n\n    @Override\n    public void commit() {\n        // Redis operations are atomic, no explicit commit needed.\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Iterator<K > keysIterator( IndexableTargetScopeMeta meta ) {\n        return (Iterator) new RedisKeysIterator( this.mJedis, \"\", new IteratorSourceAdapter() {\n            @Override\n            public ScanResult<String > scan( String cursor, ScanParams params ) {\n                return GenericRedisMasterManipulator.this.mJedis.scan( cursor, params );\n            }\n        });\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Iterator iterator( IndexableTargetScopeMeta meta ) {\n        return new EntryIterator( meta );\n    }\n\n    protected final class EntryIterator implements Iterator<Map.Entry > {\n        Iterator<K > keyIterator;\n        IndexableTargetScopeMeta meta;\n\n        EntryIterator( IndexableTargetScopeMeta meta ) {\n            this.meta = meta;\n            this.keyIterator = GenericRedisMasterManipulator.this.keysIterator( meta );\n        }\n\n        @Override\n        public final boolean hasNext() {\n            return this.keyIterator.hasNext();\n        }\n\n        @Override\n        public final Map.Entry next() {\n            K k = this.keyIterator.next(); // WARNING, Unchecked.\n            return new KeyValue<>( k, GenericRedisMasterManipulator.this.selectElementByKey( this.meta, k ) );\n        }\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/redis/IteratorSourceAdapter.java",
    "content": "package com.pinecone.slime.jelly.source.redis;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport redis.clients.jedis.ScanParams;\nimport redis.clients.jedis.ScanResult;\n\npublic interface IteratorSourceAdapter extends Pinenut {\n    ScanResult<? > scan( String cursor, ScanParams params );\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/redis/RedisEntryIterator.java",
    "content": "package com.pinecone.slime.jelly.source.redis;\n\nimport redis.clients.jedis.Jedis;\n\nimport java.util.Map;\n\npublic class RedisEntryIterator extends RedisIterator {\n    public RedisEntryIterator( Jedis jedis, String namespace, int batchSize, IteratorSourceAdapter adapter ) {\n        super( jedis, namespace, batchSize, adapter );\n    }\n\n    public RedisEntryIterator( Jedis jedis, String namespace, IteratorSourceAdapter adapter ) {\n        this( jedis, namespace, 1000, adapter );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Map.Entry<String, String > next() {\n        return (Map.Entry<String, String >) super.next();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/redis/RedisIterator.java",
    "content": "package com.pinecone.slime.jelly.source.redis;\n\nimport redis.clients.jedis.Jedis;\nimport redis.clients.jedis.ScanParams;\nimport redis.clients.jedis.ScanResult;\n\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.NoSuchElementException;\n\npublic class RedisIterator implements Iterator<Object > {\n    private final Jedis             mJedis;\n    private final ScanParams        mScanParams;\n    private String                  mCursor;\n    private List<? >                mCurrentBatch;\n    private int                     mCurrentIndex;\n    private IteratorSourceAdapter   mSourceAdapter;\n\n    public RedisIterator( Jedis jedis, String namespace, int batchSize, IteratorSourceAdapter adapter ) {\n        this.mJedis          = jedis;\n        this.mScanParams     = new ScanParams().match( namespace + \"*\" ).count( batchSize );\n        this.mCursor         = ScanParams.SCAN_POINTER_START;\n        this.mCurrentBatch   = null;\n        this.mCurrentIndex   = 0;\n        this.mSourceAdapter  = adapter;\n        this.fetchNextBatch();\n    }\n\n    public RedisIterator(Jedis jedis, String namespace, IteratorSourceAdapter adapter ) {\n        this( jedis, namespace, 1000, adapter );\n    }\n\n    private void fetchNextBatch() {\n        ScanResult<? > scanResult      = this.mSourceAdapter.scan( this.mCursor, this.mScanParams );\n        this.mCurrentBatch             = scanResult.getResult();\n        this.mCursor                   = scanResult.getCursor();\n        this.mCurrentIndex             = 0;\n    }\n\n    @Override\n    public boolean hasNext() {\n        if ( this.mCurrentBatch == null || this.mCurrentIndex >= this.mCurrentBatch.size() ) {\n            if ( this.mCursor.equals( ScanParams.SCAN_POINTER_START ) ) {\n                return false;\n            }\n            this.fetchNextBatch();\n        }\n        return this.mCurrentIndex < this.mCurrentBatch.size();\n    }\n\n    @Override\n    public Object next() {\n        if ( !this.hasNext() ) {\n            throw new NoSuchElementException();\n        }\n        return this.mCurrentBatch.get( this.mCurrentIndex++ );\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/main/java/com/pinecone/slime/jelly/source/redis/RedisKeysIterator.java",
    "content": "package com.pinecone.slime.jelly.source.redis;\n\nimport redis.clients.jedis.Jedis;\n\nimport java.util.Map;\n\npublic class RedisKeysIterator extends RedisIterator {\n    public RedisKeysIterator( Jedis jedis, String namespace, int batchSize, IteratorSourceAdapter adapter ) {\n        super( jedis, namespace, batchSize, adapter );\n    }\n\n    public RedisKeysIterator( Jedis jedis, String namespace, IteratorSourceAdapter adapter ) {\n        this( jedis, namespace, 1000, adapter );\n    }\n\n    @Override\n    public String next() {\n        Object e = super.next();\n        if( e instanceof String ) {\n            return ( String ) e;\n        }\n        else {\n            Map.Entry entry = (Map.Entry) e;\n            return (String) entry.getKey();\n        }\n    }\n}"
  },
  {
    "path": "Pinecones/Jelly/src/test/java/com/TestJelly.java",
    "content": "package com;\n\nimport com.pinecone.Pinecone;\n\npublic class TestJelly {\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n\n\n            return 0;\n        }, (Object[]) args );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>pinecones</artifactId>\n        <groupId>com.pinecones</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone</groupId>\n    <artifactId>pinecone</artifactId>\n    <version>2.5.1</version>\n    <packaging>jar</packaging>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n\n    </dependencies>\n</project>\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/PineTrial.java",
    "content": "package com.pinecone;\n\nimport java.math.BigDecimal;\n\nimport com.pinecone.framework.util.Debug;\n//import opennlp.tools.ml.maxent.DataStream;\n//import org.glassfish.jersey.server.internal.scanning.FilesScanner;\n//import sun.misc.FloatingDecimal;\n//import sun.nio.ch.WindowsSelectorImpl\n\n\nclass SS implements Runnable {\n    public int i = 0;\n    @Override\n    public void run() {\n        for ( int j = 0; j < 1e4; j++ ) {\n            //++i;\n            Debug.trace( Thread.currentThread().getName(), j );\n        }\n    }\n}\n\n\n\npublic class PineTrial {\n\n    public static boolean test(int n){\n        if (n<2){\n            return false;\n        }\n        int z = (int)Math.sqrt(n);\n        for (int i = 2; i <= z; i++) {\n            if (n%i == 0){\n                return false;\n            }\n        }\n        return true;\n    }\n\n    public static Integer pre(int n) {\n        int temp = 0;\n        while (n > 0) {\n            temp = temp * 10 + (n % 10);\n            n = n / 10;\n        }\n        return temp;\n    }\n\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n//            for (int i = 11; i < 100_000_000_0; i++) {\n//                if (i == pre(i) && test(i) ){\n//                    System.out.println(i);\n//                }\n//            }\n\n\n\n\n\n\n//            MySQLExecutor mysql = new MySQLExecutor( new MySQLHost( \"localhost/predator\", \"root\", \"test\", \"UTF-8\" ) );\n            //JSONArray tables = mysql.fetch( \"SELECT tM.en_word, tM.coca_rank FROM( SELECT tW.en_word, tF.coca_rank FROM predator_mutual_words AS tW LEFT JOIN predator_mutual_words_frequency AS tF ON tW.en_word = tF.en_word WHERE LENGTH(tW.en_word) = 3 ) AS tM WHERE tM.coca_rank IS NOT NULL AND tM.coca_rank <= 20000 AND tM.coca_rank != 0 ORDER BY tM.coca_rank;\" );\n//            JSONArray tables = mysql.fetch( \"SELECT tM.en_word, tM.coca_rank FROM( SELECT tW.en_word, tF.coca_rank FROM predator_mutual_words AS tW LEFT JOIN predator_mutual_words_frequency AS tF ON tW.en_word = tF.en_word WHERE LENGTH(tW.en_word) >= 3 AND LENGTH(tW.en_word) <= 5 ) AS tM WHERE tM.coca_rank IS NOT NULL AND tM.coca_rank <= 15000 AND tM.coca_rank != 0 ORDER BY tM.coca_rank;\" );\n//\n//            JSONArray words = new JSONArraytron();\n//            for ( int i = 0; i < tables.size(); i++ ) {\n//                String szWord = tables.optJSONObject(i).optString( \"en_word\" );\n//                //if( szWord.charAt(0) >= 'a' ) {\n//                    words.put( szWord );\n//                //}\n//            }\n//\n//            Debug.trace( words );\n\n\n\n//            SS runnable = new SS();\n//\n//            Thread t1 = new Thread( runnable );\n//            Thread t2 = new Thread( runnable );\n//\n//\n//            t1.start();\n//\n//            t2.start();\n//\n//            Thread.sleep( 100 );\n//\n//            Debug.trace( runnable.i );\n\n\n\n\n\n\n            Debug.redfs((new BigDecimal(\"8031.12\"))\n                    .multiply(new BigDecimal(1024))\n                    .multiply(new BigDecimal(1024))\n                    .multiply(new BigDecimal(1024))\n                    .multiply(new BigDecimal(1024))\n                    .multiply(new BigDecimal(1024))\n                    .longValue());\n\n\n\n\n\n\n\n\n\n\n//            Debug.trace( ( (Framework)Pinecone.sys().getTaskManager().summon(\n//                    Framework.class.getName(),\n//                    new Class<?>[]{ String[].class, PrimeSystem.class },\n//                    (Object[]) new String[0], Pinecone.sys()\n//            ) ).getName() );\n\n\n//            ReentrantLock lock = new ReentrantLock();\n//            Runnable runnable = new Runnable() {\n//                @Override\n//                public void run() {\n////                    for ( int i = 0;  i < 1e6;  i++ ) {\n////                        lock.lock();\n////                        Debug.trace( i );\n////                        lock.unlock();\n////                    }\n//                    Thread thread2 = new Thread(()->{\n//                        //ThreadGroup parentThreadGroup = Thread.currentThread().getThreadGroup().getParent();\n//                       // Debug.trace( Thread.currentThread().getId(), parentThreadGroup. )\n//                        Thread.currentThread().getThreadGroup().list();\n//\n//                    });\n//                    thread2.start();\n//                }\n//            };\n//\n//            Thread thread1 = new Thread(runnable);\n//            thread1.start();\n//\n//            Thread thread2 = new Thread(runnable);\n//            thread2.start();\n\n\n\n\n//            LinkedTreeMap<Integer, Integer > linkedTreeMap = new LinkedTreeMap<>();\n//            LinkedHashSet<Integer > linkedTreeSet = new LinkedHashSet<>();\n//            for ( int i = 0; i < 1e6; i++ ) {\n//                int j = new Random().nextInt((int)1e6);\n//                linkedTreeMap.put( j, i );\n//                linkedTreeSet.add(j);\n//            }\n//\n//\n//\n////            for ( Integer i : linkedTreeSet ) {\n////                linkedTreeMap.remove(i);\n////            }\n//            Integer[] arr = linkedTreeSet.toArray( new Integer[0] );\n//\n//            int len = linkedTreeMap.size();\n//            for ( int i = 0; i < len -20; i++ ) {\n//                //linkedTreeMap.remove( arr[i] );\n//                linkedTreeMap.removeFirst();\n//            }\n//\n//\n//            int i = 0;\n//            for ( Object kv : linkedTreeMap.entrySet()  ) {\n//                ++i;\n//            }\n//\n//            Debug.trace( linkedTreeMap.size(), i, linkedTreeMap );\n\n\n\n\n//            Thread.sleep( 100000 );\n\n\n//            for ( Map.Entry<String, String> kv : treeMap ) {\n//                Debug.trace( kv );\n//            }\n\n\n\n//            Debug.trace( JSON.parse( FileUtils.readAll(\"E:\\\\MyFiles\\\\CodeScript\\\\Project\\\\Hazelnut\\\\Sauron\\\\Saurons\\\\system\\\\setup\\\\PubChem.json5\") ) );\n\n//            String packageName = \"Predator.Wizard.Public.undefined\";\n//            // List<String> classNames = getClassName(packageName);\n//            List<String> classNames = PackageUtils.fetchClassName( packageName );\n//            if (classNames != null) {\n//                for ( String className : classNames ) {\n//                    className = className.substring( className.indexOf(packageName) );\n//                    Class<?> pVoid = Thread.currentThread().getContextClassLoader().loadClass( className );\n//                    Debug.trace(pVoid.getAnnotations());\n//                }\n//            }\n\n\n//            Debug.trace( system.getProperty(\"user.dir\") );\n//\n//            HostMatrix illuminationSystem = new HostMatrix(\"E:/MyFiles/CodeScript/Project/Hazelnut/Predator/Predator/src/Resources/\",\"config.json5\");\n//\n//            //Debug.trace( illuminationSystem.getSystemConfig() );\n\n//            String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n//            JSONObject jsonShit = new JSONMaptron(szJson);\n\n\n            ///predator_en_w_etymon_derived_linguae\n            /*MySQLExecutor mysql = new MySQLExecutor( new MySQLHost( \"localhost/predator\", \"root\", \"test\", \"UTF-8\" ) );\n            JSONArray tables = mysql.fetch( \"SELECT * FROM predator_en_w_etymon_derived_linguae\" );\n\n\n            for ( Object obj : tables) {\n                JSONObject row = (JSONObject)obj;\n\n                row.put( \"nation\", new JSONArraytron() );\n            }\n\n            FileWriter fileWriter = new FileWriter( \"M:/etymon_derived_linguae.json\" );\n            tables.write( fileWriter );\n            fileWriter.close();*/\n\n\n/*            MySQLExecutor mysql = new MySQLExecutor( new MySQLHost( \"localhost/predator\", \"root\", \"test\", \"UTF-8\" ) );\n            JSONArray tables = mysql.fetch( \"SELECT * FROM predator_mutual_words_frequency\" );\n\n            JSONObject jMap  = new JSONMaptron();\n            for ( Object obj : tables) {\n                JSONObject row = (JSONObject)obj;\n\n                String szWord = row.optString( \"en_word\" );\n\n                jMap.affirmArray( szWord ).put( row );\n            }\n\n            FileWriter fileWriter = new FileWriter( \"M:/dv/mutual_words_frequency.json\" );\n            tables.write( fileWriter );\n            fileWriter.close();*/\n\n\n\n\n\n\n\n//            ArrayList<String > arrayList = new ArrayList<>();\n//            for ( int i = 0; i < 1e7; i++ ) {\n//                arrayList.add( new String( new char[4] ) );\n//            }\n//\n//            system.out.println( arrayList.size() );\n//\n//            system.gc();\n//            system.gc();\n//            system.gc();\n//            system.gc();\n//\n\n//            long nMem = (long)( (double)1 * 1024 * 1024 * 1024 );\n//            byte[][] magnChars = new byte[8][];\n//            magnChars[0]  = new byte[ (int) nMem  ];\n//            magnChars[1] = new byte[ (int) nMem  ];\n//            magnChars[2] = new byte[ (int) nMem  ];\n//            magnChars[3] = new byte[ (int) nMem  ];\n//\n//\n//            Debug.trace( nMem,\"Done\", framework.getRunTime() );\n//\n//            Thread.sleep(1000000);\n\n\n\n\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/Pinecone.java",
    "content": "package com.pinecone;\n\n\nimport com.pinecone.framework.system.Framework;\nimport com.pinecone.framework.system.functions.Function;\nimport com.pinecone.framework.util.io.Tracer;\n\nimport java.io.InputStream;\nimport java.io.PrintStream;\n\n/**\n *  Pinecone Framework For Java (Bean Nuts Pinecone Ursus for Java)\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  Open Source licensed under the GPL.\n *  *****************************************************************************************\n *  Other information about this framework, such as papers, patents, etc -> http://www.rednest.cn\n *  Warning: This source code is protected by copyright law and international treaties.\n *  *****************************************************************************************\n *  www.nutgit.com/ www.xbean.net / www.rednest.cn\n *  Include Almond, C/CPP, JAVA, PHP, Python, JavaScript, ActionScript, GoLang\n *  *****************************************************************************************\n *  ;) Hope you enjoy this | Dragon King, the undefined\n */\npublic class Pinecone {\n    public static final long        VER_PINE               =  202506L;\n    public static final String      VERSION                = \"2.5.1\";\n    public static final String      RELEASE_DATE           = \"2025/06/06\";\n    public static final String      ROOT_SERVER            = \"http://www.rednest.cn/\";\n    public static final String      MY_PROGRAM_NAME        = \"Pinecone\";\n    public static final String      CONTACT_INFO           = \"E-Mail:arb#rednest.cn\";\n    public static final boolean     S_DEBUG_MODE           = true;\n    public static final int         FLOAT_ACCURACY         = 32;\n    public static final int         COMMON_ACCURACY_LIMIT  = 10000;\n\n\n    public static final Framework   PRIME_SYSTEM           = new Framework();\n\n    public static int init ( Function fnInlet, Object...args ) throws Exception {\n        return Pinecone.PRIME_SYSTEM.init( fnInlet, args );\n    }\n\n    public static Framework sys(){\n        return Pinecone.PRIME_SYSTEM;\n    }\n\n    public static Tracer console() {\n        return Pinecone.sys().console();\n    }\n\n    public static PrintStream out() {\n        return Pinecone.console().getOut();\n    }\n\n    public static PrintStream err() {\n        return Pinecone.console().getOut();\n    }\n\n    public static InputStream in() {\n        return Pinecone.sys().in();\n    }\n\n}\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/NamedInheritableThreadLocal.java",
    "content": "package com.pinecone.framework.lang;\n\nimport com.pinecone.framework.util.Assert;\n\npublic class NamedInheritableThreadLocal<T > extends InheritableThreadLocal<T > {\n    private final String name;\n\n    public NamedInheritableThreadLocal( String name ) {\n        Assert.hasText( name, \"Name must not be empty\" );\n        this.name = name;\n    }\n\n    @Override\n    public String toString() {\n        return this.name;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/NamedThreadLocal.java",
    "content": "package com.pinecone.framework.lang;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.Assert;\n\npublic class NamedThreadLocal<T> extends ThreadLocal<T> implements Pinenut {\n    private final String name;\n\n    public NamedThreadLocal( String name ) {\n        Assert.hasText(name, \"Name must not be empty\");\n        this.name = name;\n    }\n\n    @Override\n    public String toString() {\n        return this.name;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/field/DataStructureEntity.java",
    "content": "package com.pinecone.framework.lang.field;\n\npublic interface DataStructureEntity extends SegmentEntity {\n    String StructureNameKey = \"__NAME__\";\n\n    int getStartOffset();\n\n    int getTextOffset();\n\n    int getDataOffset();\n\n    void setTextOffset( int offset );\n\n    void setDataOffset( int offset );\n\n    boolean isEmpty();\n\n    int size();\n\n    int capacity();\n\n    void resize( int newSize );\n\n    FieldEntity[] getFields();\n\n    FieldEntity[] getSegments();\n\n    void setTextField( int index, FieldEntity field ) ;\n\n    void setDataField( int index, FieldEntity field ) ;\n\n    void setTextField( int index, String key, Object val );\n\n    void setDataField( int index, String key, Object val );\n\n    void setDataField( int index, String key, Object val, String genericLabel );\n\n    void setTextField( int index, String key, Class<?> type );\n\n    void setDataField( int index, String key, Class<?> type );\n\n    void setDataField( int index, String key, Class<?> type, String genericLabel );\n\n    FieldEntity getTextField( int index );\n\n    FieldEntity getDataField( int index );\n\n    FieldEntity findTextField( String key );\n\n    FieldEntity findDataField( String key );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/field/FieldEntity.java",
    "content": "package com.pinecone.framework.lang.field;\n\nimport java.util.Arrays;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.ReflectionUtils;\n\npublic interface FieldEntity extends Pinenut {\n    String getName();\n\n    Class<?> getType();\n\n    Object getValue();\n\n    String getGenericTypeLabel();\n\n    default String[] getGenericTypeNames() {\n        return ReflectionUtils.extractGenericClassNames( this.getGenericTypeLabel() );\n    }\n\n    void applyGenericTypeLabel( String genericTypeLabel );\n\n    default boolean hasDeclaredGenericType() {\n        return this.getGenericTypeLabel() != null && this.getGenericTypeLabel().contains( \"<\" ) && this.getGenericTypeLabel().contains( \">\" );\n    }\n\n    void setValue( Object value );\n\n    static FieldEntity[] typeFrom( Map map ) {\n        FieldEntity[] entities = new FieldEntity[ map.size() ];\n        int i = 0;\n        for( Object em : map.entrySet() ) {\n            Map.Entry kv = (Map.Entry) em;\n\n            entities[ i ] = new GenericFieldEntity( kv.getKey().toString(), kv.getValue().getClass() );\n            ++i;\n        }\n\n        return entities;\n    }\n\n    static FieldEntity[] from( Map map ) {\n        FieldEntity[] entities = new FieldEntity[ map.size() ];\n        int i = 0;\n        for( Object em : map.entrySet() ) {\n            Map.Entry kv = (Map.Entry) em;\n\n            entities[ i ] = new GenericFieldEntity( kv.getKey().toString(), kv.getValue() );\n            ++i;\n        }\n\n        return entities;\n    }\n\n    static FieldEntity[] from( Class<? >[] parameters ) {\n        FieldEntity[] entities = new FieldEntity[ parameters.length ];\n        int i = 0;\n        for( Class<? > parameter : parameters ) {\n            entities[ i ] = new GenericFieldEntity(\n                    parameter.getName().replace( \".\", \"_\" ) + \"_\" + i, parameter.getComponentType()\n            );\n            ++i;\n        }\n\n        return entities;\n    }\n\n    default FieldEntity[] copy( FieldEntity[] that ) {\n        return Arrays.copyOf( that, that.length );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/field/GenericFieldEntity.java",
    "content": "package com.pinecone.framework.lang.field;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSON;\n\npublic class GenericFieldEntity implements FieldEntity {\n    protected String mszName;\n\n    protected Class<?> mType;\n\n    protected String mszGenericTypeLabel;\n\n    protected Object mValue;\n\n    public GenericFieldEntity( String szName, Object value, Class<?> type, String genericTypeLabel ) {\n        this.mszName                     = szName;\n        this.mType                       = type;\n        this.mValue                      = value;\n        this.mszGenericTypeLabel = genericTypeLabel;\n    }\n\n    public GenericFieldEntity( String szName, Object value, Class<?> type ) {\n        this( szName, value, type, null );\n    }\n\n    public GenericFieldEntity( String szName, Object value ) {\n        this( szName, value, value.getClass() );\n    }\n\n    public GenericFieldEntity( String szName, Class<?> type ) {\n        this( szName, null, type );\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public Class<?> getType() {\n        return this.mType;\n    }\n\n    @Override\n    public String getGenericTypeLabel() {\n        return this.mszGenericTypeLabel;\n    }\n\n    @Override\n    public void applyGenericTypeLabel( String genericTypeLabel ) {\n        this.mszGenericTypeLabel = genericTypeLabel;\n    }\n\n    @Override\n    public Object getValue() {\n        return this.mValue;\n    }\n\n    @Override\n    public void setValue( Object value ) {\n        this.mValue = value;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return \"{\" + StringUtils.jsonQuote( this.mszName.toString() ) + \":\" + JSON.stringify( this.mValue ) + \"}\";\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/field/GenericStructure.java",
    "content": "package com.pinecone.framework.lang.field;\n\nimport java.util.Arrays;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSON;\n\npublic class GenericStructure implements DataStructureEntity {\n    protected FieldEntity[] mSegments;\n\n    protected int mnTextOffset;\n\n    protected int mnDataOffset;\n\n    public GenericStructure( String szName, int nTextOffset, int nDataOffset ,int nElements ) {\n        if ( nDataOffset < 1 || nTextOffset >= nDataOffset ) {\n            throw new IllegalArgumentException( \"DataOffset must be greater than 1.\" );\n        }\n\n        this.mSegments       = new FieldEntity[ nDataOffset - nTextOffset + nElements ];\n        this.mSegments[ 0 ]  = new GenericFieldEntity( DataStructureEntity.StructureNameKey, szName, String.class );\n        this.mnTextOffset    = nTextOffset;\n        this.mnDataOffset    = nDataOffset;\n    }\n\n    public GenericStructure( String szName ,int nElements ) {\n        this( szName, 0, 1 , nElements );\n    }\n\n    public GenericStructure( FieldEntity[] segments, int nTextOffset, int nDataOffset ) {\n        this.mSegments    = segments;\n        this.mnTextOffset = nTextOffset;\n        this.mnDataOffset = nDataOffset;\n    }\n\n    @Override\n    public String getName() {\n        return (String) this.mSegments[ 0 ].getValue();\n    }\n\n    @Override\n    public String getSimpleName() {\n        String sz = this.getName();\n        String[] debris = sz.split( \"\\\\.|\\\\/\" );\n        if( debris.length > 1 ) {\n            return debris [ 1 ];\n        }\n        return sz;\n    }\n\n    @Override\n    public int getStartOffset() {\n        return 0;\n    }\n\n    @Override\n    public int getTextOffset() {\n        return this.mnTextOffset;\n    }\n\n    @Override\n    public int getDataOffset() {\n        return this.mnDataOffset;\n    }\n\n    @Override\n    public void setTextOffset( int offset ) {\n        if( offset < 0 ) {\n            return;\n        }\n\n        if ( offset > this.mnTextOffset ) {\n            int legacySize = this.mSegments.length - this.mnTextOffset;\n            this.resize( offset + this.mSegments.length );\n            System.arraycopy( this.mSegments, this.mnTextOffset, this.mSegments, offset, legacySize );\n            for( int i = 0; i < offset; i++ ){\n                this.mSegments[ i ] = null;\n            }\n        }\n\n        if( offset < this.mnTextOffset ) {\n            int length = this.mnDataOffset - this.mnTextOffset;\n            System.arraycopy( this.mSegments, this.mnTextOffset, this.mSegments, offset, length );\n            for( int i = offset + length; i < this.mnDataOffset; i++ ){\n                this.mSegments[ i ] = null;\n            }\n        }\n\n        this.mnDataOffset = offset - this.mnTextOffset + this.mnDataOffset;\n        this.mnTextOffset = offset;\n    }\n\n    @Override\n    public void setDataOffset( int offset ) {\n        if( offset <= 1 ) {\n            return;\n        }\n\n        if ( offset > this.mnDataOffset ) {\n            this.resize( this.size() + offset - this.mnDataOffset  );\n            System.arraycopy( this.mSegments, this.mnDataOffset, this.mSegments, offset, this.mSegments.length - offset );\n            for ( int i = this.mnDataOffset ; i < offset; ++i ) {\n                this.mSegments[ i ] = null;\n            }\n        }\n\n        if( offset < this.mnDataOffset ){\n            this.trimResize( this.mSegments.length - ( this.mnDataOffset - offset ), offset );\n        }\n\n        this.mnDataOffset = offset;\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mnDataOffset <= 0;\n    }\n\n    @Override\n    public int size() {\n        return this.mSegments.length - this.mnDataOffset;\n    }\n\n    @Override\n    public int capacity() {\n        return this.mSegments.length;\n    }\n\n    @Override\n    public void resize( int newSize ) {\n        if ( newSize + this.mnDataOffset <= this.mSegments.length ) {\n            throw new IllegalArgumentException( \"New size must be greater than current size.\" );\n        }\n\n        FieldEntity[] newSegments = new FieldEntity[ newSize + this.mnDataOffset ];\n        System.arraycopy( this.mSegments, 0, newSegments, 0, this.mSegments.length );\n        this.mSegments = newSegments;\n    }\n\n\n    @Override\n    public FieldEntity[] getFields() {\n        return Arrays.copyOfRange( this.mSegments, this.mnDataOffset, this.mSegments.length );\n    }\n\n    @Override\n    public FieldEntity[] getSegments() {\n        return this.mSegments;\n    }\n\n    @Override\n    public void setTextField( int index, FieldEntity field ) {\n        if ( index < this.mnTextOffset || index >= this.mnDataOffset ) {\n            throw new IndexOutOfBoundsException( \"Text segment index out of bounds.\" );\n        }\n        this.mSegments[ this.mnTextOffset + index ] = field;\n    }\n\n    @Override\n    public void setDataField( int index, FieldEntity field ) {\n        int dataEnd   = this.mSegments.length;\n        if ( index >= dataEnd - this.mnDataOffset ) {\n            throw new IndexOutOfBoundsException( \"Data segment index out of bounds.\" );\n        }\n        this.mSegments[ this.mnDataOffset + index ] = field;\n    }\n\n    @Override\n    public void setTextField( int index, String key, Object val ) {\n        FieldEntity legacy = this.getTextField( index );\n        FieldEntity neo    = null;\n        if( legacy != null ) {\n            if( legacy.getName().equals( key ) ) {\n                legacy.setValue( val );\n                return;\n            }\n        }\n        neo = new GenericFieldEntity( key, val );\n        this.setTextField( index, neo );\n    }\n\n    @Override\n    public void setDataField( int index, String key, Object val ) {\n        this.setDataField( index, key, val, null );\n    }\n\n    @Override\n    public void setDataField( int index, String key, Object val, String genericLabel ) {\n        FieldEntity legacy = this.getDataField( index );\n        FieldEntity neo    = null;\n        if( legacy != null ) {\n            if( key.equals( legacy.getName() ) ) {\n                legacy.setValue( val );\n                return;\n            }\n        }\n\n        if ( genericLabel == null ) {\n            neo = new GenericFieldEntity( key, val );\n        }\n        else {\n            neo = new GenericFieldEntity( key, val, val.getClass(), genericLabel );\n        }\n        this.setDataField( index, neo );\n    }\n\n    @Override\n    public void setTextField( int index, String key, Class<?> type ) {\n        this.setTextField( index, new GenericFieldEntity( key, type ) );\n    }\n\n    @Override\n    public void setDataField( int index, String key, Class<?> type ) {\n        this.setDataField( index, new GenericFieldEntity( key, type ) );\n    }\n\n    @Override\n    public void setDataField( int index, String key, Class<?> type, String genericLabel ) {\n        this.setDataField( index, new GenericFieldEntity( key, null, type, genericLabel ) );\n    }\n\n    @Override\n    public FieldEntity getDataField( int index ) {\n        return this.mSegments[ this.mnDataOffset + index ];\n    }\n\n    @Override\n    public FieldEntity getTextField( int index ) {\n        return this.mSegments[ this.mnTextOffset + index ];\n    }\n\n    @Override\n    public FieldEntity findTextField( String key ) {\n        return this.findField( key, this.mnTextOffset );\n    }\n\n    @Override\n    public FieldEntity findDataField( String key ) {\n        return this.findField( key, this.mnDataOffset );\n    }\n\n    protected FieldEntity findField( String key, int offset ) {\n        for ( int i = offset; i < this.mSegments.length; ++i ) {\n            FieldEntity entity = this.mSegments[ i ];\n            if( entity.getName() == (Object)key ) {\n                return entity;\n            }\n            if( entity.getName() != null && entity.getName().equals( key ) ) {\n                return entity;\n            }\n        }\n\n        return null;\n    }\n\n    @Override\n    public String toJSONString() {\n        StringBuilder sb = new StringBuilder();\n        sb.append( '{' );\n\n        for( int i = this.mnDataOffset; i < this.mSegments.length; ++i ) {\n            FieldEntity entity = this.mSegments[ i ];\n            if( entity != null ) {\n                sb.append( StringUtils.jsonQuote( entity.getName() ) );\n                sb.append( ':' );\n                sb.append( JSON.stringify( entity.getValue() ) );\n                sb.append( ',' );\n            }\n        }\n\n        if( sb.charAt( sb.length() - 1 ) == ',' ) {\n            sb.deleteCharAt( sb.length() - 1 );\n        }\n\n        sb.append( '}' );\n        return sb.toString();\n    }\n\n\n    protected void trimResize( int newSize, int newDataOffset ){\n        FieldEntity[] newSegments = new FieldEntity[newSize];\n        System.arraycopy( this.mSegments, this.mnDataOffset, newSegments, newDataOffset, this.mSegments.length - this.mnDataOffset );\n        System.arraycopy( this.mSegments, this.mnTextOffset, newSegments, this.mnTextOffset, newDataOffset - this.mnTextOffset  );\n        this.mSegments = newSegments;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/lang/field/SegmentEntity.java",
    "content": "package com.pinecone.framework.lang.field;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface SegmentEntity extends Pinenut {\n    String getName();\n\n    String getSimpleName();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ApoptosisRejectSignalException.java",
    "content": "package com.pinecone.framework.system;\n\npublic class ApoptosisRejectSignalException extends PineRuntimeException {\n    public ApoptosisRejectSignalException() {\n        super();\n    }\n\n    public ApoptosisRejectSignalException( String message ) {\n        super( message );\n    }\n\n    public ApoptosisRejectSignalException( String message, Throwable cause ) {\n        super( message, cause );\n    }\n\n    public ApoptosisRejectSignalException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/AssertionRuntimeException.java",
    "content": "package com.pinecone.framework.system;\n\npublic class AssertionRuntimeException extends PineRuntimeException {\n    public AssertionRuntimeException() {\n        super();\n    }\n\n    public AssertionRuntimeException( String message ) {\n        super( message );\n    }\n\n    public AssertionRuntimeException( String message, Throwable cause ) {\n        super( message, cause );\n    }\n\n    public AssertionRuntimeException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/AsynSystem.java",
    "content": "package com.pinecone.framework.system;\n\npublic interface AsynSystem extends RuntimeSystem {\n\n    void handleAsynLiveException( Exception e ) throws ProvokeHandleException;\n\n    void handleAsynKillException( Exception e ) throws ProvokeHandleException;\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/CascadeSystem.java",
    "content": "package com.pinecone.framework.system;\n\npublic interface CascadeSystem extends RuntimeSystem {\n    CascadeSystem rootSystem();\n\n    CascadeSystem getParent();\n\n    default long getPrimaryId() {\n        return 0;\n    }\n\n    default boolean isPrimarySystem() {\n        return this.getPrimaryId() == this.getSystemId();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ConformitySystem.java",
    "content": "package com.pinecone.framework.system;\n\nimport com.pinecone.framework.util.config.SysConfigson;\n\npublic interface ConformitySystem extends RuntimeSystem {\n    SysConfigson getGlobalConfig() ;\n\n    SysConfigson  getSystemConfig() ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ErrorStrings.java",
    "content": "package com.pinecone.framework.system;\n\npublic abstract class ErrorStrings {\n    public static final String E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED = \"Compromised attempts, Included path and its parent context are all invalid.\";\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Experimental.java",
    "content": "package com.pinecone.framework.system;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Target({ElementType.TYPE, ElementType.METHOD})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface Experimental {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Framework.java",
    "content": "package com.pinecone.framework.system;\n\nimport com.pinecone.framework.system.executum.ArchProcessum;\nimport com.pinecone.framework.system.functions.Function;\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.config.JSONSystemConfig;\nimport com.pinecone.framework.util.config.StartupCommandParser;\nimport com.pinecone.framework.util.io.Tracer;\nimport com.pinecone.framework.util.io.Tracerson;\nimport com.pinecone.framework.util.json.JSONException;\n\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStream;\nimport java.io.PrintStream;\nimport java.nio.file.Path;\nimport java.util.Map;\nimport java.util.Set;\n\npublic class Framework extends ArchProcessum implements Pinecore {\n    public static final String  DEFAULT_MAIN_CONFIG_FILE_NAME = \"config\";\n\n    // System properties\n    protected JSONSystemConfig       mjoGlobalConfig           ;\n    protected JSONSystemConfig       mjoSystemConfig           ;\n\n    // Startup & Environment properties\n    protected String                 mszMajorPackagePath       ; // The path of this class file.\n    protected String                 mszRuntimeContextPath     ; // System startup command, the 'user.dir'.\n    protected String                 mszRuntimePath            ; // System real runtime path.\n    protected Map<String, String[]>  mStartupCommandMap        ;\n    protected Map<String, String[]>  mEnvironmentVars          ;\n    protected String[]               mStartupCommand           ;\n    protected Thread                 mMainThread               ;\n    protected InputStream            mIn = System.in           ;\n    protected OutputStream           mOut = System.out         ;\n    protected Tracer                 mConsole = new Tracerson();\n\n    private   long                   mnBootTime                ;\n    private   Function               mfnAfterGlobalExpCaught   = new Function() {\n        @Override\n        public Object invoke( Object... obj ) throws Exception {\n            Framework.this.console().cerr( \"Unhandled exception in \\\"\" + Framework.this.getAffiliateThread().getName() + \"\\\" : \\n\" );\n            ( ( Throwable ) obj[0] ).printStackTrace();\n            return null;\n        }\n    };\n\n    private   ClassLoader            mGlobalClassLoader        ;\n\n\n    protected void setStartupCommand( String[] args ) {\n        if( args == null ) {\n            args = new String[0];\n        }\n        this.mStartupCommand    = args;\n        this.mStartupCommandMap = StartupCommandParser.DefaultParser.parse( args );\n    }\n\n    protected void dispatchStartupCommand() {\n\n    }\n\n    protected Thread searchMainThread() {\n        Set<Thread > all = this.fetchAllProcessThreads();\n        Thread main = null;\n        Thread tid1 = null;\n        for( Thread thread : all ) {\n            if(\n                    // The thread name can be modify, so it is hard to believe all those conditions are mismatched, Jesus! Who would ever do that...\n                    thread.getName().equals( \"main\" ) && !thread.isDaemon() &&\n                    Thread.currentThread().getThreadGroup().getName().equals( \"main\" ) &&\n                    Thread.currentThread().getThreadGroup().getParent().getName().equals(\"system\")\n            ){\n                main = thread;\n            }\n\n            if( thread.getId() == 1 ) {\n                tid1 = thread;\n            }\n        }\n        if( main == null ) {\n            this.console().warn( \"[PineconeLifecycle] [WARN] System main thread will using thread[id=1] as main thread.\" );\n            main = tid1;\n        }\n        return main;\n    }\n\n    private File findDefaultConfigFile() {\n        String szDefaultConfMajorPath  = Path.of( this.getRuntimePath(), Framework.DEFAULT_MAIN_CONFIG_FILE_NAME ).toString();\n        String szDefaultConfFilePath   = szDefaultConfMajorPath + \".json5\";\n        File f = new File( szDefaultConfFilePath );\n        if( f.exists() ) {\n            return f;\n        }\n\n        szDefaultConfFilePath   = szDefaultConfMajorPath + \".json\";\n        f = new File( szDefaultConfFilePath );\n        if( f.exists() ) {\n            return f;\n        }\n\n        return null;\n    }\n\n    protected void loadConfig() {\n        if( this.mjoGlobalConfig == null ) {\n            File f = this.findDefaultConfigFile();\n            if( f != null ) {\n                try{\n                    this.mjoGlobalConfig   = new JSONSystemConfig( this );\n                    this.mjoGlobalConfig.apply( f );\n                    this.mjoSystemConfig   = this.mjoGlobalConfig.getChild( \"System\" );\n                }\n                catch ( IOException | JSONException e ) {\n                    this.handleIgnoreException( e );\n                }\n            }\n\n            this.mjoGlobalConfig = new JSONSystemConfig( this );\n            this.mjoSystemConfig = new JSONSystemConfig( this );\n            this.mjoGlobalConfig.put( \"System\", this.mjoSystemConfig );\n        }\n\n        this.mExceptionRestartTime = this.getSystemConfig().optInt( \"ExceptionRestartTime\", 0 );\n    }\n\n    protected void onlyLoadTaskManager() {\n        this.mTaskManager          = new GenericMasterTaskManager( this );\n    }\n\n    protected void init() {\n        this.traceWelcomeInfo();\n\n        this.mszMajorPackagePath   = this.getClass().getProtectionDomain().getCodeSource().getLocation().getPath();\n        this.mszRuntimeContextPath = System.getProperty(\"user.dir\");\n        this.mszRuntimePath        = this.mszRuntimeContextPath;\n        this.mMainThread           = this.searchMainThread();\n        this.mGlobalClassLoader    = this.mMainThread.getContextClassLoader();\n        this.mEnvironmentVars      = StartupCommandParser.DefaultParser.parse( System.getenv() );\n\n        this.setThreadAffinity( Thread.currentThread() );\n        this.loadConfig();\n\n        this.onlyLoadTaskManager();\n    }\n\n    @Override\n    public Map<String, String[] > getStartupCommandMap() {\n        return this.mStartupCommandMap;\n    }\n\n    @Override\n    public Map<String, String[] > getEnvironmentVars() {\n        return this.mEnvironmentVars;\n    }\n\n    public Framework(){\n        this( new String[0], null, null );\n    }\n\n    public Framework( String[] args ){\n        this( args, null, null );\n    }\n\n    public Framework( String[] args, String szName ){\n        this( args, szName, null );\n    }\n\n    public Framework( String[] args, CascadeSystem parent ){\n        this( args, null, parent );\n    }\n\n    public Framework( String[] args, String szName, CascadeSystem parent ){\n        this( szName, parent );\n        this.setStartupCommand( args );\n        this.init();\n    }\n\n    public Framework( String szName, CascadeSystem parent ) {\n        super( szName, parent );\n    }\n\n\n    public void registerPineExpCatcher( Function fn ){\n        this.mfnAfterGlobalExpCaught = fn;\n    }\n\n    public long getBootTime(){\n        return this.mnBootTime;\n    }\n\n    public long getRunTime(){ //This function is using to calculate program run time\n        return System.currentTimeMillis() - this.mnBootTime;\n    }\n\n    public void traceRunTime() {\n        System.out.print( String.format(\n                \"\\n%s Runtime : %d /ms !\\n\", Pinecone.MY_PROGRAM_NAME, this.getRunTime()\n        ) );\n    }\n\n    private void initCommit() throws Throwable {\n        this.mnBootTime = System.currentTimeMillis();\n    }\n\n    protected Object invokeInitHandle(  Function fnInlet, Object...args  ) throws Exception {\n        this.setStartupCommand( (String[]) (Object[])args );\n        this.dispatchStartupCommand();\n        int nRetNum = 0;\n\n        try {\n            this.initCommit();\n            nRetNum = (int) fnInlet.invoke( args );\n\n            if( Pinecone.S_DEBUG_MODE ){\n                this.traceRunTime();\n            }\n        }\n        catch ( Throwable throwable ){\n            try{\n                this.handleRootKillException( throwable );\n            }\n            catch ( RestartSignalException e ) {\n                this.handleIgnoreException( e );\n                return e;\n            }\n            nRetNum = -1;\n        }\n\n        return nRetNum;\n    }\n\n    public int init ( Function fnInlet, Object...args ) throws Exception {\n        Object ret = null;\n        while ( true ) {\n            ret = this.invokeInitHandle( fnInlet, args );\n\n            if( ! (ret instanceof RestartSignalException ) ) {\n                return (int) ret;\n            }\n            else {\n                RestartSignalException e = (RestartSignalException) ret;\n                this.console().warn( String.format(\n                        \"[PineconeLifecycle] [WARN] System restart [Time: %s] [What:<%s>:%s]\",\n                        this.mExceptionRestartCount, e.getCause().getClass().getSimpleName(), e.getCause().getMessage()\n                ));\n                e.getCause().printStackTrace();\n            }\n        }\n\n    }\n\n    @Override\n    public ClassLoader getGlobalClassLoader() {\n        return this.mGlobalClassLoader;\n    }\n\n    @Override\n    public void setGlobalClassLoader( ClassLoader classLoader ) {\n        this.mGlobalClassLoader = classLoader;\n    }\n\n    public InputStream in(){\n        return this.mIn;\n    }\n\n    public InputStream inSync(){\n        this.mResourceLock.readLock().lock();\n        try{\n            return this.in();\n        }\n        finally {\n            this.mResourceLock.readLock().unlock();\n        }\n    }\n\n    public OutputStream out(){\n        return this.mOut;\n    }\n\n    public PrintStream  pout(){\n        try{\n            return (PrintStream) this.mOut;\n        }\n        catch ( ClassCastException e ) {\n            return new PrintStream( this.mOut );\n        }\n    }\n\n    public OutputStream outSync(){\n        this.mResourceLock.readLock().lock();\n        try{\n            return this.out();\n        }\n        finally {\n            this.mResourceLock.readLock().unlock();\n        }\n    }\n\n\n    @Override\n    public Tracer console() {\n        return this.mConsole;\n    }\n\n    public Tracer consoleSync() {\n        this.mResourceLock.readLock().lock();\n        Tracer tracer = this.console();\n        this.mResourceLock.readLock().unlock();\n        return tracer;\n    }\n\n    public Tracer setConsole( Tracer tracer ) {\n        this.mResourceLock.writeLock().lock();\n        this.mConsole = tracer;\n        this.mResourceLock.writeLock().unlock();\n        return this.mConsole;\n    }\n\n    public InputStream setIn( InputStream in ) {\n        this.mResourceLock.writeLock().lock();\n        this.mIn = in;\n        this.mResourceLock.writeLock().unlock();\n        return this.mIn;\n    }\n\n    public OutputStream setOut( OutputStream out ) {\n        this.mResourceLock.writeLock().lock();\n        this.mOut = out;\n        this.mResourceLock.writeLock().unlock();\n        return this.mOut;\n    }\n\n    protected void traceWelcomeInfo() { }\n\n    @Override\n    public CascadeSystem parentExecutum(){\n        return (CascadeSystem)super.parentExecutum();\n    }\n\n    @Override\n    public String[] getStartupCommand(){\n        return this.mStartupCommand;\n    }\n\n    @Override\n    public String getMajorPackagePath() {\n        return this.mszMajorPackagePath;\n    }\n\n    @Override\n    public String getRuntimeContextPath() {\n        return this.mszRuntimeContextPath;\n    }\n\n    @Override\n    public String getRuntimePath() {\n        return this.mszRuntimePath;\n    }\n\n    @Override\n    public void   setRuntimePath( String szRealRuntimePath ){\n        this.mszRuntimePath = szRealRuntimePath;\n    }\n\n    @Override\n    public JSONSystemConfig getGlobalConfig() {\n        return this.mjoGlobalConfig;\n    }\n\n    @Override\n    public JSONSystemConfig getSystemConfig() {\n        return this.mjoSystemConfig;\n    }\n\n    @Override\n    public CascadeSystem rootSystem(){\n        CascadeSystem system = this.getParent();\n        CascadeSystem root   = system;\n        while ( true ) {\n            if( system != null ){\n                root   = system;\n                system = system.getParent();\n            }\n            else {\n                break;\n            }\n        }\n        return root;\n    }\n\n    @Override\n    public long getPrimaryId() {\n        if( this.getSystemId() == 0 ) {\n            return this.getSystemId();\n        }\n\n        CascadeSystem root   = this.rootSystem();\n\n        if( root == null ) {\n            this.console().warn( \"[PineconeLifecycle] [WARN] Id of primary system should be always 0.\" );\n            return this.getSystemId();\n        }\n\n        return root.getPrimaryId();\n    }\n\n    @Override\n    public CascadeSystem getParent(){\n        return (CascadeSystem)this.mParentSystem;\n    }\n\n    @Override\n    public Thread getProcessMainThread() {\n        return this.mMainThread;\n    }\n\n    @Override\n    public void handleLiveException( Exception e ) throws ProvokeHandleException {\n        this.console().warn( e.toString() );\n    }\n\n    @Override\n    public void handleAsynLiveException( Exception e ) throws ProvokeHandleException {\n\n    }\n\n    @Override\n    public void handleAsynKillException( Exception e ) throws ProvokeHandleException {\n\n    }\n\n    // Lifecycle\n    protected void handleRootKillException( Throwable e ) throws RestartSignalException {\n        try{\n            this.mfnAfterGlobalExpCaught.invoke( e );\n        }\n        catch ( Exception e1 ) {\n            e = e1;\n        }\n\n        if( e instanceof InstantKillError ) {\n            this.kill();\n        }\n        if( e instanceof Error ) {\n            this.kill();\n        }\n\n        if( e instanceof Exception ) {\n            if( this.mExceptionRestartCount < this.mExceptionRestartTime ) {\n                ++this.mExceptionRestartCount;\n                throw new RestartSignalException( e );\n            }\n            else {\n                this.kill();\n            }\n        }\n\n    }\n\n    protected void beforeReluctantDeath() {\n\n    }\n\n    @Override\n    public void  entreatLive() {\n        this.beforeReluctantDeath();\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/GenericMasterTaskManager.java",
    "content": "package com.pinecone.framework.system;\n\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.Map;\nimport java.util.concurrent.BlockingDeque;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.LinkedBlockingDeque;\nimport java.util.concurrent.Phaser;\nimport java.util.concurrent.TimeUnit;\n\nimport com.pinecone.framework.system.executum.EventedTaskManager;\nimport com.pinecone.framework.system.executum.ExclusiveProcessum;\nimport com.pinecone.framework.system.executum.Executum;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.executum.VitalResource;\n\npublic class GenericMasterTaskManager implements EventedTaskManager {\n    protected Processum                               mParentProcessum     ;\n    protected ClassLoader                             mClassLoader         ;\n    protected RuntimeSystem                           mSystem;\n    protected Map<Long, Executum>                     mExecutumPool        = new ConcurrentHashMap<>();\n    protected Map<String, ExclusiveProcessum >        mExclusiveTasks      = new ConcurrentHashMap<>();\n    protected Map<Integer, VitalResource>             mVitalResourcePool   = new ConcurrentHashMap<>();\n    protected long                                    mnVitalizeCount      = 0;\n    protected long                                    mnFatalityCount      = 0;\n    protected long                                    mnMaxWaitApoptosis   = 5000;\n    protected final Object                            mTerminationLock     = new Object();\n    protected BlockingDeque<Executum >                mSyncApoptosisQueue  = new LinkedBlockingDeque<>();\n    protected Phaser                                  mFinishingPhaser     = new Phaser( 1 );\n\n    public GenericMasterTaskManager( Processum parent, ClassLoader classLoader ) {\n        this.mParentProcessum = parent;\n        if( parent instanceof RuntimeSystem ) {\n            this.mSystem = (RuntimeSystem) parent;\n        }\n        else {\n            this.mSystem = parent.parentSystem();\n        }\n\n        this.mClassLoader = classLoader;\n    }\n\n    public GenericMasterTaskManager( Processum parent ) {\n        this( parent, null );\n\n        if( this.mSystem != null ) {\n            this.mClassLoader = this.mSystem.getGlobalClassLoader();\n        }\n        else {\n            this.mClassLoader = Thread.currentThread().getContextClassLoader();\n        }\n    }\n\n\n    protected BlockingDeque<Executum  >    getSyncApoptosisQueue(){\n        return this.mSyncApoptosisQueue;\n    }\n\n    public Map<Long, Executum > getExecutumPool() {\n        return this.mExecutumPool;\n    }\n\n    public Map<String, ExclusiveProcessum > getExclusiveTasks() {\n        return this.mExclusiveTasks;\n    }\n\n    @Override\n    public Processum getParentProcessum () {\n        return this.mParentProcessum;\n    }\n\n    @Override\n    public RuntimeSystem getSystem() {\n        return this.mSystem;\n    }\n\n    @Override\n    public ClassLoader getClassLoader() {\n        return this.mClassLoader;\n    }\n\n    @Override\n    public Map<Integer, VitalResource > getVitalResources() {\n        return this.mVitalResourcePool;\n    }\n\n    @Override\n    public void executeZionSequence() {\n        Map<Integer, VitalResource   > map = this.getVitalResources();\n        for ( Map.Entry<Integer, VitalResource > kv : map.entrySet() ) {\n            VitalResource resource = kv.getValue();\n            try{\n                resource.store();\n            }\n            catch ( Throwable e ) {\n                System.err.println(\n                        String.format( \"[FatesCriticalWarn] [VitalResource: %s, Id: %d] [StoreFailed]\", resource.getName(), resource.getId() )\n                );\n            }\n        }\n    }\n\n    @Override\n    public void sendApoptosisSignal() {\n        for ( Map.Entry<Long, Executum > kv : this.getExecutumPool().entrySet() ) {\n            kv.getValue().apoptosis();\n        }\n    }\n\n    protected void killAll() {\n        if( !this.isTerminated() ) {\n            for ( Map.Entry<Long, Executum > kv : this.getExecutumPool().entrySet() ) {\n                kv.getValue().kill();\n            }\n\n            if( !this.mFinishingPhaser.isTerminated() ) {\n                this.mFinishingPhaser.forceTermination();\n            }\n        }\n    }\n\n    @Override\n    public void terminate() {\n        this.executeZionSequence();\n        this.killAll();\n    }\n\n    @Override\n    public void  suspendAll() {\n        for ( Map.Entry<Long, Executum > kv : this.getExecutumPool().entrySet() ) {\n            kv.getValue().suspend();\n        }\n    }\n\n    @Override\n    public void  resumeAll() {\n        for ( Map.Entry<Long, Executum > kv : this.getExecutumPool().entrySet() ) {\n            kv.getValue().resume();\n        }\n    }\n\n    @Override\n    public int     size(){\n        return this.getExecutumPool().size();\n    }\n\n    @Override\n    public boolean isPooled(){\n        return true;\n    }\n\n    @Override\n    public long    getVitalizeCount() {\n        return this.mnVitalizeCount;\n    }\n\n    @Override\n    public long    getFatalityCount() {\n        return this.mnFatalityCount;\n    }\n\n    @Override\n    public Executum add( Executum that ){\n        this.getExecutumPool().put( that.getExecutumId(), that );\n        if( that instanceof ExclusiveProcessum ) {\n            this.getExclusiveTasks().put( that.getName(), (ExclusiveProcessum) that );\n        }\n        return that;\n    }\n\n    @Override\n    public void erase( Executum that ){\n        if( this.autopsy( that ) ) {\n            this.getExecutumPool().remove( that.getExecutumId() );\n            this.getExclusiveTasks().remove( that.getName() );\n            ++this.mnFatalityCount;\n        }\n        else {\n            throw new IllegalStateException( \"Executum is still alive.\" );\n        }\n    }\n\n    @Override\n    public void purge() {\n        this.terminate();\n        this.getExecutumPool().clear();\n        this.getVitalResources().clear();\n        this.getExclusiveTasks().clear();\n    }\n\n    @Override\n    public boolean isTerminated(){\n        boolean b = true;\n        for ( Map.Entry<Long, Executum > kv : this.getExecutumPool().entrySet() ) {\n            Thread primaryAffiliateThread = kv.getValue().getAffiliateThread();\n            if( primaryAffiliateThread != null ) { // null is uninitialized thread.\n                if( !primaryAffiliateThread.isDaemon() ) {\n                    b &= kv.getValue().isTerminated();\n                }\n            }\n        }\n        return b;\n    }\n\n    @Override\n    public void syncWaitingTerminated() throws Exception {\n        this.mFinishingPhaser.arriveAndAwaitAdvance();\n\n        if( !this.isTerminated() ){\n            while ( true ) {\n                if( this.isTerminated() ) {\n                    break;\n                }\n\n                synchronized ( this.mTerminationLock ) {\n                    this.mTerminationLock.wait( 30 );\n                }\n            }\n        }\n    }\n\n    protected Executum spawn     ( String szClassPath, Object... args ) {\n        Executum obj = null;\n        try {\n            Class<?>[] paramTypes;\n            if( args.length > 0 && args[0] instanceof Class<?>[] ) {\n                paramTypes = (Class<?>[]) args[0];\n                Object[] neoArgs = new Object[ args.length - 1 ];\n                for ( int i = 0; i < neoArgs.length; i++ ) {\n                    neoArgs[i] = args[i+1];\n                }\n                args = neoArgs;\n            }\n            else {\n                paramTypes = new Class<?>[ args.length ];\n                for ( int i = 0; i < args.length; i++ ) {\n                    paramTypes[i] = args[i].getClass();\n                }\n            }\n\n            Class<?> pVoid = this.getClassLoader().loadClass( szClassPath );\n            try{\n                Constructor<?> constructor = pVoid.getConstructor( paramTypes );\n                obj = (Processum) constructor.newInstance( args );\n            }\n            catch ( NoSuchMethodException | InvocationTargetException e1 ){\n                this.getSystem().handleLiveException( e1 );\n            }\n        }\n        catch ( ClassNotFoundException | IllegalAccessException | InstantiationException e ){\n            this.getSystem().handleLiveException( e );\n        }\n        return obj;\n    }\n\n    @Override\n    public Executum summon       ( String szClassPath, Object... args ) throws Exception {\n        String[] debris   = szClassPath.split( \".\" );\n        String szTaskName = debris[ debris.length - 1 ];\n        Executum obj = (Executum) this.getExclusiveTasks().get( szTaskName );\n        if( obj != null ) {\n            return obj;\n        }\n\n        obj = this.spawn( szClassPath, args );\n        this.add( obj );\n\n        ++this.mnVitalizeCount;\n        return obj;\n    }\n\n    @Override\n    public void     kill          ( Executum that ) {\n        that.kill();\n        this.erase( that );\n    }\n\n    protected boolean isApproveLifeRenewal( ApoptosisRejectSignalException e ) {\n        return true; // TODO\n    }\n\n    @Override\n    public void     apoptosis     ( Executum that ) {\n        try{\n            that.apoptosis();\n        }\n        catch ( ApoptosisRejectSignalException e ) {\n            if( this.isApproveLifeRenewal( e ) ) {\n                return;\n            }\n            else {\n                try {\n                    that.apoptosis();\n                }\n                catch ( ApoptosisRejectSignalException e1 ) { // No more wait, just going to die.\n                    System.err.println(\n                            String.format( \"[FatesCriticalWarn] [Executum: %d] [ForceApoptosis]\", that.hashCode() )\n                    );\n                }\n            }\n        }\n\n        try{\n            Executum suspect = this.getSyncApoptosisQueue().poll( this.mnMaxWaitApoptosis, TimeUnit.MILLISECONDS );\n            if( suspect == that ) {\n                this.kill( that );\n            }\n        }\n        catch ( InterruptedException e ) {\n            this.kill( that );\n        }\n    }\n\n    @Override\n    public void     commitSuicide ( Executum that ){\n        this.getSyncApoptosisQueue().add( that );\n    }\n\n    @Override\n    public boolean  autopsy       ( Executum that ) {\n        return true; //TODO\n    }\n\n    @Override\n    public String   nomenclature    ( Thread that   ) {\n        return String.format(\n                \"proc-%s-%s\",this.getParentProcessum().getName(), that.getName()\n        ).toLowerCase();\n    }\n\n    @Override\n    public void notifyFinished    ( Executum that ){\n        this.mFinishingPhaser.arriveAndDeregister();\n    }\n\n    @Override\n    public void notifyExecuting   ( Executum that ){\n        this.mFinishingPhaser.register();\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/InstantKillError.java",
    "content": "package com.pinecone.framework.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class InstantKillError extends Error implements Pinenut {\n    public InstantKillError() {\n        super();\n    }\n\n    public InstantKillError( String message ) {\n        super(message);\n    }\n\n    public InstantKillError( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public InstantKillError( Throwable cause ) {\n        super(cause);\n    }\n\n    protected InstantKillError( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/IntegratedSubsystem.java",
    "content": "package com.pinecone.framework.system;\n\npublic interface IntegratedSubsystem extends Subsystem {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/IrrationalProvokedException.java",
    "content": "package com.pinecone.framework.system;\n\npublic class IrrationalProvokedException extends PineRuntimeException {\n    protected IrrationalProvokedType irrationalProvokedType;\n\n    public IrrationalProvokedException() {\n        this( IrrationalProvokedType.Aberration );\n    }\n\n    public IrrationalProvokedException( IrrationalProvokedType type ) {\n        super();\n\n        this.irrationalProvokedType = type;\n    }\n\n    public IrrationalProvokedException( String message, IrrationalProvokedType type ) {\n        super( message );\n\n        this.irrationalProvokedType = type;\n    }\n\n    public IrrationalProvokedException( String message ) {\n        this( message, IrrationalProvokedType.Aberration );\n    }\n\n    public IrrationalProvokedException( String message, Throwable cause, IrrationalProvokedType type ) {\n        super( message, cause );\n\n        this.irrationalProvokedType = type;\n    }\n\n    public IrrationalProvokedException( String message, Throwable cause ) {\n        this( message, cause, IrrationalProvokedType.Aberration );\n    }\n\n    public IrrationalProvokedException( Throwable cause, IrrationalProvokedType type ) {\n        super(cause);\n\n        this.irrationalProvokedType = type;\n    }\n\n    public IrrationalProvokedException( Throwable cause ) {\n        this( cause, IrrationalProvokedType.Aberration );\n    }\n\n    public IrrationalProvokedType getIrrationalProvokedType() {\n        return this.irrationalProvokedType;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/IrrationalProvokedType.java",
    "content": "package com.pinecone.framework.system;\n\npublic enum IrrationalProvokedType {\n    Aberration     ( \"Aberration\"      ), // Should never happen, but happened.\n    Expected       ( \"Expected\"        ), // Programmatic designed exception.\n    Architectural  ( \"Architectural\"   ), // Architecturally critical errors.\n    Destructive    ( \"Destructive\"     ); // Structural breach trigger.\n\n    private final String name;\n\n    IrrationalProvokedType( String name ){\n        this.name  = name;\n    }\n\n    public String getName(){\n        return this.name;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ModularizedSubsystem.java",
    "content": "package com.pinecone.framework.system;\n\nimport com.pinecone.framework.util.config.PatriarchalConfig;\n\npublic interface ModularizedSubsystem extends Subsystem {\n\n    RuntimeSystem parentSystem();\n\n    void vitalize();\n\n    void terminate();\n\n    void release();\n\n    PatriarchalConfig getSubsystemConfig();\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/NestedCheckedException.java",
    "content": "package com.pinecone.framework.system;\n\npublic abstract class NestedCheckedException extends Exception {\n    private static final long serialVersionUID = 7100714597678207546L;\n\n    public NestedCheckedException( String msg ) {\n        super(msg);\n    }\n\n    public NestedCheckedException( @Nullable String msg, @Nullable Throwable cause ) {\n        super(msg, cause);\n    }\n\n    @Nullable\n    @Override\n    public String getMessage() {\n        return NestedExceptionUtils.buildMessage(super.getMessage(), this.getCause());\n    }\n\n    @Nullable\n    public Throwable getRootCause() {\n        return NestedExceptionUtils.getRootCause(this);\n    }\n\n    public Throwable getMostSpecificCause() {\n        Throwable rootCause = this.getRootCause();\n        return (Throwable)(rootCause != null ? rootCause : this);\n    }\n\n    public boolean contains(@Nullable Class<?> exType) {\n        if ( exType == null ) {\n            return false;\n        }\n        else if ( exType.isInstance(this) ) {\n            return true;\n        }\n        else {\n            Throwable cause = this.getCause();\n            if ( cause == this ) {\n                return false;\n            }\n            else if ( cause instanceof NestedCheckedException ) {\n                return ((NestedCheckedException)cause).contains(exType);\n            }\n            else {\n                while(cause != null) {\n                    if (exType.isInstance(cause)) {\n                        return true;\n                    }\n\n                    if (cause.getCause() == cause) {\n                        break;\n                    }\n\n                    cause = cause.getCause();\n                }\n\n                return false;\n            }\n        }\n    }\n\n    static {\n        NestedExceptionUtils.class.getName();\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/NestedExceptionUtils.java",
    "content": "package com.pinecone.framework.system;\n\npublic final class NestedExceptionUtils {\n    public NestedExceptionUtils() {\n\n    }\n\n    @Nullable\n    public static String buildMessage( @Nullable String message, @Nullable Throwable cause ) {\n        if (cause == null) {\n            return message;\n        }\n        else {\n            StringBuilder sb = new StringBuilder(64);\n            if ( message != null ) {\n                sb.append(message).append(\"; \");\n            }\n\n            sb.append( \"nested exception is \" ).append( cause );\n            return sb.toString();\n        }\n    }\n\n    @Nullable\n    public static Throwable getRootCause( @Nullable Throwable original ) {\n        if ( original == null ) {\n            return null;\n        }\n        else {\n            Throwable rootCause = null;\n\n            for( Throwable cause = original.getCause(); cause != null && cause != rootCause; cause = cause.getCause() ) {\n                rootCause = cause;\n            }\n\n            return rootCause;\n        }\n    }\n\n    public static Throwable getMostSpecificCause( Throwable original ) {\n        Throwable rootCause = NestedExceptionUtils.getRootCause(original);\n        return rootCause != null ? rootCause : original;\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/NestedRuntimeException.java",
    "content": "package com.pinecone.framework.system;\n\npublic class NestedRuntimeException extends PineRuntimeException {\n    private static final long serialVersionUID = 1312001337874041913L;\n\n    public NestedRuntimeException( String msg ) {\n        super( msg );\n    }\n\n    public NestedRuntimeException( @Nullable String msg, @Nullable Throwable cause ) {\n        super(msg, cause);\n    }\n\n    @Nullable\n    @Override\n    public String getMessage() {\n        return NestedExceptionUtils.buildMessage(super.getMessage(), this.getCause());\n    }\n\n    @Nullable\n    public Throwable getRootCause() {\n        return NestedExceptionUtils.getRootCause(this);\n    }\n\n    public Throwable getMostSpecificCause() {\n        Throwable rootCause = this.getRootCause();\n        return (Throwable)(rootCause != null ? rootCause : this);\n    }\n\n    public boolean contains( @Nullable Class<?> exType ) {\n        if ( exType == null ) {\n            return false;\n        }\n        else if ( exType.isInstance(this) ) {\n            return true;\n        }\n        else {\n            Throwable cause = this.getCause();\n            if ( cause == this ) {\n                return false;\n            }\n            else if ( cause instanceof NestedRuntimeException ) {\n                return ((NestedRuntimeException)cause).contains(exType);\n            }\n            else {\n                while( cause != null ) {\n                    if ( exType.isInstance(cause) ) {\n                        return true;\n                    }\n\n                    if ( cause.getCause() == cause ) {\n                        break;\n                    }\n\n                    cause = cause.getCause();\n                }\n\n                return false;\n            }\n        }\n    }\n\n    static {\n        NestedExceptionUtils.class.getName();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/NoSuchProviderException.java",
    "content": "package com.pinecone.framework.system;\n\npublic class NoSuchProviderException extends Exception {\n    public NoSuchProviderException() {\n        super();\n    }\n\n    public NoSuchProviderException( String message ) {\n        super( message );\n    }\n\n    public NoSuchProviderException( String message, Throwable cause ) {\n        super( message, cause );\n    }\n\n    public NoSuchProviderException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Noexcept.java",
    "content": "package com.pinecone.framework.system;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Target({ElementType.METHOD})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface Noexcept {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/NonNull.java",
    "content": "package com.pinecone.framework.system;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Target({ElementType.METHOD, ElementType.PARAMETER, ElementType.FIELD})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface NonNull {\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/NotImplementedException.java",
    "content": "package com.pinecone.framework.system;\n\npublic class NotImplementedException extends PineRuntimeException {\n    public NotImplementedException() {\n        super();\n    }\n\n    public NotImplementedException( String message ) {\n        super( message );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Nullable.java",
    "content": "package com.pinecone.framework.system;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Target({ElementType.METHOD, ElementType.PARAMETER, ElementType.FIELD})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface Nullable {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ParseException.java",
    "content": "package com.pinecone.framework.system;\n\npublic class ParseException extends PineRuntimeException {\n    protected int errorOffset;\n\n    public int getErrorOffset () {\n        return errorOffset;\n    }\n\n    public ParseException    ( String what ) {\n        this( what, -1 );\n    }\n\n    public ParseException    ( String what, int errorOffset ) {\n        super( what );\n        this.errorOffset = errorOffset;\n    }\n\n    public ParseException    ( String message, int errorOffset, Throwable cause ) {\n        super( message, cause );\n        this.errorOffset = errorOffset;\n    }\n\n    public ParseException    ( Throwable cause, int errorOffset ) {\n        super(cause.getMessage(), cause);\n        this.errorOffset = errorOffset;\n    }\n\n    public ParseException    ( Throwable cause ) {\n        this( cause, -1 );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/PieceworkManager.java",
    "content": "package com.pinecone.framework.system;\n\npublic class PieceworkManager {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/PineRuntimeException.java",
    "content": "package com.pinecone.framework.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class PineRuntimeException extends RuntimeException implements Pinenut {\n    public PineRuntimeException    () {\n        super();\n    }\n\n    public PineRuntimeException    ( String message ) {\n        super(message);\n    }\n\n    public PineRuntimeException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public PineRuntimeException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected PineRuntimeException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Pinecore.java",
    "content": "package com.pinecone.framework.system;\n\nimport com.pinecone.framework.util.io.Tracer;\n\npublic interface Pinecore extends AsynSystem, CascadeSystem, ConformitySystem, RuntimeSystem {\n    Tracer console();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/PrimarySystem.java",
    "content": "package com.pinecone.framework.system;\n\nimport com.pinecone.framework.system.executum.ExclusiveProcessum;\n\npublic interface PrimarySystem extends RuntimeSystem, ExclusiveProcessum {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ProvokeHandleException.java",
    "content": "package com.pinecone.framework.system;\n\npublic class ProvokeHandleException extends PineRuntimeException {\n    public ProvokeHandleException() {\n        super();\n    }\n\n    public ProvokeHandleException( String message ) {\n        super( message );\n    }\n\n    public ProvokeHandleException( String message, Throwable cause ) {\n        super( message, cause );\n    }\n\n    public ProvokeHandleException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/ProxyProvokeHandleException.java",
    "content": "package com.pinecone.framework.system;\n\npublic class ProxyProvokeHandleException extends PineRuntimeException {\n    public ProxyProvokeHandleException() {\n        super();\n    }\n\n    public ProxyProvokeHandleException( String message ) {\n        super( message );\n    }\n\n    public ProxyProvokeHandleException( String message, Throwable cause ) {\n        super( message, cause );\n    }\n\n    public ProxyProvokeHandleException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/RedirectRuntimeException.java",
    "content": "package com.pinecone.framework.system;\n\npublic class RedirectRuntimeException extends PineRuntimeException {\n    public RedirectRuntimeException    () {\n        super();\n    }\n\n    public RedirectRuntimeException    ( String message ) {\n        super(message);\n    }\n\n    public RedirectRuntimeException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public RedirectRuntimeException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected RedirectRuntimeException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/RestartSignalException.java",
    "content": "package com.pinecone.framework.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class RestartSignalException extends Exception implements Pinenut {\n    public RestartSignalException    () {\n        super();\n    }\n\n    public RestartSignalException    ( String message ) {\n        super(message);\n    }\n\n    public RestartSignalException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public RestartSignalException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected RestartSignalException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/RuntimeConstructionException.java",
    "content": "package com.pinecone.framework.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class RuntimeConstructionException extends PineRuntimeException implements Pinenut {\n    public RuntimeConstructionException    () {\n        super();\n    }\n\n    public RuntimeConstructionException    ( String message ) {\n        super(message);\n    }\n\n    public RuntimeConstructionException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public RuntimeConstructionException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected RuntimeConstructionException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/RuntimeInstantiationException.java",
    "content": "package com.pinecone.framework.system;\n\npublic class RuntimeInstantiationException extends RuntimeConstructionException {\n    public RuntimeInstantiationException() {\n        super();\n    }\n\n    public RuntimeInstantiationException( String message ) {\n        super( message );\n    }\n\n    public RuntimeInstantiationException( String message, Throwable cause ) {\n        super( message, cause );\n    }\n\n    public RuntimeInstantiationException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/RuntimeSystem.java",
    "content": "package com.pinecone.framework.system;\n\nimport com.pinecone.framework.system.executum.Lifecycle;\nimport com.pinecone.framework.system.executum.Systemum;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.config.SystemConfig;\n\nimport java.util.Map;\n\npublic interface RuntimeSystem extends Pinenut, Systemum, Lifecycle {\n    String[]                   getStartupCommand();\n\n    Map<String, String[] >     getStartupCommandMap();\n\n    Map<String, String[] >     getEnvironmentVars();\n\n    SystemConfig               getGlobalConfig();\n\n    String                     getMajorPackagePath();\n\n    String                     getRuntimeContextPath();\n\n    String                     getRuntimePath();\n\n    void                       setRuntimePath( String szRealRuntimePath );\n\n    ClassLoader                getGlobalClassLoader();\n\n    void                       setGlobalClassLoader( ClassLoader classLoader );\n\n    void                       handleLiveException( Exception e ) throws ProvokeHandleException;\n\n    default void               handleKillException( Exception e ) throws ProvokeHandleException {\n        throw new ProvokeHandleException( e );\n    }\n\n    default void               handleIgnoreException( Exception e ) throws ProvokeHandleException {\n        // Just ignore them.\n    }\n\n    /**\n     * Those Exceptions should never happened.\n     */\n    default void               handleDummyException( Exception e ) throws ProvokeHandleException {\n        throw new ProvokeHandleException( e );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Subsystem.java",
    "content": "package com.pinecone.framework.system;\n\nimport com.pinecone.framework.system.executum.Systema;\n\npublic interface Subsystem extends Systema {\n\n\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/SynergicSystem.java",
    "content": "package com.pinecone.framework.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface SynergicSystem extends Pinenut {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/Unsafe.java",
    "content": "package com.pinecone.framework.system;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Target({ElementType.METHOD})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface Unsafe {\n    String value() default \"\";\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/aop/InfrastructureProxy.java",
    "content": "package com.pinecone.framework.system.aop;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface InfrastructureProxy extends Pinenut {\n    Object getWrappedObject();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/aop/RawTargetAccess.java",
    "content": "package com.pinecone.framework.system.aop;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface RawTargetAccess extends Pinenut {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/aop/ScopedObject.java",
    "content": "package com.pinecone.framework.system.aop;\n\npublic interface ScopedObject extends RawTargetAccess {\n    Object getTargetObject();\n\n    void removeFromScope();\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/ArchCascadeComponent.java",
    "content": "package com.pinecone.framework.system.architecture;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.framework.util.name.UniNamespace;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\n\npublic abstract class ArchCascadeComponent extends ArchComponent implements CascadeComponent {\n    private CascadeComponent                mParent;\n    private Namespace                       mName;\n    private Map<String, Component >         mChildren;\n\n    protected ArchCascadeComponent( Namespace name, CascadeComponentManager manager, CascadeComponent parent ) {\n        super( manager );\n        this.mName     = name;\n        this.mChildren = new LinkedTreeMap<>();\n\n        if( name == null ) {\n            this.setTargetingName( this.className() );\n        }\n        this.setParent( parent );\n    }\n\n    @Override\n    public CascadeComponent parent() {\n        return this.mParent;\n    }\n\n    @Override\n    public void setParent( CascadeComponent parent ) {\n        this.mParent = parent;\n        if( parent != null ) {\n            this.mName.setParent( parent.getTargetingName() );\n        }\n    }\n\n    @Override\n    public Namespace getTargetingName() {\n        return this.mName;\n    }\n\n    @Override\n    public void setTargetingName( Namespace name ) {\n        this.mName = name;\n    }\n\n    @Override\n    public void setTargetingName( String name ) {\n        CascadeComponent.super.setTargetingName( name );\n    }\n\n    @Override\n    public Collection<Component > children() {\n        return this.mChildren.values();\n    }\n\n    protected Map<String, Component > getChildren() {\n        return this.mChildren;\n    }\n\n    @Override\n    public CascadeComponentManager getComponentManager() {\n        return (CascadeComponentManager) super.getComponentManager();\n    }\n\n    @Override\n    public void addChildComponent( CascadeComponent child ) {\n        child.setParent( this );\n        this.referChildComponent( child );\n        this.getComponentManager().addComponent( child );\n    }\n\n    @Override\n    public void referChildComponent    ( Component child ) {\n        this.mChildren.put( child.getFullName(), child );\n    }\n\n    @Override\n    public void detachChildComponent( String fullName ) {\n        this.mChildren.remove( fullName );\n    }\n\n    public void removeChildComponent  ( @Nullable Component child, String fullName ) {\n        if( child == null ) {\n            child = this.getChildComponentByFullName( fullName );\n        }\n\n        if( child != null ) {\n            this.detachChildComponent( fullName );\n            if( child instanceof CascadeComponent && this.ownedChild( (CascadeComponent)child ) ) {\n                this.getComponentManager().removeComponent( child );\n            }\n        }\n    }\n\n    @Override\n    public void removeChildComponent  ( Component child ) {\n        this.removeChildComponent( child, child.getFullName() );\n    }\n\n    @Override\n    public void removeChildComponent  ( String fullName ) {\n        this.removeChildComponent( null, fullName );\n    }\n\n    @Override\n    public void clear() {\n        this.mChildren.clear();\n    }\n\n    @Override\n    public void independent( String newName ) {\n        if( this.mParent != null ) {\n            this.mParent = null;\n\n            this.getComponentManager().detachComponent( this );\n\n            this.mName.setName( newName );\n            this.mName.setParent( null );\n\n            this.getComponentManager().addComponent( this );\n        }\n    }\n\n    @Override\n    public void purge() {\n        this.purgeChildren();\n\n        String                szFN = this.getTargetingName().getFullName();\n        if ( this.mParent != null ) {\n            this.mParent.removeChildComponent( szFN );\n        }\n        this.getComponentManager().removeComponent( szFN );\n    }\n\n    @Override\n    public void purgeChildren() {\n        List<Component > purgeList = new ArrayList<>( this.mChildren.values() );\n        for ( Component child : purgeList ) {\n            this.mChildren.remove( child.getFullName() );\n\n            if( child instanceof CascadeComponent ) {\n                if( this.ownedChild( (CascadeComponent)child ) ) { // Purge owned child,\n                    this.getComponentManager().removeComponent( child.getFullName() );\n                }\n            }\n        }\n    }\n\n    @Override\n    public boolean hasOwnChild( CascadeComponent child ) {\n        Component component = this.getChildComponentByFullName( child.getFullName() );\n        if( component instanceof CascadeComponent && component == child ) {\n            return this.ownedChild( child );\n        }\n        return false;\n    }\n\n    @Override\n    public boolean hasReferredChild( Component child ) {\n        return this.mChildren.containsKey( child.getFullName() );\n    }\n\n    @Override\n    public Component getChildComponentByFullName( String fullName ) {\n        return this.mChildren.get( fullName );\n    }\n\n    @Override\n    public int childSize() {\n        return this.mChildren.size();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/ArchCascadeComponentManager.java",
    "content": "package com.pinecone.framework.system.architecture;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\n\nimport java.util.Collection;\nimport java.util.Map;\n\n/**\n * The Omega Device\n * CascadeComponentManager can cascade control all node and its reference.\n * If one node owned its child, and others referred it:\n * 1. Mark-Sweep cascading effacement.\n * 1.1. Remove a node, and effaces itself and its own children will be erased from the whole scope.\n * 2. Cascading add.\n * 2.1 Add a new node, and will automatic marks and registers in its parent manager.\n * 3. Reference add.\n * 3.1 Refer a new node, and will only refers its instance without ownership.\n */\npublic abstract class ArchCascadeComponentManager extends ArchComponentManager implements CascadeComponentManager {\n    private Map<String, Component >  mComponentListMap;\n\n    protected ArchCascadeComponentManager( Map<String, Component > rootComponents, Map<String, Component > componentsList ) {\n        super( rootComponents );\n\n        this.mComponentListMap = componentsList;\n    }\n\n    protected ArchCascadeComponentManager() {\n        super();\n\n        this.mComponentListMap = new LinkedTreeMap<>();\n    }\n\n    protected Component onlyAdd( Component component ) {\n        Component v = this.mComponentListMap.put( component.getFullName(), component );\n\n        if( component instanceof CascadeComponent ) {\n            if( ((CascadeComponent) component).parent() != null ) {\n                return v;\n            }\n        }\n        this.rootComponents().put( component.getFullName(), component );\n\n        return v;\n    }\n\n    protected Component onlyRemove( String fullName ) {\n        Component v = this.mComponentListMap.remove( fullName );\n\n        if( v instanceof CascadeComponent ) {\n            if( ((CascadeComponent) v).parent() != null ) {\n                return v;\n            }\n        }\n        this.rootComponents().remove( fullName );\n        return v;\n    }\n\n    protected Map<String, Component > getComponentListMap() {\n        return this.mComponentListMap;\n    }\n\n\n    @Override\n    public void addComponent   ( Component component ) {\n        this.onlyAdd( component );\n    }\n\n    @Override\n    public void detachComponent( Component component ) {\n        this.onlyRemove( component.getFullName() );\n    }\n\n    @Override\n    public void removeComponent( String fullName ) {\n        Component v = this.mComponentListMap.get( fullName );\n        if( v != null ) {\n            if( v instanceof CascadeComponent ){\n                for( Component c : this.mComponentListMap.values() ) {\n                    if( c instanceof CascadeComponent ) {\n                        ((CascadeComponent) c).detachChildComponent( fullName );\n                    }\n                }\n            }\n\n            this.onlyRemove( fullName );\n            if( v instanceof CascadeComponent ) {\n                CascadeComponent component = (CascadeComponent) v;\n                component.purge();\n            }\n        }\n    }\n\n    @Override\n    public Component getComponentByFullName( String fullName ) {\n        return this.mComponentListMap.get( fullName );\n    }\n\n    @Override\n    public Component getRootComponentByFullName( String fullName ) {\n        return super.getComponentByFullName( fullName );\n    }\n\n    @Override\n    public Collection<Component> getComponentsRegisterList() {\n        return this.mComponentListMap.values();\n    }\n\n    @Override\n    public int componentScopeSize() {\n        return this.getComponentListMap().size();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/ArchComponent.java",
    "content": "package com.pinecone.framework.system.architecture;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\n\npublic abstract class ArchComponent implements Component {\n    private ComponentManager  mComponentManager;\n\n    protected ArchComponent( ComponentManager manager ) {\n        this.mComponentManager = manager;\n    }\n\n    @Override\n    public ComponentManager getComponentManager() {\n        return this.mComponentManager;\n    }\n\n    @Override\n    public void setComponentManager( ComponentManager componentManager ) {\n        this.mComponentManager = componentManager;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"class\", this.className() ),\n                new KeyValue<>( \"name\", this.getSimpleName() ),\n                new KeyValue<>( \"fullName\", this.getFullName() )\n        } );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/ArchComponentManager.java",
    "content": "package com.pinecone.framework.system.architecture;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\n\nimport java.util.Collection;\nimport java.util.Map;\n\npublic abstract class ArchComponentManager implements ComponentManager {\n    private Map<String, Component > mRootComponents;\n\n    protected ArchComponentManager( Map<String, Component > components ) {\n        this.mRootComponents = components;\n    }\n\n    protected ArchComponentManager() {\n        this( new LinkedTreeMap<>() );\n    }\n\n    protected Map<String, Component > rootComponents() {\n        return this.mRootComponents;\n    }\n\n    @Override\n    public void addComponent ( Component component ) {\n        this.mRootComponents.put( component.getFullName(), component );\n    }\n\n    @Override\n    public void removeComponent ( String fullName ){\n        this.mRootComponents.remove( fullName );\n    }\n\n    @Override\n    public Component getComponentByFullName ( String fullName ) {\n        return this.mRootComponents.get( fullName );\n    }\n\n    @Override\n    public int componentSize() {\n        return this.mRootComponents.size();\n    }\n\n    @Override\n    public Collection<Component > getComponents() {\n        return this.rootComponents().values();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/CascadeComponent.java",
    "content": "package com.pinecone.framework.system.architecture;\n\nimport com.pinecone.framework.system.regimentation.CascadeNodus;\nimport com.pinecone.framework.util.name.Namespace;\n\nimport java.util.Collection;\n\npublic interface CascadeComponent extends Component, CascadeNodus {\n    @Override\n    CascadeComponent parent();\n\n    void setParent( CascadeComponent parent );\n\n    @Override\n    default boolean isRoot() {\n        return this.parent() == null;\n    }\n\n    @Override\n    default CascadeComponent root() {\n        return (CascadeComponent) CascadeNodus.super.root();\n    }\n\n    Collection<Component > children();\n\n    @Override\n    Namespace getTargetingName();\n\n    @Override\n    void setTargetingName( Namespace name );\n\n    @Override\n    default void setTargetingName( String name ) {\n        CascadeNodus.super.setTargetingName( name );\n    }\n\n    @Override\n    default String getSimpleName() {\n        return this.getTargetingName().getSimpleName();\n    }\n\n    @Override\n    default String getFullName() {\n        return this.getTargetingName().getFullName();\n    }\n\n    CascadeComponentManager getComponentManager();\n\n    void addChildComponent      ( CascadeComponent child ) ;\n\n\n    void detachChildComponent   ( String fullName );\n\n    void referChildComponent    ( Component child ) ;\n\n    void removeChildComponent   ( Component child );\n\n    void removeChildComponent   ( String fullName ) ;\n\n    default boolean ownedChild  ( CascadeComponent child ) {\n        return child.parent() == this;\n    }\n\n    boolean hasOwnChild         ( CascadeComponent child ) ;\n\n    boolean hasReferredChild    ( Component child ) ;\n\n    Component getChildComponentByFullName( String fullName ) ;\n\n\n    // Only clear all children reference.\n    void clear() ;\n\n    // if this has parent, mark it as null, and elevated to a root node.\n    void independent( String newName );\n\n    // Purge itself and its own children\n    void purge();\n\n    // Purge its own children\n    void purgeChildren();\n\n    int childSize() ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/CascadeComponentManager.java",
    "content": "package com.pinecone.framework.system.architecture;\n\nimport java.util.Collection;\n\npublic interface CascadeComponentManager extends ComponentManager {\n    int componentScopeSize() ;\n\n    Component getRootComponentByFullName ( String fullName );\n\n    Collection<Component > getComponentsRegisterList();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/Component.java",
    "content": "package com.pinecone.framework.system.architecture;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Component extends Pinenut {\n    String getSimpleName();\n\n    String getFullName();\n\n    ComponentManager getComponentManager();\n\n    void setComponentManager( ComponentManager componentManager );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/ComponentManager.java",
    "content": "package com.pinecone.framework.system.architecture;\n\nimport com.pinecone.framework.system.regime.arch.Manager;\n\nimport java.util.Collection;\n\npublic interface ComponentManager extends Manager {\n    void addComponent                ( Component component ) ;\n\n    void detachComponent             ( Component component ) ;\n\n    default void removeComponent     ( Component component ){\n        this.removeComponent( component.getFullName() );\n    }\n\n    void removeComponent             ( String fullName );\n\n    Component getComponentByFullName ( String fullName );\n\n    int componentSize() ;\n\n    Collection<Component > getComponents();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/SystemComponent.java",
    "content": "package com.pinecone.framework.system.architecture;\n\nimport com.pinecone.framework.system.RuntimeSystem;\n\npublic interface SystemComponent extends Component {\n    RuntimeSystem getSystem();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/architecture/SystemComponentManager.java",
    "content": "package com.pinecone.framework.system.architecture;\n\npublic interface SystemComponentManager extends ComponentManager {\n    @Override\n    SystemComponent getComponentByFullName( String fullName );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/DynamicInstancePool.java",
    "content": "package com.pinecone.framework.system.construction;\n\npublic interface DynamicInstancePool<T > extends InstancePool<T > {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/DynamicStructure.java",
    "content": "package com.pinecone.framework.system.construction;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport java.lang.annotation.Repeatable;\n\n@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD})\n@Retention(RetentionPolicy.RUNTIME)\npublic @interface DynamicStructure {\n    String name() default \"\";\n\n    String lookup() default \"\";\n\n    // If true, will allowed this object member fields assignment likes the C/C++/Go struct.\n    boolean directlyStruct() default false;\n\n    Class<? > type() default Object.class;\n\n    ReuseCycle cycle() default ReuseCycle.Singleton;\n\n    DynamicStructure.AuthenticationType authenticationType() default DynamicStructure.AuthenticationType.CONTAINER;\n\n    boolean shareable() default true;\n\n    String mappedName() default \"\";\n\n    String description() default \"\";\n\n    // Instancing handle\n    // TODO\n    Class<?> provider() default void.class;\n\n    String providerMethod() default \"\";\n\n    enum AuthenticationType {\n        CONTAINER,\n        APPLICATION;\n\n        AuthenticationType() {\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/GenericDynamicInstancePool.java",
    "content": "package com.pinecone.framework.system.construction;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.lang.DynamicFactory;\n\nimport java.util.concurrent.BlockingQueue;\nimport java.util.concurrent.LinkedBlockingQueue;\n\npublic class GenericDynamicInstancePool<T > implements DynamicInstancePool<T > {\n    private BlockingQueue<T > mPool;\n    private DynamicFactory    mFactory;\n    private Class<T >         mClassType;\n    private int               mCapacity;\n    private int               mFreeSize;\n    private int               mPreAllocate;\n\n    public GenericDynamicInstancePool(DynamicFactory factory, Class<T > classType ) {\n        this( factory, 0, 0, classType );\n    }\n\n    public GenericDynamicInstancePool(DynamicFactory factory, int preAllocate, Class<T > classType ) {\n        this( factory, 0, preAllocate, classType );\n    }\n\n    public GenericDynamicInstancePool(DynamicFactory factory, int capacity, int preAllocate, Class<T > classType ) {\n        this.mPool        = new LinkedBlockingQueue<>();\n        this.mFactory     = factory;\n        this.mCapacity    = capacity > 0 ? capacity : Integer.MAX_VALUE;\n        this.mClassType   = classType;\n        this.mPreAllocate = preAllocate;\n        this.mFreeSize    = this.mCapacity;\n\n        this.preAllocate( preAllocate );\n    }\n\n    protected T newInstance() {\n        try{\n            return this.mClassType.cast( this.mFactory.newInstance( this.mClassType, null, null ) );\n        }\n        catch ( Exception e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    @Override\n    public T allocate() {\n        T obj = this.mPool.poll();\n        if ( obj == null ) {\n            int availableCapacity = this.freeSize();\n            if ( availableCapacity > 0 ) {\n                int allocateCount = 1;\n                if( this.mPreAllocate > 0 ) {\n                    allocateCount = Math.min( availableCapacity, this.mPreAllocate );\n                }\n                this.preAllocate( allocateCount );\n                obj = this.mPool.poll();\n                if ( obj == null ) {\n                    throw new InternalError( \"Unable to allocate instance.\" );\n                }\n            }\n            else {\n                throw new IllegalStateException( \"Out of capacity, too many instances[\" + this.mCapacity + \"].\" );\n            }\n        }\n\n        --this.mFreeSize;\n        return obj;\n    }\n\n    @Override\n    public void free( T obj ) {\n        if ( obj != null ) {\n            this.mPool.offer( obj );\n            ++this.mFreeSize;\n        }\n    }\n\n    @Override\n    public int freeSize() {\n        return this.mFreeSize;\n    }\n\n    @Override\n    public int pooledSize() {\n        return this.mPool.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.freeSize() == 0;\n    }\n\n    @Override\n    public void preAllocate( int count ) {\n        for ( int i = 0; i < count; ++i) {\n            this.mPool.offer( this.newInstance() );\n        }\n    }\n\n    @Override\n    public void setCapacity( int capacity ) {\n        if ( capacity < this.mCapacity - this.mFreeSize ) {\n            throw new IllegalArgumentException( \"New capacity cannot be less than current capacity minus free size.\" );\n        }\n        if ( capacity > this.mCapacity ) {\n            int availableCapacity = this.freeSize();\n            if ( availableCapacity > 0 ) {\n                if( this.mPreAllocate > 0 ) {\n                    this.preAllocate( Math.min( availableCapacity, this.mPreAllocate ) );\n                }\n            }\n        }\n        this.mCapacity = capacity;\n    }\n\n    @Override\n    public int getCapacity() {\n        return this.mCapacity;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/GenericStructureDefinition.java",
    "content": "package com.pinecone.framework.system.construction;\n\npublic class GenericStructureDefinition implements StructureDefinition {\n    private String     mLookup         = \"\";\n    private Class<?>   mType           = Object.class;\n    private ReuseCycle mCycle          = ReuseCycle.Singleton;\n    private boolean    mShareable      = true;\n    private String     mDescription    = \"\";\n    private Class<?>   mProvider       =  void.class;\n    private String     mProviderMethod = \"\";\n\n    private Structure.AuthenticationType mAuthenticationType = Structure.AuthenticationType.CONTAINER;\n\n    public GenericStructureDefinition( Class<?> type ) {\n        this.mType = type;\n    }\n\n\n    public GenericStructureDefinition( Structure structure ) {\n        this( structure.type() );\n        this.mCycle              = structure.cycle();\n        this.mLookup             = structure.lookup();\n        this.mProvider           = structure.provider();\n        this.mShareable          = structure.shareable();\n        this.mDescription        = structure.description();\n        this.mProviderMethod     = structure.providerMethod();\n        this.mAuthenticationType = structure.authenticationType();\n    }\n\n\n    @Override\n    public String getLookup() {\n        return this.mLookup;\n    }\n\n    @Override\n    public void setLookup( String lookup ) {\n        this.mLookup = lookup; //TODO\n    }\n\n    @Override\n    public Class<? > getType() {\n        return this.mType;\n    }\n\n    @Override\n    public void setType( Class<? > type ) {\n        this.mType = type;\n    }\n\n    @Override\n    public ReuseCycle getCycle() {\n        return this.mCycle;\n    }\n\n    @Override\n    public void setCycle( ReuseCycle cycle ) {\n        this.mCycle = cycle;\n    }\n\n    @Override\n    public Structure.AuthenticationType getAuthenticationType() {\n        return this.mAuthenticationType;\n    }\n\n    @Override\n    public void setAuthenticationType( Structure.AuthenticationType authenticationType ) {\n        this.mAuthenticationType = authenticationType;\n    }\n\n    @Override\n    public boolean isShareable() {\n        return this.mShareable;\n    }\n\n    @Override\n    public void setShareable( boolean shareable ) {\n        this.mShareable = shareable;\n    }\n\n    @Override\n    public String getDescription() {\n        return this.mDescription;\n    }\n\n    @Override\n    public void setDescription(String description) {\n        this.mDescription = description;\n    }\n\n    @Override\n    public Class<?> getProvide() {\n        return this.mProvider;\n    }\n\n    @Override\n    public String getProvideMethod() {\n        return this.mProviderMethod;\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/InstanceDispenser.java",
    "content": "package com.pinecone.framework.system.construction;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface InstanceDispenser extends Pinenut {\n    InstanceDispenser register( Class<?> type ) ;\n\n    boolean  hasRegistered( Class<? > type );\n\n    <T > T allotInstance( Class<T> type ) ;\n\n    void free( Class<?> type, Object instance ) ;\n\n    void free( Object instance );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/InstanceManufacturer.java",
    "content": "package com.pinecone.framework.system.construction;\n\nimport java.util.List;\n\npublic interface InstanceManufacturer extends InstanceDispenser {\n    @Override\n    InstanceManufacturer register( Class<?> type ) ;\n\n    void onlyRegister( Class<?> type ) ;\n\n    InstanceManufacturer registers( List<Class<?> > types );\n\n    List<Class<?>> fetchRegistered();\n\n    String[] fetchRegisteredNames();\n\n    default InstanceManufacturer registerInstancing( Class<?> type ) {\n        return this.registerInstancing( type, null );\n    }\n\n    InstanceManufacturer registerInstancing( Class<?> type, Object instance ) ;\n\n    Object allotInstance( String type );\n\n    Object autowire( Object that );\n\n    void close();\n\n    void refresh();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/InstancePool.java",
    "content": "package com.pinecone.framework.system.construction;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface InstancePool<T > extends Pinenut {\n\n    T allocate() ;\n\n    void free( T obj ) ;\n\n    int freeSize() ;\n\n    int pooledSize();\n\n    boolean isEmpty() ;\n\n    void preAllocate( int count ) ;\n\n    void setCapacity( int capacity ) ;\n\n    int getCapacity() ;\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/ObjectBasicTraits.java",
    "content": "package com.pinecone.framework.system.construction;\n\npublic class ObjectBasicTraits implements ObjectTraits {\n    private boolean   mIsBean             = false;\n    private boolean   mIsDirectlyStruct   = false;\n    private String    mName               = \"\";\n    private String    mMappedKey          = \"\";\n    private Object    mTargetAnnotation   = null;\n    private Class<?>  mDeclaredType       = null;\n    private Class<?>  mAffiliatedType     = null;\n\n    public ObjectBasicTraits() {\n    }\n\n    public ObjectBasicTraits( Structure structure ) {\n        this.fromStructure( structure );\n    }\n\n    public ObjectTraits fromStructure( Structure structure ) {\n        this.setBean( true );\n        this.setDirectlyStruct( false );\n        this.setName( structure.name() );\n        this.setTargetAnnotation( structure );\n        this.setDeclaredType( structure.type() );\n        this.setMappedKey( structure.mappedName() );\n        return this;\n    }\n\n\n    @Override\n    public boolean isDirectlyStruct() {\n        return this.mIsDirectlyStruct;\n    }\n\n    @Override\n    public void setDirectlyStruct( boolean isDirectlyStruct ) {\n        this.mIsDirectlyStruct = isDirectlyStruct;\n    }\n\n    @Override\n    public boolean isBean() {\n        return this.mIsBean;\n    }\n\n    @Override\n    public void setBean( boolean isBean ) {\n        this.mIsBean = isBean;\n    }\n\n    @Override\n    public String getName() {\n        return this.mName;\n    }\n\n    @Override\n    public void setName( String name ) {\n        this.mName = name;\n    }\n\n    @Override\n    public String getMappedKey() {\n        return this.mMappedKey;\n    }\n\n    @Override\n    public void setMappedKey( String mappedKey ) {\n        this.mMappedKey = mappedKey;\n    }\n\n    @Override\n    public Object getTargetAnnotation() {\n        return this.mTargetAnnotation;\n    }\n\n    @Override\n    public void setTargetAnnotation( Object targetAnnotation ) {\n        this.mTargetAnnotation = targetAnnotation;\n    }\n\n    @Override\n    public Class<?> getDeclaredType() {\n        return this.mDeclaredType;\n    }\n\n    @Override\n    public void setDeclaredType( Class<?> declaredType ) {\n        this.mDeclaredType = declaredType;\n    }\n\n    @Override\n    public Class<?> getAffiliatedType() {\n        return this.mAffiliatedType;\n    }\n\n    @Override\n    public void setAffiliatedType( Class<?> affiliatedType ) {\n        this.mAffiliatedType = affiliatedType;\n    }\n\n    @Override\n    public boolean isStructure() {\n        return this.getTargetAnnotation() instanceof Structure;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/ObjectTraits.java",
    "content": "package com.pinecone.framework.system.construction;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.StringUtils;\n\npublic interface ObjectTraits extends Pinenut {\n    boolean isBean();\n\n    void setBean( boolean isBean );\n\n    boolean isDirectlyStruct();\n\n    void setDirectlyStruct( boolean isDirectlyStruct );\n\n    String getName();\n\n    void setName( String name );\n\n    String getMappedKey();\n\n    void setMappedKey( String mappedKey );\n\n    Object getTargetAnnotation();\n\n    void setTargetAnnotation( Object targetAnnotation );\n\n    Class<? > getDeclaredType();\n\n    void setDeclaredType( Class<?> declaredType );\n\n    Class<? > getAffiliatedType();\n\n    void setAffiliatedType( Class<?> affiliatedType );\n\n    default boolean isStructure() {\n        return this.getTargetAnnotation() instanceof Structure;\n    }\n\n    default boolean isAnonymous() {\n        return StringUtils.isEmpty( this.getMappedKey() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/Postpone.java",
    "content": "package com.pinecone.framework.system.construction;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Target({ElementType.METHOD, ElementType.PARAMETER, ElementType.FIELD})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface Postpone {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/ReuseCycle.java",
    "content": "package com.pinecone.framework.system.construction;\n\npublic enum ReuseCycle {\n    // Passively(Lazy) loading and instancing, and only have the one static singleton in the whole program scope.\n    Singleton     ( \"Singleton\"     ),\n\n    // Passively(Lazy) loading and instancing, and with instancing without pooling, free lifecycle.\n    Disposable    ( \"Disposable\"    ),\n\n    // Passively(Lazy) loading and instancing, and pooled the instanced object. Opt. Allocate / Free.\n    Recyclable    ( \"Recyclable\"    ),\n\n    // Positively loading and instancing if found, and only have the one static singleton in the whole program scope.\n    PreSingleton  ( \"PreSingleton\"  ),\n\n    // Positively loading and instancing if found, and pooled the instanced object. Opt. Allocate / Free.\n    PreRecyclable ( \"PreRecyclable\" );\n\n    private final String value;\n\n    ReuseCycle( String value ){\n        this.value = value;\n    }\n\n    public String getName(){\n        return this.value;\n    }\n\n    public boolean isSingleton() {\n        return this == ReuseCycle.Singleton || this == ReuseCycle.PreSingleton;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/Structure.java",
    "content": "package com.pinecone.framework.system.construction;\n\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport java.lang.annotation.Repeatable;\n\n@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD})\n@Retention(RetentionPolicy.RUNTIME)\n@Repeatable( Structures.class )\npublic @interface Structure {\n    String name() default \"\";\n\n    String lookup() default \"\";\n\n    // If true, will allowed this object member fields assignment likes the C/C++/Go struct.\n    boolean directlyStruct() default false;\n\n    Class<? > type() default Object.class;\n\n    ReuseCycle cycle() default ReuseCycle.Singleton;\n\n    Structure.AuthenticationType authenticationType() default Structure.AuthenticationType.CONTAINER;\n\n    boolean shareable() default true;\n\n    String mappedName() default \"\";\n\n    String description() default \"\";\n\n    // Instancing handle\n    // TODO\n    Class<?> provider() default void.class;\n\n    String providerMethod() default \"\";\n\n    enum AuthenticationType {\n        CONTAINER,\n        APPLICATION;\n\n        AuthenticationType() {\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/StructureDefinition.java",
    "content": "package com.pinecone.framework.system.construction;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface StructureDefinition extends Pinenut {\n    String getLookup();\n    void setLookup( String lookup );\n\n    Class<?> getType();\n    void setType( Class<?> type );\n\n    ReuseCycle getCycle();\n    void setCycle( ReuseCycle cycle );\n\n    Structure.AuthenticationType getAuthenticationType();\n    void setAuthenticationType( Structure.AuthenticationType authenticationType );\n\n    boolean isShareable();\n    void setShareable( boolean shareable );\n\n    String getDescription();\n    void setDescription( String description );\n\n    Class<?> getProvide();\n    String getProvideMethod();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/StructureInstanceDispenser.java",
    "content": "package com.pinecone.framework.system.construction;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.lang.DynamicFactory;\n\npublic interface StructureInstanceDispenser extends InstanceDispenser {\n    StructureDefinition update ( Class<?> type, StructureDefinition definition ) ;\n\n    InstanceDispenser register( Class<?> type, StructureDefinition definition, @Nullable InstancePool<? > pool ) ;\n\n    InstanceDispenser register( Class<?> type, StructureDefinition definition ) ;\n\n    InstanceDispenser register( Class<?> type, Structure structure, @Nullable InstancePool<? > pool ) ;\n\n    InstanceDispenser register( Class<?> type, Structure structure ) ;\n\n    InstanceDispenser register( Class<?> type, @Nullable InstancePool<? > pool ) ;\n\n    InstanceDispenser register( StructureDefinition definition ) ;\n\n    InstanceDispenser registerByImplicitFirstFound( Class<?> type, @Nullable Structure structure, @Nullable InstancePool<? > pool ) ;\n\n    InstanceDispenser registerByImplicitFirstFound( Class<?> type, @Nullable Structure structure ) ;\n\n    InstanceDispenser registerByImplicitFirstFound( Class<?> type ) ;\n\n    <T > T allotInstance( Class<T> type, @Nullable Structure instanceStructure ) ;\n\n    StructureDefinition getStructureDefinition( Class<?> type );\n\n    InstancePool<? > getInstancePool( Class<?> type );\n\n    DynamicFactory getCentralFactory();\n\n\n\n    Object registerInstance( String name, Object instance );\n\n    Object getRegisteredInstance( String name );\n\n    Object removeRegisteredInstance( String name );\n\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/Structures.java",
    "content": "package com.pinecone.framework.system.construction;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Documented\n@Retention(RetentionPolicy.RUNTIME)\n@Target({ElementType.TYPE})\npublic @interface Structures {\n    Structure[] value();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/UnifyCentralInstanceDispenser.java",
    "content": "package com.pinecone.framework.system.construction;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.ReflectionUtils;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\n\nimport java.lang.annotation.Annotation;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\n\npublic class UnifyCentralInstanceDispenser implements StructureInstanceDispenser {\n    protected final Map<Class<?>, Object >                 mSingletonObjects   = new ConcurrentHashMap<>();\n    protected final Map<Class<?>, StructureDefinition >    mObjectDefinitions  = new ConcurrentHashMap<>();\n    protected final Map<Class<?>, InstancePool<? > >       mObjectInstancer    = new ConcurrentHashMap<>(); // Pool is immutable.\n    protected final Map<String, Object >                   mObjectRegister     = new ConcurrentHashMap<>();\n    protected final DynamicFactory                         mCentralFactory     ;\n\n    public UnifyCentralInstanceDispenser( DynamicFactory factory ) {\n        this.mCentralFactory = factory;\n    }\n\n    public UnifyCentralInstanceDispenser() {\n        this( new GenericDynamicFactory() );\n    }\n\n    /**\n     * update\n     * The pool is immutable.\n     * @param type the object`s type\n     * @param definition the object`s definition\n     * @return null for nonsexist or definition which just inserted.\n     */\n    @Override\n    public StructureDefinition update( Class<?> type, StructureDefinition definition ) {\n        if( this.mObjectDefinitions.containsKey( type ) ) {\n            return this.mObjectDefinitions.put( type, definition );\n        }\n        return null;\n    }\n\n    @Override\n    public InstanceDispenser register( Class<?> type, StructureDefinition definition, InstancePool<? > pool ) {\n        if( pool == null ) {\n            pool = this.defaultInstancePool( type, definition );\n        }\n        this.mObjectDefinitions.putIfAbsent( type, definition );\n        this.mObjectInstancer.putIfAbsent( type, pool );\n        return this;\n    }\n\n    @Override\n    public InstanceDispenser register( Class<?> type, StructureDefinition definition ) {\n        return this.register( type, definition, this.defaultInstancePool( type, definition ) );\n    }\n\n    protected StructureDefinition defaultDefinition( Class<?> type, Structure structure ) {\n        StructureDefinition definition = new GenericStructureDefinition( structure );\n        if( definition.getType() == Object.class && type != Object.class ) {\n            definition.setType( type );\n        }\n\n        return definition;\n    }\n\n    @Override\n    public InstanceDispenser register( Class<?> type, Structure structure ) {\n        return this.register( type, structure, null );\n    }\n\n    @Override\n    public InstanceDispenser register( Class<?> type, Structure structure, @Nullable InstancePool<?> pool ) {\n        StructureDefinition definition = this.defaultDefinition( type, structure );\n        if( pool == null ) {\n            pool = this.defaultInstancePool( type, definition );\n        }\n        return this.register( type, definition, pool );\n    }\n\n    @Override\n    public InstanceDispenser register( Class<?> type ) {\n        return this.register( type, (InstancePool<?>) null );\n    }\n\n    protected Structure foundClassDeclaredStructure( Class<?> type ) {\n        Annotation[] annotations = type.getAnnotations();\n        for( Annotation annotation : annotations ) {\n            if( annotation instanceof Structure ) {\n                return (Structure)annotation;\n            }\n        }\n\n        return null;\n    }\n\n    @Override\n    public InstanceDispenser register( Class<?> type, @Nullable InstancePool<?> pool ) {\n        Structure target = this.foundClassDeclaredStructure( type );\n        if( target != null ) {\n            return this.register( type, target, pool );\n        }\n\n        StructureDefinition definition = new GenericStructureDefinition( type );\n        return this.register( type, definition, pool );\n    }\n\n    protected InstancePool<? > defaultInstancePool( Class<?> type, StructureDefinition definition ) {\n        if( definition.getProvide() != void.class && definition.getProvide() != Object.class ) {\n            Object o = this.tryInstancingFromProvider( type, definition, null );\n            if( o instanceof InstancePool ) {\n                return (InstancePool)o;\n            }\n        }\n\n        if( definition.getCycle() == ReuseCycle.Disposable || definition.getCycle().isSingleton() ) {\n            return new GenericDynamicInstancePool<>( this.mCentralFactory, 0, type );\n        }\n        return new GenericDynamicInstancePool<>( this.mCentralFactory, 4, type );\n    }\n\n    @Override\n    public InstanceDispenser register( StructureDefinition definition ) {\n        return this.register( definition.getType(), definition );\n    }\n\n    @Override\n    public InstanceDispenser registerByImplicitFirstFound( Class<?> type, @Nullable Structure structure ) {\n        return this.registerByImplicitFirstFound( type, structure, null );\n    }\n\n    @Override\n    public InstanceDispenser registerByImplicitFirstFound( Class<?> type, @Nullable Structure structure, @Nullable InstancePool<?> pool ) {\n        Structure target = this.foundClassDeclaredStructure( type );\n        if( target == null ) {\n            target = structure;\n        }\n\n        if( target == null ) {\n            StructureDefinition definition = new GenericStructureDefinition( type );\n            return this.register( type, definition, pool );\n        }\n        return this.register( type, target, pool );\n    }\n\n    @Override\n    public InstanceDispenser registerByImplicitFirstFound( Class<?> type ) {\n        return this.registerByImplicitFirstFound( type, null, null );\n    }\n\n    @Override\n    public boolean hasRegistered( Class<? > type ) {\n        return this.mObjectDefinitions.containsKey( type );\n    }\n\n    protected Object invokeInstancingProvider( Class<? > provider, String szMethodName ) {\n        Object provide = this.mCentralFactory.optNewInstance( provider, null );\n        Method pm;\n        try{\n            pm = provide.getClass().getMethod( szMethodName );\n        }\n        catch ( NoSuchMethodException nme ) {\n            return null;\n        }\n\n        try {\n            return ReflectionUtils.tryAccessibleInvoke( pm, provide );\n        }\n        catch ( InvocationTargetException | IllegalArgumentException e ) {\n            return null;\n        }\n    }\n\n    protected Object tryInstancingFromProvider( Class<?> type, StructureDefinition definition, @Nullable Structure instanceStructure ) {\n        if( instanceStructure != null ) {\n            Class<? > provider = instanceStructure.provider();\n            if( DynamicInstancePool.class.isAssignableFrom( provider ) ) {\n                if( instanceStructure.cycle() == ReuseCycle.Disposable || instanceStructure.cycle().isSingleton() ) {\n                    return (InstancePool<?>) this.mCentralFactory.optNewInstance( provider, new Object[]{ this.mCentralFactory, 0, type } );\n                }\n                return (InstancePool<?>) this.mCentralFactory.optNewInstance( provider, new Object[]{ this.mCentralFactory, 4, type } );\n            }\n            else if( InstancePool.class.isAssignableFrom( provider ) ) {\n                return (InstancePool<?>) this.mCentralFactory.optNewInstance( provider, null );\n            }\n            else if( instanceStructure.type() != void.class && instanceStructure.type() != Object.class && !instanceStructure.providerMethod().isEmpty() ) {\n                Object ret = this.invokeInstancingProvider( provider, instanceStructure.providerMethod() );\n                if( ret != null ) {\n                    return ret;\n                }\n            }\n        }\n\n        Class<? > provider = definition.getProvide();\n        if( DynamicInstancePool.class.isAssignableFrom( definition.getProvide() ) ) {\n            if( definition.getCycle() == ReuseCycle.Disposable || definition.getCycle().isSingleton() ) {\n                return (InstancePool<?>) this.mCentralFactory.optNewInstance( provider, new Object[]{ this.mCentralFactory, 0, type } );\n            }\n            return (InstancePool<?>) this.mCentralFactory.optNewInstance( provider, new Object[]{ this.mCentralFactory, 4, type } );\n        }\n        else if( InstancePool.class.isAssignableFrom( definition.getProvide() ) ) {\n            return (InstancePool<?>) this.mCentralFactory.optNewInstance( provider, null );\n        }\n        else if( definition.getType() != void.class && definition.getType() != Object.class && !definition.getProvideMethod().isEmpty() ) {\n            Object ret = this.invokeInstancingProvider( provider, definition.getProvideMethod() );\n            if( ret != null ) {\n                return ret;\n            }\n        }\n        return null;\n    }\n\n\n    @Override\n    public <T> T allotInstance( Class<T> type, @Nullable Structure instanceStructure ) {\n        StructureDefinition definition = this.mObjectDefinitions.get( type );\n        if( definition == null ) {\n            return null; // Unregistered.\n        }\n        Class<? > innerType = definition.getType();\n        if( innerType == Object.class ) {\n            innerType = type;\n        }\n\n        Object t = this.tryInstancingFromProvider( type, definition, instanceStructure );\n        if( t != null ) {\n            return type.cast( t );\n        }\n\n        Object b = this.mSingletonObjects.get( type );\n        if ( b != null ) {\n            if( instanceStructure != null && !instanceStructure.cycle().isSingleton() ) {\n                return type.cast( this.mObjectInstancer.get( innerType ).allocate() );\n            }\n            return type.cast( b );\n        }\n\n        InstancePool<? > pool = this.mObjectInstancer.get( innerType );\n        if ( pool != null ) {\n            if(\n                    definition.getCycle() == ReuseCycle.Disposable ||\n                    ( instanceStructure != null && instanceStructure.cycle() == ReuseCycle.Disposable )\n            ) {\n                return type.cast( pool.allocate() );\n            }\n\n            T obj = type.cast( pool.allocate() );\n            if ( definition.getCycle().isSingleton() ) {\n                this.mSingletonObjects.put( innerType, obj );\n            }\n            return obj;\n        }\n\n        String name = instanceStructure.name();\n        if ( StringUtils.isEmpty(name) ) {\n            name = type.getSimpleName();\n            name = Character.toLowerCase( name.charAt(0) ) + name.substring(1);\n        }\n\n        if ( StringUtils.isNoneEmpty(name) ) {\n            Object o = this.getRegisteredInstance( name );\n            if( o != null && type.isAssignableFrom( o.getClass() ) ) {\n                return type.cast( o );\n            }\n        }\n\n        return null;\n    }\n\n    @Override\n    public <T > T allotInstance( Class<T> type ) {\n        return this.allotInstance( type, null );\n    }\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    public void free( Class<?> type, Object instance ) {\n        InstancePool pool = this.mObjectInstancer.get( type );\n        if( pool != null ) {\n            pool.free( instance );\n        }\n        else {\n            throw new IllegalArgumentException( type.getName() + \" is not owned instance.\" );\n        }\n    }\n\n    @Override\n    public void free( Object instance ) {\n        this.free( instance.getClass(), instance );\n    }\n\n    @Override\n    public StructureDefinition getStructureDefinition( Class<?> type ) {\n        return this.mObjectDefinitions.get( type );\n    }\n\n    @Override\n    public InstancePool<? > getInstancePool( Class<?> type ) {\n        return this.mObjectInstancer.get( type );\n    }\n\n    @Override\n    public DynamicFactory getCentralFactory() {\n        return this.mCentralFactory;\n    }\n\n\n    @Override\n    public Object registerInstance( String name, Object instance ) {\n        return this.mObjectRegister.put( name, instance );\n    }\n\n    @Override\n    public Object getRegisteredInstance( String name ) {\n        return this.mObjectRegister.get( name );\n    }\n\n    @Override\n    public Object removeRegisteredInstance( String name ) {\n        return this.mObjectRegister.remove( name );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/construction/UnifyStructureInjector.java",
    "content": "package com.pinecone.framework.system.construction;\n\nimport com.pinecone.framework.system.functions.Executable;\nimport com.pinecone.framework.unit.Units;\nimport com.pinecone.framework.util.ReflectionUtils;\nimport com.pinecone.framework.util.json.homotype.JSONGet;\nimport com.pinecone.framework.util.json.homotype.MapStructure;\nimport com.pinecone.framework.util.json.homotype.ObjectInjector;\n\nimport java.beans.JavaBean;\nimport java.lang.annotation.Annotation;\nimport java.lang.reflect.Array;\nimport java.lang.reflect.Field;\nimport java.util.Collection;\nimport java.util.Map;\n\npublic class UnifyStructureInjector extends ObjectInjector {\n    protected StructureInstanceDispenser mInstanceDispenser;\n\n    public UnifyStructureInjector( Class type, StructureInstanceDispenser instanceDispenser ) {\n        super( type );\n        this.mInstanceDispenser = instanceDispenser;\n    }\n\n    public UnifyStructureInjector( Class type ) {\n        this( type, null );\n    }\n\n\n    public ObjectTraits getObjectTraits( Field field ) {\n        String szKey = null;\n\n        ObjectBasicTraits traits = new ObjectBasicTraits();\n        Annotation[] annotations = field.getAnnotations();\n        for ( Annotation a : annotations ) {\n            if( a instanceof JSONGet ) {\n                szKey = ( (JSONGet) a ).value();\n                traits.setMappedKey( szKey );\n                traits.setTargetAnnotation( a );\n                break;\n            }\n            else if( a instanceof MapStructure ) {\n                szKey = ( (MapStructure) a ).value();\n                traits.setMappedKey( szKey );\n                traits.setTargetAnnotation( a );\n                break;\n            }\n            else if( a instanceof Structure ) {\n                Structure structure = (Structure) a;\n                traits.fromStructure( structure );\n                szKey = structure.mappedName();\n                if( szKey.isEmpty() ) {\n                    szKey = structure.name();\n                }\n                traits.setMappedKey( szKey );\n                traits.setTargetAnnotation( a );\n                break;\n            }\n            else if( a instanceof JavaBean ) {\n                JavaBean javaBean = (JavaBean) a;\n                szKey = javaBean.defaultProperty();\n                traits.setMappedKey( szKey );\n                traits.setBean( true );\n                traits.setTargetAnnotation( a );\n                break;\n            }\n        }\n\n        if( szKey == null ) {\n            return null;\n        }\n        traits.setAffiliatedType( field.getType() );\n        return traits;\n    }\n\n\n    protected Object getFromMapStructure    ( Object mapLiked, String key ) {\n        return Units.getFromMapStructure( mapLiked, key, true, true );\n    }\n\n    protected Object injectMapLinked     ( Object mapLiked, Class<?> type, Object instance ) {\n        Field[] fields = type.getDeclaredFields();\n        for ( Field field : fields ) {\n            ReflectionUtils.makeAccessible( field );\n            try {\n                ObjectTraits traits = this.getObjectTraits( field );\n                String szMappedKey;\n                if( traits == null ) {\n                    continue;\n                }\n                else if( traits.getMappedKey().isEmpty() ) {\n                    szMappedKey = field.getName();\n                    traits.setMappedKey( szMappedKey );\n                }\n                else {\n                    szMappedKey = traits.getMappedKey();\n                }\n\n                if( traits.getName().isEmpty() ) {\n                    traits.setName( field.getName() );\n                }\n\n\n                Object val = this.getFromMapStructure( mapLiked, this.getFieldName( szMappedKey ) );\n                if( val == null ){\n                    val = this.getFromMapStructure( mapLiked, szMappedKey );\n                }\n                if( val == null && szMappedKey.contains( \".\" ) ){\n                    val = this.getValueFromMapRecursively( mapLiked, szMappedKey );\n                }\n\n                try {\n                    Object j;\n                    Class<? > insType   = traits.getDeclaredType();\n                    Class<? > fieldType = field.getType();\n                    Object ann          = traits.getTargetAnnotation();\n                    if( ann instanceof Structure ) {\n                        if( insType == Object.class || insType == null ) {\n                            j = this.inject( val, fieldType );\n                        }\n                        else {\n                            j = this.instancingUnitWithSpecificType( traits, val, field );\n                        }\n                    }\n                    else {\n                        j = this.inject( val, fieldType );\n                    }\n\n                    if( j == null ) {\n                        j = this.instancingAndInject( traits, val, field );\n                    }\n                    field.set( instance, j );\n                }\n                catch ( IllegalArgumentException e ){\n                    //e.printStackTrace();\n                    field = null;\n                }\n            }\n            catch ( IllegalAccessException e ){\n                throw new IllegalStateException(e); // This should never be happened.\n            }\n        }\n\n        return instance;\n    }\n\n    protected void ensureRegistered( ObjectTraits traits, Class<? > insType ) {\n        if( !this.mInstanceDispenser.hasRegistered( insType ) ) {\n            Object ann = traits.getTargetAnnotation();\n            if( ann instanceof Structure ) {\n                this.mInstanceDispenser.registerByImplicitFirstFound( insType, (Structure)ann );\n            }\n            else {\n                this.mInstanceDispenser.registerByImplicitFirstFound( insType );\n            }\n        }\n    }\n\n\n    protected Object instancingAndInject( ObjectTraits traits, Object val, Field field ) {\n        Class<? > insType = traits.getDeclaredType();\n        if( this.mInstanceDispenser != null ) {\n            if( insType == null || (insType == Object.class && field.getType() != Object.class) ) {\n                insType = field.getType();\n            }\n\n            this.ensureRegistered( traits, insType );\n            Object neoMember = this.mInstanceDispenser.allotInstance( insType );\n            try{\n                this.inject( val, insType, neoMember );\n            }\n            catch ( Exception e ) {\n                throw new IllegalArgumentException( e );\n            }\n            return neoMember;\n        }\n\n        return null;\n    }\n\n    protected Object instancingUnitWithSpecificType( ObjectTraits traits, Object val, Field field ) {\n        if( this.mInstanceDispenser == null || val == null ) {\n            return null;\n        }\n\n        Class<? > fieldType = field.getType();\n        Class<? > insType   = traits.getDeclaredType();\n        this.ensureRegistered( traits, insType );\n        if( fieldType.isAssignableFrom( val.getClass() ) ) {\n            if( val instanceof Map ) {\n                Map<Object, Object> cm = Units.newInstance( val.getClass() );\n                for( Object v : ((Map) val).entrySet() ) {\n                    Map.Entry kv = (Map.Entry) v;\n                    Object neo = this.mInstanceDispenser.allotInstance( insType );\n                    neo = this.injectMapLinked( kv.getValue(), insType, neo );\n                    cm.put( kv.getKey(), neo );\n                }\n                return cm;\n            }\n            else if( val.getClass().isArray() ) {\n                Object[] vals = new Object[ Array.getLength( val ) ];\n                for ( int i = 0; i < vals.length; ++i ) {\n                    Object neo = this.mInstanceDispenser.allotInstance( insType );\n                    neo = this.injectMapLinked( Array.get( val, i ), insType, neo );\n                    vals[i] = neo;\n                }\n                return vals;\n            }\n            else if( val instanceof Collection ) {\n                Collection<Object> ib = Units.newInstance( val.getClass() );\n                for( Object o : (Collection) val ) {\n                    Object neo = this.mInstanceDispenser.allotInstance( insType );\n                    neo = this.injectMapLinked( o, insType, neo );\n                    ib.add( neo );\n                }\n                return ib;\n            }\n        }\n        return val;\n    }\n\n    @Override\n    public    Object inject      ( Map that, Class<?> type, Object instance ) {\n        return this.injectMapLinked( that, type, instance );\n    }\n\n    protected Object getValueFromMapRecursively( Object mapLiked, String key ) {\n        String[] keys = key.split(\"\\\\.|\\\\/\");\n        Object value = mapLiked;\n        for ( String k : keys ) {\n            value = this.getFromMapStructure( value, k );\n        }\n        return value;\n    }\n\n    @Override\n    public Object inject              ( Object that, Class<?> type, Object instance ) throws Exception {\n        if ( ObjectInjector.trialHomogeneity( that ) ){\n            return that;\n        }\n        else if( type == Object.class ){\n            return that;\n        }\n        else if( that instanceof Executable){\n            return this.inject( (Executable) that );\n        }\n        else if ( that instanceof Collection){\n            return this.inject( (Collection) that, type, instance );\n        }\n        else if ( that instanceof Map ){\n            return this.inject( (Map) that, type, instance );\n        }\n        else {\n            return this.injectMapLinked( that, type, instance );\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/ArchExecutum.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.GenericMasterTaskManager;\n\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\n\npublic abstract class ArchExecutum implements Executum {\n    private long              mnId                      ;\n    protected String          mszName                   ;\n    protected RuntimeSystem   mParentSystem             ;\n    protected Processum       mParentProcessum          ;\n\n    protected Thread          mAffiliateThread          ;\n\n    protected int             mExceptionRestartCount    = 0;\n    protected int             mExceptionRestartTime     ; // < 0 Force always keep alive, like the cancer.\n\n    // Mutex & Lock\n    protected ReentrantReadWriteLock mResourceLock     = new ReentrantReadWriteLock();\n\n    protected ArchExecutum ( String szName, Processum parent, Thread affiliateThread ) {\n        this.mAffiliateThread = affiliateThread;\n        this.mszName          = szName;\n        this.mParentProcessum = parent;\n        if( this.mParentProcessum == null ) {\n            this.mParentSystem = null;\n        }\n        else if( this.mParentProcessum instanceof RuntimeSystem ) {\n            this.mParentSystem = (RuntimeSystem) this.mParentProcessum;\n        }\n        else {\n            this.mParentSystem = this.mParentProcessum.parentSystem();\n        }\n\n        this.makeNameAndId();\n    }\n\n    protected ArchExecutum ( String szName, Processum parent ) {\n        this( szName, parent, null );\n    }\n\n    protected ArchExecutum ( Processum parent, Thread affiliateThread ) {\n        this( affiliateThread.getName(), parent, affiliateThread );\n    }\n\n    protected void makeNameAndId() {\n        this.mnId          = Executum.nextAutoIncrementId();\n        if( this.mszName == null ) {\n            this.mszName = this.className();\n            long id = this.getExecutumId();\n            if( this.mParentProcessum != null ) {\n                this.mszName = this.mszName + \"-Executum-\" + id;\n            }\n        }\n    }\n\n\n    @Override\n    public int getExceptionRestartTime() {\n        return this.mExceptionRestartTime;\n    }\n\n    @Override\n    public ArchExecutum applyExceptionRestartTime( int time ){\n        this.mResourceLock.writeLock().lock();\n        this.mExceptionRestartTime = time;\n        this.mResourceLock.writeLock().unlock();\n        return this;\n    }\n\n    @Override\n    public String getName(){\n        return this.mszName;\n    }\n\n    @Override\n    public void setName( String szName ) {\n        this.mszName = szName;\n    }\n\n    @Override\n    public long getExecutumId() {\n        return this.mnId;\n    }\n\n    @Override\n    public ArchExecutum setThreadAffinity( Thread affinity ) {\n        this.mAffiliateThread = affinity;\n        return this;\n    }\n\n    @Override\n    public Thread getAffiliateThread() {\n        return this.mAffiliateThread;\n    }\n\n    @Override\n    public Thread.State getState() {\n        return this.getAffiliateThread().getState();\n    }\n\n    @Override\n    public RuntimeSystem  parentSystem() {\n        return this.mParentSystem;\n    }\n\n    @Override\n    public RuntimeSystem revealNearestSystem() {\n        if ( this instanceof RuntimeSystem ) {\n            return (RuntimeSystem) this;\n        }\n        return parentSystem();\n    }\n\n    @Override\n    public Processum parentExecutum() {\n        return this.mParentProcessum;\n    }\n\n    @Override\n    public boolean isTerminated(){\n        return this.getState() == Thread.State.TERMINATED;\n    }\n\n    @Override\n    public void start() {\n        if ( this.mAffiliateThread != null ) {\n            this.mAffiliateThread.start();\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/ArchProcessum.java",
    "content": "package com.pinecone.framework.system.executum;\n\n\nimport com.pinecone.framework.system.ApoptosisRejectSignalException;\nimport com.pinecone.framework.system.GenericMasterTaskManager;\n\nimport java.time.LocalDateTime;\nimport java.util.Map;\n\npublic abstract class ArchProcessum extends ArchExecutum implements Processum {\n    protected GenericMasterTaskManager mTaskManager              ;\n\n    protected LocalDateTime            mCreateTime;\n    protected LocalDateTime            mStartTime;\n\n    public ArchProcessum ( String szName, Processum parent ) {\n        super( szName, parent );\n\n        this.mCreateTime = LocalDateTime.now();\n    }\n\n\n    @Override\n    public ArchProcessum applyExceptionRestartTime( int time ){\n        return (ArchProcessum) super.applyExceptionRestartTime( time );\n    }\n\n    @Override\n    public ArchProcessum setThreadAffinity( Thread affinity ) {\n        return (ArchProcessum) super.setThreadAffinity( affinity );\n    }\n\n    @Override\n    public void  apoptosis() {\n        this.interrupt();\n    }\n\n    @Override\n    public void interrupt() {\n        if( this.getAffiliateThread() != null ) {\n            this.getAffiliateThread().interrupt();\n        }\n    }\n\n    @Override\n    public void  kill() {\n        this.getTaskManager().terminate();\n        if( this.getAffiliateThread() != null ) {\n            this.getAffiliateThread().stop();\n        }\n    }\n\n    @Override\n    public void  suspend() {\n        this.getAffiliateThread().suspend();\n    }\n\n    @Override\n    public void  resume() {\n        this.getAffiliateThread().resume();\n    }\n\n    @Override\n    public void  entreatLive() {\n        throw new ApoptosisRejectSignalException();\n    }\n\n    @Override\n    public GenericMasterTaskManager getTaskManager() {\n        return this.mTaskManager;\n    }\n\n    @Override\n    public Map<Long, Executum > getOwnThreadGroup() {\n        return this.getTaskManager().getExecutumPool();\n    }\n\n    @Override\n    public void start() {\n        super.start();\n        this.mStartTime  = LocalDateTime.now();\n    }\n\n    @Override\n    public LocalDateTime getCreateTime() {\n        return this.mCreateTime;\n    }\n\n    @Override\n    public LocalDateTime getStartTime() {\n        return this.mStartTime;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/ArchThreadum.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport com.pinecone.framework.system.ApoptosisRejectSignalException;\n\npublic abstract class ArchThreadum extends ArchExecutum implements Executum {\n\n    protected ArchThreadum ( String szName, Processum parent, Thread affiliateThread ) {\n        super( szName, parent, affiliateThread );\n    }\n\n    protected ArchThreadum ( String szName, Processum parent ) {\n        this( szName, parent, null );\n    }\n\n    protected ArchThreadum ( Processum parent, Thread affiliateThread ) {\n        this( affiliateThread.getName(), parent, affiliateThread );\n    }\n\n    @Override\n    public void setName( String szName ) {\n        super.setName( szName );\n        if( this.getAffiliateThread() != null ) {\n            this.getAffiliateThread().setName( szName );\n        }\n    }\n\n    @Override\n    public void  apoptosis() {\n        this.interrupt();\n    }\n\n    @Override\n    public void  kill() {\n        this.getAffiliateThread().stop();\n    }\n\n    @Override\n    public void interrupt() {\n        if( this.getAffiliateThread() != null ) {\n            this.getAffiliateThread().interrupt();\n        }\n    }\n\n    @Override\n    @SuppressWarnings( \"deprecated\" )\n    public void  suspend() {\n        this.getAffiliateThread().suspend();\n    }\n\n    @Override\n    @SuppressWarnings( \"deprecated\" )\n    public void  resume() {\n        this.getAffiliateThread().resume();\n    }\n\n    @Override\n    public void  entreatLive() {\n        throw new ApoptosisRejectSignalException();\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/Chronum.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.concurrent.TimeUnit;\n\npublic interface Chronum extends Pinenut {\n    long getStartNano();\n\n    default long getStartTime( TimeUnit unit ) {\n        return unit.convert( this.getStartNano(), TimeUnit.NANOSECONDS );\n    }\n\n    default long getStartMillis() {\n        return this.getStartTime( TimeUnit.MILLISECONDS );\n    }\n\n    default long getExecutedNano() {\n        return System.nanoTime() - this.getStartNano();\n    }\n\n    default long getExecutedTime( TimeUnit unit) {\n        long executedNano = this.getExecutedNano();\n        return unit.convert(executedNano, TimeUnit.NANOSECONDS);\n    }\n\n    default long getExecutedMillis() {\n        return this.getExecutedTime(TimeUnit.MILLISECONDS);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/EventedTaskManager.java",
    "content": "package com.pinecone.framework.system.executum;\n\npublic interface EventedTaskManager extends TaskManager {\n    void notifyFinished  ( Executum that );\n\n    void notifyExecuting ( Executum that );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/ExclusiveProcessum.java",
    "content": "package com.pinecone.framework.system.executum;\n\npublic interface ExclusiveProcessum extends Processum {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/ExecutableSummoner.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport com.pinecone.framework.system.prototype.Summoner;\n\npublic interface ExecutableSummoner extends Summoner {\n    void executeAfterSummonSequence() throws Exception ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/Executum.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.functions.Executable;\n\nimport java.util.concurrent.atomic.AtomicLong;\n\n/**\n *  Pinecone Ursus For Java Executum\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Executum vs Executor\n *  1.  Executor is just a function, that ignores the specific thread it executing on.\n *  2.  Executum is a thread based executable object, that has its own specific execute threads.\n *  2.1 Executum can own its thread group, which just like a process [Processum].\n *  2.2 Executum is a sophisticated task, which is focus on specific task-group or scheme.\n *  *****************************************************************************************\n */\npublic interface Executum extends Executable, Lifecycle {\n    String              getName();\n\n    void                setName( String szName );\n\n    long                getExecutumId();\n\n    RuntimeSystem       parentSystem();\n\n    RuntimeSystem       revealNearestSystem();\n\n    Executum            parentExecutum();\n\n    Executum            setThreadAffinity( Thread affinity );\n\n    Thread              getAffiliateThread();\n\n    default boolean     isSystemExecutum() {\n        return this instanceof Systemum;\n    }\n\n    default boolean     isMainThreadExecutum() {\n        return this.getAffiliateThread() == this.parentSystem().getProcessMainThread();\n    }\n\n    boolean             isTerminated();\n\n    void                start();\n\n    AtomicLong AutoIncrementId     = new AtomicLong( 0 );\n\n    static long nextAutoIncrementId() {\n        return Executum.AutoIncrementId.getAndIncrement();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/JobCompromisedException.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class JobCompromisedException extends Exception implements Pinenut {\n    public JobCompromisedException    () {\n        super();\n    }\n\n    public JobCompromisedException    ( String message ) {\n        super(message);\n    }\n\n    public JobCompromisedException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public JobCompromisedException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected JobCompromisedException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/LifeDaemon.java",
    "content": "package com.pinecone.framework.system.executum;\n\npublic interface LifeDaemon {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/Lifecycle.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport com.pinecone.framework.system.ApoptosisRejectSignalException;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Lifecycle extends Pinenut {\n    void          apoptosis() throws ApoptosisRejectSignalException; // Notify you should die, but you can chose to be the cancer that refuse to die.\n\n    void          kill(); // Just kill you, the darkness comes...\n\n    void          interrupt();\n\n    void          suspend();\n\n    void          resume();\n\n    void          entreatLive(); // Before you die.\n\n    Thread.State  getState();\n\n    int           getExceptionRestartTime();\n\n    Lifecycle     applyExceptionRestartTime( int time );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/Processum.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport java.time.LocalDateTime;\nimport java.util.Map;\n\npublic interface Processum extends Executum {\n    Map<Long, Executum >   getOwnThreadGroup();\n\n    default Thread         rootThread() {\n        return this.getAffiliateThread();\n    }\n\n    default boolean        isOnMainThread() {\n        return this.rootThread() == null || this.rootThread() == this.parentSystem().getProcessMainThread();\n    }\n\n    TaskManager            getTaskManager();\n\n\n    LocalDateTime          getCreateTime() ;\n\n    LocalDateTime          getStartTime() ;\n\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/StageCompromisedException.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class StageCompromisedException extends Exception implements Pinenut {\n    public StageCompromisedException    () {\n        super();\n    }\n\n    public StageCompromisedException    ( String message ) {\n        super(message);\n    }\n\n    public StageCompromisedException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public StageCompromisedException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected StageCompromisedException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/Systema.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\n// Systema [Latin, System]\npublic interface Systema extends Pinenut {\n    String       getName();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/Systemum.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport java.util.Map;\nimport java.util.Set;\n\npublic interface Systemum extends Processum, Systema {\n    default long getSystemId() {\n        return this.getExecutumId();\n    }\n\n    Thread       getAffiliateThread();\n\n    Thread       getProcessMainThread() ;\n\n    default boolean  isMainThreadSystem() {\n        return this.getAffiliateThread() == this.getProcessMainThread();\n    }\n\n    default Set<Thread > fetchAllProcessThreads() {\n        Map<Thread, StackTraceElement[]> allThreads = Thread.getAllStackTraces();\n        return allThreads.keySet();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/TaskCompromisedException.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class TaskCompromisedException extends Exception implements Pinenut {\n    public TaskCompromisedException    () {\n        super();\n    }\n\n    public TaskCompromisedException    ( String message ) {\n        super(message);\n    }\n\n    public TaskCompromisedException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public TaskCompromisedException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected TaskCompromisedException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/TaskManager.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport com.pinecone.framework.system.regime.arch.Manager;\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.system.prototype.Summoner;\n\nimport java.util.Map;\n\npublic interface TaskManager extends Pinenut, Summoner, Manager {\n    Processum     getParentProcessum();\n\n    RuntimeSystem getSystem();\n\n    ClassLoader   getClassLoader();\n\n    Map<Integer, VitalResource > getVitalResources();\n\n    void     executeZionSequence(); // No exception\n\n    void     sendApoptosisSignal();\n\n    void     terminate();            // Instant kill all subordinate executums, will no a negotiation.\n\n    void     suspendAll();\n\n    void     resumeAll();\n\n    int      size();\n\n    boolean  isPooled();\n\n    long     getVitalizeCount();\n\n    long     getFatalityCount();\n\n    Executum add  ( Executum that );\n\n    void     erase( Executum that );\n\n    void     purge();\n\n    boolean isTerminated();\n\n    // Synchronized currently thread, waiting for all tasks be terminated.\n    void syncWaitingTerminated() throws Exception;\n\n    Executum summon         ( String szClassPath, Object... args ) throws Exception ;\n\n    void     kill           ( Executum that );\n\n    void     apoptosis      ( Executum that );\n\n    void     commitSuicide  ( Executum that );\n\n    boolean  autopsy        ( Executum that ); // Check if is dead.\n\n    String   nomenclature   ( Thread   that );\n\n    // Object clearance rate, help load balance and dispatch. [e.g. Using priority queue.]\n    default double getClearanceRate() {\n        double nFatality = this.getFatalityCount();\n        double nVitalize = this.getVitalizeCount();\n        return nFatality / ( nVitalize + nFatality );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/executum/VitalResource.java",
    "content": "package com.pinecone.framework.system.executum;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface VitalResource extends Pinenut {\n    String      getName();\n\n    long        getId();\n\n    Object      nativeResource();\n\n    void        store(); // No exception\n\n    TaskManager parentFates();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/ChosenDispatcher.java",
    "content": "package com.pinecone.framework.system.functions;\n\nimport java.util.LinkedHashMap;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.TreeSet;\n\npublic class ChosenDispatcher implements SteerableSegment {\n    public static Set<String > S_RESERVED = new TreeSet<String >() { { add( \"default\" ); } };\n    private Map<String, Executable > mInnerFunctions;\n    private Map<String, Object >     mDynamicData;\n    private String                   mszCurrentChosen ;\n    private Invoker                  mInvoker;\n\n    public ChosenDispatcher( Map<String, Executable > innerFns ){\n        this( null, innerFns, null );\n    }\n\n    public ChosenDispatcher( Map<String, Object > dynamicData, Map<String, Executable > innerFns ){\n        this( dynamicData, innerFns, null );\n    }\n\n    public ChosenDispatcher( Map<String, Object > dynamicData, Map<String, Executable > innerFns, Invoker invoker ){\n        this.mInnerFunctions = innerFns;\n        this.mDynamicData    =  dynamicData != null ? dynamicData : new LinkedHashMap<>();\n        this.mInvoker        = invoker != null ? invoker : new SystemInvoker();\n    }\n\n    @Override\n    public Map<String, Object > data(){\n        return this.mDynamicData;\n    }\n\n    @Override\n    public String name() {\n        return this.mszCurrentChosen;\n    }\n\n    @Override\n    public Object invoke( String fnName, Object...args ) throws Exception {\n        String szLastName = this.mszCurrentChosen;\n        this.mszCurrentChosen = fnName;\n        Object ret = this.mInvoker.invoke( this.mInnerFunctions.get( this.mszCurrentChosen ), args ) ;\n        this.mszCurrentChosen = szLastName;\n        return ret;\n    }\n\n    @Override\n    public void dispatch( Object... args ) throws Exception {\n        if( args.length <= 0 ){\n            throw new IllegalArgumentException( \"No chosen be committed.\" );\n        }\n\n        this.mszCurrentChosen = (String) args[0];\n        Executable fn = this.mInnerFunctions.get( this.mszCurrentChosen );\n        if( fn == null ){\n            fn = this.mInnerFunctions.get( \"default\" );\n        }\n        this.mInvoker.invoke( fn, this ) ;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/Executable.java",
    "content": "package com.pinecone.framework.system.functions;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Executable extends Pinenut {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/Executor.java",
    "content": "package com.pinecone.framework.system.functions;\n\npublic interface Executor extends Executable {\n    void execute() throws Exception;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/Function.java",
    "content": "package com.pinecone.framework.system.functions;\n\npublic interface Function extends Executable, Invokable {\n    @Override\n    Object invoke( Object...obj ) throws Exception;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/FunctionTraits.java",
    "content": "package com.pinecone.framework.system.functions;\n\nimport com.pinecone.framework.util.ReflectionUtils;\n\nimport java.lang.reflect.Method;\n\npublic abstract class FunctionTraits {\n    public static String thisName(){\n        return FunctionTraits.thatName( 3 );\n    }\n\n    public static String thatName( int level ){\n        return Thread.currentThread().getStackTrace()[ level ].getMethodName();\n    }\n\n    public static Object invoke ( Invokable fn, Object... obj ) throws Exception {\n        return fn.invoke( obj );\n    }\n\n    public static Object invoke ( Executable fn, Object... obj ) throws Exception {\n        if( fn instanceof Function ){\n            return FunctionTraits.invoke( (Invokable) fn , obj );\n        }\n        else if ( fn instanceof Executor ){\n            ( (Executor) fn ).execute();\n            return null;\n        }\n        throw new IllegalArgumentException( \"Not executable.\" );\n    }\n\n    public static Object invoke ( Object that, Method fn, Object...obj ) throws Exception {\n        ReflectionUtils.makeAccessible( fn );\n        try {\n            return fn.invoke( that, obj );\n        }\n        catch ( IllegalArgumentException e ){\n            return fn.invoke( that, new Object[]{ obj } );\n        }\n    }\n\n    public static Object invoke ( Object that, String szFnName, Object... obj ) throws Exception {\n        try { //Most likely...\n            Method fn = that.getClass().getDeclaredMethod( szFnName, Object[].class );\n            return FunctionTraits.invoke( that, fn, obj );\n        }\n        catch ( NoSuchMethodException nsm ){ // Try this...\n            Class[] protoArgs = new Class[ obj.length ];\n            int i = 0;\n            for ( Object arg : obj ) {\n                protoArgs [ i++ ] = arg.getClass();\n            }\n\n            try {\n                Method fn = that.getClass().getDeclaredMethod( szFnName, protoArgs );\n                return FunctionTraits.invoke( that, fn, obj );\n            }\n            catch ( NoSuchMethodException e ){ // Let's do savage way...\n                Method[] fns = that.getClass().getDeclaredMethods();\n                for( Method fn : fns ){\n                    if( fn.getName().equals(szFnName) ){\n                        try{\n                            return FunctionTraits.invoke( that, fn, obj );\n                        }\n                        catch ( NoSuchMethodException | IllegalArgumentException againAndAgain ){ }\n                    }\n                }\n                throw new NoSuchMethodException( \"Exhaustively trialed, but still undefined founded.\" );\n            }\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/Invokable.java",
    "content": "package com.pinecone.framework.system.functions;\n\npublic interface Invokable {\n    Object invoke( Object...obj ) throws Exception;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/Invoker.java",
    "content": "package com.pinecone.framework.system.functions;\nimport java.lang.reflect.Method;\n\npublic interface Invoker {\n    Object invoke ( Invokable  fn, Object...obj ) throws Exception ;\n\n    Object invoke ( Executable fn, Object...obj ) throws Exception ;\n\n    Object invoke ( Object that, Method fn, Object...obj ) throws Exception ;\n\n    Object invoke ( Object that, String szFnName, Object...obj ) throws Exception ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/LinearDispatcher.java",
    "content": "package com.pinecone.framework.system.functions;\n\nimport java.util.LinkedHashMap;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.TreeSet;\n\npublic class LinearDispatcher implements SteerableSegment {\n    public static Set<String > S_RESERVED = new TreeSet<String >() { { add( \"init\" ); add( \"final\" ); } };\n    private Map<String, Executable > mInnerFunctions;\n    private Map<String, Object >     mDynamicData;\n    private String                   mszCurrentChosen ;\n    private Invoker                  mInvoker;\n\n    public LinearDispatcher( Map<String, Executable > innerFns ){\n        this( null, innerFns, null );\n    }\n\n    public LinearDispatcher( Map<String, Object > dynamicData, Map<String, Executable > innerFns ){\n        this( dynamicData, innerFns, null );\n    }\n\n    public LinearDispatcher( Map<String, Object > dynamicData, Map<String, Executable > innerFns, Invoker invoker ){\n        this.mInnerFunctions = innerFns;\n        this.mDynamicData    =  dynamicData != null ? dynamicData : new LinkedHashMap<>();\n        this.mInvoker        = invoker != null ? invoker : new SystemInvoker();\n    }\n\n    @Override\n    public Map<String, Object > data(){\n        return this.mDynamicData;\n    }\n\n    @Override\n    public String name() {\n        return this.mszCurrentChosen;\n    }\n\n    @Override\n    public Object invoke( String fnName, Object...args ) throws Exception {\n        String szLastName = this.mszCurrentChosen;\n        this.mszCurrentChosen = fnName;\n        Object ret = this.mInvoker.invoke( this.mInnerFunctions.get( fnName ), args ) ;\n        this.mszCurrentChosen = szLastName;\n        return ret;\n    }\n\n    @Override\n    public void dispatch( Object... args ) throws Exception {\n        boolean bNotIgnoreExp = args.length > 0 && (boolean) args[0];\n\n        try{\n            this.mszCurrentChosen = \"init\";\n            Executable fnInit = this.mInnerFunctions.get( this.mszCurrentChosen );\n\n            if( fnInit instanceof Function ){\n                Object ret = ( ( Function ) fnInit ).invoke( this );\n                if( ret instanceof Boolean && !(boolean) ret ){\n                    return;\n                }\n            }\n            else {\n                this.mInvoker.invoke( this.mInnerFunctions.get( this.mszCurrentChosen ), this ) ;\n            }\n        }\n        catch ( Exception e ){\n            if( bNotIgnoreExp ){\n                throw e;\n            }\n        }\n\n\n        for( Object each : this.mInnerFunctions.entrySet() ){\n            Map.Entry kv = ( Map.Entry ) each;\n            this.mszCurrentChosen = (String) kv.getKey();\n            if( LinearDispatcher.S_RESERVED.contains( this.mszCurrentChosen ) ){\n                continue;\n            }\n\n            try {\n                this.mInvoker.invoke( (Executable) kv.getValue(), this );\n            }\n            catch ( Exception e ){\n                if( bNotIgnoreExp ){\n                    throw e;\n                }\n            }\n        }\n\n\n        try {\n            this.mszCurrentChosen = \"final\";\n            this.mInvoker.invoke( this.mInnerFunctions.get( this.mszCurrentChosen ), this ) ;\n        }\n        catch ( Exception e ){\n            if( bNotIgnoreExp ){\n                throw e;\n            }\n        }\n\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/SteerableSegment.java",
    "content": "package com.pinecone.framework.system.functions;\n\nimport java.util.Map;\n\npublic interface SteerableSegment {\n    Map<String, Object > data();\n\n    String name();\n\n    Object invoke( String fnName, Object...args ) throws Exception;\n\n    void dispatch( Object...args ) throws Exception;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/functions/SystemInvoker.java",
    "content": "package com.pinecone.framework.system.functions;\nimport java.lang.reflect.Method;\n\npublic class SystemInvoker implements Invoker {\n    @Override\n    public Object invoke( Invokable  fn, Object... obj ) throws Exception {\n        return FunctionTraits.invoke( fn, obj );\n    }\n\n    @Override\n    public Object invoke( Executable fn, Object... obj ) throws Exception {\n        return FunctionTraits.invoke( fn, obj );\n    }\n\n    @Override\n    public Object invoke( Object that, Method fn, Object...obj ) throws Exception {\n        return FunctionTraits.invoke( that, fn, obj );\n    }\n\n    @Override\n    public Object invoke( Object that, String szFnName, Object... obj ) throws Exception {\n        return FunctionTraits.invoke( that, szFnName, obj );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/homotype/Assimilable.java",
    "content": "package com.pinecone.framework.system.homotype;\n\npublic interface Assimilable {\n    Object assimilate( Object that );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/homotype/HomoInjector.java",
    "content": "package com.pinecone.framework.system.homotype;\n\npublic interface HomoInjector extends Injector {\n    boolean  isHomogeneity( Object that );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/homotype/Homotypic.java",
    "content": "package com.pinecone.framework.system.homotype;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Homotypic extends Pinenut {\n    boolean  isHomogeneity( Object that );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/homotype/Injector.java",
    "content": "package com.pinecone.framework.system.homotype;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Injector extends Pinenut {\n    Object inject ( Object that ) throws Exception ;\n\n    default Object inject ( Object that, Object instance ) throws Exception {\n        return this.inject( that, that.getClass(), instance );\n    }\n\n    Object inject ( Object that, Class<?> stereotype, Object instance ) throws Exception ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/homotype/StereotypicInjector.java",
    "content": "package com.pinecone.framework.system.homotype;\n\npublic interface StereotypicInjector extends Injector {\n    Class<?> getStereotype();\n\n    void     setStereotype( Class<?> stereotype );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Ally.java",
    "content": "package com.pinecone.framework.system.prototype;\n\npublic interface Ally {\n    void beforeSummon() throws Exception ;\n\n    void summoning()     throws Exception ;\n\n    void afterSummon()  throws Exception ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Factory.java",
    "content": "package com.pinecone.framework.system.prototype;\n\npublic interface Factory extends Pinenut {\n    ClassLoader           getClassLoader();\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/FamilyContext.java",
    "content": "package com.pinecone.framework.system.prototype;\n\nimport java.nio.file.Path;\nimport java.util.List;\n\npublic interface FamilyContext extends Pinenut {\n    List<Object > getGlobalScopes();\n\n    FamilyContext setGlobalScopes( List<Object > globalScopes );\n\n    Object parent();\n\n    Object thisScope();\n\n    Object root();\n\n    Path[] getParentPaths();\n\n    FamilyContext setParent( Object parent );\n\n    FamilyContext setThisScope( Object thisScope );\n\n    FamilyContext setRoot( Object root );\n\n    FamilyContext setParentPaths( Path[] parentPaths );\n\n    FamilyContext addParentPath( Path newPath );\n\n    FamilyContext addGlobalScope( Object scope );\n\n    default FamilyContext asProgenitor  ( Object root ) {\n        this.setThisScope( root );\n        this.setParent   ( root );\n        this.setRoot     ( root );\n\n        return this;\n    }\n\n    default boolean isFamilyAffinity ( FamilyContext otherContext ) {\n        return this.root().equals( otherContext.root() );\n    }\n\n    default boolean isParentAffinity ( FamilyContext otherContext ) {\n        return this.parent().equals( otherContext.parent() );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/MapStructuresEvaluator.java",
    "content": "package com.pinecone.framework.system.prototype;\n\nimport java.lang.reflect.Field;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.stereotype.JavaBeans;\nimport com.pinecone.framework.util.ClassUtils;\nimport com.pinecone.framework.util.json.JSONArray;\n\npublic class MapStructuresEvaluator implements ObjectiveEvaluator {\n    @Override\n    public Object beanGet( Object that, String key ) {\n        try {\n            return this.beanGetExp( that, key );\n        }\n        catch ( NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) {\n            return null;\n        }\n    }\n\n    @Override\n    public Object beanGetExp( Object that, String key ) throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {\n        if ( that == null || key == null ) {\n            return null;\n        }\n        String getterName = JavaBeans.MethodMajorKeyGet + Character.toUpperCase( key.charAt( 0 ) ) + key.substring( 1 );\n        Method getter = that.getClass().getMethod( getterName );\n        getter.setAccessible( true );\n        return getter.invoke( that );\n    }\n\n    @Override\n    public void beanSet( Object that, String key, Object val ) {\n        try {\n            this.beanSetExp( that, key, val );\n        }\n        catch ( NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ignored ) {\n            // Do nothing.\n        }\n    }\n\n    @Override\n    public void beanSetExp( Object that, String key, Object val ) throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {\n        if ( that == null || key == null ) {\n            return;\n        }\n        String setterName = JavaBeans.MethodMajorKeySet + Character.toUpperCase( key.charAt( 0 ) ) + key.substring( 1 );\n        Method setter;\n        if( val == null ) {\n            setter = that.getClass().getMethod( setterName );\n        }\n        else {\n            try{\n                setter = that.getClass().getMethod( setterName, val.getClass() );\n            }\n            catch ( NoSuchMethodException e ) {\n                setter = null;\n                Method[] candidates = that.getClass().getMethods();\n                for( Method candidate : candidates ) {\n                    Class<?>[] pars = candidate.getParameterTypes();\n                    if( candidate.getName().equals( setterName ) && pars.length == 1 ) {\n                        if( ClassUtils.isAssignable( pars[0], val.getClass() ) ){\n                            setter = candidate;\n                            break;\n                        }\n                    }\n                }\n\n                if( setter == null ) {\n                    throw e;\n                }\n            }\n        }\n\n        setter.setAccessible( true );\n        setter.invoke( that, val );\n    }\n\n    @Override\n    public Object structGet( Object that, String key ) {\n        try {\n            return this.structGetExp( that, key );\n        }\n        catch ( NoSuchFieldException | SecurityException | IllegalAccessException | IllegalArgumentException e ) {\n            return null;\n        }\n    }\n\n    public Object structGetExp( Object that, String key ) throws NoSuchFieldException, SecurityException, IllegalAccessException, IllegalArgumentException {\n        if ( that == null || key == null ) {\n            return null;\n        }\n        Field field = that.getClass().getField( key );\n        field.setAccessible( true );\n        return field.get( that );\n    }\n\n    @Override\n    public void structSet( Object that, String key, Object val ) {\n        try {\n            this.structSetExp( that, key, val );\n        }\n        catch ( NoSuchFieldException | SecurityException | IllegalAccessException | IllegalArgumentException ignored ) {\n            // Do nothing.\n        }\n    }\n\n    public void structSetExp( Object that, String key, Object val ) throws NoSuchFieldException, SecurityException, IllegalAccessException, IllegalArgumentException {\n        if ( that == null || key == null ) {\n            return;\n        }\n        Field field = that.getClass().getField( key );\n        field.setAccessible( true );\n        field.set( that, val );\n    }\n\n    @Override\n    public void classSet( Object that, String key, Object val ) {\n        try {\n            this.beanSetExp( that, key, val );\n        }\n        catch ( NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) {\n            this.structSet( that, key, val );\n        }\n    }\n\n    @Override\n    public Object classGet( Object that, String key ) {\n        Object value = this.beanGet( that, key );\n        if( value == null ) {\n            return this.structGet( that, key );\n        }\n        return value;\n    }\n\n    @Override\n    public Object get( Object that, String key ) {\n        if ( that == null ) {\n            return null;\n        }\n\n        if ( that instanceof Map ) {\n            return ((Map<?, ?>) that).get(key);\n        }\n        else if ( that instanceof List ) {\n            try {\n                int index = Integer.parseInt( key );\n                return ((List<?>) that).get(index);\n            }\n            catch ( NumberFormatException | IndexOutOfBoundsException e ) {\n                return null;\n            }\n        }\n        else if ( that.getClass().isArray() ) {\n            try {\n                int index = Integer.parseInt( key );\n                return ((Object[]) that)[ index ];\n            }\n            catch ( NumberFormatException | ArrayIndexOutOfBoundsException e ) {\n                return null;\n            }\n        }\n        else if( that.getClass().isPrimitive() ) {\n            return null;\n        }\n        else if( that.getClass().isEnum() ) {\n            return null;\n        }\n        else if( that instanceof Number ) {\n            return null;\n        }\n        else if( that instanceof String ) {\n            return null;\n        }\n        else {\n            return this.classGet( that, key );\n        }\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public void set( Object that, String key, Object val ) {\n        if ( that == null ) {\n            return;\n        }\n\n        if ( that instanceof Map ) {\n            ((Map<String, Object>) that).put( key, val );\n        }\n        else if ( that instanceof List ) {\n            try {\n                int index = Integer.parseInt( key );\n                ((List<Object>) that).set( index, val );\n            }\n            catch ( NumberFormatException | IndexOutOfBoundsException e ) {\n                // Do nothing.\n            }\n        }\n        else if ( that.getClass().isArray() ) {\n            try {\n                int index = Integer.parseInt( key );\n                ((Object[]) that)[ index ] = val;\n            }\n            catch ( NumberFormatException | ArrayIndexOutOfBoundsException e ) {\n                // Do nothing.\n            }\n        }\n\n        if( that.getClass().isPrimitive() ) {\n            return;\n        }\n        else if( that.getClass().isEnum() ) {\n            return;\n        }\n        else if( that instanceof Number ) {\n            return;\n        }\n        else if( that instanceof String ) {\n            return;\n        }\n\n        this.classSet( that, key, val );\n    }\n\n\n\n\n    @Override\n    public Class<?> beanGetType( Object that, String key ) {\n        try {\n            return this.beanGetTypeExp( that, key );\n        }\n        catch ( NoSuchMethodException | SecurityException | IllegalArgumentException e ) {\n            return null;\n        }\n    }\n\n    public Class<?> beanGetTypeExp( Object that, String key ) throws NoSuchMethodException, SecurityException, IllegalArgumentException {\n        if ( that == null ) {\n            return null;\n        }\n        if( key == null ) {\n            return null;\n        }\n\n        String getterName = JavaBeans.MethodMajorKeyGet + Character.toUpperCase( key.charAt(0) ) + key.substring( 1 );\n        return that.getClass().getMethod( getterName ).getReturnType();\n    }\n\n    @Override\n    public Class<?> structGetType( Object that, String key ) {\n        try {\n            return this.structGetTypeWithException( that, key );\n        }\n        catch ( NoSuchFieldException | SecurityException e ) {\n            return null;\n        }\n    }\n\n    public Class<?> structGetTypeWithException( Object that, String key ) throws NoSuchFieldException, SecurityException {\n        if ( that == null || key == null ) {\n            return null;\n        }\n        Field field = that.getClass().getField( key );\n        return field.getType();\n    }\n\n    @Override\n    public Class<?> classGetType( Object that, String key ) {\n        Class<?> type = this.beanGetType( that, key );\n        if ( type == null ) {\n            type = this.structGetType( that, key );\n        }\n        return type;\n    }\n\n    @Override\n    public Class<?> getType( Object that, String key ) {\n        if ( that == null ) {\n            return null;\n        }\n        if ( that instanceof Map ) {\n            Object value = ((Map<?, ?>) that).get( key );\n            return value != null ? value.getClass() : Object.class;\n        }\n        else if ( that instanceof List ) {\n            try {\n                int index = Integer.parseInt( key );\n                Object value ;\n                if( that instanceof JSONArray ) {\n                    value = ((JSONArray) that).opt( index );\n                }\n                else {\n                    value = ((List<?>) that).get( index );\n                }\n\n                return value != null ? value.getClass() : Object.class;\n            }\n            catch ( NumberFormatException | IndexOutOfBoundsException e ) {\n                return null;\n            }\n        }\n        else if ( that.getClass().isArray() ) {\n            return that.getClass().getComponentType();\n        }\n        else {\n            return this.classGetType( that, key );\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/ObjectiveArray.java",
    "content": "package com.pinecone.framework.system.prototype;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.LinkedHashMap;\nimport java.util.Map;\n\nimport com.pinecone.framework.unit.Units;\nimport com.pinecone.framework.util.json.JSON;\n\n\npublic class ObjectiveArray implements Objectom {\n    protected Object[] mArray;\n\n    public ObjectiveArray( Object[] arr ) {\n        this.mArray = arr;\n    }\n\n    @Override\n    public int size() {\n        return this.mArray.length;\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.size() == 0;\n    }\n\n    @Override\n    public Object get( Object key ){\n        Integer i = ObjectiveList.affirmIntegerKey(key);\n        if( i == null ) {\n            return null;\n        }\n\n        return this.mArray[i];\n    }\n\n    @Override\n    public void set( Object key, Object val ){\n        Integer i = ObjectiveList.affirmIntegerKey(key);\n        if( i == null ) {\n            return ;\n        }\n\n        this.mArray[i] = val;\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object k ) {\n        return this.containsKey( k );\n    }\n\n    @Override\n    public boolean containsKey( Object k ) {\n        Integer i = ObjectiveList.affirmIntegerKey(k);\n        if( i == null ) {\n            return false;\n        }\n\n        int nLength = this.mArray.length;\n        if( i < 0 || nLength == 0 ){\n            return false;\n        }\n        return nLength > i;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this.mArray );\n    }\n\n    @Override\n    public Map<String, Object > toMap(Class<? > mapType ) {\n        Map<String, Object > map = Units.newInstance( mapType );\n        int i = 0;\n        for( Object e : this.mArray ) {\n            map.put( Integer.toString( i ), e );\n            ++i;\n        }\n\n        return map;\n    }\n\n    @Override\n    public Map<String, Object > toMap() {\n        return this.toMap( LinkedHashMap.class );\n    }\n\n    @Override\n    public TypeIndex prototype() {\n        return Prototype.typeid( this.mArray );\n    }\n\n    @Override\n    public String  prototypeName() {\n        return Prototype.prototypeName(this.mArray);\n    }\n\n    @Override\n    public Integer[] keys() {\n        Integer[] list = new Integer[ this.mArray.length ];\n        for ( int i = 0; i < this.mArray.length; ++i ) {\n            list[ i ] = i;\n        }\n        return list;\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/ObjectiveBean.java",
    "content": "package com.pinecone.framework.system.prototype;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.lang.reflect.Modifier;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.LinkedHashMap;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.stereotype.JavaBeans;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.unit.Units;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSONEncoder;\n\npublic class ObjectiveBean implements Objectom {\n    protected Object    mObj;\n    protected Entry[]   mGetMethods;\n    protected Entry[]   mSetMethods;\n\n    public ObjectiveBean( Object bean ) {\n        this.mObj = bean;\n        this.cacheMethods();\n    }\n\n    protected void cacheMethods() {\n        Class klass = this.mObj.getClass();\n        boolean includeSuperClass = klass.getClassLoader() != null;\n        Method[] methods = includeSuperClass ? klass.getMethods() : klass.getDeclaredMethods();\n\n        ArrayList<Entry> getDummy = new ArrayList<>();\n        ArrayList<Entry> setDummy = new ArrayList<>();\n\n        for( int i = 0; i < methods.length; ++i ) {\n            try {\n                Method method = methods[i];\n                if ( Modifier.isPublic( method.getModifiers() ) ) {\n                    String key = JavaBeans.getGetterMethodKeyName( method );\n                    if( StringUtils.isEmpty( key ) ) {\n                        key = JavaBeans.getSetterMethodKeyName( method );\n                        if( !StringUtils.isEmpty( key ) ) { // Found setter\n                            if ( Character.isUpperCase( key.charAt(0) ) && method.getParameterTypes().length == 1 ) {\n                                key = JavaBeans.methodKeyNameLowerCaseNormalize( key );\n\n                                setDummy.add( new Entry( key, method ) );\n                            }\n                        }\n                    }\n                    else { // Found getter\n                        if ( Character.isUpperCase( key.charAt(0) ) && method.getParameterTypes().length == 0 ) {\n                            key = JavaBeans.methodKeyNameLowerCaseNormalize( key );\n\n                            getDummy.add( new Entry( key, method ) );\n                        }\n                    }\n                }\n            }\n            catch ( Exception e ) {\n                e.printStackTrace();\n                // Do nothing.\n            }\n        }\n\n        this.mGetMethods = getDummy.toArray( new Entry[]{} );\n        this.mSetMethods = setDummy.toArray( new Entry[]{} );\n        Arrays.sort( this.mGetMethods );\n        Arrays.sort( this.mSetMethods );\n    }\n\n    @Override\n    public int size() {\n        return this.mGetMethods.length;\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.size() == 0;\n    }\n\n    @Override\n    public Object get( Object key ) {\n        String szKey = key.toString();\n        try {\n            int index = ObjectiveBean.binarySearch( this.mGetMethods, szKey );\n            if( index < 0 ) {\n                return null;\n            }\n            Method method = this.mGetMethods[ index ].method;\n            method.setAccessible( true );\n            return method.invoke( this.mObj );\n        }\n        catch ( IllegalAccessException | InvocationTargetException e ) {\n            return null;\n        }\n    }\n\n    protected static int binarySearch( Entry[] those, String key ) {\n        int low = 0;\n        int high = those.length - 1;\n        while ( low <= high ) {\n            int mid = (low + high) >>> 1;\n            int cmp = those[ mid ].name.compareTo( key );\n            if ( cmp < 0 ) {\n                low = mid + 1;\n            }\n            else if ( cmp > 0 ) {\n                high = mid - 1;\n            }\n            else {\n                return mid;\n            }\n        }\n        return -(low + 1);\n    }\n\n    @Override\n    public void set( Object key, Object val ) {\n        String szKey = key.toString();\n        try {\n            int index = ObjectiveBean.binarySearch( this.mSetMethods, szKey );\n            if( index < 0 ) {\n                throw new IllegalArgumentException( \"Specific setter-method not found: set\" + JavaBeans.methodKeyNameUpperCaseNormalize( szKey ) );\n            }\n            Method method = this.mSetMethods[ index ].method;\n            method.setAccessible( true );\n            method.invoke( this.mObj, val );\n        }\n        catch ( IllegalAccessException | InvocationTargetException e ) {\n            throw new RuntimeException( e );\n        }\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object k ) {\n        if ( this.mObj instanceof PineUnit ) {\n            return ( (PineUnit) this.mObj ).hasOwnProperty(k);\n        }\n        return this.containsKey(k);\n    }\n\n    @Override\n    public boolean containsKey( Object k ) {\n        return ObjectiveBean.binarySearch( this.mGetMethods, k.toString() ) >= 0;\n    }\n\n    @Override\n    public String toJSONString() {\n        ArrayList<KeyValue<String, Object > > dummy = new ArrayList<>();\n        for( Entry kv : this.mGetMethods ) {\n            Object val;\n            try {\n                kv.method.setAccessible( true );\n                val = kv.method.invoke( this.mObj );\n            }\n            catch ( IllegalAccessException | InvocationTargetException e ) {\n                break;\n            }\n\n            dummy.add( new KeyValue<>( kv.name, val ) );\n        }\n\n        return JSONEncoder.stringifyMapFormat( dummy );\n    }\n\n    @Override\n    public Map<String, Object > toMap( Class<? > mapType ) {\n        Map<String, Object > map = Units.newInstance( mapType );\n        for( Entry kv : this.mGetMethods ) {\n            Object val;\n            try {\n                kv.method.setAccessible( true );\n                val = kv.method.invoke( this.mObj );\n                map.put( kv.name, val );\n            }\n            catch ( IllegalAccessException | InvocationTargetException e ) {\n                break;\n            }\n        }\n\n        return map;\n    }\n\n    @Override\n    public Map<String, Object > toMap() {\n        return this.toMap( LinkedHashMap.class );\n    }\n\n    @Override\n    public TypeIndex prototype() {\n        return Prototype.typeid( this.mObj );\n    }\n\n    @Override\n    public String prototypeName() {\n        return Prototype.prototypeName( this.mObj );\n    }\n\n    static class Entry implements Comparable<Entry > {\n        String name;\n        Method method;\n\n        Entry( String name, Method method ) {\n            this.name   = name;\n            this.method = method;\n        }\n\n        @Override\n        public int compareTo( Entry o ) {\n            return this.name.compareTo( o.name );\n        }\n    }\n\n    @Override\n    public String[] keys() {\n        String[] list = new String[ this.mGetMethods.length ];\n        for ( int i = 0; i < this.mGetMethods.length; ++i ) {\n            list[ i ] = this.mGetMethods[ i ].name;\n        }\n        return list;\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/ObjectiveClass.java",
    "content": "package com.pinecone.framework.system.prototype;\n\nimport java.lang.reflect.Field;\nimport java.util.Arrays;\nimport java.util.LinkedHashMap;\nimport java.util.Map;\n\nimport com.pinecone.framework.unit.Units;\nimport com.pinecone.framework.util.ReflectionUtils;\nimport com.pinecone.framework.util.json.homotype.StructJSONEncoder;\n\npublic class ObjectiveClass implements Objectom {\n    protected Object    mObj;\n    protected Entry[]   mFields;\n    protected boolean   mbUsingOrderCache;\n\n    public ObjectiveClass( Object that, boolean bUsingOrderCache ) {\n        this.mObj = that;\n        this.mbUsingOrderCache = bUsingOrderCache;\n        if ( bUsingOrderCache ) {\n            this.cacheFields();\n        }\n    }\n\n    public ObjectiveClass( Object that ) {\n        this( that, true );\n    }\n\n    @Override\n    public int size() {\n        if( this.mFields != null ) {\n            return this.mFields.length;\n        }\n        return this.mObj.getClass().getFields().length;\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.size() == 0;\n    }\n\n    private void cacheFields() {\n        Field[] classFields = this.mObj.getClass().getFields();\n        this.mFields = new Entry[ classFields.length ];\n        for ( int i = 0; i < classFields.length; ++i ) {\n            Field field        = classFields[ i ];\n            String fieldName   = field.getName();\n            this.mFields[ i ]  = new Entry( fieldName, field );\n        }\n        Arrays.sort( this.mFields );\n    }\n\n    public Object get( Object key ) {\n        String szKey = key.toString();\n        try {\n            if ( this.mbUsingOrderCache ) {\n                int index = this.binarySearch( szKey );\n                if ( index >= 0 ) {\n                    ReflectionUtils.makeAccessible( this.mFields[index].field );\n                    return this.mFields[ index ].field.get( this.mObj );\n                }\n            }\n            else {\n                Field field = this.mObj.getClass().getField( szKey );\n                ReflectionUtils.makeAccessible( field );\n                return field.get( this.mObj );\n            }\n        }\n        catch ( NoSuchFieldException | IllegalAccessException e ) {\n            return null;\n        }\n        return null;\n    }\n\n    protected int binarySearch( String key ) {\n        int low = 0;\n        int high = this.mFields.length - 1;\n        while ( low <= high ) {\n            int mid = (low + high) >>> 1;\n            int cmp = this.mFields[ mid ].name.compareTo( key );\n            if ( cmp < 0 ) {\n                low = mid + 1;\n            }\n            else if ( cmp > 0 ) {\n                high = mid - 1;\n            }\n            else {\n                return mid;\n            }\n        }\n        return -(low + 1);\n    }\n\n    public void set( Object key, Object val ) {\n        String szKey = key.toString();\n        try {\n            if ( this.mbUsingOrderCache ) {\n                int index = this.binarySearch( szKey );\n                if ( index >= 0 ) {\n                    ReflectionUtils.makeAccessible( this.mFields[ index ].field );\n                    this.mFields[index].field.set( this.mObj, val );\n                    return;\n                }\n            }\n            else {\n                Field field = this.mObj.getClass().getField( szKey );\n                ReflectionUtils.makeAccessible( field );\n                field.set( this.mObj, val );\n                return;\n            }\n        }\n        catch ( NoSuchFieldException | IllegalAccessException e ) {\n            throw new RuntimeException( e );\n        }\n        throw new IllegalArgumentException( \"Field not found: \" + key );\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object k ) {\n        if ( this.mObj instanceof PineUnit ) {\n            return ( (PineUnit) this.mObj ).hasOwnProperty(k);\n        }\n        return this.containsKey(k);\n    }\n\n    @Override\n    public boolean containsKey( Object k ) {\n        String szKey = k.toString();\n        try {\n            if ( this.mbUsingOrderCache ) {\n                return this.binarySearch( szKey ) != -1;\n            }\n            else {\n                Field field = this.mObj.getClass().getField( szKey );\n                return field != null;\n            }\n        }\n        catch (NoSuchFieldException e) {\n            return false;\n        }\n    }\n\n    @Override\n    public String toJSONString() {\n        return StructJSONEncoder.BasicEncoder.encode( this.mObj );\n    }\n\n    @Override\n    public Map<String, Object > toMap( Class<? > mapType ) {\n        Map<String, Object > map = Units.newInstance( mapType );\n        Field[] classFields = this.mObj.getClass().getFields();\n        this.mFields = new Entry[ classFields.length ];\n        for ( int i = 0; i < classFields.length; ++i ) {\n            Field field        = classFields[ i ];\n            String fieldName   = field.getName();\n            try {\n                map.put( fieldName, field.get( this.mObj ) );\n            }\n            catch ( IllegalAccessException e ) {\n                try {\n                    field.setAccessible( true );\n                    map.put( fieldName, field.get( this.mObj ) );\n                    field.setAccessible( false );\n                }\n                catch ( IllegalAccessException ignore ) {\n                    // Do nothing.\n                }\n            }\n        }\n\n        return map;\n    }\n\n    @Override\n    public Map<String, Object > toMap() {\n        return this.toMap( LinkedHashMap.class );\n    }\n\n    @Override\n    public TypeIndex prototype() {\n        return Prototype.typeid( this.mObj );\n    }\n\n    @Override\n    public String prototypeName() {\n        return Prototype.prototypeName( this.mObj );\n    }\n\n    private static class Entry implements Comparable<Entry> {\n        String name;\n        Field field;\n\n        Entry( String name, Field field ) {\n            this.name  = name;\n            this.field = field;\n        }\n\n        @Override\n        public int compareTo( Entry o ) {\n            return this.name.compareTo( o.name );\n        }\n    }\n\n    @Override\n    public String[] keys() {\n        int size = this.size(); // Saving some logic operations.\n\n        String[] list = new String[ size ];\n        if( this.mFields != null && this.mFields.length > 0 ) {\n            for ( int i = 0; i < size; ++i ) {\n                list[ i ] = this.mFields[i].name;\n            }\n        }\n        else {\n            Field[] classFields = this.mObj.getClass().getFields();\n            for ( int i = 0; i < size; ++i ) {\n                list[ i ] = classFields[i].getName();\n            }\n        }\n        return list;\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/ObjectiveEvaluator.java",
    "content": "package com.pinecone.framework.system.prototype;\n\nimport java.lang.reflect.Array;\nimport java.lang.reflect.Field;\nimport java.lang.reflect.GenericArrayType;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.lang.reflect.ParameterizedType;\nimport java.lang.reflect.Type;\nimport java.lang.reflect.WildcardType;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.pinecone.framework.system.stereotype.JavaBeans;\n\npublic interface ObjectiveEvaluator extends Pinenut {\n    ObjectiveEvaluator MapStructures = new MapStructuresEvaluator();\n\n    Object beanGet( Object that, String key );\n\n    Object beanGetExp( Object that, String key ) throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException;\n\n    default Object beanGet( Object that, Object key ) {\n        return this.beanGet( that, key.toString() );\n    }\n\n    Object structGet( Object that, String key );\n\n    default Object structGet( Object that, Object key ) {\n        return this.structGet( that, key.toString() );\n    }\n\n    Object get( Object that, String key );\n\n    default Object get( Object that, Object key ) {\n        return this.get( that, key.toString() );\n    }\n\n    Object classGet( Object that, String key );\n\n    default Object classGet( Object that, Object key ) {\n        return this.classGet( that, key.toString() );\n    }\n\n\n\n\n    void beanSet( Object that, String key, Object val );\n\n    void beanSetExp( Object that, String key, Object val ) throws NoSuchMethodException, SecurityException, IllegalAccessException, IllegalArgumentException, InvocationTargetException;\n\n    default void beanSet( Object that, Object key, Object val ) {\n        this.beanSet( that, key.toString(), val );\n    }\n\n    void structSet( Object that, String key, Object val );\n\n    default void structSet( Object that, Object key, Object val ) {\n        this.structSet( that, key.toString(), val );\n    }\n\n    void set( Object that, String key, Object val );\n\n    void classSet( Object that, String key, Object val );\n\n    default void classSet( Object that, Object key, Object val ) {\n        this.classSet( that, key.toString(), val );\n    }\n\n    default void set( Object that, Object key, Object val ) {\n        this.set( that, key.toString(), val );\n    }\n\n\n\n    Class<?> beanGetType( Object that, String key );\n\n    Class<?> beanGetTypeExp( Object that, String key ) throws NoSuchMethodException, SecurityException, IllegalArgumentException;\n\n    default Class<?> beanGetType( Object that, Object key ) {\n        return this.beanGetType( that, key.toString() );\n    }\n\n    Class<?> structGetType( Object that, String key );\n\n    default Class<?> structGetType( Object that, Object key ) {\n        return this.structGetType( that, key.toString() );\n    }\n\n    Class<?> getType( Object that, String key );\n\n    default Class<?> getType( Object that, Object key ) {\n        return this.getType( that, key.toString() );\n    }\n\n    Class<?> classGetType( Object that, String key );\n\n    default Class<?> classGetType( Object that, Object key ) {\n        return this.classGetType( that, key.toString() );\n    }\n\n    default Type getFieldGenericType( Object obj, String fieldName ) {\n        Type fieldGenericType = null;\n        try{\n            if( obj != null ) {\n                Field field      = obj.getClass().getDeclaredField( fieldName );\n                fieldGenericType = field.getGenericType();\n            }\n        }\n        catch ( NoSuchFieldException | SecurityException e ) {\n            fieldGenericType = null;\n        }\n\n        return fieldGenericType;\n    }\n\n    default Type getGetterGenericType( Object that, String key ) {\n        Type genericType = null;\n        try{\n            if( that != null ) {\n                String getterName = JavaBeans.MethodMajorKeyGet + Character.toUpperCase( key.charAt(0) ) + key.substring( 1 );\n                Method getter     = that.getClass().getMethod( getterName );\n                genericType       = getter.getGenericReturnType();\n            }\n        }\n        catch ( NoSuchMethodException | SecurityException e ) {\n            genericType = null;\n        }\n\n        return genericType;\n    }\n\n    default Type getSetterGenericType( Object that, String key ) {\n        Type genericType = null;\n        if( that != null ) {\n            String getterName = JavaBeans.MethodMajorKeySet + Character.toUpperCase( key.charAt(0) ) + key.substring( 1 );\n            Method[] methods  = that.getClass().getMethods();\n            for( Method method : methods ) {\n                if( method.getName().equals( getterName ) && method.getParameterCount() == 1 ) {\n                    Type[] pars = method.getGenericParameterTypes();\n                    genericType = pars[ 0 ];\n                    break;\n                }\n            }\n        }\n\n        return genericType;\n    }\n\n    default Type getElementGenericType( Object that, String key ) {\n        Type t = this.getSetterGenericType( that, key );\n        if( t == null ) {\n            t = this.getGetterGenericType( that, key );\n        }\n\n        if( t == null ) {\n            t = this.getFieldGenericType( that, key );\n        }\n\n        return t;\n    }\n\n\n\n    static Class<?> resolveRawClass( Type type ) {\n        if ( type instanceof Class<?> ) {\n            return (Class<?>) type;\n        }\n\n        if ( type instanceof ParameterizedType )\n            return (Class<?>) ((ParameterizedType) type).getRawType();\n\n        if ( type instanceof GenericArrayType ) {\n            Type c = ((GenericArrayType) type).getGenericComponentType();\n            return Array.newInstance(resolveRawClass(c), 0).getClass();\n        }\n\n        if ( type instanceof WildcardType ) {\n            Type[] upper = ((WildcardType) type).getUpperBounds();\n            return resolveRawClass(upper[0]);\n        }\n\n        return Object.class;\n    }\n\n    static Type extractGenericElementType( Type type ) {\n        if ( type instanceof ParameterizedType ) {\n            ParameterizedType pt = (ParameterizedType) type;\n            Type raw = pt.getRawType();\n\n            if (raw == List.class || raw == Set.class || raw == Collection.class) {\n                return pt.getActualTypeArguments()[0];\n            }\n\n            if (raw == Map.class) {\n                return pt.getActualTypeArguments()[1]; // value type\n            }\n        }\n\n        if ( type instanceof GenericArrayType ) {\n            return ((GenericArrayType) type).getGenericComponentType();\n        }\n\n        if ( type instanceof Class && ((Class<?>) type).isArray() ) {\n            return ((Class<?>) type).getComponentType();\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/ObjectiveList.java",
    "content": "package com.pinecone.framework.system.prototype;\n\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.unit.Units;\nimport com.pinecone.framework.util.json.JSON;\n\n\npublic class ObjectiveList<T> implements Objectom {\n    protected List<T > mList;\n\n    public ObjectiveList( List<T > list ) {\n        this.mList = list;\n    }\n\n    public static Integer affirmIntegerKey( Object key ) {\n        if ( key instanceof Integer ) {\n            return (Integer) key;\n        }\n        else if ( key instanceof Long ) {\n            return  (int)(long) key;\n        }\n        else if ( key instanceof Short ) {\n            return (int)(short) key;\n        }\n        else if ( key instanceof Byte ) {\n            return (int) (byte) key;\n        }\n        else if ( key instanceof String ) {\n            String szKey = (String) key;\n            return Integer.parseInt(szKey);\n        }\n\n        return null;\n    }\n\n    @Override\n    public int size() {\n        return this.mList.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mList.isEmpty();\n    }\n\n    @Override\n    public Object get( Object key ){\n        Integer i = ObjectiveList.affirmIntegerKey(key);\n        if( i == null ) {\n            return null;\n        }\n\n        return this.mList.get(i);\n    }\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    public void set( Object key, Object val ){\n        Integer i = ObjectiveList.affirmIntegerKey(key);\n        if( i == null ) {\n            return ;\n        }\n\n        this.mList.set(i, (T)val);\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object k ) {\n        if( this.mList instanceof PineUnit ) {\n            ( (PineUnit)this.mList ).hasOwnProperty( k );\n        }\n        return this.containsKey( k );\n    }\n\n    @Override\n    public boolean containsKey( Object k ) {\n        Integer i = ObjectiveList.affirmIntegerKey(k);\n        if( i == null ) {\n            return false;\n        }\n\n        int nLength = this.mList.size();\n        if( i < 0 || nLength == 0 ){\n            return false;\n        }\n        return nLength > i;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify(this.mList);\n    }\n\n    @Override\n    public Map<String, Object > toMap(Class<? > mapType ) {\n        Map<String, Object > map = Units.newInstance( mapType );\n        int i = 0;\n        for( Object e : this.mList ) {\n            map.put( Integer.toString( i ), e );\n            ++i;\n        }\n\n        return map;\n    }\n\n    @Override\n    public Map<String, Object > toMap() {\n        return this.toMap( LinkedHashMap.class );\n    }\n\n    @Override\n    public TypeIndex prototype() {\n        return Prototype.typeid( this.mList );\n    }\n\n    @Override\n    public String  prototypeName() {\n        return Prototype.prototypeName(this.mList);\n    }\n\n    @Override\n    public Integer[] keys() {\n        Integer[] list = new Integer[ this.mList.size() ];\n        for ( int i = 0; i < this.mList.size(); ++i ) {\n            list[ i ] = i;\n        }\n        return list;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/ObjectiveMap.java",
    "content": "package com.pinecone.framework.system.prototype;\n\nimport com.pinecone.framework.unit.Units;\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.util.LinkedHashMap;\nimport java.util.Map;\n\npublic class ObjectiveMap<K, V> implements Objectom {\n    protected Map<K, V > mMap;\n\n    public ObjectiveMap( Map<K, V > map ) {\n        this.mMap = map;\n    }\n\n    @Override\n    public int size() {\n        return this.mMap.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mMap.isEmpty();\n    }\n\n    public Object get( Object key ){\n        return this.mMap.get(key);\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    public void set( Object key, Object val ){\n        this.mMap.put((K)key, (V)val);\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object k ) {\n        if( this.mMap instanceof PineUnit ) {\n            ( (PineUnit)this.mMap ).hasOwnProperty( k );\n        }\n        return this.containsKey(k);\n    }\n\n    @Override\n    public boolean containsKey( Object k ) {\n        return this.mMap.containsKey(k);\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify(this.mMap);\n    }\n\n    @Override\n    public Map<String, Object > toMap( Class<? > mapType ) {\n        Map<String, Object > map = Units.newInstance( mapType );\n        int i = 0;\n        for( Map.Entry<K, V > kv : this.mMap.entrySet() ) {\n            map.put( kv.getKey().toString(), kv.getValue() );\n            ++i;\n        }\n\n        return map;\n    }\n\n    @Override\n    public Map<String, Object > toMap() {\n        return this.toMap( LinkedHashMap.class );\n    }\n\n    @Override\n    public TypeIndex prototype() {\n        return Prototype.typeid( this.mMap );\n    }\n\n    @Override\n    public String  prototypeName() {\n        return Prototype.prototypeName(this.mMap);\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public K[] keys() {\n        return (K[])this.mMap.keySet().toArray();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Objectom.java",
    "content": "package com.pinecone.framework.system.prototype;\n\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\n\n/**\n *  Pinecone Ursus For Java Objectom\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Objectom is an uniformity map-operator, supported wrapped unified operation class.\n *  It has implemented following types, and will let them conformed the unified interface.\n *  Array, List, Map, Fielded-Class, Bean-Class\n *  *****************************************************************************************\n *  Notice:\n *  1. All objects are un-appendable, and should consider as the `class`, only supported get/set.\n *  2. Excepted the `set`, other methods should consider as the const, `const Type* method() const;`\n *  3. Some scenarios likes the `bean`, which the `gets` could not paired with the `sets` therein.\n *  4. Some scenarios e.g. the `class`, the value needed to retrieve from inner fields or methods.\n *  4.1 In these condition, it may provokes exceptions, so no explicit `values()` method given.\n *  4.2 The implicated keys in the `class`, will be all retrieved, and may not be expected.\n *  *****************************************************************************************\n *  Dragon King, the undefined\n */\npublic interface Objectom extends PineUnit {\n    int size();\n\n    boolean isEmpty();\n\n    Object get( Object key );\n\n    void set( Object key, Object val );\n\n    boolean containsKey( Object k ) ;\n\n    // Readonly\n    // const Object* keys() const;\n    Object[] keys();\n\n    Map<String, Object > toMap( Class<? > mapType );\n\n    default Map<String, Object > toMap() {\n        return this.toMap( LinkedHashMap.class );\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    static Objectom wrap( Object that ) {\n        if( that instanceof Objectom ) {\n            return (Objectom) that;\n        }\n        else if( that instanceof Map ) {\n            return new ObjectiveMap<>( (Map) that );\n        }\n        else if( that instanceof List) {\n            return new ObjectiveList<>( (List) that );\n        }\n        else if( that.getClass().isArray() ){\n            return new ObjectiveArray( (Object[]) that );\n        }\n\n        return new ObjectiveBean(that);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/OverridableFamily.java",
    "content": "package com.pinecone.framework.system.prototype;\n\npublic interface OverridableFamily extends FamilyContext {\n\n    boolean isOverriddenAffinity();\n\n    void setOverriddenAffinity( boolean overrideAffinity ) ;\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/PineUnit.java",
    "content": "package com.pinecone.framework.system.prototype;\n\npublic interface PineUnit extends Pinenut {\n    boolean hasOwnProperty( Object elm );\n\n    boolean containsKey( Object key );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Pinenut.java",
    "content": "package com.pinecone.framework.system.prototype;\n\npublic interface Pinenut {\n    default TypeIndex prototype() {\n        return Prototype.typeid( this );\n    }\n\n    default String  prototypeName() {\n        return this.className();\n    }\n\n    default boolean isPrototypeOf( TypeIndex that ){\n        return this.prototype().equals( that );\n    }\n\n    default String className() {\n        return this.getClass().getSimpleName();\n    }\n\n    default String toJSONString() {\n        return String.format(\n                \"\\\"[object %s(0x%s)]\\\"\",\n                this.className() , Integer.toHexString( this.hashCode() )\n        );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/PinenutTraits.java",
    "content": "package com.pinecone.framework.system.prototype;\n\nimport com.pinecone.framework.system.functions.Executor;\nimport com.pinecone.framework.system.functions.Function;\nimport com.pinecone.framework.util.ReflectionUtils;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.util.concurrent.Callable;\n\npublic final class PinenutTraits {\n    public static final String OBJ_STRINGIFY_DEFAULT                 = \"[object %s]\"; //I think javascript's format is marvelous.\n\n    public static final String FUN_TO_JSON_STRING_NAME               = \"toJSONString\";\n\n    public static String invokeToJSONString    ( Object that ) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException {\n        Method fnToJSONString = that.getClass().getMethod( PinenutTraits.FUN_TO_JSON_STRING_NAME );\n        ReflectionUtils.makeAccessible( fnToJSONString );\n        return (String) fnToJSONString.invoke( that );\n    }\n\n    public static String invokeToJSONString    ( Object that, int nIndentFactor, int nIndentBlankNum ) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException {\n        Method fnToJSONString = that.getClass().getMethod( PinenutTraits.FUN_TO_JSON_STRING_NAME, int.class, int.class );\n        ReflectionUtils.makeAccessible( fnToJSONString );\n        return (String) fnToJSONString.invoke( that, nIndentFactor, nIndentBlankNum );\n    }\n\n    public static String invokeToJSONString    ( Object that, String szDefaultResult )  {\n        try{\n            return PinenutTraits.invokeToJSONString( that );\n        }\n        catch ( NoSuchMethodException | IllegalAccessException | InvocationTargetException e ){\n            if( szDefaultResult == null ){\n                return that.toString();\n            }\n            return szDefaultResult;\n        }\n    }\n\n    public static String invokeToString        ( Object that, Object dyDefaultResult ) {\n        try{\n            return PinenutTraits.invokeCaseToString( that, dyDefaultResult );\n        }\n        catch ( IllegalArgumentException e ) {\n            return String.format(\n                    PinenutTraits.OBJ_STRINGIFY_DEFAULT,\n                    that.getClass().getName() + \"(0x\" + Integer.toHexString( that.hashCode() ) + \")\"\n            );\n        }\n    }\n\n    public static String invokeCaseToString    ( Object that, Object dyDefaultResult ) throws IllegalArgumentException {\n        if( that == null ){\n            return \"null\";\n        }\n        else if( that instanceof Function ){\n            return \"[object Function]\";\n        }\n        else if( that instanceof Executor ){\n            return \"[object Executor]\";\n        }\n        else if( that instanceof Runnable ){\n            return \"[object Runnable]\";\n        }\n        else if( that instanceof Callable ){\n            return \"[object Callable]\";\n        }\n        else if( that.getClass().isEnum() ){\n            return that.toString();\n        }\n        else if( Prototype.isMethodDeclared( that, \"toString\" ) ){\n            return that.toString();\n        }\n        else if( that.getClass() == Object.class ){ //Hei hei hei~ :)\n            return \"[object Object]\";\n        }\n        else if( dyDefaultResult instanceof Boolean && (boolean)dyDefaultResult ) {\n            return that.toString();\n        }\n        else if( dyDefaultResult instanceof String ){\n            return (String) dyDefaultResult;\n        }\n\n        throw new IllegalArgumentException();\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Prototype.java",
    "content": "package com.pinecone.framework.system.prototype;\n\n\nimport java.lang.reflect.*;\nimport java.util.Arrays;\nimport java.util.HashSet;\n\npublic abstract class Prototype {\n    public static String prototypeName( Object that ){\n        try {\n            return that.getClass().getSimpleName();\n        }\n        catch ( Exception E ){\n            return \"[object Object]\";\n        }\n    }\n\n    public static TypeIndex typeid( Object that ) {\n        return new TypeIndex( that );\n    }\n\n    public static String namespace( Class that ){\n        //return that.getName().split( \".\" + that.getSimpleName() )[0];\n        return that.getPackage().getName();\n    }\n\n    public static String namespace( Object that ){\n        return Prototype.namespace( that.getClass() );\n    }\n\n    public static String namespaceNode ( Class that ) {\n        String szNamespace = Prototype.namespace( that );\n        String[] debris = szNamespace.split(\"\\\\.\");\n        return debris.length <= 1 ? szNamespace : debris [ debris.length - 1 ];\n    }\n\n    public static String namespaceNode ( Object that ) {\n        return Prototype.namespaceNode( that.getClass() );\n    }\n\n    public static boolean isAbstract ( Class that ) {\n        return Modifier.isAbstract( that.getModifiers() );\n    }\n\n    private static String[] getPropertyNames ( Object that, boolean bAllOwned ) {\n        if ( that == null ) {\n            return null;\n        }\n        else {\n            Class klass = that.getClass();\n            Field[] fields = klass.getDeclaredFields();\n            int length = fields.length;\n            if ( length == 0 ) {\n                return null;\n            }\n            else {\n                String[] names = new String[length];\n\n                int j = 0;\n                for( int i = 0; i < length; ++i ) {\n                    Field field = fields[i];\n                    if ( (!Modifier.isPublic(field.getModifiers()) || Modifier.isFinal(field.getModifiers())) && !field.isAccessible() ) {\n                        if( !bAllOwned ){\n                            continue;\n                        }\n                    }\n                    names[j++] = fields[i].getName();\n                }\n\n                if( !bAllOwned ){\n                    return Arrays.copyOf( names, j );\n                }\n\n                return names;\n            }\n        }\n    }\n\n    public static String[] getOwnPropertyNames ( Object that ){\n        return Prototype.getPropertyNames( that, true );\n    }\n\n    public static String[] keys ( Object that ){\n        return Prototype.getPropertyNames( that, false );\n    }\n\n\n\n\n\n    public static HashSet<String > getDeclaredMethodsNameSet( Object that ){\n        HashSet<String > hashSet = new HashSet<>();\n        Prototype.getDeclaredMethodsNameSet( hashSet, that );\n        return hashSet;\n    }\n\n    public static void getDeclaredMethodsNameSet( HashSet<String > hSet, Object that ){\n        Prototype.getDeclaredMethodsNameSet( hSet, that.getClass() );\n    }\n\n    public static HashSet<String > getDeclaredMethodsNameSet( Class<?> that ){\n        HashSet<String > hashSet = new HashSet<>();\n        Prototype.getDeclaredMethodsNameSet( hashSet, that );\n        return hashSet;\n    }\n\n    public static void getDeclaredMethodsNameSet( HashSet<String > set,  Class<?> hThatClass ){\n        Method[] methods = hThatClass.getDeclaredMethods();\n        for ( Method row : methods ) {\n            set.add( row.getName() );\n        }\n    }\n\n    public static Object invokeNoParameterMethod ( Object that , String szFunctionName ) throws NoSuchMethodException, InvocationTargetException ,IllegalAccessException {\n        Method method = that.getClass().getMethod( szFunctionName );\n        return method.invoke( that );\n    }\n\n    public static boolean isMethodDeclared       ( Object that, String szFnName, Class<?>... parameterTypes ) {\n        try{\n            return that.getClass().getDeclaredMethod( szFnName, parameterTypes ) != null;\n        }\n        catch ( NoSuchMethodException e ){\n            return false;\n        }\n    }\n\n    public static Class primitivify( Class c ){\n        if( c == Byte.class ){\n            return byte.class;\n        }\n        else if( c == Short.class ){\n            return short.class;\n        }\n        else if( c == Integer.class ){\n            return int.class;\n        }\n        else if( c == Long.class ){\n            return long.class;\n        }\n        else if( c == Float.class ){\n            return float.class;\n        }\n        else if( c == Double.class ){\n            return double.class;\n        }\n        else if( c == Character.class ){\n            return char.class;\n        }\n        else if( c == Void.class ){\n            return void.class;\n        }\n\n        return c;\n    }\n\n\n\n\n\n\n\n    /** Element **/\n    public static boolean isNumber( Class<?> stereotype ) {\n        if( Number.class.isAssignableFrom( stereotype ) ){\n            return true;\n        }\n        else if( stereotype.isPrimitive() ){\n            return  stereotype == byte.class  || stereotype == short.class ||\n                    stereotype == int.class   || stereotype == long.class  ||\n                    stereotype == float.class || stereotype == double.class;\n        }\n        return false;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Strategy.java",
    "content": "package com.pinecone.framework.system.prototype;\n\npublic interface Strategy extends Pinenut, Cloneable {\n    boolean matched( Object condition );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/Summoner.java",
    "content": "package com.pinecone.framework.system.prototype;\n\npublic interface Summoner extends Pinenut {\n    Object summon( String szClassPath, Object... args ) throws Exception ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/prototype/TypeIndex.java",
    "content": "package com.pinecone.framework.system.prototype;\n\n\n/**\n *  Pinecone For Java TypeIndex [ Runtime Smart prototype Identity ]\n *  Copyright © 2008 - 2024 Bean Nuts Foundation ( DR.Undefined ) All rights reserved. [Mr.A.R.B / WJH]\n *  Tip:\n *  *****************************************************************************************\n *  Author: undefined\n *  Last Modified Date: 2021-03-13\n *  *****************************************************************************************\n *  For name: It's simple name of `class`.\n *  Full name should be considered as Namespace(PackageName) + SimpleName\n *  *****************************************************************************************\n */\npublic class TypeIndex {\n    private Class<?> mClass      = null;\n\n    private Class<?> mParent          = null;\n\n    private Object   mThis            = null;\n\n    public TypeIndex( Object that ) {\n        this.mParent   = that.getClass().getSuperclass();\n        this.mThis     = that;\n        this.mClass    = that.getClass();\n    }\n\n    public TypeIndex prototype(){\n        return this;\n    }\n\n    public Object proto(){\n        return this.mThis;\n    }\n\n    public Class<?> parent(){\n        return this.mParent;\n    }\n\n    public Class<?> classType() {\n        return this.mClass;\n    }\n\n    public String namespace()     {\n        return Prototype.namespace( this.mClass );\n    }\n\n    public String name()     {\n        return this.mClass.getSimpleName();\n    }\n\n    public String typeName(){\n        return this.mClass.getName();\n    }\n\n    @Override\n    public String toString(){\n        return this.typeName();\n    }\n\n    @Override\n    public boolean equals( Object that ) {\n        if( that instanceof TypeIndex ){\n            TypeIndex realThat = (TypeIndex)that;\n            return this.mClass.equals( realThat.mClass ) ;\n        }\n        return false;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Automatus.java",
    "content": "package com.pinecone.framework.system.regime;\n\n/**\n * Automatus (Automaton)\n * 自动机\n */\npublic interface Automatus extends Executioner {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Censorate.java",
    "content": "package com.pinecone.framework.system.regime;\n\n/**\n * Censorate\n * 审阅器\n */\npublic interface Censorate extends Volition, Supervisor {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Examiner.java",
    "content": "package com.pinecone.framework.system.regime;\n\n/**\n * Examiner\n * 检察器\n */\npublic interface Examiner extends Executioner, Supervisor {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Executioner.java",
    "content": "package com.pinecone.framework.system.regime;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\n/**\n * Executor\n * 执行器\n */\npublic interface Executioner extends Pinenut {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Instrument.java",
    "content": "package com.pinecone.framework.system.regime;\n\n/**\n * Instrument\n * 编制器\n */\npublic interface Instrument extends Volition {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Orchestrator.java",
    "content": "package com.pinecone.framework.system.regime;\n\n/**\n * Orchestrator\n * 编排器\n */\npublic interface Orchestrator extends Executioner, Volition {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Regiment.java",
    "content": "package com.pinecone.framework.system.regime;\n\npublic interface Regiment extends Volition, Executioner, Supervisor {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Supervisor.java",
    "content": "package com.pinecone.framework.system.regime;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\n/**\n * Supervisor\n * 都察器\n */\npublic interface Supervisor extends Pinenut {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Tracker.java",
    "content": "package com.pinecone.framework.system.regime;\n\n/**\n * Tracker\n * 观察器\n */\npublic interface Tracker extends Supervisor {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/Volition.java",
    "content": "package com.pinecone.framework.system.regime;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\n/**\n * Volition\n * 意志器\n */\npublic interface Volition extends Pinenut {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/arch/Controllor.java",
    "content": "package com.pinecone.framework.system.regime.arch;\n\n/**\n * Controllor, Function Controller\n * 最小临界控制者\n */\npublic interface Controllor extends Dominator {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/arch/Director.java",
    "content": "package com.pinecone.framework.system.regime.arch;\n\n/**\n * Director, Module Manger Controller\n * 分组控制者\n */\npublic interface Director extends Manager {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/arch/Dominator.java",
    "content": "package com.pinecone.framework.system.regime.arch;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\n/**\n * Dominator, Control-Element\n * 支配 / 管理单元\n * OR A.K.A. Driver.\n */\npublic interface Dominator extends Pinenut {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/arch/Dominus.java",
    "content": "package com.pinecone.framework.system.regime.arch;\n\n/**\n * Dominus, Central Controller\n * 中央控制者\n */\npublic interface Dominus extends Dominator {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/arch/Lord.java",
    "content": "package com.pinecone.framework.system.regime.arch;\n\n/**\n * Lord, Domain  Controller\n * 领域控制者\n */\npublic interface Lord extends Dominator {\n\n    void release();\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regime/arch/Manager.java",
    "content": "package com.pinecone.framework.system.regime.arch;\n\n/**\n * Manager, Module Controller\n * 组件控制者\n */\npublic interface Manager extends Dominator {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regimentation/CascadeNodus.java",
    "content": "package com.pinecone.framework.system.regimentation;\n\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.framework.util.name.UniNamespace;\n\npublic interface CascadeNodus extends Nodus {\n    CascadeNodus parent();\n\n    default boolean isRoot() {\n        return this.parent() == null;\n    }\n\n    default CascadeNodus root() {\n        CascadeNodus p = this;\n        CascadeNodus c = p;\n        while ( p != null ) {\n            c = p;\n            p = p.parent();\n        }\n\n        return c;\n    }\n\n    Namespace getTargetingName();\n\n    void setTargetingName( Namespace name );\n\n    default void setTargetingName( String name ) {\n        Namespace p = null;\n        if( this.parent() != null ) {\n            p = this.parent().getTargetingName();\n        }\n        this.setTargetingName( new UniNamespace( name, p ) );\n    }\n\n    default String getSimpleName() {\n        return this.getTargetingName().getSimpleName();\n    }\n\n    default String getFullName() {\n        return this.getTargetingName().getFullName();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regimentation/Nodus.java",
    "content": "package com.pinecone.framework.system.regimentation;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Nodus extends Pinenut {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regimentation/UniformCascadeNodus.java",
    "content": "package com.pinecone.framework.system.regimentation;\n\nimport com.pinecone.framework.util.name.Namespace;\n\npublic interface UniformCascadeNodus extends CascadeNodus, UniformNodus {\n    @Override\n    default Namespace getUniformName() {\n        return this.getTargetingName();\n    }\n\n    @Override\n    default void setUniformName( Namespace name ) {\n        this.setTargetingName( name );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/regimentation/UniformNodus.java",
    "content": "package com.pinecone.framework.system.regimentation;\n\nimport com.pinecone.framework.util.name.Namespace;\n\n/**\n *  Pinecone Framework For Java (Bean Nuts Pinecone Ursus for Java)\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  UniformNodus\n *  Regimentation Uniform Node\n *  统一编组节点\n *  *****************************************************************************************\n *  Dragon King, the undefined\n */\npublic interface UniformNodus extends Nodus {\n    /**\n     * Nomenclature of node`s name, usually the path of a cascade centralized tree.\n     * 编制节点的系统命名，通常是级联中央集权树的路径\n     */\n    Namespace getUniformName();\n\n    void setUniformName( Namespace name );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/stereotype/HungarianNotation.java",
    "content": "package com.pinecone.framework.system.stereotype;\n\nimport com.pinecone.framework.system.functions.Executable;\nimport com.pinecone.framework.system.prototype.Prototype;\n\nimport java.lang.reflect.Method;\n\npublic class HungarianNotation {\n    public static final String S_PRE_STRING_ZERO = \"sz\";\n    public static final String S_PRE_STRING      = \"s\";\n    public static final String S_PRE_NUMBER      = \"n\";\n    public static final String S_PRE_BOOLEAN     = \"b\";\n    public static final String S_PRE_CHAR        = \"c\";\n    public static final String S_PRE_FUNCTION    = \"fn\";\n\n    public static final String S_PRE_MEMBER      = \"m\";\n    public static final String S_PRE_HANDLE      = \"h\";\n\n\n    public static String toUpperCaseFirst( String szProto ){\n        StringBuilder sb = new StringBuilder();\n        sb.append( szProto );\n        sb.setCharAt( 0, Character.toUpperCase( sb.charAt(0) ) );\n        return sb.toString();\n    }\n\n    public static String addPrefix( String szProto, Class<?> stereotype ) {\n        String szRealName = HungarianNotation.toUpperCaseFirst( szProto );\n        if( Prototype.isNumber( stereotype ) ){\n            return HungarianNotation.S_PRE_NUMBER + szRealName;\n        }\n        else if ( stereotype == String.class ){\n            return HungarianNotation.S_PRE_STRING_ZERO + szRealName;\n        }\n        else if ( stereotype == Boolean.class || stereotype == boolean.class ){\n            return HungarianNotation.S_PRE_BOOLEAN + szRealName;\n        }\n        else if ( stereotype == Character.class || stereotype == char.class ){\n            return HungarianNotation.S_PRE_CHAR + szRealName;\n        }\n        else if ( Executable.class.isAssignableFrom( stereotype ) || stereotype == Method.class ){\n            return HungarianNotation.S_PRE_FUNCTION + szRealName;\n        }\n        return szProto;\n    }\n\n    public static String unPrefix( String szProto, Class<?> stereotype ) {\n        StringBuilder sb = new StringBuilder();\n        sb.append( szProto );\n\n        if( Prototype.isNumber( stereotype ) ){\n            sb.deleteCharAt( 0 );\n        }\n        else if ( stereotype == String.class ){\n            sb.delete( 0, 1 );\n        }\n        else if ( stereotype == Boolean.class || stereotype == boolean.class ){\n            sb.deleteCharAt( 0 );\n        }\n        else if ( stereotype == Character.class || stereotype == char.class ){\n            sb.deleteCharAt( 0 );\n        }\n        else if ( Executable.class.isAssignableFrom( stereotype ) || stereotype == Method.class ){\n            sb.delete( 0, 1 );\n        }\n\n        if( sb.length() != szProto.length() ){\n            sb.setCharAt( 0, Character.toLowerCase( sb.charAt(0) ) );\n            return sb.toString() ;\n        }\n\n        return szProto;\n    }\n\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/system/stereotype/JavaBeans.java",
    "content": "package com.pinecone.framework.system.stereotype;\n\nimport java.lang.reflect.Method;\n\npublic final class JavaBeans {\n    public static final String MethodKeyGetClass            = \"getClass\";\n    public static final String MethodKeyGetDeclaringClass   = \"getDeclaringClass\";\n    public static final String MethodMajorKeyGet            = \"get\";\n    public static final int    MethodMajorKeyGetLength      = JavaBeans.MethodMajorKeyGet.length();\n    public static final String MethodMajorKeyIs             = \"is\";\n    public static final int    MethodMajorKeyIsLength       = JavaBeans.MethodMajorKeyIs.length();\n    public static final String MethodMajorKeySet            = \"set\";\n    public static final int    MethodMajorKeySetLength      = JavaBeans.MethodMajorKeySet.length();\n\n\n    public static String getGetterMethodKeyName( String szMethodName ) {\n        String key = null;\n        if ( szMethodName.startsWith( JavaBeans.MethodMajorKeyGet ) ) {\n            if ( !JavaBeans.MethodKeyGetClass.equals(szMethodName) && !JavaBeans.MethodKeyGetDeclaringClass.equals(szMethodName) ) {\n                key = szMethodName.substring( JavaBeans.MethodMajorKeyGetLength ); // \"get\"\n            }\n        }\n        else if ( szMethodName.startsWith( JavaBeans.MethodMajorKeyIs ) ) {\n            key = szMethodName.substring( JavaBeans.MethodMajorKeyIsLength ); // \"is\"\n        }\n\n        return key;\n    }\n\n    public static String getGetterMethodKeyName( Method method ) {\n        return JavaBeans.getGetterMethodKeyName( method.getName() );\n    }\n\n    // First character lower case.\n    public static String methodKeyNameLowerCaseNormalize( String key ) {\n        if ( key.length() == 1 ) {\n            key = key.toLowerCase();\n        }\n        else if ( !Character.isUpperCase( key.charAt( 1 ) ) ) {\n            key = key.substring(0, 1).toLowerCase() + key.substring(1);\n        }\n\n        return key;\n    }\n\n    // First character lower case.\n    public static String getKeyGetterMethodNameLowerCaseNormalized( String szMethodName ) {\n        return JavaBeans.methodKeyNameLowerCaseNormalize( JavaBeans.getGetterMethodKeyName( szMethodName ) );\n    }\n\n    // First character lower case.\n    public static String getKeyGetterMethodNameLowerCaseNormalized( Method method ) {\n        return JavaBeans.methodKeyNameLowerCaseNormalize( JavaBeans.getGetterMethodKeyName( method ) );\n    }\n\n\n\n\n    public static String getSetterMethodKeyName( String szMethodName ) {\n        String key = null;\n        if ( szMethodName.startsWith( JavaBeans.MethodMajorKeySet ) ) {\n            key = szMethodName.substring( JavaBeans.MethodMajorKeySetLength ); // \"set\"\n        }\n\n        return key;\n    }\n\n    public static String getSetterMethodKeyName( Method method ) {\n        return JavaBeans.getSetterMethodKeyName( method.getName() );\n    }\n\n    // First character upper case.\n    public static String methodKeyNameUpperCaseNormalize( String key ) {\n        if ( key.length() == 1 ) {\n            key = key.toUpperCase();\n        }\n        else if ( Character.isLowerCase( key.charAt( 0 ) ) ) {\n            key = key.substring(0, 1).toUpperCase() + key.substring(1);\n        }\n\n        return key;\n    }\n\n\n    // First character upper case.\n    public static String getKeySetterMethodNameLowerCaseNormalized( String szMethodName ) {\n        return JavaBeans.methodKeyNameLowerCaseNormalize( JavaBeans.getSetterMethodKeyName( szMethodName ) );\n    }\n\n    // First character upper case.\n    public static String getKeySetterMethodNameLowerCaseNormalized( Method method ) {\n        return JavaBeans.methodKeyNameLowerCaseNormalize( JavaBeans.getSetterMethodKeyName( method ) );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/AbstractMap.java",
    "content": "package com.pinecone.framework.unit;\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.io.Serializable;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.AbstractSet;\nimport java.util.AbstractCollection;\n\npublic abstract class AbstractMap<K,V> implements Map<K,V>, PineUnit {\n\n    protected AbstractMap() {\n    }\n\n    // Query Operations\n\n    /**\n     * {@inheritDoc}\n     *\n     * @implSpec\n     * This implementation returns {@code entrySet().size()}.\n     */\n    public int size() {\n        return entrySet().size();\n    }\n\n    /**\n     * {@inheritDoc}\n     *\n     * @implSpec\n     * This implementation returns {@code size() == 0}.\n     */\n    public boolean isEmpty() {\n        return size() == 0;\n    }\n\n    /**\n     * {@inheritDoc}\n     *\n     * @implSpec\n     * This implementation iterates over {@code entrySet()} searching\n     * for an entry with the specified value.  If such an entry is found,\n     * {@code true} is returned.  If the iteration terminates without\n     * finding such an entry, {@code false} is returned.  Note that this\n     * implementation requires linear time in the size of the map.\n     *\n     * @throws ClassCastException   {@inheritDoc}\n     * @throws NullPointerException {@inheritDoc}\n     */\n    public boolean containsValue(Object value) {\n        Iterator<Entry<K,V>> i = entrySet().iterator();\n        if (value==null) {\n            while (i.hasNext()) {\n                Entry<K,V> e = i.next();\n                if (e.getValue()==null)\n                    return true;\n            }\n        } else {\n            while (i.hasNext()) {\n                Entry<K,V> e = i.next();\n                if (value.equals(e.getValue()))\n                    return true;\n            }\n        }\n        return false;\n    }\n\n    /**\n     * {@inheritDoc}\n     *\n     * @implSpec\n     * This implementation iterates over {@code entrySet()} searching\n     * for an entry with the specified key.  If such an entry is found,\n     * {@code true} is returned.  If the iteration terminates without\n     * finding such an entry, {@code false} is returned.  Note that this\n     * implementation requires linear time in the size of the map; many\n     * implementations will override this method.\n     *\n     * @throws ClassCastException   {@inheritDoc}\n     * @throws NullPointerException {@inheritDoc}\n     */\n    public boolean containsKey(Object key) {\n        Iterator<Map.Entry<K,V>> i = entrySet().iterator();\n        if (key==null) {\n            while (i.hasNext()) {\n                Entry<K,V> e = i.next();\n                if (e.getKey()==null)\n                    return true;\n            }\n        } else {\n            while (i.hasNext()) {\n                Entry<K,V> e = i.next();\n                if (key.equals(e.getKey()))\n                    return true;\n            }\n        }\n        return false;\n    }\n\n    /**\n     * {@inheritDoc}\n     *\n     * @implSpec\n     * This implementation iterates over {@code entrySet()} searching\n     * for an entry with the specified key.  If such an entry is found,\n     * the entry's value is returned.  If the iteration terminates without\n     * finding such an entry, {@code null} is returned.  Note that this\n     * implementation requires linear time in the size of the map; many\n     * implementations will override this method.\n     *\n     * @throws ClassCastException            {@inheritDoc}\n     * @throws NullPointerException          {@inheritDoc}\n     */\n    public V get(Object key) {\n        Iterator<Entry<K,V>> i = entrySet().iterator();\n        if ( key == null ) {\n            while ( i.hasNext() ) {\n                Entry<K,V> e = i.next();\n                if ( e.getKey() == null ) {\n                    return e.getValue();\n                }\n            }\n        }\n        else {\n            while ( i.hasNext() ) {\n                Entry<K,V> e = i.next();\n                if ( key.equals(e.getKey()) ) {\n                    return e.getValue();\n                }\n            }\n        }\n        return null;\n    }\n\n\n    // Modification Operations\n\n    /**\n     * {@inheritDoc}\n     *\n     * @implSpec\n     * This implementation always throws an\n     * {@code UnsupportedOperationException}.\n     *\n     * @throws UnsupportedOperationException {@inheritDoc}\n     * @throws ClassCastException            {@inheritDoc}\n     * @throws NullPointerException          {@inheritDoc}\n     * @throws IllegalArgumentException      {@inheritDoc}\n     */\n    public V put(K key, V value) {\n        throw new UnsupportedOperationException();\n    }\n\n    /**\n     * {@inheritDoc}\n     *\n     * @implSpec\n     * This implementation iterates over {@code entrySet()} searching for an\n     * entry with the specified key.  If such an entry is found, its value is\n     * obtained with its {@code getValue} operation, the entry is removed\n     * from the collection (and the backing map) with the iterator's\n     * {@code remove} operation, and the saved value is returned.  If the\n     * iteration terminates without finding such an entry, {@code null} is\n     * returned.  Note that this implementation requires linear time in the\n     * size of the map; many implementations will override this method.\n     *\n     * <p>Note that this implementation throws an\n     * {@code UnsupportedOperationException} if the {@code entrySet}\n     * iterator does not support the {@code remove} method and this map\n     * contains a mapping for the specified key.\n     *\n     * @throws UnsupportedOperationException {@inheritDoc}\n     * @throws ClassCastException            {@inheritDoc}\n     * @throws NullPointerException          {@inheritDoc}\n     */\n    public V remove(Object key) {\n        Iterator<Entry<K,V>> i = entrySet().iterator();\n        Entry<K,V> correctEntry = null;\n        if (key==null) {\n            while (correctEntry==null && i.hasNext()) {\n                Entry<K,V> e = i.next();\n                if (e.getKey()==null)\n                    correctEntry = e;\n            }\n        } else {\n            while (correctEntry==null && i.hasNext()) {\n                Entry<K,V> e = i.next();\n                if (key.equals(e.getKey()))\n                    correctEntry = e;\n            }\n        }\n\n        V oldValue = null;\n        if (correctEntry !=null) {\n            oldValue = correctEntry.getValue();\n            i.remove();\n        }\n        return oldValue;\n    }\n\n\n    // Bulk Operations\n\n    /**\n     * {@inheritDoc}\n     *\n     * @implSpec\n     * This implementation iterates over the specified map's\n     * {@code entrySet()} collection, and calls this map's {@code put}\n     * operation once for each entry returned by the iteration.\n     *\n     * <p>Note that this implementation throws an\n     * {@code UnsupportedOperationException} if this map does not support\n     * the {@code put} operation and the specified map is nonempty.\n     *\n     * @throws UnsupportedOperationException {@inheritDoc}\n     * @throws ClassCastException            {@inheritDoc}\n     * @throws NullPointerException          {@inheritDoc}\n     * @throws IllegalArgumentException      {@inheritDoc}\n     */\n    public void putAll(Map<? extends K, ? extends V> m) {\n        for ( Map.Entry<? extends K, ? extends V> e : m.entrySet() ) {\n            put( e.getKey(), e.getValue() );\n        }\n    }\n\n    /**\n     * {@inheritDoc}\n     *\n     * @implSpec\n     * This implementation calls {@code entrySet().clear()}.\n     *\n     * <p>Note that this implementation throws an\n     * {@code UnsupportedOperationException} if the {@code entrySet}\n     * does not support the {@code clear} operation.\n     *\n     * @throws UnsupportedOperationException {@inheritDoc}\n     */\n    public void clear() {\n        entrySet().clear();\n    }\n\n\n    // Views\n\n    protected transient Set<K> keySet;\n    protected transient Collection<V> values;\n\n    /**\n     * {@inheritDoc}\n     *\n     * @implSpec\n     * This implementation returns a set that subclasses {@link AbstractSet}.\n     * The subclass's iterator method returns a \"wrapper object\" over this\n     * map's {@code entrySet()} iterator.  The {@code size} method\n     * delegates to this map's {@code size} method and the\n     * {@code contains} method delegates to this map's\n     * {@code containsKey} method.\n     *\n     * <p>The set is created the first time this method is called,\n     * and returned in response to all subsequent calls.  No synchronization\n     * is performed, so there is a slight chance that multiple calls to this\n     * method will not all return the same set.\n     */\n    public Set<K> keySet() {\n        Set<K> ks = keySet;\n        if (ks == null) {\n            ks = new AbstractSet<K>() {\n                public Iterator<K> iterator() {\n                    return new Iterator<K>() {\n                        private Iterator<Entry<K,V>> i = entrySet().iterator();\n\n                        public boolean hasNext() {\n                            return i.hasNext();\n                        }\n\n                        public K next() {\n                            return i.next().getKey();\n                        }\n\n                        public void remove() {\n                            i.remove();\n                        }\n                    };\n                }\n\n                public int size() {\n                    return AbstractMap.this.size();\n                }\n\n                public boolean isEmpty() {\n                    return AbstractMap.this.isEmpty();\n                }\n\n                public void clear() {\n                    AbstractMap.this.clear();\n                }\n\n                public boolean contains(Object k) {\n                    return AbstractMap.this.containsKey(k);\n                }\n            };\n            keySet = ks;\n        }\n        return ks;\n    }\n\n    /**\n     * {@inheritDoc}\n     *\n     * @implSpec\n     * This implementation returns a collection that subclasses {@link\n     * AbstractCollection}.  The subclass's iterator method returns a\n     * \"wrapper object\" over this map's {@code entrySet()} iterator.\n     * The {@code size} method delegates to this map's {@code size}\n     * method and the {@code contains} method delegates to this map's\n     * {@code containsValue} method.\n     *\n     * <p>The collection is created the first time this method is called, and\n     * returned in response to all subsequent calls.  No synchronization is\n     * performed, so there is a slight chance that multiple calls to this\n     * method will not all return the same collection.\n     */\n    public Collection<V> values() {\n        Collection<V> vals = values;\n        if (vals == null) {\n            vals = new AbstractCollection<V>() {\n                public Iterator<V> iterator() {\n                    return new Iterator<V>() {\n                        private Iterator<Entry<K,V>> i = entrySet().iterator();\n\n                        public boolean hasNext() {\n                            return i.hasNext();\n                        }\n\n                        public V next() {\n                            return i.next().getValue();\n                        }\n\n                        public void remove() {\n                            i.remove();\n                        }\n                    };\n                }\n\n                public int size() {\n                    return AbstractMap.this.size();\n                }\n\n                public boolean isEmpty() {\n                    return AbstractMap.this.isEmpty();\n                }\n\n                public void clear() {\n                    AbstractMap.this.clear();\n                }\n\n                public boolean contains(Object v) {\n                    return AbstractMap.this.containsValue(v);\n                }\n            };\n            values = vals;\n        }\n        return vals;\n    }\n\n    public abstract Set<Entry<K,V>> entrySet();\n\n\n    // Comparison and hashing\n\n    /**\n     * Compares the specified object with this map for equality.  Returns\n     * {@code true} if the given object is also a map and the two maps\n     * represent the same mappings.  More formally, two maps {@code m1} and\n     * {@code m2} represent the same mappings if\n     * {@code m1.entrySet().equals(m2.entrySet())}.  This ensures that the\n     * {@code equals} method works properly across different implementations\n     * of the {@code Map} interface.\n     *\n     * @implSpec\n     * This implementation first checks if the specified object is this map;\n     * if so it returns {@code true}.  Then, it checks if the specified\n     * object is a map whose size is identical to the size of this map; if\n     * not, it returns {@code false}.  If so, it iterates over this map's\n     * {@code entrySet} collection, and checks that the specified map\n     * contains each mapping that this map contains.  If the specified map\n     * fails to contain such a mapping, {@code false} is returned.  If the\n     * iteration completes, {@code true} is returned.\n     *\n     * @param o object to be compared for equality with this map\n     * @return {@code true} if the specified object is equal to this map\n     */\n    public boolean equals(Object o) {\n        if (o == this)\n            return true;\n\n        if (!(o instanceof Map))\n            return false;\n        Map<?,?> m = (Map<?,?>) o;\n        if (m.size() != size())\n            return false;\n\n        try {\n            for (Entry<K, V> e : entrySet()) {\n                K key = e.getKey();\n                V value = e.getValue();\n                if (value == null) {\n                    if (!(m.get(key) == null && m.containsKey(key)))\n                        return false;\n                } else {\n                    if (!value.equals(m.get(key)))\n                        return false;\n                }\n            }\n        } catch (ClassCastException unused) {\n            return false;\n        } catch (NullPointerException unused) {\n            return false;\n        }\n\n        return true;\n    }\n\n    /**\n     * Returns the hash code value for this map.  The hash code of a map is\n     * defined to be the sum of the hash codes of each entry in the map's\n     * {@code entrySet()} view.  This ensures that {@code m1.equals(m2)}\n     * implies that {@code m1.hashCode()==m2.hashCode()} for any two maps\n     * {@code m1} and {@code m2}, as required by the general contract of\n     * {@link Object#hashCode}.\n     *\n     * @implSpec\n     * This implementation iterates over {@code entrySet()}, calling\n     * {@link Map.Entry#hashCode hashCode()} on each element (entry) in the\n     * set, and adding up the results.\n     *\n     * @return the hash code value for this map\n     * @see Map.Entry#hashCode()\n     * @see Object#equals(Object)\n     * @see Set#equals(Object)\n     */\n    public int hashCode() {\n        int h = 0;\n        for (Entry<K, V> entry : entrySet())\n            h += entry.hashCode();\n        return h;\n    }\n\n    /**\n     * Returns a string representation of this map.  The string representation\n     * consists of a list of key-value mappings in the order returned by the\n     * map's {@code entrySet} view's iterator, enclosed in braces\n     * ({@code \"{}\"}).  Adjacent mappings are separated by the characters\n     * {@code \", \"} (comma and space).  Each key-value mapping is rendered as\n     * the key followed by an equals sign ({@code \"=\"}) followed by the\n     * associated value.  Keys and values are converted to strings as by\n     * {@link String#valueOf(Object)}.\n     *\n     * @return a string representation of this map\n     */\n    @Override\n    public String toString() {\n        Iterator<Entry<K,V>> i = entrySet().iterator();\n        if (! i.hasNext())\n            return \"{}\";\n\n        StringBuilder sb = new StringBuilder();\n        sb.append('{');\n        for (;;) {\n            Entry<K,V> e = i.next();\n            K key = e.getKey();\n            V value = e.getValue();\n            sb.append(key   == this ? \"(this Map)\" : key);\n            sb.append('=');\n            sb.append(value == this ? \"(this Map)\" : value);\n            if (! i.hasNext())\n                return sb.append('}').toString();\n            sb.append(',').append(' ');\n        }\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this );\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object key ){\n        return this.containsKey( key );\n    }\n\n    /**\n     * Returns a shallow copy of this {@code AbstractMap} instance: the keys\n     * and values themselves are not cloned.\n     *\n     * @return a shallow copy of this map\n     */\n    protected Object clone() throws CloneNotSupportedException {\n        AbstractMap<?,?> result = (AbstractMap<?,?>)super.clone();\n        result.keySet = null;\n        result.values = null;\n        return result;\n    }\n\n    /**\n     * Utility method for SimpleEntry and SimpleImmutableEntry.\n     * Test for equality, checking for nulls.\n     *\n     * NB: Do not replace with Object.equals until JDK-8015417 is resolved.\n     */\n    private static boolean eq(Object o1, Object o2) {\n        return o1 == null ? o2 == null : o1.equals(o2);\n    }\n\n    // Implementation Note: SimpleEntry and SimpleImmutableEntry\n    // are distinct unrelated classes, even though they share\n    // some code. Since you can't add or subtract final-ness\n    // of a field in a subclass, they can't share representations,\n    // and the amount of duplicated code is too small to warrant\n    // exposing a common abstract class.\n\n\n    /**\n     * An Entry maintaining a key and a value.  The value may be\n     * changed using the {@code setValue} method.  This class\n     * facilitates the process of building custom map\n     * implementations. For example, it may be convenient to return\n     * arrays of {@code SimpleEntry} instances in method\n     * {@code Map.entrySet().toArray}.\n     *\n     * @since 1.6\n     */\n    public static class SimpleEntry<K,V> implements Entry<K,V>, Serializable {\n        private static final long serialVersionUID = -8499721149061103585L;\n\n        private final K key;\n        private V value;\n\n        /**\n         * Creates an entry representing a mapping from the specified\n         * key to the specified value.\n         *\n         * @param key the key represented by this entry\n         * @param value the value represented by this entry\n         */\n        public SimpleEntry(K key, V value) {\n            this.key   = key;\n            this.value = value;\n        }\n\n        /**\n         * Creates an entry representing the same mapping as the\n         * specified entry.\n         *\n         * @param entry the entry to copy\n         */\n        public SimpleEntry(Entry<? extends K, ? extends V> entry) {\n            this.key   = entry.getKey();\n            this.value = entry.getValue();\n        }\n\n        /**\n         * Returns the key corresponding to this entry.\n         *\n         * @return the key corresponding to this entry\n         */\n        public K getKey() {\n            return key;\n        }\n\n        /**\n         * Returns the value corresponding to this entry.\n         *\n         * @return the value corresponding to this entry\n         */\n        public V getValue() {\n            return value;\n        }\n\n        /**\n         * Replaces the value corresponding to this entry with the specified\n         * value.\n         *\n         * @param value new value to be stored in this entry\n         * @return the old value corresponding to the entry\n         */\n        public V setValue(V value) {\n            V oldValue = this.value;\n            this.value = value;\n            return oldValue;\n        }\n\n        /**\n         * Compares the specified object with this entry for equality.\n         * Returns {@code true} if the given object is also a map entry and\n         * the two entries represent the same mapping.  More formally, two\n         * entries {@code e1} and {@code e2} represent the same mapping\n         * if<pre>\n         *   (e1.getKey()==null ?\n         *    e2.getKey()==null :\n         *    e1.getKey().equals(e2.getKey()))\n         *   &amp;&amp;\n         *   (e1.getValue()==null ?\n         *    e2.getValue()==null :\n         *    e1.getValue().equals(e2.getValue()))</pre>\n         * This ensures that the {@code equals} method works properly across\n         * different implementations of the {@code Map.Entry} interface.\n         *\n         * @param o object to be compared for equality with this map entry\n         * @return {@code true} if the specified object is equal to this map\n         *         entry\n         * @see    #hashCode\n         */\n        public boolean equals(Object o) {\n            if (!(o instanceof Map.Entry))\n                return false;\n            Map.Entry<?,?> e = (Map.Entry<?,?>)o;\n            return eq(key, e.getKey()) && eq(value, e.getValue());\n        }\n\n        /**\n         * Returns the hash code value for this map entry.  The hash code\n         * of a map entry {@code e} is defined to be: <pre>\n         *   (e.getKey()==null   ? 0 : e.getKey().hashCode()) ^\n         *   (e.getValue()==null ? 0 : e.getValue().hashCode())</pre>\n         * This ensures that {@code e1.equals(e2)} implies that\n         * {@code e1.hashCode()==e2.hashCode()} for any two Entries\n         * {@code e1} and {@code e2}, as required by the general\n         * contract of {@link Object#hashCode}.\n         *\n         * @return the hash code value for this map entry\n         * @see    #equals\n         */\n        public int hashCode() {\n            return (key   == null ? 0 :   key.hashCode()) ^\n                    (value == null ? 0 : value.hashCode());\n        }\n\n        /**\n         * Returns a String representation of this map entry.  This\n         * implementation returns the string representation of this\n         * entry's key followed by the equals character (\"{@code =}\")\n         * followed by the string representation of this entry's value.\n         *\n         * @return a String representation of this map entry\n         */\n        public String toString() {\n            return key + \"=\" + value;\n        }\n\n    }\n\n    /**\n     * An Entry maintaining an immutable key and value.  This class\n     * does not support method {@code setValue}.  This class may be\n     * convenient in methods that return thread-safe snapshots of\n     * key-value mappings.\n     *\n     * @since 1.6\n     */\n    public static class SimpleImmutableEntry<K,V> implements Entry<K,V>, Serializable {\n        private static final long serialVersionUID = 7138329143949025153L;\n\n        private final K key;\n        private final V value;\n\n        /**\n         * Creates an entry representing a mapping from the specified\n         * key to the specified value.\n         *\n         * @param key the key represented by this entry\n         * @param value the value represented by this entry\n         */\n        public SimpleImmutableEntry(K key, V value) {\n            this.key   = key;\n            this.value = value;\n        }\n\n        /**\n         * Creates an entry representing the same mapping as the\n         * specified entry.\n         *\n         * @param entry the entry to copy\n         */\n        public SimpleImmutableEntry(Entry<? extends K, ? extends V> entry) {\n            this.key   = entry.getKey();\n            this.value = entry.getValue();\n        }\n\n        /**\n         * Returns the key corresponding to this entry.\n         *\n         * @return the key corresponding to this entry\n         */\n        public K getKey() {\n            return key;\n        }\n\n        /**\n         * Returns the value corresponding to this entry.\n         *\n         * @return the value corresponding to this entry\n         */\n        public V getValue() {\n            return value;\n        }\n\n        /**\n         * Replaces the value corresponding to this entry with the specified\n         * value (optional operation).  This implementation simply throws\n         * {@code UnsupportedOperationException}, as this class implements\n         * an <i>immutable</i> map entry.\n         *\n         * @param value new value to be stored in this entry\n         * @return (Does not return)\n         * @throws UnsupportedOperationException always\n         */\n        public V setValue(V value) {\n            throw new UnsupportedOperationException();\n        }\n\n        /**\n         * Compares the specified object with this entry for equality.\n         * Returns {@code true} if the given object is also a map entry and\n         * the two entries represent the same mapping.  More formally, two\n         * entries {@code e1} and {@code e2} represent the same mapping\n         * if<pre>\n         *   (e1.getKey()==null ?\n         *    e2.getKey()==null :\n         *    e1.getKey().equals(e2.getKey()))\n         *   &amp;&amp;\n         *   (e1.getValue()==null ?\n         *    e2.getValue()==null :\n         *    e1.getValue().equals(e2.getValue()))</pre>\n         * This ensures that the {@code equals} method works properly across\n         * different implementations of the {@code Map.Entry} interface.\n         *\n         * @param o object to be compared for equality with this map entry\n         * @return {@code true} if the specified object is equal to this map\n         *         entry\n         * @see    #hashCode\n         */\n        public boolean equals(Object o) {\n            if (!(o instanceof Map.Entry))\n                return false;\n            Map.Entry<?,?> e = (Map.Entry<?,?>)o;\n            return eq(key, e.getKey()) && eq(value, e.getValue());\n        }\n\n        /**\n         * Returns the hash code value for this map entry.  The hash code\n         * of a map entry {@code e} is defined to be: <pre>\n         *   (e.getKey()==null   ? 0 : e.getKey().hashCode()) ^\n         *   (e.getValue()==null ? 0 : e.getValue().hashCode())</pre>\n         * This ensures that {@code e1.equals(e2)} implies that\n         * {@code e1.hashCode()==e2.hashCode()} for any two Entries\n         * {@code e1} and {@code e2}, as required by the general\n         * contract of {@link Object#hashCode}.\n         *\n         * @return the hash code value for this map entry\n         * @see    #equals\n         */\n        public int hashCode() {\n            return (key   == null ? 0 :   key.hashCode()) ^\n                    (value == null ? 0 : value.hashCode());\n        }\n\n        /**\n         * Returns a String representation of this map entry.  This\n         * implementation returns the string representation of this\n         * entry's key followed by the equals character (\"{@code =}\")\n         * followed by the string representation of this entry's value.\n         *\n         * @return a String representation of this map entry\n         */\n        public String toString() {\n            return key + \"=\" + value;\n        }\n\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/AbstractMultiValueMap.java",
    "content": "package com.pinecone.framework.unit;\n\nimport java.util.Map;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.Collections;\nimport java.util.NoSuchElementException;\nimport java.util.AbstractCollection;\n\npublic abstract class AbstractMultiValueMap<K, V > implements MultiValueMapper<K, V > {\n    private transient EntryCollection     mEntryCollection;\n    private transient ValueCollection     mValueCollection;\n\n    @Override\n    public Collection<Map.Entry<K, V > > collection() {\n        Collection<Map.Entry<K,V > >  es = this.mEntryCollection;\n        return (es != null) ? es : ( this.mEntryCollection = new EntryCollection( this ) );\n    }\n\n    @Override\n    public Collection<V > collectionValues(){\n        Collection<V> vs = this.mValueCollection;\n        return (vs != null) ? vs : ( this.mValueCollection = new ValueCollection( this ) );\n    }\n\n    class DummyEntry extends KeyValue<K, V > {\n        public DummyEntry( K key, V value ) {\n            super( key, value );\n        }\n\n        public void setKey( K key ) {\n            this.key = key;\n        }\n    }\n\n    class EntryIterator implements Iterator<Map.Entry<K, V > > {\n        private final Iterator<? extends Map.Entry<K, ? extends Collection<V > > > entryIterator;\n        private Iterator<V > currentCollectionIterator;\n        private K currentKey;\n        protected DummyEntry dummyEntry = new DummyEntry( null, null );\n\n        EntryIterator( MultiValueMapper<K, V > that ) {\n            this.entryIterator = that.entrySet().iterator();\n            this.currentCollectionIterator = Collections.emptyIterator();\n        }\n\n        @Override\n        public boolean hasNext() {\n            while ( !this.currentCollectionIterator.hasNext() && this.entryIterator.hasNext() ) {\n                Map.Entry<K, ? extends Collection<V > > entry = this.entryIterator.next();\n                this.currentKey = entry.getKey();\n                this.currentCollectionIterator = entry.getValue().iterator();\n            }\n            return this.currentCollectionIterator.hasNext();\n        }\n\n        @Override\n        public Map.Entry<K, V > next() {\n            if ( !this.hasNext() ) {\n                throw new NoSuchElementException();\n            }\n\n            this.dummyEntry.setKey( this.currentKey );\n            this.dummyEntry.setValue( this.currentCollectionIterator.next() );\n            return this.dummyEntry;\n        }\n    }\n\n    class EntryCollection extends AbstractCollection<Map.Entry<K, V > > {\n        MultiValueMapper<K, V > map;\n\n        EntryCollection( MultiValueMapper<K, V > that ) {\n            this.map = that;\n        }\n\n        @Override\n        public Iterator<Map.Entry<K, V > > iterator() {\n            return new EntryIterator( this.map );\n        }\n\n        @Override\n        public int size() {\n            int size = 0;\n            for ( Collection<V> values : this.map.values() ) {\n                size += values.size();\n            }\n            return size;\n        }\n\n        @Override\n        public boolean isEmpty() {\n            return this.map.isEmpty();\n        }\n\n        @Override\n        public void clear() {\n            this.map.clear();\n        }\n\n        @Override\n        public boolean contains( Object o ) {\n            if ( !(o instanceof Map.Entry) ) {\n                return false;\n            }\n            Map.Entry<?, ?> entry = (Map.Entry<?, ?>) o;\n            Collection<V> values = this.map.get(entry.getKey());\n            return values != null && values.contains( entry.getValue() );\n        }\n\n        @Override\n        public boolean remove(Object o) {\n            if (!(o instanceof Map.Entry)) {\n                return false;\n            }\n            Map.Entry<?, ?> entry = (Map.Entry<?, ?>) o;\n            Collection<V> values = this.map.get(entry.getKey());\n            if ( values != null && values.remove( entry.getValue() ) ) {\n                if ( values.isEmpty() ) {\n                    this.map.remove( entry.getKey() );\n                }\n                return true;\n            }\n            return false;\n        }\n    }\n\n    class ValueIterator implements Iterator<V > {\n        private final Iterator<? extends Map.Entry<K, ? extends Collection<V>>> entryIterator;\n        private Iterator<V> currentCollectionIterator;\n\n        ValueIterator(MultiValueMapper<K, V> that) {\n            this.entryIterator = that.entrySet().iterator();\n            this.currentCollectionIterator = Collections.emptyIterator();\n        }\n\n        @Override\n        public boolean hasNext() {\n            while ( !this.currentCollectionIterator.hasNext() && this.entryIterator.hasNext() ) {\n                Map.Entry<K, ? extends Collection<V > > entry = this.entryIterator.next();\n                this.currentCollectionIterator = entry.getValue().iterator();\n            }\n            return this.currentCollectionIterator.hasNext();\n        }\n\n        @Override\n        public V next() {\n            if ( !this.hasNext() ) {\n                throw new NoSuchElementException();\n            }\n            return this.currentCollectionIterator.next();\n        }\n    }\n\n    class ValueCollection extends AbstractCollection<V> {\n        MultiValueMapper<K, V> map;\n\n        ValueCollection( MultiValueMapper<K, V> that ) {\n            this.map = that;\n        }\n\n        @Override\n        public Iterator<V> iterator() {\n            return new ValueIterator(this.map);\n        }\n\n        @Override\n        public int size() {\n            int size = 0;\n            for ( Collection<V> values : this.map.values() ) {\n                size += values.size();\n            }\n            return size;\n        }\n\n        @Override\n        public boolean isEmpty() {\n            return this.map.isEmpty();\n        }\n\n        @Override\n        public void clear() {\n            this.map.clear();\n        }\n\n        @Override\n        public boolean contains( Object o ) {\n            for ( Collection<V> values : this.map.values() ) {\n                if ( values.contains(o) ) {\n                    return true;\n                }\n            }\n            return false;\n        }\n\n        @Override\n        public boolean remove( Object o ) {\n            for ( Map.Entry<K, ? extends Collection<V > > entry : this.map.entrySet() ) {\n                Collection<V > values = entry.getValue();\n                if ( values.remove(o) ) {\n                    if ( values.isEmpty() ) {\n                        this.map.remove( entry.getKey() );\n                    }\n                    return true;\n                }\n            }\n            return false;\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/BidLinkedEntry.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.system.prototype.Prototype;\nimport com.pinecone.framework.system.prototype.TypeIndex;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.util.Map;\n\npublic class BidLinkedEntry<K,V> implements Map.Entry<K,V>, Pinenut {\n    protected K key;\n    protected V value;\n    protected BidLinkedEntry<K,V> before;\n    protected BidLinkedEntry<K,V> after;\n\n    BidLinkedEntry( K key, V value ) {\n        this.key = key;\n        this.value = value;\n    }\n\n    public void extend( Map.Entry<K,V > entry ) {\n        this.key   = entry.getKey();\n        this.value = entry.getValue();\n    }\n\n    public K getKey() {\n        return this.key;\n    }\n\n    public V getValue() {\n        return this.value;\n    }\n\n    public V setValue(V value) {\n        V oldValue = this.value;\n        this.value = value;\n        return oldValue;\n    }\n\n    @Override\n    public int hashCode() {\n        int keyHash = (key==null ? 0 : key.hashCode());\n        int valueHash = (value==null ? 0 : value.hashCode());\n        return keyHash ^ valueHash;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return \"{\" + StringUtils.jsonQuote( this.key.toString() ) + \":\" + JSON.stringify( this.value ) + \"}\";\n    }\n\n    @Override\n    public TypeIndex prototype() {\n        return Prototype.typeid( this );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/BitSet64.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.util.Bits;\n//import com.pinecone.framework.util.Debug;\n\npublic final class BitSet64 {\n\n    public static final int Int64MaxPos = Long.SIZE - 1;\n\n    public static long setBit( long that, int position ) {\n        return that | (1L << position);\n    }\n\n    public static long clearBit( long that, int position ) {\n        return that & ~(1L << position);\n    }\n\n    public static boolean isBitSet( long that, int position ) {\n        return (that & (1L << position)) != 0;\n    }\n\n    public static long flipBit( long that, int position ) {\n        return that ^ (1L << position);\n    }\n\n    public static String toBinaryString( long that ) {\n        return Long.toBinaryString(that);\n    }\n\n    public static String toBinaryStringMSB( long that ) {\n        String binaryString = String.format(\n                \"%64s\", Long.toBinaryString( Bits.reverse64Bits(that) )\n        ).replace( ' ', '0' );\n        return \"0b\" + binaryString;\n    }\n\n    public static String toBinaryStringLSB( long that ) {\n        String binaryString = String.format(\n                \"%64s\", Long.toBinaryString(that)\n        ).replace( ' ', '0' );\n        return \"0b\" + binaryString;\n    }\n\n    public static String toIndexJSONString( long that ) {\n        StringBuilder sb = new StringBuilder();\n        sb.append( '[' );\n\n        for ( int i = 0; i < Long.SIZE; ++i ) {\n            if ( ( that & (1L << i) ) != 0 ) {\n                sb.append( i ).append( ',' );\n            }\n        }\n\n        if( sb.charAt( sb.length() - 1 ) == ',' ) {\n            sb.deleteCharAt( sb.length() - 1 );\n        }\n\n        sb.append( ']' );\n\n        return sb.toString();\n    }\n\n\n    public static long set( long that, int from, int to, boolean val ) throws IllegalArgumentException {\n        int jt = BitSet64.check( from, to );\n        long mask = ((1L << (jt - from + 1)) - 1) << from;\n\n        if ( val ) {\n            that |= mask;\n\n            if ( to >= BitSet64.Int64MaxPos ) {\n                that = BitSet64.setBit( that, to );\n            }\n        }\n        else {\n            that &= ~mask;\n\n            if ( to >= BitSet64.Int64MaxPos ) {\n                that = BitSet64.clearBit( that, to );\n            }\n        }\n\n        return that;\n    }\n\n    public static long set( long that, int from, int to ) throws IllegalArgumentException {\n        return BitSet64.set( that, from, to, true );\n    }\n\n    public static long unset( long that, int from, int to ) throws IllegalArgumentException {\n        return BitSet64.set( that, from, to, false );\n    }\n\n    private static int check( int from, int to ) throws IllegalArgumentException {\n        if ( from > to || from < 0 || to > BitSet64.Int64MaxPos ) {\n            throw new IllegalArgumentException( \"Invalid bit positions\" );\n        }\n        int jt = to;\n        if ( to == BitSet64.Int64MaxPos ) {\n            jt = BitSet64.Int64MaxPos - 1;\n        }\n\n        return jt;\n    }\n\n    public static long extract( long that, int from, int to ) throws IllegalArgumentException {\n        int jt = BitSet64.check( from, to );\n        long mask = ((1L << (jt - from + 1)) - 1) << from;\n        long t = (that & mask) >>> from;\n\n        if ( to >= BitSet64.Int64MaxPos ) {\n            t =  t | (that & 0x8000000000000000L);\n        }\n\n        return t;\n    }\n\n    public static long copy( long that, int from, int to, long segment ) {\n        int jt = to;\n        if ( to == BitSet64.Int64MaxPos ) {\n            jt = BitSet64.Int64MaxPos - 1;\n        }\n\n        long seg = segment;\n        int segmentLength = jt - from + 1;\n        segment &= (1L << segmentLength) - 1;\n        segment <<= from;\n\n        long mask = ((1L << segmentLength) - 1) << from;\n        that &= ~mask;\n        if ( to >= BitSet64.Int64MaxPos ) {\n            that &= ~(1L << to);\n        }\n\n        that |= segment;\n\n        if ( to >= BitSet64.Int64MaxPos ) {\n            long lastBit = seg & 0x8000000000000000L;\n            that |= lastBit;\n        }\n\n        return that;\n    }\n\n    public static long reverse( long that, int from, int to ) {\n        long seg  = BitSet64.extract( that, from, to );\n        long re   = Bits.reverse64Bits( seg );\n\n        int k = to - from;\n        long sift = (re >>> (BitSet64.Int64MaxPos - k)) | (re << k);\n        if ( to == BitSet64.Int64MaxPos ) {\n            sift = re;\n        }\n        else {\n            sift &= ~(1L << BitSet64.Int64MaxPos);\n        }\n\n//        Debug.bluef( BitSet64.toBinaryStringLSB( seg ).substring(2 + BitSet64.Int64MaxPos - k, 66) );\n//        Debug.bluef( BitSet64.toBinaryStringLSB( re ).substring(2, k + 3) );\n//        Debug.bluef( BitSet64.toBinaryStringLSB( sift ).substring(2 + BitSet64.Int64MaxPos - k, 66) );\n\n        return BitSet64.copy( that, from, to, sift );\n    }\n\n    public static long flip( long that, int from, int to ) throws IllegalArgumentException {\n        int jt = BitSet64.check( from, to );\n        long mask = ((1L << (jt - from + 1)) - 1) << from;\n\n        if ( to >= BitSet64.Int64MaxPos ) {\n            that = BitSet64.flipBit( that, to );\n        }\n        return that ^ mask;\n    }\n\n\n    public static int existence ( long that ) {\n        return Long.bitCount(that);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/ConcurrentReferenceHashMap.java",
    "content": "package com.pinecone.framework.unit;\n\nimport java.lang.ref.ReferenceQueue;\nimport java.lang.ref.SoftReference;\nimport java.lang.ref.WeakReference;\nimport java.lang.reflect.Array;\nimport java.util.AbstractMap;\nimport java.util.AbstractSet;\nimport java.util.Collections;\nimport java.util.EnumSet;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.NoSuchElementException;\nimport java.util.Set;\nimport java.util.concurrent.ConcurrentMap;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.concurrent.locks.ReentrantLock;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.framework.util.ObjectUtils;\nimport com.pinecone.framework.util.json.JSON;\n\n\npublic class ConcurrentReferenceHashMap<K, V> extends AbstractMap<K, V> implements ConcurrentMap<K, V>, PineUnit {\n    private static final int DEFAULT_INITIAL_CAPACITY = 16;\n    private static final float DEFAULT_LOAD_FACTOR = 0.75F;\n    private static final int DEFAULT_CONCURRENCY_LEVEL = 16;\n    private static final ConcurrentReferenceHashMap.ReferenceType DEFAULT_REFERENCE_TYPE;\n    private static final int MAXIMUM_CONCURRENCY_LEVEL = 65536;\n    private static final int MAXIMUM_SEGMENT_SIZE = 1073741824;\n    private final ConcurrentReferenceHashMap<K, V>.Segment[] segments;\n    private final float loadFactor;\n    private final ConcurrentReferenceHashMap.ReferenceType referenceType;\n    private final int shift;\n    @Nullable\n    private volatile Set<java.util.Map.Entry<K, V>> entrySet;\n\n    public ConcurrentReferenceHashMap() {\n        this(16, 0.75F, 16, DEFAULT_REFERENCE_TYPE);\n    }\n\n    public ConcurrentReferenceHashMap(int initialCapacity) {\n        this(initialCapacity, 0.75F, 16, DEFAULT_REFERENCE_TYPE);\n    }\n\n    public ConcurrentReferenceHashMap(int initialCapacity, float loadFactor) {\n        this(initialCapacity, loadFactor, 16, DEFAULT_REFERENCE_TYPE);\n    }\n\n    public ConcurrentReferenceHashMap(int initialCapacity, int concurrencyLevel) {\n        this(initialCapacity, 0.75F, concurrencyLevel, DEFAULT_REFERENCE_TYPE);\n    }\n\n    public ConcurrentReferenceHashMap(int initialCapacity, ConcurrentReferenceHashMap.ReferenceType referenceType) {\n        this(initialCapacity, 0.75F, 16, referenceType);\n    }\n\n    public ConcurrentReferenceHashMap(int initialCapacity, float loadFactor, int concurrencyLevel) {\n        this(initialCapacity, loadFactor, concurrencyLevel, DEFAULT_REFERENCE_TYPE);\n    }\n\n    public ConcurrentReferenceHashMap(int initialCapacity, float loadFactor, int concurrencyLevel, ConcurrentReferenceHashMap.ReferenceType referenceType) {\n        Assert.isTrue(initialCapacity >= 0, \"Initial capacity must not be negative\");\n        Assert.isTrue(loadFactor > 0.0F, \"Load factor must be positive\");\n        Assert.isTrue(concurrencyLevel > 0, \"Concurrency level must be positive\");\n        Assert.notNull(referenceType, \"Reference type must not be null\");\n        this.loadFactor = loadFactor;\n        this.shift = calculateShift(concurrencyLevel, 65536);\n        int size = 1 << this.shift;\n        this.referenceType = referenceType;\n        int roundedUpSegmentCapacity = (int)(((long)(initialCapacity + size) - 1L) / (long)size);\n        int initialSize = 1 << calculateShift(roundedUpSegmentCapacity, 1073741824);\n        ConcurrentReferenceHashMap<K, V>.Segment[] segments = (ConcurrentReferenceHashMap.Segment[])((ConcurrentReferenceHashMap.Segment[])Array.newInstance(ConcurrentReferenceHashMap.Segment.class, size));\n        int resizeThreshold = (int)((float)initialSize * this.getLoadFactor());\n\n        for(int i = 0; i < segments.length; ++i) {\n            segments[i] = new ConcurrentReferenceHashMap.Segment(initialSize, resizeThreshold);\n        }\n\n        this.segments = segments;\n    }\n\n    protected final float getLoadFactor() {\n        return this.loadFactor;\n    }\n\n    protected final int getSegmentsSize() {\n        return this.segments.length;\n    }\n\n    protected final ConcurrentReferenceHashMap<K, V>.Segment getSegment(int index) {\n        return this.segments[index];\n    }\n\n    protected ConcurrentReferenceHashMap<K, V>.ReferenceManager createReferenceManager() {\n        return new ConcurrentReferenceHashMap.ReferenceManager();\n    }\n\n    protected int getHash(@Nullable Object o) {\n        int hash = o != null ? o.hashCode() : 0;\n        hash += hash << 15 ^ -12931;\n        hash ^= hash >>> 10;\n        hash += hash << 3;\n        hash ^= hash >>> 6;\n        hash += (hash << 2) + (hash << 14);\n        hash ^= hash >>> 16;\n        return hash;\n    }\n\n    @Nullable\n    @Override\n    public V get(@Nullable Object key) {\n        ConcurrentReferenceHashMap.Reference<K, V> ref = this.getReference(key, ConcurrentReferenceHashMap.Restructure.WHEN_NECESSARY);\n        ConcurrentReferenceHashMap.Entry<K, V> entry = ref != null ? ref.get() : null;\n        return entry != null ? entry.getValue() : null;\n    }\n\n    @Nullable\n    @Override\n    public V getOrDefault(@Nullable Object key, @Nullable V defaultValue) {\n        ConcurrentReferenceHashMap.Reference<K, V> ref = this.getReference(key, ConcurrentReferenceHashMap.Restructure.WHEN_NECESSARY);\n        ConcurrentReferenceHashMap.Entry<K, V> entry = ref != null ? ref.get() : null;\n        return entry != null ? entry.getValue() : defaultValue;\n    }\n\n    public boolean containsKey(@Nullable Object key) {\n        ConcurrentReferenceHashMap.Reference<K, V> ref = this.getReference(key, ConcurrentReferenceHashMap.Restructure.WHEN_NECESSARY);\n        ConcurrentReferenceHashMap.Entry<K, V> entry = ref != null ? ref.get() : null;\n        return entry != null && ObjectUtils.nullSafeEquals(entry.getKey(), key);\n    }\n\n    @Nullable\n    protected final ConcurrentReferenceHashMap.Reference<K, V> getReference(@Nullable Object key, ConcurrentReferenceHashMap.Restructure restructure) {\n        int hash = this.getHash(key);\n        return this.getSegmentForHash(hash).getReference(key, hash, restructure);\n    }\n\n    @Nullable\n    @Override\n    public V put(@Nullable K key, @Nullable V value) {\n        return this.put(key, value, true);\n    }\n\n    @Nullable\n    @Override\n    public V putIfAbsent(@Nullable K key, @Nullable V value) {\n        return this.put(key, value, false);\n    }\n\n    @Nullable\n    private V put(@Nullable K key, @Nullable final V value, final boolean overwriteExisting) {\n        return this.doTask(key, new ConcurrentReferenceHashMap<K, V>.Task<V>(new ConcurrentReferenceHashMap.TaskOption[]{ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_BEFORE, ConcurrentReferenceHashMap.TaskOption.RESIZE}) {\n            @Nullable\n            protected V execute(@Nullable ConcurrentReferenceHashMap.Reference<K, V> ref, @Nullable ConcurrentReferenceHashMap.Entry<K, V> entry, @Nullable ConcurrentReferenceHashMap.Entries<V> entries) {\n                if (entry != null) {\n                    V oldValue = entry.getValue();\n                    if (overwriteExisting) {\n                        entry.setValue(value);\n                    }\n\n                    return oldValue;\n                } else {\n                    Assert.state(entries != null, \"No entries segment\");\n                    entries.add(value);\n                    return null;\n                }\n            }\n        });\n    }\n\n    @Nullable\n    @Override\n    public V remove(Object key) {\n        return this.doTask(key, new ConcurrentReferenceHashMap<K, V>.Task<V>(new ConcurrentReferenceHashMap.TaskOption[]{ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_AFTER, ConcurrentReferenceHashMap.TaskOption.SKIP_IF_EMPTY}) {\n            @Nullable\n            protected V execute(@Nullable ConcurrentReferenceHashMap.Reference<K, V> ref, @Nullable ConcurrentReferenceHashMap.Entry<K, V> entry) {\n                if (entry != null) {\n                    if (ref != null) {\n                        ref.release();\n                    }\n\n                    return entry.value;\n                } else {\n                    return null;\n                }\n            }\n        });\n    }\n\n    @Override\n    public boolean remove(Object key, final Object value) {\n        Boolean result = (Boolean)this.doTask(key, new ConcurrentReferenceHashMap<K, V>.Task<Boolean>(new ConcurrentReferenceHashMap.TaskOption[]{ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_AFTER, ConcurrentReferenceHashMap.TaskOption.SKIP_IF_EMPTY}) {\n            protected Boolean execute(@Nullable ConcurrentReferenceHashMap.Reference<K, V> ref, @Nullable ConcurrentReferenceHashMap.Entry<K, V> entry) {\n                if (entry != null && ObjectUtils.nullSafeEquals(entry.getValue(), value)) {\n                    if (ref != null) {\n                        ref.release();\n                    }\n\n                    return true;\n                } else {\n                    return false;\n                }\n            }\n        });\n        return Boolean.TRUE.equals(result);\n    }\n\n    @Override\n    public boolean replace(K key, final V oldValue, final V newValue) {\n        Boolean result = (Boolean)this.doTask(key, new ConcurrentReferenceHashMap<K, V>.Task<Boolean>(new ConcurrentReferenceHashMap.TaskOption[]{ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_BEFORE, ConcurrentReferenceHashMap.TaskOption.SKIP_IF_EMPTY}) {\n            protected Boolean execute(@Nullable ConcurrentReferenceHashMap.Reference<K, V> ref, @Nullable ConcurrentReferenceHashMap.Entry<K, V> entry) {\n                if (entry != null && ObjectUtils.nullSafeEquals(entry.getValue(), oldValue)) {\n                    entry.setValue(newValue);\n                    return true;\n                } else {\n                    return false;\n                }\n            }\n        });\n        return Boolean.TRUE.equals(result);\n    }\n\n    @Nullable\n    @Override\n    public V replace(K key, final V value) {\n        return this.doTask(key, new ConcurrentReferenceHashMap<K, V>.Task<V>(new ConcurrentReferenceHashMap.TaskOption[]{ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_BEFORE, ConcurrentReferenceHashMap.TaskOption.SKIP_IF_EMPTY}) {\n            @Nullable\n            protected V execute(@Nullable ConcurrentReferenceHashMap.Reference<K, V> ref, @Nullable ConcurrentReferenceHashMap.Entry<K, V> entry) {\n                if (entry != null) {\n                    V oldValue = entry.getValue();\n                    entry.setValue(value);\n                    return oldValue;\n                } else {\n                    return null;\n                }\n            }\n        });\n    }\n\n    @Override\n    public void clear() {\n        ConcurrentReferenceHashMap.Segment[] var1 = this.segments;\n        int var2 = var1.length;\n\n        for(int var3 = 0; var3 < var2; ++var3) {\n            ConcurrentReferenceHashMap<K, V>.Segment segment = var1[var3];\n            segment.clear();\n        }\n\n    }\n\n    public void purgeUnreferencedEntries() {\n        ConcurrentReferenceHashMap.Segment[] var1 = this.segments;\n        int var2 = var1.length;\n\n        for(int var3 = 0; var3 < var2; ++var3) {\n            ConcurrentReferenceHashMap<K, V>.Segment segment = var1[var3];\n            segment.restructureIfNecessary(false);\n        }\n\n    }\n\n    @Override\n    public int size() {\n        int size = 0;\n        ConcurrentReferenceHashMap.Segment[] var2 = this.segments;\n        int var3 = var2.length;\n\n        for(int var4 = 0; var4 < var3; ++var4) {\n            ConcurrentReferenceHashMap<K, V>.Segment segment = var2[var4];\n            size += segment.getCount();\n        }\n\n        return size;\n    }\n\n    @Override\n    public boolean isEmpty() {\n        ConcurrentReferenceHashMap.Segment[] var1 = this.segments;\n        int var2 = var1.length;\n\n        for(int var3 = 0; var3 < var2; ++var3) {\n            ConcurrentReferenceHashMap<K, V>.Segment segment = var1[var3];\n            if (segment.getCount() > 0) {\n                return false;\n            }\n        }\n\n        return true;\n    }\n\n    @Override\n    public Set<java.util.Map.Entry<K, V>> entrySet() {\n        Set<java.util.Map.Entry<K, V>> entrySet = this.entrySet;\n        if (entrySet == null) {\n            entrySet = new ConcurrentReferenceHashMap.EntrySet();\n            this.entrySet = (Set)entrySet;\n        }\n\n        return (Set)entrySet;\n    }\n\n    @Nullable\n    private <T> T doTask(@Nullable Object key, ConcurrentReferenceHashMap<K, V>.Task<T> task) {\n        int hash = this.getHash(key);\n        return this.getSegmentForHash(hash).doTask(hash, key, task);\n    }\n\n    @Override\n    public boolean hasOwnProperty(Object elm) {\n        return this.containsKey( elm );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this );\n    }\n\n    private ConcurrentReferenceHashMap<K, V>.Segment getSegmentForHash(int hash) {\n        return this.segments[hash >>> 32 - this.shift & this.segments.length - 1];\n    }\n\n    protected static int calculateShift(int minimumValue, int maximumValue) {\n        int shift = 0;\n\n        for(int value = 1; value < minimumValue && value < maximumValue; ++shift) {\n            value <<= 1;\n        }\n\n        return shift;\n    }\n\n    static {\n        DEFAULT_REFERENCE_TYPE = ConcurrentReferenceHashMap.ReferenceType.SOFT;\n    }\n\n    private static final class WeakEntryReference<K, V> extends WeakReference<ConcurrentReferenceHashMap.Entry<K, V>> implements ConcurrentReferenceHashMap.Reference<K, V> {\n        private final int hash;\n        @Nullable\n        private final ConcurrentReferenceHashMap.Reference<K, V> nextReference;\n\n        public WeakEntryReference(ConcurrentReferenceHashMap.Entry<K, V> entry, int hash, @Nullable ConcurrentReferenceHashMap.Reference<K, V> next, ReferenceQueue<ConcurrentReferenceHashMap.Entry<K, V>> queue) {\n            super(entry, queue);\n            this.hash = hash;\n            this.nextReference = next;\n        }\n\n        public int getHash() {\n            return this.hash;\n        }\n\n        @Nullable\n        public ConcurrentReferenceHashMap.Reference<K, V> getNext() {\n            return this.nextReference;\n        }\n\n        public void release() {\n            this.enqueue();\n            this.clear();\n        }\n    }\n\n    private static final class SoftEntryReference<K, V> extends SoftReference<ConcurrentReferenceHashMap.Entry<K, V>> implements ConcurrentReferenceHashMap.Reference<K, V> {\n        private final int hash;\n        @Nullable\n        private final ConcurrentReferenceHashMap.Reference<K, V> nextReference;\n\n        public SoftEntryReference(ConcurrentReferenceHashMap.Entry<K, V> entry, int hash, @Nullable ConcurrentReferenceHashMap.Reference<K, V> next, ReferenceQueue<ConcurrentReferenceHashMap.Entry<K, V>> queue) {\n            super(entry, queue);\n            this.hash = hash;\n            this.nextReference = next;\n        }\n\n        public int getHash() {\n            return this.hash;\n        }\n\n        @Nullable\n        public ConcurrentReferenceHashMap.Reference<K, V> getNext() {\n            return this.nextReference;\n        }\n\n        public void release() {\n            this.enqueue();\n            this.clear();\n        }\n    }\n\n    protected class ReferenceManager {\n        private final ReferenceQueue<ConcurrentReferenceHashMap.Entry<K, V>> queue = new ReferenceQueue();\n\n        protected ReferenceManager() {\n        }\n\n        public ConcurrentReferenceHashMap.Reference<K, V> createReference(ConcurrentReferenceHashMap.Entry<K, V> entry, int hash, @Nullable ConcurrentReferenceHashMap.Reference<K, V> next) {\n            return (ConcurrentReferenceHashMap.Reference)(ConcurrentReferenceHashMap.this.referenceType == ConcurrentReferenceHashMap.ReferenceType.WEAK ? new ConcurrentReferenceHashMap.WeakEntryReference(entry, hash, next, this.queue) : new ConcurrentReferenceHashMap.SoftEntryReference(entry, hash, next, this.queue));\n        }\n\n        @Nullable\n        public ConcurrentReferenceHashMap.Reference<K, V> pollForPurge() {\n            return (ConcurrentReferenceHashMap.Reference)this.queue.poll();\n        }\n    }\n\n    protected static enum Restructure {\n        WHEN_NECESSARY,\n        NEVER;\n\n        private Restructure() {\n        }\n    }\n\n    private class EntryIterator implements Iterator<java.util.Map.Entry<K, V>> {\n        private int segmentIndex;\n        private int referenceIndex;\n        @Nullable\n        private ConcurrentReferenceHashMap.Reference<K, V>[] references;\n        @Nullable\n        private ConcurrentReferenceHashMap.Reference<K, V> reference;\n        @Nullable\n        private ConcurrentReferenceHashMap.Entry<K, V> next;\n        @Nullable\n        private ConcurrentReferenceHashMap.Entry<K, V> last;\n\n        public EntryIterator() {\n            this.moveToNextSegment();\n        }\n\n        public boolean hasNext() {\n            this.getNextIfNecessary();\n            return this.next != null;\n        }\n\n        public ConcurrentReferenceHashMap.Entry<K, V> next() {\n            this.getNextIfNecessary();\n            if (this.next == null) {\n                throw new NoSuchElementException();\n            } else {\n                this.last = this.next;\n                this.next = null;\n                return this.last;\n            }\n        }\n\n        private void getNextIfNecessary() {\n            while(this.next == null) {\n                this.moveToNextReference();\n                if (this.reference == null) {\n                    return;\n                }\n\n                this.next = this.reference.get();\n            }\n\n        }\n\n        private void moveToNextReference() {\n            if (this.reference != null) {\n                this.reference = this.reference.getNext();\n            }\n\n            while(this.reference == null && this.references != null) {\n                if (this.referenceIndex >= this.references.length) {\n                    this.moveToNextSegment();\n                    this.referenceIndex = 0;\n                } else {\n                    this.reference = this.references[this.referenceIndex];\n                    ++this.referenceIndex;\n                }\n            }\n\n        }\n\n        private void moveToNextSegment() {\n            this.reference = null;\n            this.references = null;\n            if (this.segmentIndex < ConcurrentReferenceHashMap.this.segments.length) {\n                this.references = ConcurrentReferenceHashMap.this.segments[this.segmentIndex].references;\n                ++this.segmentIndex;\n            }\n\n        }\n\n        public void remove() {\n            Assert.state(this.last != null, \"No element to remove\");\n            ConcurrentReferenceHashMap.this.remove(this.last.getKey());\n        }\n    }\n\n    private class EntrySet extends AbstractSet<java.util.Map.Entry<K, V>> {\n        private EntrySet() {\n        }\n\n        public Iterator<java.util.Map.Entry<K, V>> iterator() {\n            return ConcurrentReferenceHashMap.this.new EntryIterator();\n        }\n\n        public boolean contains(@Nullable Object o) {\n            if (o instanceof java.util.Map.Entry) {\n                java.util.Map.Entry<?, ?> entry = (java.util.Map.Entry)o;\n                ConcurrentReferenceHashMap.Reference<K, V> ref = ConcurrentReferenceHashMap.this.getReference(entry.getKey(), ConcurrentReferenceHashMap.Restructure.NEVER);\n                ConcurrentReferenceHashMap.Entry<K, V> otherEntry = ref != null ? ref.get() : null;\n                if (otherEntry != null) {\n                    return ObjectUtils.nullSafeEquals(otherEntry.getValue(), otherEntry.getValue());\n                }\n            }\n\n            return false;\n        }\n\n        public boolean remove(Object o) {\n            if (o instanceof java.util.Map.Entry) {\n                java.util.Map.Entry<?, ?> entry = (java.util.Map.Entry)o;\n                return ConcurrentReferenceHashMap.this.remove(entry.getKey(), entry.getValue());\n            } else {\n                return false;\n            }\n        }\n\n        public int size() {\n            return ConcurrentReferenceHashMap.this.size();\n        }\n\n        public void clear() {\n            ConcurrentReferenceHashMap.this.clear();\n        }\n    }\n\n    private interface Entries<V> {\n        void add(@Nullable V var1);\n    }\n\n    private static enum TaskOption {\n        RESTRUCTURE_BEFORE,\n        RESTRUCTURE_AFTER,\n        SKIP_IF_EMPTY,\n        RESIZE;\n\n        private TaskOption() {\n        }\n    }\n\n    private abstract class Task<T> {\n        private final EnumSet<ConcurrentReferenceHashMap.TaskOption> options;\n\n        public Task(ConcurrentReferenceHashMap.TaskOption... options) {\n            this.options = options.length == 0 ? EnumSet.noneOf(ConcurrentReferenceHashMap.TaskOption.class) : EnumSet.of(options[0], options);\n        }\n\n        public boolean hasOption(ConcurrentReferenceHashMap.TaskOption option) {\n            return this.options.contains(option);\n        }\n\n        @Nullable\n        protected T execute(@Nullable ConcurrentReferenceHashMap.Reference<K, V> ref, @Nullable ConcurrentReferenceHashMap.Entry<K, V> entry, @Nullable ConcurrentReferenceHashMap.Entries<V> entries) {\n            return this.execute(ref, entry);\n        }\n\n        @Nullable\n        protected T execute(@Nullable ConcurrentReferenceHashMap.Reference<K, V> ref, @Nullable ConcurrentReferenceHashMap.Entry<K, V> entry) {\n            return null;\n        }\n    }\n\n    protected static final class Entry<K, V> implements java.util.Map.Entry<K, V> {\n        @Nullable\n        private final K key;\n        @Nullable\n        private volatile V value;\n\n        public Entry(@Nullable K key, @Nullable V value) {\n            this.key = key;\n            this.value = value;\n        }\n\n        @Nullable\n        public K getKey() {\n            return this.key;\n        }\n\n        @Nullable\n        public V getValue() {\n            return this.value;\n        }\n\n        @Nullable\n        public V setValue(@Nullable V value) {\n            V previous = this.value;\n            this.value = value;\n            return previous;\n        }\n\n        public String toString() {\n            return this.key + \"=\" + this.value;\n        }\n\n        public final boolean equals(@Nullable Object other) {\n            if (this == other) {\n                return true;\n            } else if (!(other instanceof java.util.Map.Entry)) {\n                return false;\n            } else {\n                java.util.Map.Entry otherEntry = (java.util.Map.Entry)other;\n                return ObjectUtils.nullSafeEquals(this.getKey(), otherEntry.getKey()) && ObjectUtils.nullSafeEquals(this.getValue(), otherEntry.getValue());\n            }\n        }\n\n        public final int hashCode() {\n            return ObjectUtils.nullSafeHashCode(this.key) ^ ObjectUtils.nullSafeHashCode(this.value);\n        }\n    }\n\n    protected interface Reference<K, V> {\n        @Nullable\n        ConcurrentReferenceHashMap.Entry<K, V> get();\n\n        int getHash();\n\n        @Nullable\n        ConcurrentReferenceHashMap.Reference<K, V> getNext();\n\n        void release();\n    }\n\n    protected final class Segment extends ReentrantLock {\n        private final ConcurrentReferenceHashMap<K, V>.ReferenceManager referenceManager = ConcurrentReferenceHashMap.this.createReferenceManager();\n        private final int initialSize;\n        private volatile ConcurrentReferenceHashMap.Reference<K, V>[] references;\n        private final AtomicInteger count = new AtomicInteger();\n        private int resizeThreshold;\n\n        public Segment(int initialSize, int resizeThreshold) {\n            this.initialSize = initialSize;\n            this.references = this.createReferenceArray(initialSize);\n            this.resizeThreshold = resizeThreshold;\n        }\n\n        @Nullable\n        public ConcurrentReferenceHashMap.Reference<K, V> getReference(@Nullable Object key, int hash, ConcurrentReferenceHashMap.Restructure restructure) {\n            if (restructure == ConcurrentReferenceHashMap.Restructure.WHEN_NECESSARY) {\n                this.restructureIfNecessary(false);\n            }\n\n            if (this.count.get() == 0) {\n                return null;\n            } else {\n                ConcurrentReferenceHashMap.Reference<K, V>[] references = this.references;\n                int index = this.getIndex(hash, references);\n                ConcurrentReferenceHashMap.Reference<K, V> head = references[index];\n                return this.findInChain(head, key, hash);\n            }\n        }\n\n        @Nullable\n        public <T> T doTask(int hash, @Nullable Object key, ConcurrentReferenceHashMap<K, V>.Task<T> task) {\n            boolean resize = task.hasOption(ConcurrentReferenceHashMap.TaskOption.RESIZE);\n            if (task.hasOption(ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_BEFORE)) {\n                this.restructureIfNecessary(resize);\n            }\n\n            if (task.hasOption(ConcurrentReferenceHashMap.TaskOption.SKIP_IF_EMPTY) && this.count.get() == 0) {\n                return (T)task.execute((ConcurrentReferenceHashMap.Reference)null, (ConcurrentReferenceHashMap.Entry)null, (ConcurrentReferenceHashMap.Entries)null);\n            }\n            else {\n                this.lock();\n\n                Object var10;\n                try {\n                    int index = this.getIndex(hash, this.references);\n                    ConcurrentReferenceHashMap.Reference<K, V> head = this.references[index];\n                    ConcurrentReferenceHashMap.Reference<K, V> ref = this.findInChain(head, key, hash);\n                    ConcurrentReferenceHashMap.Entry<K, V> entry = ref != null ? ref.get() : null;\n                    ConcurrentReferenceHashMap.Entries<V> entries = (value) -> {\n                        ConcurrentReferenceHashMap.Entry<K, V> newEntry = new ConcurrentReferenceHashMap.Entry(key, value);\n                        ConcurrentReferenceHashMap.Reference<K, V> newReference = this.referenceManager.createReference(newEntry, hash, head);\n                        this.references[index] = newReference;\n                        this.count.incrementAndGet();\n                    };\n                    var10 = task.execute(ref, entry, entries);\n                }\n                finally {\n                    this.unlock();\n                    if (task.hasOption(ConcurrentReferenceHashMap.TaskOption.RESTRUCTURE_AFTER)) {\n                        this.restructureIfNecessary(resize);\n                    }\n\n                }\n\n                return (T)var10;\n            }\n        }\n\n        public void clear() {\n            if (this.count.get() != 0) {\n                this.lock();\n\n                try {\n                    this.references = this.createReferenceArray(this.initialSize);\n                    this.resizeThreshold = (int)((float)this.references.length * ConcurrentReferenceHashMap.this.getLoadFactor());\n                    this.count.set(0);\n                } finally {\n                    this.unlock();\n                }\n\n            }\n        }\n\n        protected final void restructureIfNecessary(boolean allowResize) {\n            int currCount = this.count.get();\n            boolean needsResize = allowResize && currCount > 0 && currCount >= this.resizeThreshold;\n            ConcurrentReferenceHashMap.Reference<K, V> ref = this.referenceManager.pollForPurge();\n            if (ref != null || needsResize) {\n                this.restructure(allowResize, ref);\n            }\n\n        }\n\n        private void restructure(boolean allowResize, @Nullable ConcurrentReferenceHashMap.Reference<K, V> ref) {\n            this.lock();\n\n            try {\n                int countAfterRestructure = this.count.get();\n                Set<ConcurrentReferenceHashMap.Reference<K, V>> toPurge = Collections.emptySet();\n                if (ref != null) {\n                    for(toPurge = new HashSet(); ref != null; ref = this.referenceManager.pollForPurge()) {\n                        ((Set)toPurge).add(ref);\n                    }\n                }\n\n                countAfterRestructure -= ((Set)toPurge).size();\n                boolean needsResize = countAfterRestructure > 0 && countAfterRestructure >= this.resizeThreshold;\n                boolean resizing = false;\n                int restructureSize = this.references.length;\n                if (allowResize && needsResize && restructureSize < 1073741824) {\n                    restructureSize <<= 1;\n                    resizing = true;\n                }\n\n                ConcurrentReferenceHashMap.Reference<K, V>[] restructured = resizing ? this.createReferenceArray(restructureSize) : this.references;\n\n                for(int i = 0; i < this.references.length; ++i) {\n                    ref = this.references[i];\n                    if (!resizing) {\n                        restructured[i] = null;\n                    }\n\n                    for(; ref != null; ref = ref.getNext()) {\n                        if (!((Set)toPurge).contains(ref)) {\n                            ConcurrentReferenceHashMap.Entry<K, V> entry = ref.get();\n                            if (entry != null) {\n                                int index = this.getIndex(ref.getHash(), restructured);\n                                restructured[index] = this.referenceManager.createReference(entry, ref.getHash(), restructured[index]);\n                            }\n                        }\n                    }\n                }\n\n                if (resizing) {\n                    this.references = restructured;\n                    this.resizeThreshold = (int)((float)this.references.length * ConcurrentReferenceHashMap.this.getLoadFactor());\n                }\n\n                this.count.set(Math.max(countAfterRestructure, 0));\n            } finally {\n                this.unlock();\n            }\n\n        }\n\n        @Nullable\n        private ConcurrentReferenceHashMap.Reference<K, V> findInChain(ConcurrentReferenceHashMap.Reference<K, V> ref, @Nullable Object key, int hash) {\n            for(ConcurrentReferenceHashMap.Reference currRef = ref; currRef != null; currRef = currRef.getNext()) {\n                if (currRef.getHash() == hash) {\n                    ConcurrentReferenceHashMap.Entry<K, V> entry = currRef.get();\n                    if (entry != null) {\n                        K entryKey = entry.getKey();\n                        if (ObjectUtils.nullSafeEquals(entryKey, key)) {\n                            return currRef;\n                        }\n                    }\n                }\n            }\n\n            return null;\n        }\n\n        private ConcurrentReferenceHashMap.Reference<K, V>[] createReferenceArray(int size) {\n            return new ConcurrentReferenceHashMap.Reference[size];\n        }\n\n        private int getIndex(int hash, ConcurrentReferenceHashMap.Reference<K, V>[] references) {\n            return hash & references.length - 1;\n        }\n\n        public final int getSize() {\n            return this.references.length;\n        }\n\n        public final int getCount() {\n            return this.count.get();\n        }\n    }\n\n    public static enum ReferenceType {\n        SOFT,\n        WEAK;\n\n        private ReferenceType() {\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/Dictionary.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.util.json.JSONUtils;\n\nimport java.util.List;\nimport java.util.Map;\n\n/**\n *  Dictionary\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  PHP Array / Python Dictionary Style\n *  *****************************************************************************************\n *  Dragon King, the undefined\n */\npublic interface Dictionary<V > extends Dictium<V > {\n    default void reset () {\n        this.resetAsList();\n    }\n\n    @Override\n    default void clear () {\n        if( this.isMap() ) {\n            this.getMap().clear();\n        }\n        else {\n            this.getList().clear();\n        }\n    }\n\n    default void reduce () {\n        if( this.isMap() ) {\n            this.resetAsList();\n        }\n        else {\n            this.getList().clear();\n        }\n    }\n\n    @Override\n    default V get( Object key ) {\n        if( this.isMap() ) {\n            return this.getMap().get( key );\n        }\n\n        int index = JSONUtils.asInt32Key( key );\n        return this.getList().get( index );\n    }\n\n    @Override\n    default V erase( Object key ) {\n        if( this.isMap() ) {\n            return this.getMap().remove( key );\n        }\n\n        int index = JSONUtils.asInt32Key( key );\n        return this.getList().remove( index );\n    }\n\n    boolean isMap();\n\n    boolean isList();\n\n    Map<?, V > affirmMap() ;\n\n    List<V >   affirmList() ;\n\n    Map<?, V > resetAsMap() ;\n\n    List<V >   resetAsList() ;\n\n    Dictionary<V > convertToMap();\n\n    Dictionary<V > convertToList();\n\n    Map<?, V > getMap() throws ClassCastException ;\n\n    List<V >   getList() throws ClassCastException ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/Dictium.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\npublic interface Dictium<V > extends PineUnit {\n    int size();\n\n    boolean isEmpty();\n\n    void clear();\n\n    @Override\n    boolean containsKey( Object key );\n\n    boolean containsValue( Object value );\n\n    V get( Object key );\n\n    V insertIfAbsent( Object key, V value );\n\n    V insert( Object key, V value );\n\n    V erase( Object key );\n\n    Set<? > entrySet();\n\n    Collection<V > values();\n\n    Map<?, V > toMap();\n\n    List<V > toList();\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/DummyMap.java",
    "content": "package com.pinecone.framework.unit;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\n\npublic class DummyMap<K, V > implements Map<K, V > , PineUnit {\n    @Override\n    public V put(K key, V value) {\n        return null;\n    }\n\n    @Override\n    public V get(Object key) {\n        return null;\n    }\n\n    @Override\n    public int size() {\n        return 0;\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return true;\n    }\n\n    @Override\n    public boolean remove(Object key, Object value) {\n        return false;\n    }\n\n    @Override\n    public V remove(Object key) {\n        return null;\n    }\n\n    @Override\n    public void putAll(Map<? extends K, ? extends V> m) {\n\n    }\n\n    @Override\n    public void clear() {\n\n    }\n\n    @Override\n    public boolean containsKey(Object key) {\n        return false;\n    }\n\n    @Override\n    public boolean containsValue(Object value) {\n        return false;\n    }\n\n    @Override\n    public boolean hasOwnProperty(Object elm) {\n        return false;\n    }\n\n    @Override\n    public Set<Entry<K, V>> entrySet() {\n        return Set.of();\n    }\n\n    @Override\n    public Set<K> keySet() {\n        return Set.of();\n    }\n\n    @Override\n    public Collection<V> values() {\n        return List.of();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/KeyValue.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.system.prototype.Prototype;\nimport com.pinecone.framework.system.prototype.TypeIndex;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.util.Map;\n\npublic class KeyValue<K, V > implements Map.Entry<K, V >, Pinenut {\n    protected K key;\n\n    protected V value;\n\n    public KeyValue( K key, V value ) {\n        this.key = key;\n        this.value = value;\n    }\n\n    public KeyValue( Map.Entry<K, V > other ) {\n        this( other.getKey(), other.getValue() );\n    }\n\n\n    @Override\n    public K getKey() {\n        return this.key;\n    }\n\n    @Override\n    public V getValue() {\n        return this.value;\n    }\n\n    @Override\n    public V setValue( V value ) {\n        V oldValue = this.value;\n        this.value = value;\n        return oldValue;\n    }\n\n    @Override\n    public boolean equals( Object o ) {\n        if ( !(o instanceof Map.Entry) ) {\n            return false;\n        }\n        Map.Entry<?,?> e = (Map.Entry<?,?>)o;\n\n        return valEquals( this.key,e.getKey()) && valEquals( this.value,e.getValue() );\n    }\n\n    @Override\n    public int hashCode() {\n        int keyHash = (this.key==null ? 0 : this.key.hashCode());\n        int valueHash = (this.value==null ? 0 : this.value.hashCode());\n        return keyHash ^ valueHash;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return \"{\" + StringUtils.jsonQuote( this.key.toString() ) + \":\" + JSON.stringify( this.value ) + \"}\";\n    }\n\n    @Override\n    public TypeIndex prototype() {\n        return Prototype.typeid( this );\n    }\n\n\n    static final boolean valEquals( Object o1, Object o2 ) {\n        return (o1==null ? o2==null : o1.equals(o2));\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/LinkedCaseInsensitiveMap.java",
    "content": "package com.pinecone.framework.unit;\n\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.LinkedHashMap;\nimport java.util.Locale;\nimport java.util.Map;\nimport java.util.Map.Entry;\n\npublic class LinkedCaseInsensitiveMap<V> extends LinkedHashMap<String, V> {\n    private final Map<String, String> caseInsensitiveKeys;\n    private final Locale locale;\n\n    public LinkedCaseInsensitiveMap() {\n        this((Locale)null);\n    }\n\n    public LinkedCaseInsensitiveMap(Locale locale) {\n        this.caseInsensitiveKeys = new HashMap();\n        this.locale = locale != null ? locale : Locale.getDefault();\n    }\n\n    public LinkedCaseInsensitiveMap(int initialCapacity) {\n        this(initialCapacity, (Locale)null);\n    }\n\n    public LinkedCaseInsensitiveMap(int initialCapacity, Locale locale) {\n        super(initialCapacity);\n        this.caseInsensitiveKeys = new HashMap<>(initialCapacity);\n        this.locale = locale != null ? locale : Locale.getDefault();\n    }\n\n    @Override\n    public V put(String key, V value) {\n        String oldKey = (String)this.caseInsensitiveKeys.put(this.convertKey(key), key);\n        if (oldKey != null && !oldKey.equals(key)) {\n            super.remove(oldKey);\n        }\n\n        return super.put(key, value);\n    }\n\n    @Override\n    public void putAll(Map<? extends String, ? extends V> map) {\n        if (!map.isEmpty()) {\n            Iterator var2 = map.entrySet().iterator();\n\n            while(var2.hasNext()) {\n                Entry<? extends String, ? extends V> entry = (Entry)var2.next();\n                this.put((String)entry.getKey(), entry.getValue());\n            }\n\n        }\n    }\n\n    @Override\n    public boolean containsKey(Object key) {\n        return key instanceof String && this.caseInsensitiveKeys.containsKey(this.convertKey((String)key));\n    }\n\n    @Override\n    public V get(Object key) {\n        return key instanceof String ? super.get(this.caseInsensitiveKeys.get(this.convertKey((String)key))) : null;\n    }\n\n    @Override\n    public V remove(Object key) {\n        return key instanceof String ? super.remove(this.caseInsensitiveKeys.remove(this.convertKey((String)key))) : null;\n    }\n\n    @Override\n    public void clear() {\n        this.caseInsensitiveKeys.clear();\n        super.clear();\n    }\n\n    protected String convertKey(String key) {\n        return key.toLowerCase(this.locale);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/LinkedMultiValueMap.java",
    "content": "package com.pinecone.framework.unit;\n\nimport java.util.LinkedList;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\n\npublic class LinkedMultiValueMap<K, V > extends MultiValueMaptron<K, V, List<V > > implements MultiValueMap<K, V > {\n    public LinkedMultiValueMap() {\n        this( new LinkedHashMap<>() );\n    }\n\n    public LinkedMultiValueMap( int initialCapacity ) {\n        this( new LinkedHashMap<>( initialCapacity ) );\n    }\n\n    public LinkedMultiValueMap( Map<K, List<V > > otherMap ) {\n        super( otherMap, false );\n    }\n\n    @Override\n    protected List<V > newCollection() {\n        return new LinkedList<>();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/LinkedTreeMap.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.system.prototype.Prototype;\nimport com.pinecone.framework.system.prototype.TypeIndex;\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.io.IOException;\nimport java.io.ObjectInputStream;\nimport java.io.ObjectOutputStream;\nimport java.util.*;\nimport java.util.function.BiConsumer;\nimport java.util.function.BiFunction;\nimport java.util.function.Consumer;\n\npublic class LinkedTreeMap <K,V> extends TreeMap<K,V> implements PineUnit, ListedSortedMap<K, V >, Iterable<Map.Entry<K, V > > {\n    protected static class LinkedEntry<K,V> extends TreeMap.Entry<K,V> {\n        LinkedEntry<K,V> before, after;\n\n        LinkedEntry( K key, V value, TreeMap.Entry<K,V > parent ) {\n            super( key, value, parent );\n        }\n\n        public void extend( Map.Entry<K,V > entry ) {\n            this.key   = entry.getKey();\n            this.value = entry.getValue();\n        }\n\n        @Override\n        public String toString() {\n            return this.toJSONString();\n        }\n\n        @Override\n        public String toJSONString() {\n            return super.toJSONString();\n        }\n\n        @Override\n        public TypeIndex prototype() {\n            return Prototype.typeid( this );\n        }\n    }\n\n    protected transient LinkedTreeMap.LinkedEntry<K,V> head;\n\n    protected transient LinkedTreeMap.LinkedEntry<K,V> tail;\n\n    protected final boolean accessOrder;\n\n\n    // internal utilities\n\n    private void linkNodeFirst( LinkedTreeMap.LinkedEntry<K,V> p ) {\n        LinkedTreeMap.LinkedEntry<K,V> front = this.head;\n        this.head = p;\n        if ( front == null ) {\n            this.tail = p;\n        }\n        else {\n            p.after = front;\n            front.before = p;\n        }\n    }\n\n    private void linkNodeLast( LinkedTreeMap.LinkedEntry<K,V> p ) {\n        LinkedTreeMap.LinkedEntry<K,V> last = this.tail;\n        this.tail = p;\n        if ( last == null ) {\n            this.head = p;\n        }\n        else {\n            p.before = last;\n            last.after = p;\n        }\n    }\n\n    private void linkBefore( LinkedTreeMap.LinkedEntry<K,V> newNode, LinkedTreeMap.LinkedEntry<K,V> succ ) {\n        // assert succ != null;\n        final LinkedEntry<K,V> pred = succ.before;\n        //final LinkedEntry<K,V> newNode = new Node<>(pred, e, succ);\n        newNode.before = pred;\n        newNode.after  = succ;\n        succ.before = newNode;\n        if ( pred == null ) {\n            this.head = newNode;\n        }\n        else {\n            pred.after = newNode;\n        }\n    }\n\n    private LinkedEntry<K,V> detachLastTailInsert() {\n        LinkedEntry<K,V> lastInserted = LinkedTreeMap.this.tail;\n        if( LinkedTreeMap.this.tail.before != null ){\n            LinkedTreeMap.this.tail.before.after = null;\n        }\n        LinkedTreeMap.this.tail = LinkedTreeMap.this.tail.before;\n        lastInserted.before = null;\n        lastInserted.after = null;\n\n        return lastInserted;\n    }\n\n\n    protected V putValFront( K key, V value, boolean onlyIfAbsent, boolean evict ) {\n        TreeMap.Entry<K,V> t = this.root;\n        if ( t == null ) {\n            this.compare( key, key ); // type (and possibly null) check\n\n            this.root = this.spawnNodeFront( key, value, null );\n            this.size = 1;\n            ++this.modCount;\n            return null;\n        }\n        int cmp;\n        TreeMap.Entry<K,V> parent;\n        // split comparator and comparable paths\n        Comparator<? super K> cpr = this.comparator;\n\n        TreeMap.Entry<K,V> legacy = null;\n        if ( cpr != null ) {\n            do {\n                parent = t;\n                cmp = cpr.compare(key, t.key);\n                if (cmp < 0) {\n                    t = t.left;\n                }\n                else if ( cmp > 0 ) {\n                    t = t.right;\n                }\n                else {\n                    legacy = t;\n                    break;\n                }\n            }\n            while ( t != null );\n        }\n        else {\n            if ( key == null ) {\n                throw new NullPointerException();\n            }\n            @SuppressWarnings(\"unchecked\")\n            Comparable<? super K> k = (Comparable<? super K>) key;\n            do {\n                parent = t;\n                cmp = k.compareTo(t.key);\n                if ( cmp < 0 ) {\n                    t = t.left;\n                }\n                else if ( cmp > 0 ) {\n                    t = t.right;\n                }\n                else {\n                    legacy = t;\n                    break;\n                }\n            }\n            while ( t != null );\n        }\n\n        if ( legacy != null ) { // existing mapping for key\n            V oldValue = legacy.value;\n            if ( !onlyIfAbsent || oldValue == null ) {\n                legacy.setValue( value );\n            }\n            this.afterNodeAccess( legacy );\n            return oldValue;\n        }\n\n        TreeMap.Entry<K,V> e = this.spawnNodeFront( key, value, parent );\n        if ( cmp < 0 ) {\n            parent.left = e;\n        }\n        else {\n            parent.right = e;\n        }\n        this.fixAfterInsertion(e);\n        ++this.size;\n        ++this.modCount;\n        this.afterNodeInsertion( evict );\n        return null;\n    }\n\n    @Override\n    protected TreeMap.Entry<K,V > spawnNode( K key, V value, TreeMap.Entry<K,V > parent ) {\n        LinkedTreeMap.LinkedEntry<K,V> p = new LinkedTreeMap.LinkedEntry<>( key, value, parent );\n        this.linkNodeLast(p);\n        return p;\n    }\n\n    protected TreeMap.Entry<K,V > spawnNodeFront( K key, V value, TreeMap.Entry<K,V > parent ) {\n        LinkedTreeMap.LinkedEntry<K,V> p = new LinkedTreeMap.LinkedEntry<>( key, value, parent );\n        this.linkNodeFirst(p);\n        return p;\n    }\n\n\n    protected void unlinkFirst( LinkedEntry<K,V> f ) {\n        f = (LinkedEntry<K,V>)this.onlyDeleteEntry( f );\n        this.unlink( f );\n\n//        // assert f == first && f != null;\n//        final LinkedEntry<K,V> next = f.after;\n//        f.after = null; // help GC\n//        this.head = next;\n//        if ( next == null ) {\n//            this.tail = null;\n//        }\n//        else {\n//            next.before = null;\n//        }\n    }\n\n    private void unlinkLast( LinkedEntry<K,V> l ) {\n        l = (LinkedEntry<K,V>)this.onlyDeleteEntry( l );\n        this.unlink( l );\n        // assert l == last && l != null;\n//        final LinkedEntry<K,V> prev = l.before;\n//        l.before = null; // help GC\n//        this.tail = prev;\n//        if ( prev == null ) {\n//            this.head = null;\n//        }\n//        else {\n//            prev.after = null;\n//        }\n    }\n\n    protected void unlink( TreeMap.Entry<K,V> e ) {\n        LinkedTreeMap.LinkedEntry<K,V> p = ( LinkedTreeMap.LinkedEntry<K,V> )e, b = p.before, a = p.after;\n        p.before = p.after = null;\n        if ( b == null ) {\n            this.head = a;\n        }\n        else {\n            b.after = a;\n        }\n        if ( a == null ) {\n            this.tail = b;\n        }\n        else {\n            a.before = b;\n        }\n    }\n\n    @Override\n    protected void afterNodeRemoval( TreeMap.Entry<K,V> e ) { // unlink\n        this.unlink( e );\n    }\n\n    protected void afterNodeInsertion( boolean evict ) { // possibly remove eldest\n        LinkedEntry<K,V> first;\n        if ( evict && (first = this.head) != null && this.removeEldestEntry(first) ) {\n            K key = first.key;\n            TreeMap.Entry<K,V> candidate = getEntry( key );\n            this.deleteEntry( candidate );\n        }\n    }\n\n    protected void afterNodeAccess( TreeMap.Entry<K,V> e ) { // move node to last\n        LinkedTreeMap.LinkedEntry<K,V> last;\n        if ( this.accessOrder && (last = this.tail) != e ) {\n            LinkedTreeMap.LinkedEntry<K,V> p = (LinkedTreeMap.LinkedEntry<K,V>)e, b = p.before, a = p.after;\n            p.after = null;\n            if ( b == null ) {\n                this.head = a;\n            }\n            else {\n                b.after = a;\n            }\n            if ( a != null ) {\n                a.before = b;\n            }\n            else {\n                last = b;\n            }\n            if ( last == null ) {\n                this.head = p;\n            }\n            else {\n                p.before = last;\n                last.after = p;\n            }\n            this.tail = p;\n            ++this.modCount;\n        }\n    }\n\n    @Override\n    protected void internalWriteEntries( ObjectOutputStream s ) throws IOException {\n        for ( LinkedTreeMap.LinkedEntry<K,V> e = head; e != null; e = e.after ) {\n            s.writeObject(e.key);\n            s.writeObject(e.value);\n        }\n    }\n\n    @Override\n    protected void internalReadEntries( int size, final ObjectInputStream s ) throws IOException, ClassNotFoundException {\n        for ( int i = 0; i < size; i++ ) {\n            @SuppressWarnings(\"unchecked\")\n            K key = (K) s.readObject();\n            @SuppressWarnings(\"unchecked\")\n            V value = (V) s.readObject();\n            this.putVal(  key, value, false, false );\n        }\n    }\n\n    public LinkedTreeMap() {\n        super();\n        this.accessOrder = false;\n    }\n\n    public LinkedTreeMap( Comparator<? super K> comparator ) {\n        this( comparator, false );\n    }\n\n    public LinkedTreeMap( boolean accessOrder ) {\n        super();\n        this.accessOrder = accessOrder;\n    }\n\n    public LinkedTreeMap( Comparator<? super K> comparator, boolean accessOrder ) {\n        super( comparator );\n        this.accessOrder = accessOrder;\n    }\n\n    public LinkedTreeMap( Map<? extends K, ? extends V> m ) {\n        super();\n        this.accessOrder = false;\n        this.putMapEntries( m, false );\n    }\n\n    public LinkedTreeMap( SortedMap<K, ? extends V> m ) {\n        super( m );\n        this.accessOrder = false;\n    }\n\n    public boolean containsValue( Object value ) {\n        for ( LinkedTreeMap.LinkedEntry<K,V> e = head; e != null; e = e.after ) {\n            V v = e.value;\n            if ( v == value || (value != null && value.equals(v)) ) {\n                return true;\n            }\n        }\n        return false;\n    }\n\n    public V get( Object key ) {\n        TreeMap.Entry<K,V> e;\n        if ( (e = this.getEntry( key )) == null ) {\n            return null;\n        }\n        if ( this.accessOrder ) {\n            this.afterNodeAccess(e);\n        }\n        return e.value;\n    }\n\n    public V getOrDefault( Object key, V defaultValue ) {\n        TreeMap.Entry<K,V> e;\n        if ( (e = this.getEntry( key ) ) == null ) {\n            return defaultValue;\n        }\n        if ( this.accessOrder ) {\n            this.afterNodeAccess(e);\n        }\n        return super.getOrDefault( key, defaultValue );\n    }\n\n    public void clear() {\n        super.clear();\n        this.head = this.tail = null;\n    }\n\n    protected boolean removeEldestEntry( Map.Entry<K,V> eldest ) {\n        return false;\n    }\n\n    // Linked & Deque operations\n    public boolean contains( Object o ) {\n        if( o instanceof Map.Entry<?, ? > ) {\n            @SuppressWarnings(\"unchecked\")\n            Map.Entry<K,V > kv = (Map.Entry<K,V >) o;\n            TreeMap.Entry<K,V > treeEntry = this.getEntry( kv.getKey() );\n            return treeEntry != null && kv.getValue().equals( treeEntry.value );\n        }\n        return false;\n    }\n\n    public boolean add( Map.Entry<K,V > e ) {\n        this.addLast( e );\n        return true;\n    }\n\n    public boolean addAll( Collection<? extends Map.Entry<K, V> > c ) {\n        if( c.size() == 0 ) {\n            return false;\n        }\n        for ( Map.Entry<? extends K, ? extends V> e : c ) {\n            this.addLast( e.getKey(), e.getValue() );\n        }\n        return true;\n    }\n\n    public V addFirst( K key, V value ) {\n        return this.putValFront( key, value, false, true );\n    }\n\n    public V addLast( K key, V value ) {\n        return this.put( key, value );\n    }\n\n    //@Override\n    public void addFirst( Map.Entry<K,V > e ) {\n        this.addFirst( e.getKey(), e.getValue() );\n    }\n\n    //@Override\n    public void addLast( Map.Entry<K,V > e ) {\n        this.addLast( e.getKey(), e.getValue() );\n    }\n\n    public Map.Entry<K,V > getFirst() {\n        final Map.Entry<K,V > f = this.head;\n        if ( f == null ) {\n            throw new NoSuchElementException();\n        }\n        return f;\n    }\n\n    public Map.Entry<K,V > getLast() {\n        final Map.Entry<K,V > l = this.tail;\n        if ( l == null ) {\n            throw new NoSuchElementException();\n        }\n        return l;\n    }\n\n    public Map.Entry<K,V > removeFirst() {\n        final LinkedEntry<K,V > f = this.head;\n        if ( f == null ) {\n            throw new NoSuchElementException();\n        }\n        this.unlinkFirst( f );\n        return f;\n    }\n\n    public Map.Entry<K,V > removeLast() {\n        final LinkedEntry<K,V > l = this.tail;\n        if ( l == null ) {\n            throw new NoSuchElementException();\n        }\n        this.unlinkLast( l );\n        return l;\n    }\n\n    // Queue operations.\n\n    public Map.Entry<K,V > peek() {\n        return this.head;\n    }\n\n    public Map.Entry<K,V > element() {\n        return this.getFirst();\n    }\n\n    public Map.Entry<K,V > poll() {\n        final LinkedEntry<K,V > f = this.head;\n        if( f != null ) {\n            this.unlinkFirst(f);\n        }\n        return f;\n    }\n\n    public Map.Entry<K,V > remove() {\n        return this.removeFirst();\n    }\n\n    public boolean offer( Map.Entry<K,V > e ) {\n        return this.add(e);\n    }\n\n    // Deque operations\n    public boolean offerFirst( Map.Entry<K,V > e ) {\n        this.addFirst( e );\n        return true;\n    }\n\n    public boolean offerLast( Map.Entry<K,V > e ) {\n        this.addLast(e);\n        return true;\n    }\n\n    public Map.Entry<K,V > peekFirst() {\n        return this.head;\n    }\n\n    public Map.Entry<K,V > peekLast() {\n        return this.tail;\n    }\n\n    public Map.Entry<K,V > pollFirst() {\n        final LinkedEntry<K,V > f = this.head;\n        if( f != null ) {\n            this.unlinkFirst(f);\n        }\n        return f;\n    }\n\n    public Map.Entry<K,V > pollLast() {\n        final LinkedEntry<K,V > l = this.tail;\n        if( l != null ) {\n            this.unlinkLast(l);\n        }\n        return l;\n    }\n\n    public void push( Map.Entry<K,V > e ) {\n        this.addFirst(e);\n    }\n\n    public Map.Entry<K,V > pop() {\n        return this.removeFirst();\n    }\n\n    public boolean removeFirstOccurrence( Object o ) {\n        if( o instanceof Map.Entry<?, ? > ) {\n            @SuppressWarnings(\"unchecked\")\n            Map.Entry<K,V > kv = (Map.Entry<K,V >) o;\n            TreeMap.Entry<K,V > treeEntry = this.getEntry( kv.getKey() );\n            if( treeEntry != null && kv.getValue().equals( treeEntry.value ) ) {\n                this.deleteEntry( treeEntry );\n                return true;\n            }\n        }\n        return false;\n    }\n\n    public boolean removeLastOccurrence( Object o ) {\n        return this.removeFirstOccurrence( o ); // This is a map, all keys are unique.\n    }\n\n    private boolean isElementIndex( int index ) {\n        return index >= 0 && index < this.size;\n    }\n\n    private boolean isPositionIndex( int index ) {\n        return index >= 0 && index <= this.size;\n    }\n\n    private String outOfBoundsMsg( int index ) {\n        return \"Index: \"+index+\", Size: \"+ this.size;\n    }\n\n    private void checkElementIndex( int index ) {\n        if ( !this.isElementIndex(index) ) {\n            throw new IndexOutOfBoundsException(this.outOfBoundsMsg(index));\n        }\n    }\n\n    private void checkPositionIndex(int index) {\n        if ( !this.isPositionIndex(index) ) {\n            throw new IndexOutOfBoundsException(this.outOfBoundsMsg(index));\n        }\n    }\n\n    public ListIterator<Map.Entry<K, V> > listIterator( int index ) {\n        this.checkPositionIndex(index);\n        return new LinkedListIterator( index );\n    }\n\n    public Iterator<Map.Entry<K, V> > iterator() {\n        return new LinkedEntryIterator();\n    }\n\n    public Iterator<Map.Entry<K, V> > descendingIterator(){\n        return new DescendingIterator();\n    }\n\n    public Object[] toArray() {\n        Object[] result = new Object[ this.size ];\n        int i = 0;\n        for ( LinkedEntry<K, V > x = this.head; x != null; x = x.after ) {\n            result[ i++ ] = x;\n        }\n        return result;\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    public <T> T[] toArray( T[] a ) {\n        if ( a.length < this.size ) {\n            a = (T[])java.lang.reflect.Array.newInstance( a.getClass().getComponentType(), this.size );\n        }\n        int i = 0;\n        Object[] result = a;\n        for ( LinkedEntry<K, V > x = this.head; x != null; x = x.after ) {\n            result[i++] = x;\n        }\n\n        if ( a.length > this.size ) {\n            a[ this.size ] = null;\n        }\n\n        return a;\n    }\n\n    public boolean containsAll( Collection<?> c ) {\n        for ( Object e : c ) {\n            if ( !this.contains(e) ) {\n                return false;\n            }\n        }\n        return true;\n    }\n\n    public boolean removeAll( Collection<?> c ) {\n        Objects.requireNonNull(c);\n        boolean modified = false;\n        Iterator<?> it = this.iterator();\n        while ( it.hasNext() ) {\n            if ( c.contains(it.next()) ) {\n                it.remove();\n                modified = true;\n            }\n        }\n        return modified;\n    }\n\n    public boolean retainAll( Collection<?> c ) {\n        Objects.requireNonNull(c);\n        boolean modified = false;\n        Iterator<Map.Entry<K,V > > it = this.iterator();\n        while (it.hasNext()) {\n            if (!c.contains(it.next())) {\n                it.remove();\n                modified = true;\n            }\n        }\n        return modified;\n    }\n\n    @Override\n    public Set<K > keySet() {\n        Set<K> ks = this.keySet;\n        if ( ks == null ) {\n            ks = new LinkedKeySet();\n            this.keySet = ks;\n        }\n        return ks;\n    }\n\n    final class LinkedKeySet extends AbstractSet<K> {\n        public final int size()                 { return size; }\n\n        public final void clear()               { LinkedTreeMap.this.clear(); }\n\n        public final Iterator<K> iterator() {\n            return new LinkedKeyIterator();\n        }\n\n        public final boolean contains( Object o ) { return containsKey(o); }\n\n        public final boolean remove( Object key ) {\n            TreeMap.Entry<K,V> candidate = getEntry(  key );\n            boolean b = candidate != null;\n            deleteEntry( candidate );\n            return b;\n        }\n\n        public final Spliterator<K> spliterator()  {\n            return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT );\n        }\n\n        public final void forEach( Consumer<? super K> action ) {\n            if ( action == null ) {\n                throw new NullPointerException();\n            }\n            int mc = modCount;\n            for ( LinkedTreeMap.LinkedEntry<K,V> e = head; e != null; e = e.after ) {\n                action.accept(e.key);\n            }\n\n            if ( modCount != mc ) {\n                throw new ConcurrentModificationException();\n            }\n        }\n    }\n\n    @Override\n    public Collection<V> values() {\n        Collection<V> vs = this.values;\n        if ( vs == null ) {\n            vs = new LinkedValues();\n            this.values = vs;\n        }\n        return vs;\n    }\n\n    final class LinkedValues extends AbstractCollection<V> {\n        public final int size()                 { return size; }\n\n        public final void clear()               { LinkedTreeMap.this.clear(); }\n\n        public final Iterator<V> iterator() {\n            return new LinkedValueIterator();\n        }\n\n        public final boolean contains( Object o ) { return containsValue(o); }\n\n        public final Spliterator<V> spliterator() {\n            return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED );\n        }\n\n        public final void forEach( Consumer<? super V> action ) {\n            if (action == null) {\n                throw new NullPointerException();\n            }\n            int mc = modCount;\n            for ( LinkedTreeMap.LinkedEntry<K,V> e = head; e != null; e = e.after ) {\n                action.accept(e.value);\n            }\n            if ( modCount != mc ) {\n                throw new ConcurrentModificationException();\n            }\n        }\n    }\n\n    @Override\n    public Set<Map.Entry<K,V> > entrySet() {\n        Set<Map.Entry<K,V>> es;\n        return (es = this.entrySet) == null ? (this.entrySet = new LinkedEntrySet()) : es;\n    }\n\n    public Set<Map.Entry<K,V > > treeEntrySet() {\n        return new EntrySet();\n    }\n\n    protected final class LinkedEntrySet extends AbstractSet<Map.Entry<K,V> > {\n        public final int size()                 { return size; }\n\n        public final void clear()               { LinkedTreeMap.this.clear(); }\n\n        public final Iterator<Map.Entry<K,V> > iterator() {\n            return new LinkedEntryIterator();\n        }\n\n        public final boolean contains( Object o ) {\n            if ( !(o instanceof Map.Entry) ) {\n                return false;\n            }\n            Map.Entry<?,?> e = (Map.Entry<?,?>) o;\n            Object key = e.getKey();\n            TreeMap.Entry<K,V> candidate = getEntry(  key );\n            return candidate != null && candidate.equals(e);\n        }\n\n        public final boolean remove( Object o ) {\n            if ( o instanceof Map.Entry ) {\n                Map.Entry<?,?> e = (Map.Entry<?,?>) o;\n                Object key = e.getKey();\n                TreeMap.Entry<K,V> candidate = getEntry(  key );\n                boolean b = candidate != null;\n                deleteEntry( candidate );\n                return b;\n            }\n            return false;\n        }\n\n        public final Spliterator<Map.Entry<K,V>> spliterator() {\n            return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT );\n        }\n\n        public final void forEach(Consumer<? super Map.Entry<K,V>> action) {\n            if ( action == null ) {\n                throw new NullPointerException();\n            }\n            int mc = modCount;\n            for ( LinkedTreeMap.LinkedEntry<K,V> e = head; e != null; e = e.after ) {\n                action.accept(e);\n            }\n            if ( modCount != mc ) {\n                throw new ConcurrentModificationException();\n            }\n        }\n    }\n\n    // Map overrides\n\n    @Override\n    public void forEach( BiConsumer<? super K, ? super V> action ) {\n        if ( action == null ) {\n            throw new NullPointerException();\n        }\n        int mc = modCount;\n        for ( LinkedTreeMap.LinkedEntry<K,V> e = head; e != null; e = e.after ) {\n            action.accept(e.key, e.value);\n        }\n        if ( modCount != mc ) {\n            throw new ConcurrentModificationException();\n        }\n    }\n\n    @Override\n    public void replaceAll( BiFunction<? super K, ? super V, ? extends V> function ) {\n        if ( function == null ) {\n            throw new NullPointerException();\n        }\n        int mc = modCount;\n        for ( LinkedTreeMap.LinkedEntry<K,V> e = head; e != null; e = e.after ) {\n            e.value = function.apply(e.key, e.value);\n        }\n        if ( modCount != mc ) {\n            throw new ConcurrentModificationException();\n        }\n    }\n\n    // Iterators\n\n    protected LinkedEntry<K, V > queryNodeByIndex( int index ) {\n        // assert isElementIndex(index);\n\n        if ( index < (this.size >> 1) ) {\n            LinkedEntry<K, V > x = this.head;\n            for ( int i = 0; i < index; i++ ) {\n                x = x.after;\n            }\n            return x;\n        }\n        else {\n            LinkedEntry<K, V > x = this.tail;\n            for ( int i = size - 1; i > index; i-- ){\n                x = x.before;\n            }\n            return x;\n        }\n    }\n\n    protected abstract class LinkedTreeIterator {\n        LinkedTreeMap.LinkedEntry<K,V> next;\n        LinkedTreeMap.LinkedEntry<K,V> current;\n        int expectedModCount;\n\n        LinkedTreeIterator() {\n            this.next = head;\n            this.expectedModCount = modCount;\n            this.current = null;\n        }\n\n        public final boolean hasNext() {\n            return this.next != null;\n        }\n\n        final LinkedTreeMap.LinkedEntry<K,V> nextNode() {\n            LinkedTreeMap.LinkedEntry<K,V> e = next;\n            if ( modCount != this.expectedModCount ) {\n                throw new ConcurrentModificationException();\n            }\n            if ( e == null ) {\n                throw new NoSuchElementException();\n            }\n            this.current = e;\n            this.next = e.after;\n            return e;\n        }\n\n        public final void remove() {\n            TreeMap.Entry<K,V> p = this.current;\n            if ( p == null ) {\n                throw new IllegalStateException();\n            }\n            if ( modCount != this.expectedModCount ) {\n                throw new ConcurrentModificationException();\n            }\n\n            this.current = null;\n            deleteEntry( p );\n            this.expectedModCount = modCount;\n        }\n    }\n\n    protected final class LinkedKeyIterator extends LinkedTreeIterator implements Iterator<K> {\n        public final K next() { return nextNode().getKey(); }\n    }\n\n    protected final class LinkedValueIterator extends LinkedTreeIterator implements Iterator<V> {\n        public final V next() { return nextNode().value; }\n    }\n\n    protected final class LinkedEntryIterator extends LinkedTreeIterator implements Iterator<Map.Entry<K,V>> {\n        public final Map.Entry<K,V> next() { return nextNode(); }\n    }\n\n    protected class LinkedListIterator implements ListIterator<Map.Entry<K,V> > {\n        private LinkedEntry<K,V> lastReturned;\n        private LinkedEntry<K,V> next;\n        private int nextIndex;\n        private int expectedModCount = modCount;\n\n        LinkedListIterator( int index ) {\n            // assert isPositionIndex(index);\n            next = (index == size) ? null : LinkedTreeMap.this.queryNodeByIndex( index );\n            nextIndex = index;\n        }\n\n        public boolean hasNext() {\n            return nextIndex < size;\n        }\n\n        public Map.Entry<K,V> next() {\n            checkForComodification();\n            if (!hasNext())\n                throw new NoSuchElementException();\n\n            lastReturned = next;\n            next = next.after;\n            nextIndex++;\n            return lastReturned;\n        }\n\n        public boolean hasPrevious() {\n            return nextIndex > 0;\n        }\n\n        public Map.Entry<K,V> previous() {\n            checkForComodification();\n            if (!hasPrevious())\n                throw new NoSuchElementException();\n\n            lastReturned = next = (next == null) ? tail : next.before;\n            nextIndex--;\n            return lastReturned;\n        }\n\n        public int nextIndex() {\n            return nextIndex;\n        }\n\n        public int previousIndex() {\n            return nextIndex - 1;\n        }\n\n        public void remove() {\n            this.checkForComodification();\n            if ( this.lastReturned == null ) {\n                throw new IllegalStateException();\n            }\n\n            LinkedEntry<K,V> lastNext = this.lastReturned.after;\n            deleteEntry( this.lastReturned );\n            if ( this.next == this.lastReturned ) {\n                this.next = lastNext;\n            }\n            else {\n                this.nextIndex--;\n            }\n            this.lastReturned = null;\n            this.expectedModCount++;\n        }\n\n        public void set( Map.Entry<K,V> e ) {\n            if ( this.lastReturned == null ) {\n                throw new IllegalStateException();\n            }\n            checkForComodification();\n            this.lastReturned.extend( e );\n        }\n\n        public void add( Map.Entry<K,V> e ) {\n            this.checkForComodification();\n            this.lastReturned = null;\n            if ( this.next == null ) {\n                //linkLast(e);\n                LinkedTreeMap.this.addLast( e );\n            }\n            else {\n                LinkedTreeMap.this.addLast( e );\n                LinkedEntry<K, V > lastInserted = LinkedTreeMap.this.detachLastTailInsert();\n                LinkedTreeMap.this.linkBefore( lastInserted, next );\n            }\n            this.nextIndex++;\n            this.expectedModCount++;\n        }\n\n        public void forEachRemaining( Consumer<? super Map.Entry<K,V>> action ) {\n            Objects.requireNonNull(action);\n            while ( modCount == this.expectedModCount && this.nextIndex < size ) {\n                action.accept( this.next );\n                this.lastReturned = this.next;\n                this.next = this.next.after;\n                this.nextIndex++;\n            }\n            this.checkForComodification();\n        }\n\n        final void checkForComodification() {\n            if ( modCount != this.expectedModCount ) {\n                throw new ConcurrentModificationException();\n            }\n        }\n    }\n\n    protected class DescendingIterator implements Iterator<Map.Entry<K,V> > {\n        private final LinkedListIterator itr = new LinkedListIterator(size());\n\n        public boolean hasNext() {\n            return this.itr.hasPrevious();\n        }\n\n        public Map.Entry<K,V> next() {\n            return this.itr.previous();\n        }\n\n        public void remove() {\n            this.itr.remove();\n        }\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    @Override\n    public Object clone() {\n        LinkedTreeMap<K,V > clone = (LinkedTreeMap<K,V> ) super.superClone();\n\n        clone.head = null;\n        clone.tail = null;\n        clone.putMapEntries( this, false );\n\n        return clone;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this );\n    }\n\n    public Deque<Map.Entry<K, V > > toQueue() {\n        return new LinkedTreeMapList<>( this );\n    }\n\n    public List<Map.Entry<K, V > > toList() {\n        return new LinkedTreeMapList<>( this );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/LinkedTreeMapList.java",
    "content": "package com.pinecone.framework.unit;\n\n\nimport java.util.Map;\nimport java.util.AbstractSequentialList;\nimport java.util.List;\nimport java.util.Deque;\nimport java.util.Comparator;\nimport java.util.SortedMap;\nimport java.util.Collection;\nimport java.util.ListIterator;\nimport java.util.Iterator;\n\npublic class LinkedTreeMapList<K,V > extends AbstractSequentialList<Map.Entry<K, V > > implements List<Map.Entry<K, V > >, Deque<Map.Entry<K, V > > {\n    static final long serialVersionUID = -5024789606714721619L;\n\n    private transient LinkedTreeMap<K,V > map;\n\n    public LinkedTreeMapList( LinkedTreeMap<K,V > map ) {\n        super();\n        this.map = map;\n    }\n\n    public LinkedTreeMapList() {\n        super();\n        this.map = new LinkedTreeMap<>();\n    }\n\n    public LinkedTreeMapList( Comparator<? super K> comparator ) {\n        super();\n        this.map = new LinkedTreeMap<>( comparator );\n    }\n\n    public LinkedTreeMapList( Comparator<? super K> comparator, boolean accessOrder ) {\n        super();\n        this.map = new LinkedTreeMap<>( comparator, accessOrder );\n    }\n\n    public LinkedTreeMapList( boolean accessOrder ) {\n        super();\n        this.map = new LinkedTreeMap<>( accessOrder );\n    }\n\n    public LinkedTreeMapList( Map<? extends K, ? extends V> m ) {\n        super();\n        this.map = new LinkedTreeMap<>( m );\n    }\n\n    public LinkedTreeMapList( SortedMap<K, ? extends V> m ) {\n        super();\n        this.map = new LinkedTreeMap<>( m );\n    }\n\n    public LinkedTreeMap<K,V > getMap() {\n        return this.map;\n    }\n\n    public int size() {\n        return this.map.size();\n    }\n\n    public boolean isEmpty() {\n        return this.map.isEmpty();\n    }\n\n\n\n    public boolean remove(Object o) {\n        return this.map.remove(o) != null;\n    }\n\n    public void clear() {\n        this.map.clear();\n    }\n\n\n\n    // Linked & Deque operations\n\n    public boolean contains( Object o ) {\n        return this.map.contains(o);\n    }\n\n    public boolean add( Map.Entry<K, V > e ) {\n        return this.map.put(e.getKey(), e.getValue())==null;\n    }\n\n    public boolean addAll( Collection<? extends Map.Entry<K, V> > c ) {\n        return this.map.addAll( c );\n    }\n\n    public V addFirst( K key, V value ) {\n        return this.map.addFirst( key, value );\n    }\n\n    public V addLast( K key, V value ) {\n        return this.map.addLast( key, value );\n    }\n\n    //@Override\n    public void addFirst( Map.Entry<K,V > e ) {\n        this.map.addFirst( e );\n    }\n\n    //@Override\n    public void addLast( Map.Entry<K,V > e ) {\n        this.map.addLast( e );\n    }\n\n    public Map.Entry<K,V > getFirst() {\n        return this.map.getFirst();\n    }\n\n    public Map.Entry<K,V > getLast() {\n        return this.map.getLast();\n    }\n\n    public Map.Entry<K,V > removeFirst() {\n        return this.map.removeFirst();\n    }\n\n    public Map.Entry<K,V > removeLast() {\n        return this.map.removeLast();\n    }\n\n    // Queue operations.\n\n    public Map.Entry<K,V > peek() {\n        return this.map.peek();\n    }\n\n    public Map.Entry<K,V > element() {\n        return this.map.element();\n    }\n\n    public Map.Entry<K,V > poll() {\n        return this.map.poll();\n    }\n\n    public Map.Entry<K,V > remove() {\n        return this.map.remove();\n    }\n\n    public boolean offer( Map.Entry<K,V > e ) {\n        return this.map.offer(e);\n    }\n\n    // Deque operations\n    public boolean offerFirst( Map.Entry<K,V > e ) {\n        return this.map.offerFirst(e);\n    }\n\n    public boolean offerLast( Map.Entry<K,V > e ) {\n        return this.map.offerLast(e);\n    }\n\n    public Map.Entry<K,V > peekFirst() {\n        return this.map.peekFirst();\n    }\n\n    public Map.Entry<K,V > peekLast() {\n        return this.map.peekLast();\n    }\n\n    public Map.Entry<K,V > pollFirst() {\n        return this.map.pollFirst();\n    }\n\n    public Map.Entry<K,V > pollLast() {\n        return this.map.pollLast();\n    }\n\n    public void push( Map.Entry<K,V > e ) {\n        this.map.push(e);\n    }\n\n    public Map.Entry<K,V > pop() {\n        return this.map.pop();\n    }\n\n    public boolean removeFirstOccurrence( Object o ) {\n        return this.map.removeFirstOccurrence(o);\n    }\n\n    public boolean removeLastOccurrence( Object o ) {\n        return this.map.removeLastOccurrence(o);\n    }\n\n\n    public ListIterator<Map.Entry<K, V> > listIterator( int index ) {\n        return this.map.listIterator( index );\n    }\n\n    public Iterator<Map.Entry<K, V> > iterator() {\n        return this.map.iterator();\n    }\n\n    public Iterator<Map.Entry<K, V> > descendingIterator(){\n        return this.map.descendingIterator();\n    }\n\n    public Object[] toArray() {\n        return this.map.toArray();\n    }\n\n    public <T> T[] toArray( T[] a ) {\n        return this.map.toArray(a);\n    }\n\n    public boolean containsAll( Collection<?> c ) {\n        return this.map.containsAll(c);\n    }\n\n    public boolean removeAll( Collection<?> c ) {\n        return this.map.removeAll(c);\n    }\n\n    public boolean retainAll( Collection<?> c ) {\n        return this.map.retainAll(c);\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/LinkedTreeSet.java",
    "content": "package com.pinecone.framework.unit;\n\nimport java.io.InvalidObjectException;\nimport java.io.Serializable;\nimport java.util.*;\n\npublic class LinkedTreeSet<E> extends AbstractSet<E> implements Set<E>, Cloneable, Serializable {\n    static final long serialVersionUID = -5024744406713321676L;\n\n    private transient LinkedTreeMap<E,Object > map;\n\n    private static final Object PRESENT = new Object();\n\n    public LinkedTreeSet() {\n        this.map = new LinkedTreeMap<>();\n    }\n\n    public LinkedTreeSet( Collection<? extends E> c ) {\n        this.map = new LinkedTreeMap<>();\n        this.addAll(c);\n    }\n\n    public LinkedTreeSet( Comparator<? super E> comparator ) {\n        this.map = new LinkedTreeMap<>( comparator );\n    }\n\n    public LinkedTreeSet( Comparator<? super E> comparator, boolean accessOrder ) {\n        super();\n        this.map = new LinkedTreeMap<>( comparator, accessOrder );\n    }\n\n    public LinkedTreeSet( boolean accessOrder ) {\n        super();\n        this.map = new LinkedTreeMap<>( accessOrder );\n    }\n\n\n    public Iterator<E > iterator() {\n        return this.map.keySet().iterator();\n    }\n\n    public int size() {\n        return this.map.size();\n    }\n\n    public boolean isEmpty() {\n        return this.map.isEmpty();\n    }\n\n    public boolean contains( Object o ) {\n        return this.map.containsKey(o);\n    }\n\n    public boolean add(E e) {\n        return this.map.put(e, PRESENT)==null;\n    }\n\n    public boolean remove(Object o) {\n        return this.map.remove(o)==PRESENT;\n    }\n\n    public void clear() {\n        this.map.clear();\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    public Object clone() {\n        try {\n            LinkedTreeSet<E> newSet = (LinkedTreeSet<E>) super.clone();\n            newSet.map = (LinkedTreeMap<E, Object > ) this.map.clone();\n            return newSet;\n        }\n        catch ( CloneNotSupportedException e ) {\n            throw new InternalError(e);\n        }\n    }\n\n    private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {\n        s.defaultWriteObject();\n\n        s.writeInt( this.map.size() );\n\n        // Write out all elements in the proper order.\n        for ( E e : this.map.keySet() ) {\n            s.writeObject(e);\n        }\n    }\n\n    private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {\n        s.defaultReadObject();\n\n        int size = s.readInt();\n        if ( size < 0 ) {\n            throw new InvalidObjectException(\"Illegal size: \" + size);\n        }\n\n        this.map = new LinkedTreeMap<>();\n\n        for ( int i = 0; i < size; i++ ) {\n            @SuppressWarnings(\"unchecked\")\n            E e = (E) s.readObject();\n            this.map.put( e, PRESENT );\n        }\n    }\n\n    public Spliterator<E> spliterator() {\n        return LinkedTreeMap.keySpliteratorFor( this.map );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/ListDictium.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.system.prototype.Prototype;\nimport com.pinecone.framework.system.prototype.TypeIndex;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.math.BigDecimal;\nimport java.util.List;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.ListIterator;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.AbstractSet;\nimport java.util.Spliterator;\nimport java.util.Spliterators;\nimport java.util.NoSuchElementException;\n\npublic class ListDictium<V > implements Dictium<V >, List<V > {\n    private List<V > mTargetList;\n\n    public ListDictium( List<V > target ) {\n        this.mTargetList = target;\n    }\n\n    public ListDictium() {\n        this( new ArrayList<>() );\n    }\n\n    public static int asInt32Key( Object key ) {\n        if ( key instanceof Integer ) {\n            return (int) key;\n        }\n        else if ( key instanceof Float || key instanceof Double || key instanceof BigDecimal ) {\n            throw new IllegalArgumentException( \"Array does not allow float as key.\" );\n        }\n        else if ( key instanceof Number ) {\n            return ((Number) key).intValue();\n        }\n        else if ( key instanceof String ) {\n            return Integer.parseInt((String) key);\n        }\n\n        throw new IllegalArgumentException( \"Key of Array should be integer or integer-fmt-string.\" );\n    }\n\n    @Override\n    public int size() {\n        return this.mTargetList.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mTargetList.isEmpty();\n    }\n\n    @Override\n    public void clear() {\n        this.mTargetList.clear();\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        try {\n            int index = ListDictium.asInt32Key( key );\n            return index >= 0 && index < this.mTargetList.size();\n        }\n        catch ( IllegalArgumentException e ) {\n            return false;\n        }\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        return this.contains( value );\n    }\n\n    @Override\n    public boolean contains( Object o ) {\n        return this.mTargetList.contains( o );\n    }\n\n    @Override\n    public boolean add( V v ) {\n        return this.mTargetList.add( v );\n    }\n\n    @Override\n    public boolean containsAll( Collection<?> c ) {\n        return this.mTargetList.containsAll( c );\n    }\n\n    @Override\n    public boolean addAll( int index, Collection<? extends V> c ) {\n        return this.mTargetList.addAll( index, c );\n    }\n\n    @Override\n    public boolean addAll( Collection<? extends V> c ) {\n        return this.mTargetList.addAll( c );\n    }\n\n    @Override\n    public boolean removeAll( Collection<?> c ) {\n        return this.mTargetList.removeAll( c );\n    }\n\n    @Override\n    public boolean retainAll( Collection<?> c ) {\n        return this.mTargetList.retainAll( c );\n    }\n\n    @Override\n    public V get( Object key ) {\n        try {\n            int index = ListDictium.asInt32Key( key );\n            if ( index >= 0 && index < this.mTargetList.size() ) {\n                return this.mTargetList.get( index );\n            }\n        }\n        catch ( IllegalArgumentException e ) {\n            // Do nothing\n        }\n        return null;\n    }\n\n    @Override\n    public V get( int index ) {\n        return this.mTargetList.get( index );\n    }\n\n    @Override\n    public V set( int index, V value ) {\n        while ( this.mTargetList.size() <= index ) {\n            this.mTargetList.add( null );\n        }\n        return this.mTargetList.set( index, value );\n    }\n\n    @Override\n    public void add( int index, V value ) {\n        while ( this.mTargetList.size() <= index ) {\n            this.mTargetList.add( null );\n        }\n        this.mTargetList.add( index, value );\n    }\n\n\n    @Override\n    public V insert( Object key, V value ) {\n        int index = ListDictium.asInt32Key( key );\n        return this.set( index, value );\n    }\n\n    @Override\n    public V insertIfAbsent( Object key, V value ) {\n        if( !this.containsKey( ListDictium.asInt32Key( key ) ) ){\n            this.insert( key, value );\n        }\n        return null;\n    }\n\n    @Override\n    public V erase( Object key ) {\n        try {\n            int index = ListDictium.asInt32Key( key );\n            if ( index >= 0 && index < this.mTargetList.size() ) {\n                return this.mTargetList.remove(index);\n            }\n        }\n        catch ( IllegalArgumentException e ) {\n            // Do nothing\n        }\n        return null;\n    }\n\n    @Override\n    public boolean remove( Object key ) {\n        this.erase( key );\n        return true;\n    }\n\n    @Override\n    public V remove( int index ) {\n        return this.mTargetList.remove( index );\n    }\n\n    @Override\n    public int indexOf( Object o ) {\n        return this.mTargetList.indexOf( o );\n    }\n\n    @Override\n    public int lastIndexOf( Object o ) {\n        return this.mTargetList.lastIndexOf( o );\n    }\n\n    @Override\n    public ListIterator<V> listIterator() {\n        return this.mTargetList.listIterator();\n    }\n\n    @Override\n    public ListIterator<V> listIterator( int index ) {\n        return this.mTargetList.listIterator( index );\n    }\n\n    @Override\n    public List<V> subList( int fromIndex, int toIndex ) {\n        return this.mTargetList.subList( fromIndex, toIndex );\n    }\n\n    @Override\n    public Iterator<V > iterator() {\n        return this.mTargetList.iterator();\n    }\n\n    @Override\n    public Set<Map.Entry<Integer, V > > entrySet() {\n        return new ListEntrySet();\n    }\n\n    public Set<Integer > keySet() {\n        return new ListKeyEntrySet();\n    }\n\n    @Override\n    public Collection<V > values() {\n        return this.mTargetList;\n    }\n\n    @Override\n    public Map<Integer, V > toMap() {\n        return new ListMap();\n    }\n\n    @Override\n    public List<V > toList() {\n        return this.mTargetList;\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object index ) {\n        return this.containsKey( index );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this );\n    }\n\n    @Override\n    public <T> T[] toArray( T[] a ) {\n        return this.mTargetList.<T>toArray( a );\n    }\n\n    @Override\n    public Object[] toArray() {\n        return this.mTargetList.toArray();\n    }\n\n\n    protected class ListEntrySet extends AbstractSet<Map.Entry<Integer, V > > {\n        @Override\n        public Iterator<Map.Entry<Integer, V > > iterator() {\n            return new ListEntryIterator();\n        }\n\n        @Override\n        public int size() {\n            return ListDictium.this.mTargetList.size();\n        }\n\n        @Override\n        public void clear() {\n            ListDictium.this.clear();\n        }\n\n        @Override\n        public boolean contains(Object o) {\n            if (!(o instanceof Map.Entry)) {\n                return false;\n            }\n            Map.Entry<?, ?> e = (Map.Entry<?, ?>) o;\n            Object key = e.getKey();\n            Object v = ListDictium.this.get(key);\n            return v != null && v.equals(e.getValue());\n        }\n\n        @Override\n        public boolean remove(Object o) {\n            if (this.contains(o)) {\n                Map.Entry<?, ?> e = (Map.Entry<?, ?>) o;\n                Object key = e.getKey();\n                return ListDictium.this.erase(key) != null;\n            }\n            return false;\n        }\n\n        @Override\n        public Spliterator<Map.Entry<Integer, V>> spliterator() {\n            return Spliterators.spliterator(this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT);\n        }\n    }\n\n    protected abstract class DiListEntryIterator {\n        protected Iterator<V> currentIterator;\n        protected int index;\n        protected ListEntry<V> dummyEntry;\n\n        public DiListEntryIterator() {\n            this.index = 0;\n            this.currentIterator = ListDictium.this.mTargetList.iterator();\n            this.dummyEntry = new ListEntry<>( this.index, null );\n        }\n\n        public boolean hasNext() {\n            return this.currentIterator.hasNext();\n        }\n\n        protected Map.Entry<Integer, V> nextNode() {\n            if ( !this.hasNext() ) {\n                throw new NoSuchElementException();\n            }\n\n            this.dummyEntry.setKey( this.index++ );\n            this.dummyEntry.setValue( this.currentIterator.next() );\n            return this.dummyEntry;\n        }\n\n        public void remove() {\n            this.currentIterator.remove();\n        }\n    }\n\n    protected class ListEntryIterator extends DiListEntryIterator implements Iterator<Map.Entry<Integer, V>> {\n        @Override\n        public Map.Entry<Integer, V> next() {\n            return this.nextNode();\n        }\n    }\n\n    protected static boolean valEquals( Object o1, Object o2 ) {\n        return (o1==null ? o2==null : o1.equals(o2));\n    }\n\n    protected static class ListEntry<V > implements Map.Entry<Integer, V >, Pinenut {\n        Integer key;\n        V  value;\n\n        ListEntry( Integer key, V value ) {\n            this.key   = key;\n            this.value = value;\n        }\n\n        @Override\n        public Integer getKey() {\n            return this.key;\n        }\n\n        @Override\n        public V getValue() {\n            return this.value;\n        }\n\n        @Override\n        public V setValue( V value ) {\n            V oldValue = this.value;\n            this.value = value;\n            return oldValue;\n        }\n\n        public void setKey( Integer key ) {\n            this.key = key;\n        }\n\n        @Override\n        public boolean equals( Object o ) {\n            if (!(o instanceof Map.Entry))\n                return false;\n            Map.Entry<?,?> e = (Map.Entry<?,?>)o;\n\n            return valEquals(key,e.getKey()) && valEquals(value,e.getValue());\n        }\n\n        @Override\n        public int hashCode() {\n            int keyHash = (key==null ? 0 : key.hashCode());\n            int valueHash = (value==null ? 0 : value.hashCode());\n            return keyHash ^ valueHash;\n        }\n\n        @Override\n        public String toString() {\n            return this.toJSONString();\n        }\n\n        @Override\n        public String toJSONString() {\n            return \"{\" + StringUtils.jsonQuote( this.key.toString() ) + \":\" + JSON.stringify( this.value ) + \"}\";\n        }\n\n        @Override\n        public TypeIndex prototype() {\n            return Prototype.typeid( this );\n        }\n    }\n\n\n\n\n    protected class ListKeyIterator extends DiListEntryIterator implements Iterator<Integer > {\n        @Override\n        public Integer next() {\n            return this.nextNode().getKey();\n        }\n    }\n\n    protected class ListKeyEntrySet extends AbstractSet<Integer > {\n        @Override\n        public Iterator<Integer > iterator() {\n            return new ListKeyIterator();\n        }\n\n        @Override\n        public int size() {\n            return ListDictium.this.mTargetList.size();\n        }\n\n        @Override\n        public void clear() {\n            ListDictium.this.clear();\n        }\n\n        @Override\n        public boolean contains( Object o ) {\n            if (!(o instanceof Map.Entry)) {\n                return false;\n            }\n            Map.Entry<?, ?> e = (Map.Entry<?, ?>) o;\n            Object key = e.getKey();\n            Object v = ListDictium.this.get(key);\n            return v != null && v.equals(e.getValue());\n        }\n\n        @Override\n        public boolean remove( Object o ) {\n            if (this.contains(o)) {\n                Map.Entry<?, ?> e = (Map.Entry<?, ?>) o;\n                Object key = e.getKey();\n                return ListDictium.this.erase(key) != null;\n            }\n            return false;\n        }\n\n        @Override\n        public Spliterator<Integer > spliterator() {\n            return Spliterators.spliterator(this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT);\n        }\n    }\n\n    protected class ListMap implements Map<Integer, V >, PineUnit {\n        @Override\n        public int size() {\n            return ListDictium.this.size();\n        }\n\n        @Override\n        public boolean isEmpty() {\n            return ListDictium.this.isEmpty();\n        }\n\n        @Override\n        public void clear() {\n            ListDictium.this.clear();\n        }\n\n        @Override\n        public boolean containsKey( Object key ) {\n            return ListDictium.this.containsKey( key );\n        }\n\n        @Override\n        public boolean containsValue( Object value ) {\n            return ListDictium.this.containsValue( value );\n        }\n\n        @Override\n        public V get( Object key ) {\n            return ListDictium.this.get( key );\n        }\n\n        @Override\n        public V put( Integer index, V value ) {\n            return ListDictium.this.set( index, value );\n        }\n\n        @Override\n        public V remove( Object key ) {\n            return ListDictium.this.erase( key );\n        }\n\n        @Override\n        public void putAll( Map<? extends Integer, ? extends V> m ) {\n            for( Map.Entry<? extends Integer, ? extends V > kv: m.entrySet() ) {\n                this.put( kv.getKey(), kv.getValue() );\n            }\n        }\n\n        @Override\n        public Set<Map.Entry<Integer, V > > entrySet() {\n            return new ListEntrySet();\n        }\n\n        @Override\n        public Set<Integer > keySet() {\n            return new ListKeyEntrySet();\n        }\n\n        @Override\n        public Collection<V > values() {\n            return ListDictium.this.values();\n        }\n\n        @Override\n        public boolean hasOwnProperty( Object index ) {\n            return this.containsKey( index );\n        }\n\n        @Override\n        public String toString() {\n            return this.toJSONString();\n        }\n\n        @Override\n        public String toJSONString() {\n            return JSON.stringify( this );\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/ListedSortedMap.java",
    "content": "package com.pinecone.framework.unit;\n\nimport java.util.ListIterator;\nimport java.util.SortedMap;\n\npublic interface ListedSortedMap<K,V> extends SortedMap<K,V> {\n    ListIterator<Entry<K, V> > listIterator(int index );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/MapDictium.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.Collection;\nimport java.util.LinkedHashMap;\n\npublic class MapDictium<V > implements Dictium<V >, Map<Object, V > {\n    private Map<Object, V > mTargetMap;\n\n    public MapDictium( Map<Object, V > map ) {\n        this.mTargetMap = map;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public MapDictium( Map map, boolean bUnchecked ) {\n        this( map );\n    }\n\n    public MapDictium() {\n        this( new LinkedHashMap<>() );\n    }\n\n    @Override\n    public int size() {\n        return this.mTargetMap.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mTargetMap.isEmpty();\n    }\n\n    @Override\n    public void clear() {\n        this.mTargetMap.clear();\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        return this.mTargetMap.containsKey(key);\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        return this.mTargetMap.containsValue( value );\n    }\n\n    @Override\n    public V get( Object key ) {\n        return this.mTargetMap.get( key );\n    }\n\n    @Override\n    public V put( Object key, V value ) {\n        return this.insert( key, value );\n    }\n\n    @Override\n    public void putAll( Map<?, ? extends V> m ) {\n        this.mTargetMap.putAll( m );\n    }\n\n    @Override\n    public V insert( Object key, V value ) {\n        return this.mTargetMap.put( key, value );\n    }\n\n    @Override\n    public V insertIfAbsent( Object key, V value ) {\n        return this.mTargetMap.putIfAbsent( key, value );\n    }\n\n    @Override\n    public V erase( Object key ) {\n        return this.mTargetMap.remove( key );\n    }\n\n    @Override\n    public V remove( Object key ) {\n        return this.erase( key );\n    }\n\n    @Override\n    public boolean remove( Object key, Object value ) {\n        return this.mTargetMap.remove( key, value );\n    }\n\n    @Override\n    public Set<Map.Entry<Object, V > > entrySet() {\n        return this.mTargetMap.entrySet();\n    }\n\n    @Override\n    public Set<Object > keySet() {\n        return this.mTargetMap.keySet();\n    }\n\n    @Override\n    public Collection<V > values() {\n        return this.mTargetMap.values();\n    }\n\n    @Override\n    public Map<Object, V > toMap() {\n        return this.mTargetMap;\n    }\n\n    @Override\n    public List<V > toList() {\n        return new ArrayList<>( this.mTargetMap.values() );\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object index ) {\n        return this.containsKey( index );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this.mTargetMap );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/Mapnut.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\n\nimport java.util.Map;\n\npublic interface Mapnut<K, V > extends PineUnit, Map<K, V > {\n    // WARNING, Modified outside will provokes unpredictable results. [ Readonly for performance purpose, in principle ]\n    // Java has not the const function, this inconvenient...\n    // Equals `Map::Entry<K, V > getEntryByKey( Object compatibleKey )`\n    Map.Entry<K, V > getEntryByKey( Object compatibleKey ); // Jesus christ... Even it is not full-safe outside, but we need this!\n\n    // Equals `const Map::Entry<K, V > getEntryByKey( Object compatibleKey ) const`\n    Map.Entry<K, V > getEntryCopyByKey( Object compatibleKey ); // Ah, this one is more safer.\n\n    default long megaSize(){\n        return this.size();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/MultiScopeMap.java",
    "content": "package com.pinecone.framework.unit;\n\nimport java.util.*;\n\npublic interface MultiScopeMap<K, V > extends ScopeMap<K, V > {\n    List<MultiScopeMap<K, V > >    getParents    ();\n\n    Map<K, V >                     thisScope  ();\n\n    MultiScopeMap<K, V >           setParents    ( List<MultiScopeMap<K, V > > that );\n\n    MultiScopeMap<K, V >           setThisScope  ( Map<K, V > that );\n\n    MultiScopeMap<K, V >           addParent     ( MultiScopeMap<K, V > that );\n\n    String                         getName       ();\n\n    MultiScopeMap<K, V >           setName       ( String name );\n\n    default boolean                isAnonymous   () {\n        return this.getName().isEmpty();\n    }\n\n    @Override\n    default boolean                isProgenitor  () {\n        return this.getParents() == null;\n    }\n\n    @Override\n    default void                   purge() {\n        this.setParents( null );\n        this.clear();\n    }\n\n    @Override\n    default void                   depurate() {\n        List<MultiScopeMap<K, V > > p = this.getParents();\n        if( p != null ) {\n            for ( MultiScopeMap<K, V > m : p ) {\n                m.depurate();\n            }\n        }\n\n        this.clear();\n    }\n\n    @Override\n    default void                   overrideTo ( Map<K, V > neo ) {\n        Map<K, V > self = this.thisScope();\n        for ( Map.Entry<? extends K, ? extends V> e : self.entrySet() ){\n            neo.putIfAbsent( e.getKey(), e.getValue() );\n        }\n\n        List<MultiScopeMap<K, V > > p = this.getParents();\n        if( p != null ) {\n            for ( MultiScopeMap<K, V > m : p ) {\n                m.overrideTo( neo );\n            }\n        }\n    }\n\n    @Override\n    default boolean                isScopeEmpty () {\n        boolean b = this.isEmpty();\n\n        if( b ) {\n            List<MultiScopeMap<K, V > > p = this.getParents();\n            if( p != null ) {\n                for ( MultiScopeMap<K, V > m : p ) {\n                    b = m.isScopeEmpty();\n                    if( !b ) {\n                        break;\n                    }\n                }\n            }\n        }\n\n        return b;\n    }\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    default ScopeMap<K, V >[]      ancestors  (){\n        List<ScopeMap<K, V>> l = new ArrayList<>();\n        ScopeTrees.groupByNodes( this, l );\n        return l.toArray( (ScopeMap<K, V >[]) new MultiScopeMap[0] );\n    }\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    default ScopeMap<K, V >[]      scopes     (){\n        ArrayList<ScopeMap<K, V > > l = new ArrayList<>();\n        l.add( this );\n        ScopeTrees.groupByNodes( this, l );\n        return l.toArray( (ScopeMap<K, V >[]) new MultiScopeMap[0] );\n    }\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    default ScopeMap<K, V >        getAll        ( Object key, List<V > ret ) {\n        V v = this.thisScope().get( key );\n        if( v != null ) {\n            ret.add( v );\n        }\n\n        ScopeTrees.search( this, ( Object...args )->{\n            MultiScopeMap<K, V> currentMap = (MultiScopeMap<K, V>) args[0];\n            if( currentMap != this ) {\n                V t = currentMap.thisScope().get( key );\n                if( t != null ) {\n                    ret.add( t );\n                }\n            }\n            return false;\n        } );\n\n        return this;\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    default V                      query         ( Object key, String szParentNS ) {\n        final V[] v = (V[]) new Object[1];\n        v[0] = this.thisScope().get(key);\n        if (v[0] != null) {\n            return v[0];\n        }\n\n        ScopeTrees.search(this, (Object... args) -> {\n            MultiScopeMap<K, V> currentMap = (MultiScopeMap<K, V>) args[0];\n            if (currentMap != this) {\n                V t = currentMap.thisScope().get(key);\n                if (t != null && currentMap.getName().equals(szParentNS)) {\n                    v[0] = t;\n                    return true;\n                }\n            }\n            return false;\n        });\n\n        return v[0];\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    default MultiScopeMap<K, V >   getScopeByNS  ( String szParentNS ) {\n        final Object[] v = new Object[1];\n\n        ScopeTrees.search(this, (Object... args) -> {\n            MultiScopeMap<K, V> currentMap = (MultiScopeMap<K, V>) args[0];\n            if ( currentMap != this ) {\n                if ( currentMap.getName().equals(szParentNS) ) {\n                    v[0] = currentMap;\n                    return true;\n                }\n            }\n            return false;\n        });\n\n        return (MultiScopeMap<K, V > )v[0];\n    }\n\n    @Override\n    default MultiScopeMap<K, V >   removeAll  ( Object key ) {\n        this.thisScope().remove( key );\n\n        List<MultiScopeMap<K, V > > p = this.getParents();\n        if( p != null ) {\n            for ( MultiScopeMap<K, V > m : p ) {\n                m.removeAll( key );\n            }\n        }\n\n        return this;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/MultiScopeMaptron.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.io.Serializable;\nimport java.util.*;\n\npublic class MultiScopeMaptron<K, V > implements PineUnit, Map<K, V >, MultiScopeMap<K, V >, Cloneable, Serializable, Iterable<Map.Entry<K, V > > {\n    protected String                            mszName    ;\n\n    protected List<MultiScopeMap<K, V > >       mParents ;\n\n    protected Map<K, V>                         mThisMap ;\n\n    protected transient Set<Map.Entry<K,V> >    entrySet ;\n    protected transient Set<K>                  scKeySet ;\n    protected transient Collection<V>           scValues ;\n\n\n    public MultiScopeMaptron() {\n        this( true, null );\n    }\n\n    public MultiScopeMaptron( String name ) {\n        this( true, null );\n        this.setName( name );\n    }\n\n    public MultiScopeMaptron( Map<K, V > thisMap, List<MultiScopeMap<K, V > > prototypes, String name ){\n        this.mThisMap  = thisMap;\n        this.mParents  = prototypes;\n        this.mszName   = name;\n\n        if( this.mThisMap == null ) {\n            this.mThisMap = new LinkedHashMap<>();\n        }\n    }\n\n    public MultiScopeMaptron( Map<K, V > thisMap, List<MultiScopeMap<K, V > > prototypes ){\n        this( thisMap, prototypes, \"\" );\n    }\n\n    public MultiScopeMaptron( boolean bLinked, List<MultiScopeMap<K, V > > prototypes ){\n        this( bLinked ? new LinkedHashMap<>() : new HashMap<>(), prototypes );\n    }\n\n    public MultiScopeMaptron( Map<K, V > thisMap ){\n        this( thisMap, null );\n    }\n\n\n\n\n\n    @Override\n    public String                         getName       () {\n        return this.mszName;\n    }\n\n    @Override\n    public MultiScopeMap<K, V >           setName       ( String name ) {\n        this.mszName = name;\n        return this;\n    }\n\n    @Override\n    public List<MultiScopeMap<K, V > >    getParents    () {\n        return this.mParents;\n    }\n\n    @Override\n    public Map<K, V >                     thisScope  () {\n        return this.mThisMap;\n    }\n\n    @Override\n    public MultiScopeMap<K, V >           setParents    ( List<MultiScopeMap<K, V > > that ) {\n        this.mParents = that;\n        return this;\n    }\n\n    @Override\n    public MultiScopeMap<K, V >           setThisScope  ( Map<K, V > that ) {\n        this.mThisMap = that;\n        return this;\n    }\n\n    @Override\n    public MultiScopeMap<K, V >           addParent     ( MultiScopeMap<K, V > that ) {\n        if( this.getParents() == null ) {\n            this.mParents = new ArrayList<>();\n        }\n        this.mParents.add( that );\n        return this;\n    }\n\n    @Override\n    public ScopeMap<K, V >                elevate       ( Map<K, V > child ) {\n        MultiScopeMaptron<K, V > sup = new MultiScopeMaptron<>( this.mThisMap, this.mParents );\n        this.setThisScope ( child );\n        ArrayList<MultiScopeMap<K, V > > a = new ArrayList<>();\n        a.add( sup );\n        this.setParents   ( a     );\n        return this;\n    }\n\n\n\n\n    /** Basic Map **/\n    @Override\n    public int size() {\n        return this.mThisMap.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mThisMap.isEmpty();\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        boolean result = this.mThisMap.containsKey(key);\n        if ( !result && this.mParents != null ) {\n            for ( MultiScopeMap<K, V > m : this.mParents ) {\n                result = m.containsKey( key );\n                if( result ) {\n                    break;\n                }\n            }\n        }\n\n        return result;\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        boolean result = this.mThisMap.containsValue(value);\n        if ( !result && this.mParents != null ) {\n            for ( MultiScopeMap<K, V > m : this.mParents ) {\n                result = m.containsValue( value );\n                if( result ) {\n                    break;\n                }\n            }\n        }\n\n        return result;\n    }\n\n\n    // For Multiple-Inheritance Scope, for ambiguous sibling-super-key, it will only find in the nearest parent.\n    @Override\n    public V get( Object key ) {\n        V val = this.mThisMap.get( key );\n        if ( val == null && this.mParents != null ) {\n            for ( MultiScopeMap<K, V > m : this.mParents ) {\n                val = m.get( key );\n                if( val != null ) {\n                    break;\n                }\n            }\n        }\n\n        return val;\n    }\n\n    @Override\n    public void putAll( Map<? extends K, ? extends V> m ) {\n        this.mThisMap.putAll( m );\n    }\n\n    public MultiScopeMaptron xPutAll(Map<? extends K, ? extends V> m ) {\n        this.putAll(m);\n        return this;\n    }\n\n    @Override\n    public void clear() {\n        this.mThisMap.clear();\n    }\n\n    public MultiScopeMaptron xClear() {\n        this.clear();\n        return this;\n    }\n\n    @Override\n    public V remove( Object key ) {\n        V v = this.mThisMap.remove(key);\n        if ( v == null && this.mParents != null ) {\n            for ( MultiScopeMap<K, V > m : this.mParents ) {\n                v = m.remove( key );\n                if( v != null ) {\n                    break;\n                }\n            }\n        }\n\n        return v;\n    }\n\n    public MultiScopeMaptron xRemove( Object key ) {\n        this.remove(key);\n        return this;\n    }\n\n    @Override\n    public Set<K > keySet() {\n        return this.mThisMap.keySet();\n    }\n\n    @Override\n    public Collection<V > values() {\n        return this.mThisMap.values();\n    }\n\n    @Override\n    public Set<Map.Entry<K, V >> entrySet() {\n        return this.mThisMap.entrySet();\n    }\n\n    @Override\n    public Iterator<Map.Entry<K, V> > iterator() {\n        return this.mThisMap.entrySet().iterator();\n    }\n\n    @Override\n    public V put( K key, V value ) {\n        return this.mThisMap.put( key, value );\n    }\n\n    @Override\n    public V putIfAbsent( K key, V value ) {\n        return this.mThisMap.putIfAbsent( key, value );\n    }\n\n    @Override\n    public boolean hasOwnProperty ( Object key ) {\n        return this.mThisMap.containsKey( key );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this.mThisMap );\n    }\n\n\n\n\n\n\n\n\n\n\n\n    @Override\n    public Iterator<Map.Entry<K, V> > scopeIterator() {\n        return null;\n    }\n\n    @Override\n    public Set<Map.Entry<K,V> > scopeEntrySet() {\n        Set<Map.Entry<K,V>> es;\n        return (es = this.entrySet) == null ? (this.entrySet = new ScopeEntrySet()) : es;\n    }\n\n    @Override\n    public Set<K > scopeKeySet() {\n        return null;\n    }\n\n    @Override\n    public Collection<V > scopeValues() {\n        return null;\n    }\n\n\n\n\n\n\n\n    protected final class ScopeEntrySet extends AbstractSet<Map.Entry<K,V> > {\n        public final int size()                 { throw new UnsupportedOperationException(\"Iterator only.\"); }\n\n        public final void clear()               { MultiScopeMaptron.this.clear(); }\n\n        public final Iterator<Map.Entry<K,V> > iterator() {\n            return new ScopeEntryIterator();\n        }\n\n        public final boolean contains( Object o ) {\n            if ( !(o instanceof Map.Entry) ) {\n                return false;\n            }\n            Map.Entry<?,?> e = (Map.Entry<?,?>) o;\n            Object key = e.getKey();\n\n            V v = MultiScopeMaptron.this.get(key);\n            return v != null && v.equals(e.getValue());\n        }\n\n        public final boolean remove( Object o ) {\n            if ( this.contains(o) ) {\n                Map.Entry<?,?> e = (Map.Entry<?,?>) o;\n                Object key = e.getKey();\n\n                return MultiScopeMaptron.this.remove(key) != null;\n            }\n            return false;\n        }\n\n        public final Spliterator<Map.Entry<K,V>> spliterator() {\n            return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT );\n        }\n    }\n\n    protected abstract class ScopeIterator {\n        protected MultiScopeMap<K, V >[]            parentsStack;\n        protected int                               stackAt;\n        protected Iterator<Map.Entry<K, V> >        currentIterator;\n\n        public ScopeIterator() {\n            this.parentsStack     = (MultiScopeMap<K, V >[]) MultiScopeMaptron.this.ancestors();\n            this.stackAt          = 0;\n            this.currentIterator  = MultiScopeMaptron.this.mThisMap.entrySet().iterator();\n        }\n\n        private boolean parentsHasNext() {\n            boolean b = this.stackAt < this.parentsStack.length;\n            if( b ) {\n                MultiScopeMap<K, V> parentMap = this.parentsStack[ this.stackAt ];\n                if( parentMap != null && parentMap.thisScope() != null ) {\n                    return true;\n                }\n            }\n            return b;\n        }\n\n        public boolean hasNext() {\n            if ( this.currentIterator.hasNext() ) {\n                return true;\n            }\n            else {\n                while ( parentsHasNext() ) {\n                    MultiScopeMap<K, V> parentMap = this.parentsStack[ this.stackAt ];\n                    ++this.stackAt;\n                    if ( parentMap != null ) {\n                        this.currentIterator = parentMap.thisScope().entrySet().iterator();\n                        if ( this.currentIterator.hasNext() ) {\n                            return true;\n                        }\n                    }\n                }\n                return false;\n            }\n        }\n\n        protected Map.Entry<K, V> nextNode() {\n            if (!hasNext()) {\n                throw new NoSuchElementException();\n            }\n            return this.currentIterator.next();\n        }\n\n        public void remove() {\n            this.currentIterator.remove();\n        }\n    }\n\n    final class ScopeKeySet extends AbstractSet<K> {\n        public final int size()                 { throw new UnsupportedOperationException(\"Iterator only.\"); }\n\n        public final void clear()               { MultiScopeMaptron.this.clear(); }\n\n        public final Iterator<K> iterator() {\n            return new ScopeKeyIterator();\n        }\n\n        public final boolean contains( Object o ) { return containsKey(o); }\n\n        public final boolean remove( Object key ) {\n            return MultiScopeMaptron.this.remove(key) != null;\n        }\n\n        public final Spliterator<K> spliterator()  {\n            return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT );\n        }\n    }\n\n    protected final class ScopeKeyIterator extends ScopeIterator implements Iterator<K> {\n        public final K next() { return nextNode().getKey(); }\n    }\n\n    final class ScopeValues extends AbstractCollection<V> {\n        public final int size()                 { throw new IllegalStateException(\"Iterator only.\"); }\n\n        public final void clear()               { MultiScopeMaptron.this.clear(); }\n\n        public final Iterator<V> iterator() {\n            return new ScopeValueIterator();\n        }\n\n        public final boolean contains( Object o ) { return containsValue(o); }\n\n        public final Spliterator<V> spliterator() {\n            return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED );\n        }\n    }\n\n    protected final class ScopeValueIterator extends ScopeIterator implements Iterator<V> {\n        public final V next() { return (V)nextNode().getValue(); }\n    }\n\n    protected final class ScopeEntryIterator extends ScopeIterator implements Iterator<Map.Entry<K,V>> {\n        public final Map.Entry<K,V> next() { return nextNode(); }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/MultiValueMap.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.unit.multi.MultiCollectionProxyMap;\n\nimport java.util.List;\n\npublic interface MultiValueMap<K, V > extends MultiCollectionProxyMap<K, V, List<V > > {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/MultiValueMapper.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\n\nimport java.util.Collection;\nimport java.util.Map;\nimport java.util.Set;\n\n\npublic interface MultiValueMapper<K, V > extends PineUnit {\n    V getFirst( K k );\n\n    V add( K k, V v );\n\n    V set( K k, V v );\n\n    void setAll( Map<K, V > m );\n\n    V erase( Object key, V value );\n\n    Map<K, V > toSingleValueMap();\n\n    V get( Object k, V v );\n\n    @Override\n    default boolean hasOwnProperty( Object key ) {\n        return this.containsKey( key );\n    }\n\n    int size();\n\n    boolean isEmpty();\n\n    boolean containsKey( Object key );\n\n    boolean containsValue( Object value );\n\n    Collection<V > get(Object key );\n\n    Collection<V > puts( K key, Collection<V > value );\n\n    Collection<V > remove( Object key );\n\n    void putsAll( Map<? extends K, ? extends Collection<V > > m );\n\n    void clear();\n\n    Set<K > keySet();\n\n    Collection<? extends Collection<V > > values();\n\n    Set<? extends Map.Entry<K, ? extends Collection<V > > > entrySet();\n\n    Collection<Map.Entry<K, V > > collection() ;\n\n    Collection<V > collectionValues();\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/MultiValueMaptron.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.unit.multi.MultiCollectionProxyMap;\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.io.Serializable;\nimport java.util.Collection;\nimport java.util.Map;\nimport java.util.LinkedHashMap;\nimport java.util.ArrayList;\nimport java.util.Iterator;\nimport java.util.Set;\n\npublic class MultiValueMaptron<K, V, U extends Collection<V > > extends AbstractMultiValueMap<K, V > implements MultiCollectionProxyMap<K, V, U >, Serializable {\n    private static final long serialVersionUID = 3801124242820219131L;\n    private final Map<K, U >    mTargetMap;\n\n    public MultiValueMaptron( Map<K, U > otherMap, boolean bAssimilate ) {\n        if( bAssimilate ) {\n            this.mTargetMap = otherMap;\n        }\n        else {\n            this.mTargetMap = new LinkedHashMap<>( otherMap );\n        }\n    }\n\n    public MultiValueMaptron() {\n        this( new LinkedHashMap<>(), true );\n    }\n\n    protected Map<K, U > getTargetMap(){\n        return this.mTargetMap;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    protected U newCollection() {\n        return (U) new ArrayList<V >();\n    }\n\n    @Override\n    public V add( K key, V value ) {\n        U values = (U)this.mTargetMap.get( key );\n        if ( values == null ) {\n            values = this.newCollection();\n            this.mTargetMap.put( key, values );\n        }\n\n        if( ((U)values).add( value ) ){\n            return value;\n        }\n        return null;\n    }\n\n    @Override\n    public V getFirst( K key ) {\n        U values = (U)this.mTargetMap.get( key );\n        return values != null ? values.iterator().next() : null;\n    }\n\n    @Override\n    public V set( K key, V value ) {\n        U values = this.newCollection();\n        boolean b = values.add( value );\n        this.mTargetMap.put( key, values );\n        if( b ) {\n            return value;\n        }\n        return null;\n    }\n\n    @Override\n    public void setAll( Map<K, V > values ) {\n        Iterator<Entry<K, V > > iter = values.entrySet().iterator();\n\n        while( iter.hasNext() ) {\n            Entry<K, V > entry = (Entry<K, V>)iter.next();\n            this.set( entry.getKey(), entry.getValue() );\n        }\n    }\n\n    @Override\n    public Map<K, V > toSingleValueMap() {\n        LinkedHashMap<K, V> singleValueMap = new LinkedHashMap<>(this.mTargetMap.size());\n        Iterator<Entry<K, U > > iter = this.mTargetMap.entrySet().iterator();\n\n        while( iter.hasNext() ) {\n            Entry<K, U > entry = (Entry<K, U >)iter.next();\n            singleValueMap.put( entry.getKey(), ((U)entry.getValue()).iterator().next() );\n        }\n\n        return singleValueMap;\n    }\n\n    @Override\n    public int size() {\n        return this.mTargetMap.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mTargetMap.isEmpty();\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        return this.mTargetMap.containsKey(key);\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        return this.mTargetMap.containsValue(value);\n    }\n\n    @Override\n    public U get( Object key ) {\n        return (U)this.mTargetMap.get(key);\n    }\n\n    @Override\n    public U put( K key, U value ) {\n        return (U)this.mTargetMap.put( key, value );\n    }\n\n    @Override\n    public U remove( Object key ) {\n        return (U)this.mTargetMap.remove(key);\n    }\n\n    @Override\n    public void putAll( Map<? extends K, ? extends U > m ) {\n        this.mTargetMap.putAll(m);\n    }\n\n    @Override\n    public void clear() {\n        this.mTargetMap.clear();\n    }\n\n    @Override\n    public Set<K > keySet() {\n        return this.mTargetMap.keySet();\n    }\n\n    @Override\n    public Collection<U > values() {\n        return this.mTargetMap.values();\n    }\n\n    @Override\n    public Set<Entry<K, U > > entrySet() {\n        return this.mTargetMap.entrySet();\n    }\n\n    @Override\n    public boolean equals( Object obj ) {\n        return this.mTargetMap.equals(obj);\n    }\n\n    @Override\n    public int hashCode() {\n        return this.mTargetMap.hashCode();\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this.mTargetMap );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/PrecedeMultiMaptron.java",
    "content": "package com.pinecone.framework.unit;\n\nimport java.util.*;\n\npublic class PrecedeMultiMaptron<K, V > extends MultiScopeMaptron<K, V > implements PrecedeMultiScopeMap<K, V > {\n\n    protected MultiScopeMap<K, V > mPrecedeScope;\n\n    public PrecedeMultiMaptron() {\n        this( true, null );\n    }\n\n    public PrecedeMultiMaptron( String name ) {\n        this( true, null );\n        this.setName( name );\n    }\n\n    public PrecedeMultiMaptron( Map<K, V > thisMap, List<MultiScopeMap<K, V > > prototypes, String name, MultiScopeMap<K, V > precedeScope ){\n        super( thisMap, prototypes, name );\n        this.mPrecedeScope = precedeScope;\n        if( this.mPrecedeScope == null ) {\n            this.mPrecedeScope = new MultiScopeMaptron<>();\n        }\n    }\n\n    public PrecedeMultiMaptron( Map<K, V > thisMap, List<MultiScopeMap<K, V > > prototypes, String name, Map<K, V > precedeScope ){\n        super( thisMap, prototypes, name );\n        this.mPrecedeScope = new MultiScopeMaptron<>( precedeScope );\n    }\n\n    public PrecedeMultiMaptron( Map<K, V > thisMap, List<MultiScopeMap<K, V > > prototypes, String name ){\n        this( thisMap, prototypes, name, (MultiScopeMap<K, V > ) null );\n    }\n\n    public PrecedeMultiMaptron( Map<K, V > thisMap, List<MultiScopeMap<K, V > > prototypes ){\n        this( thisMap, prototypes, \"\" );\n    }\n\n    public PrecedeMultiMaptron( boolean bLinked, List<MultiScopeMap<K, V > > prototypes ){\n        this( bLinked ? new LinkedHashMap<>() : new HashMap<>(), prototypes );\n    }\n\n    public PrecedeMultiMaptron( Map<K, V > thisMap ){\n        this( thisMap, null );\n    }\n\n    @Override\n    public MultiScopeMap<K, V> getPrecedeScope() {\n        return this.mPrecedeScope;\n    }\n\n    @Override\n    public MultiScopeMap<K, V > setPrecedeScope  ( MultiScopeMap<K, V > that ){\n        this.mPrecedeScope = that;\n        return this;\n    }\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    public ScopeMap<K, V >[]    scopes     (){\n        ArrayList<ScopeMap<K, V > > l = new ArrayList<>();\n        l.add( this.getPrecedeScope() );\n        l.add( this );\n        ScopeTrees.groupByNodes( this, l );\n        return l.toArray( (ScopeMap<K, V >[]) new MultiScopeMap[0] );\n    }\n\n\n    @Override\n    public boolean containsKey( Object key ) {\n        boolean result = this.getPrecedeScope().containsKey( key );\n        if( !result ) {\n            result = super.containsKey( key );\n        }\n\n        return result;\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        boolean result = this.getPrecedeScope().containsValue( value );\n        if( !result ) {\n            result = super.containsKey( value );\n        }\n\n        return result;\n    }\n\n    @Override\n    public V get( Object key ) {\n        V val = this.getPrecedeScope().get( key );\n        if( val == null ) {\n            val = super.get( key );\n        }\n\n        return val;\n    }\n\n    @Override\n    public MultiScopeMap<K, V >  removeAll  ( Object key ) {\n        this.getPrecedeScope().removeAll( key );\n        return super.removeAll( key );\n    }\n\n    @Override\n    public void                  purge() {\n        this.getPrecedeScope().clear();\n        super.purge();\n    }\n\n    @Override\n    public void                  depurate() {\n        this.getPrecedeScope().clear();\n        super.depurate();\n    }\n\n    @Override\n    public void                  overrideTo ( Map<K, V > neo ) {\n        Map<K, V > p = this.getPrecedeScope();\n        for ( Map.Entry<? extends K, ? extends V> e : p.entrySet() ){\n            neo.putIfAbsent( e.getKey(), e.getValue() );\n        }\n\n        super.overrideTo( neo );\n    }\n\n    @Override\n    public void                  onlyOverrideFamilyTo ( Map<K, V > neo ) {\n        super.overrideTo( neo );\n    }\n\n    @Override\n    public boolean               isScopeEmpty () {\n        boolean b = this.getPrecedeScope().isScopeEmpty();\n        if( b ) {\n            return super.isScopeEmpty();\n        }\n\n        return b;\n    }\n\n    @Override\n    public MultiScopeMap<K, V >  getAll        ( Object key, List<V > ret ) {\n        V v = this.getPrecedeScope().get( key );\n        if( v != null ) {\n            ret.add( v );\n        }\n        super.getAll( key, ret );\n\n        return this;\n    }\n\n    @Override\n    public V                     query         ( Object key, String szParentNS ) {\n        V v = this.getPrecedeScope().query( key, szParentNS );\n        if ( v != null ) {\n            return v;\n        }\n\n        return super.query( key, szParentNS );\n    }\n\n    @Override\n    public MultiScopeMap<K, V >  getScopeByNS  ( String szParentNS ) {\n        MultiScopeMap<K, V > s = this.getPrecedeScope().getScopeByNS( szParentNS );\n        if ( s != null ) {\n            return s;\n        }\n\n        return super.getScopeByNS( szParentNS );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/PrecedeMultiScopeMap.java",
    "content": "package com.pinecone.framework.unit;\n\nimport java.util.Map;\n\npublic interface PrecedeMultiScopeMap<K, V > extends MultiScopeMap<K, V > {\n    MultiScopeMap<K, V>            getPrecedeScope();\n\n    MultiScopeMap<K, V >           setPrecedeScope  ( MultiScopeMap<K, V > that );\n\n    default MultiScopeMap<K, V >   setPrecedeScope  ( Map<K, V > that ) {\n        return this.setPrecedeScope( new MultiScopeMaptron<>( that ) );\n    }\n\n    void                           onlyOverrideFamilyTo ( Map<K, V > neo ) ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/ScopeMap.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\n\nimport java.util.*;\n\npublic interface ScopeMap<K, V > extends PineUnit, Map<K, V > {\n\n    Iterator<Entry<K, V> >         scopeIterator() ;\n\n    Set<Entry<K,V> >               scopeEntrySet() ;\n\n    Set<K >                        scopeKeySet()   ;\n\n    Collection<V >                 scopeValues()   ;\n\n    boolean                        isProgenitor()     ;\n\n    // [肃清] Clear itself and its ancestors, nothing left.\n    void                           purge();\n\n    // [净化] Clear itself and its ancestors's elements, reserving its ancestors tree.\n    void                           depurate();\n\n    // Override and apply all ancestors and itself to a new map.\n    void                           overrideTo ( Map<K, V > neo ) ;\n\n    boolean                        isScopeEmpty();\n\n    // Elevate self to a new super class as a new parent, append this child to current self this-scope\n    ScopeMap<K, V >                elevate    ( Map<K, V > child ) ;\n\n    ScopeMap<K, V >[]              ancestors  ();\n\n    ScopeMap<K, V >[]              scopes     ();\n\n    ScopeMap<K, V >                getAll     ( Object key, List<V> ret );\n\n    @SuppressWarnings(\"unchecked\")\n    default V[]                    getAll     ( Object key ) {\n        ArrayList<V> a = new ArrayList<>();\n        this.getAll( key, a );\n        return (V[]) a.toArray();\n    }\n\n    ScopeMap<K, V >                removeAll  ( Object key ) ;\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/ScopeTrees.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.system.PineRuntimeException;\nimport com.pinecone.framework.system.functions.Function;\n\nimport java.util.ArrayDeque;\nimport java.util.Deque;\nimport java.util.List;\n\npublic abstract class ScopeTrees {\n    public static <K, V> void search ( MultiScopeMap<K, V > that, Function fn ) {\n        Deque<MultiScopeMap<K, V>> stack = new ArrayDeque<>();\n        stack.push( that );\n        while ( !stack.isEmpty() ) {\n            MultiScopeMap<K, V> currentMap = stack.pop();\n\n            try{\n                if( (boolean) fn.invoke( currentMap ) ) {\n                    break;\n                }\n            }\n            catch ( Exception e ) {\n                throw new PineRuntimeException( e );\n            }\n\n            List<MultiScopeMap<K, V> > parents = currentMap.getParents();\n            if( parents != null ) {\n                for ( MultiScopeMap<K, V> parent : parents ) {\n                    stack.push( parent );\n                }\n            }\n        }\n    }\n\n    public static <K, V> void groupByNodes( MultiScopeMap<K, V > that, List<ScopeMap<K, V> > list ) {\n        Deque<MultiScopeMap<K, V>> stack = new ArrayDeque<>();\n        stack.push( that );\n        while ( !stack.isEmpty() ) {\n            MultiScopeMap<K, V> currentMap = stack.pop();\n            if( currentMap != that ) {\n                list.add( currentMap );\n            }\n\n            List<MultiScopeMap<K, V> > parents = currentMap.getParents();\n            if( parents != null ) {\n                for ( MultiScopeMap<K, V> parent : parents ) {\n                    stack.push( parent );\n                }\n            }\n        }\n    }\n\n    public static <K, V> void groupByNodes( UniScopeMap<K, V > that, List<ScopeMap<K, V> > list ) {\n        UniScopeMap<K, V > p = that.parent();\n        while ( p != null ) {\n            list.add( p );\n            p = p.parent();\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/SharedList.java",
    "content": "package com.pinecone.framework.unit;\n\nimport java.io.Serializable;\nimport java.util.*;\nimport java.util.stream.Collectors;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\n\n/**\n *  Pinecone Ursus For Java SharedList\n *  SharedList Author: Genius (https://geniusay.com)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  **********************************************************\n *  Thanks for genius`s contribution.\n *  **********************************************************\n */\npublic class SharedList<T> extends AbstractList<T> implements List<T>, Serializable, PineUnit {\n\n    public interface SharedListBuilder{\n\n        static <T> SharedList<T> slice(int length, List<T> list){\n            return slice(0, length, list);\n        }\n\n        static <T> SharedList<T> slice(int start, int length, List<T> list){\n            checkStartAndLength(start, length, list);\n            return freezeList(start, length, list);\n        }\n\n        static <T> SharedList<T> merge(List<T> list){\n            listNotNull(list);\n            return freedomList(list);\n        }\n\n        @SafeVarargs\n        static <T> SharedList<T> merge(List<T>...lists){\n            List<SharedList<T>> sharedLists = Arrays.stream(lists).map(SharedListBuilder::freezeList).collect(Collectors.toList());\n            return new SharedList<>(0,new ArrayList<>(), sharedLists);\n        }\n\n        static <T> SharedList<T> freedomList(List<T> list){\n            return freedomList(0, freezeList(list));\n        }\n        static <T> SharedList<T> freedomList(int start, List<T> list){\n            return new SharedList<>(0, freezeList(start, list));\n        }\n\n        private static <T> SharedList<T> freezeList(List<T> list){\n            return freezeList(0, list);\n        }\n\n        private static <T> SharedList<T> freezeList(int start, List<T> list){\n            return freezeList(start, list.size() - start, list);\n        }\n\n        private static <T> SharedList<T> freezeList(int start, int length, List<T> list){\n            checkStartAndLength(start, length, list);\n            return new SharedList<>(start, length, list);\n        }\n    }\n\n    private static final int MAX_END_OFFSET = Integer.MAX_VALUE;\n\n    private final int endPtr;\n\n    private final int startPtr;\n\n    private int size;\n    private final List<T> elementData;\n\n    private final List<Integer> sharedSizeList;\n\n    private final List<SharedList<T>> sharedLists;\n\n\n    private SharedList(int startPtr, SharedList<T> sharedList) {\n        this(startPtr, MAX_END_OFFSET, new ArrayList<>(), new ArrayList<>(List.of(sharedList)));\n    }\n    private SharedList(int startPtr, List<T> elementData) {\n        this(startPtr, MAX_END_OFFSET, elementData, new ArrayList<>());\n    }\n\n    private SharedList(int startPtr, List<T> elementData, List<SharedList<T>> sharedLists) {\n        this(startPtr, MAX_END_OFFSET, elementData, sharedLists);\n    }\n\n    private SharedList(int startPtr, int endPtr, List<T> elementData) {\n        this(startPtr, endPtr, elementData, new ArrayList<>());\n    }\n\n    private SharedList(int startPtr, int endPtr, List<T> elementData, List<SharedList<T>> sharedLists) {\n        this.endPtr = endPtr;\n        this.startPtr = startPtr;\n        this.elementData = elementData;\n        this.sharedSizeList = new ArrayList<>();\n        this.sharedLists = new ArrayList<>();\n        this.size = 0;\n\n        initSharedList(sharedLists);\n        incrementSizeAndCheckExceed(elementData.size());\n    }\n\n    // 初始化 共享 list的时候，如果某个共享list超出当前的endPtr，则停止统计后续size以及sharedSizeList\n    private void initSharedList(List<SharedList<T>> sharedLists){\n        for (SharedList<T> sharedList : sharedLists) {\n            if(sharedList.isEmpty()){\n                continue;\n            }\n\n            this.sharedLists.add(sharedList);\n            int sharedSize = sharedList.size();\n            addSharedSizeList(sharedSize);\n            if(incrementSizeAndCheckExceed(sharedSize)){\n                break;\n            }\n        }\n    }\n\n    private void addSharedSizeList(int sharedSize){\n        int size = Math.min(endPtr, sharedSizeList.isEmpty()?sharedSize:sharedSize+sharedSizeList.get(sharedSizeList.size()-1));\n        sharedSizeList.add(size);\n    }\n\n    private boolean incrementSizeAndCheckExceed(int num){\n        int nextSize;\n        if((nextSize = size + num) > endPtr){\n            size = endPtr;\n            return true;\n        }\n        size = nextSize;\n        return false;\n    }\n\n    private int selfSizeIndex(){\n        return sharedSizeList.size();\n    }\n\n    /**\n     * 基于startPtr的相对index\n     * @param index\n     * @return\n     */\n    private int offsetIndex(int index){\n        return startPtr + index;\n    }\n\n    /**\n     * 映射到对应共享列表的下标\n     * @param index\n     * @return\n     */\n    private int invokeIndex(int index, int sharedIndex){\n        return quickInvokeIndex(index, (sharedIndex == 0?0:sharedSizeList.get(sharedIndex-1)));\n    }\n\n\n    private int quickInvokeIndex(int index, int offset){\n        return offsetIndex(index) - offset;\n    }\n\n    /**\n     * 当为第一个元素时，查看index是否在 [0, sharedSizeList.get(index)] 范围内\n     * 当为最后一个元素时，查看index是否在 [sharedSizeList.get(index-1), selfSize()] 范围内\n     * @param index\n     * @param sharedIndex\n     * @return\n     */\n    private boolean inSizeRange(int index, int sharedIndex){\n        int l = sharedIndex == 0?0:sharedSizeList.get(sharedIndex-1);\n        int r = sharedIndex == sharedSizeList.size()?selfSize():sharedSizeList.get(sharedIndex);\n        return index >= l && index < r;\n    }\n\n    // 弱缓存\n    private int lastAccess = -1;\n    private int findListIndex(int index){\n\n        indexOutOfSizeThrow(index);\n\n        int selfIndex = selfSizeIndex();\n        if(sharedLists.isEmpty() && inSizeRange(index, selfIndex)){\n            return selfIndex;\n        }\n\n        if(lastAccess != -1 && inSizeRange(index, lastAccess)){\n            return lastAccess;\n        }\n        lastAccess = -1;\n\n        int l = 0;\n        int r = sharedSizeList.size()-1;\n        int mid = (l + r) >> 1;\n        while(l < r){\n            if(inSizeRange(index, mid)){\n                lastAccess = mid;\n                return mid;\n            }else if(index >= sharedSizeList.get(mid)){\n                l = mid + 1;\n            }else{\n                r = mid - 1;\n            }\n        }\n        lastAccess = l;\n        return l;\n    }\n\n    private List<T> findList(int listIndex){\n\n        SharedList<T> sharedList = listIndex == selfSizeIndex()?this:sharedLists.get(listIndex);\n        if(sharedList == this){\n            return this.elementData;\n        }\n        return sharedList;\n    }\n\n    @Override\n    public T get(int index) {\n        int listIndex = findListIndex(index);\n        int ptr = invokeIndex(index, listIndex);\n        return findList(listIndex).get(ptr);\n    }\n\n    @Override\n    public T set(int index, T element) {\n        int listIndex = findListIndex(index);\n        int ptr = invokeIndex(index, listIndex);\n        return findList(listIndex).set(ptr, element);\n    }\n\n    @Override\n    public boolean add(T t) {\n        incrementSizeThrowOutOfSize(1);\n        return this.elementData.add(t);\n    }\n\n    @Override\n    public int indexOf(Object o) {\n        return super.indexOf(o);\n    }\n\n    @Override\n    public int lastIndexOf(Object o) {\n        return super.lastIndexOf(o);\n    }\n\n    public SharedList<T> subList(){\n        return SharedListBuilder.slice(0, this.size(), this);\n    }\n\n    public SharedList<T> subList(int start, int length){\n        return SharedListBuilder.slice(start, length, this);\n    }\n\n    @Override\n    public int size() {\n        return size;\n    }\n\n    public int selfSize(){\n        return this.elementData.size();\n    }\n\n    private boolean indexOutOfSize(int index){\n        return index < 0 || index >= size();\n    }\n\n    private void indexOutOfSizeThrow(int index){\n        if(indexOutOfSize(index)){\n            throw new IndexOutOfBoundsException(String.format(\"index %s out of range [0, %s]\", index, size()));\n        }\n    }\n    private void incrementSizeThrowOutOfSize(int num){\n        if (incrementSizeAndCheckExceed(num)) {\n            throw new IndexOutOfBoundsException(String.format(\"size %s + %s out of end ptr %s\", size(), num, endPtr));\n        }\n    }\n\n    private static <T> void checkStartAndLength(int start, int length, List<T> list){\n\n        listNotNull(list);\n\n        if(start < 0 || length < 0){\n            throw new IndexOutOfBoundsException(\"start and length must >= 0\");\n        }\n\n        int totalLength;\n        if((totalLength = start + length) > list.size()){\n            throw new IndexOutOfBoundsException(String.format(\"start + length %s must in range [0, %s)\", totalLength, list.size()));\n        }\n    }\n\n    private static <T> void listNotNull(List<T> list){\n        if(Objects.isNull(list)){\n            throw new NullPointerException(\"list is nullptr\");\n        }\n    }\n    public Iterator<T> iterator(){\n        return new Itr();\n    }\n\n    private Iterator<T> skipIterator(int skipNum){\n        return new Itr(skipNum);\n    }\n\n    private class Itr implements Iterator<T> {\n\n        private int cursor;\n\n        private int nowSharedListIndex = -1;\n\n        private Iterator<T> currentSharedItr = null;\n\n        private Iterator<T> selfItr = null;\n\n        int sum = 0;\n\n        private boolean selfFlag = false;\n\n        private final boolean selfElementIsShared = elementData instanceof SharedList;\n\n        private int lastCursor = -1;\n\n        public Itr() {\n        }\n\n        public Itr(int cursor) {\n            this.cursor = cursor;\n        }\n\n        @Override\n        public boolean hasNext() {\n            return !indexOutOfSize(cursor);\n        }\n\n        /**\n         * 采用优化遍历法，不再采用二分的方式去一个个寻找对应的index\n         * 而是采用遍历的方式，遍历过程中，如果发现当前的index已经大于等于endPtr，则需要切换到下一个sharedList\n         * @return T\n         */\n        @Override\n        public T next() {\n            int i = cursor++;\n\n            if(currentSharedItr!=null && currentSharedItr.hasNext()){\n                return currentSharedItr.next();\n            }\n\n            for(;;){\n                if(selfFlag || sharedLists.isEmpty()){\n                    int ptr = quickInvokeIndex(i, sum);\n                    if(selfElementIsShared){\n                        if(selfItr == null) {\n                            selfItr = ((SharedList<T>) elementData).skipIterator(ptr);\n                        }\n                        return selfItr.next();\n                    }else{\n                        return elementData.get(lastCursor = ptr);\n                    }\n                }\n\n                nowSharedListIndex++;\n                if(nowSharedListIndex>=sharedLists.size()){\n                    selfFlag = true;\n                    continue;\n                }\n                SharedList<T> sharedList = sharedLists.get(nowSharedListIndex);\n                currentSharedItr = sharedList.skipIterator(quickInvokeIndex(i, sum));\n                sum += sharedList.size();\n                return currentSharedItr.next();\n            }\n\n        }\n    }\n\n    @Override\n    public String toString() {\n        StringBuilder sb = new StringBuilder();\n\n        sb.append(\"[\");\n        for (T t : this) {\n            sb.append(t).append(\",\");\n        }\n        sb.deleteCharAt(sb.length()-1);\n        sb.append(\"]\");\n\n        return sb.toString();\n    }\n\n    @Override\n    public boolean containsKey( Object elm ) {\n        try {\n            if( elm instanceof Number ) {\n                int nElm = ( (Number)elm ).intValue();\n                int nLength = this.size();\n                if( nElm < 0 || nLength == 0 ){\n                    return false;\n                }\n                return nLength > nElm;\n            }\n            return this.containsKey( Integer.parseInt(elm.toString()) );\n        }\n        catch ( NumberFormatException e ){\n            return false;\n        }\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object elm ) {\n        return this.containsKey( elm );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/SingletonSupplier.java",
    "content": "package com.pinecone.framework.unit;\n\nimport java.util.function.Supplier;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.Assert;\n\npublic class SingletonSupplier<T> implements Supplier<T> {\n    @Nullable\n    private final Supplier<? extends T> instanceSupplier;\n    @Nullable\n    private final Supplier<? extends T> defaultSupplier;\n    @Nullable\n    private volatile T singletonInstance;\n\n    public SingletonSupplier(@Nullable T instance, Supplier<? extends T> defaultSupplier) {\n        this.instanceSupplier = null;\n        this.defaultSupplier = defaultSupplier;\n        this.singletonInstance = instance;\n    }\n\n    public SingletonSupplier(@Nullable Supplier<? extends T> instanceSupplier, Supplier<? extends T> defaultSupplier) {\n        this.instanceSupplier = instanceSupplier;\n        this.defaultSupplier = defaultSupplier;\n    }\n\n    private SingletonSupplier(Supplier<? extends T> supplier) {\n        this.instanceSupplier = supplier;\n        this.defaultSupplier = null;\n    }\n\n    private SingletonSupplier(T singletonInstance) {\n        this.instanceSupplier = null;\n        this.defaultSupplier = null;\n        this.singletonInstance = singletonInstance;\n    }\n\n    @Nullable\n    @Override\n    public T get() {\n        T instance = this.singletonInstance;\n        if (instance == null) {\n            synchronized(this) {\n                instance = this.singletonInstance;\n                if (instance == null) {\n                    if (this.instanceSupplier != null) {\n                        instance = this.instanceSupplier.get();\n                    }\n\n                    if (instance == null && this.defaultSupplier != null) {\n                        instance = this.defaultSupplier.get();\n                    }\n\n                    this.singletonInstance = instance;\n                }\n            }\n        }\n\n        return instance;\n    }\n\n    public T obtain() {\n        T instance = this.get();\n        Assert.state(instance != null, \"No instance from Supplier\");\n        return instance;\n    }\n\n    public static <T> SingletonSupplier<T> of(T instance) {\n        return new SingletonSupplier<>(instance);\n    }\n\n    @Nullable\n    public static <T> SingletonSupplier<T> ofNullable( @Nullable T instance ) {\n        return instance != null ? new SingletonSupplier<>(instance) : null;\n    }\n\n    public static <T> SingletonSupplier<T> of( Supplier<T> supplier ) {\n        return new SingletonSupplier<>(supplier);\n    }\n\n    @Nullable\n    public static <T> SingletonSupplier<T> ofNullable( @Nullable Supplier<T> supplier ) {\n        return supplier != null ? new SingletonSupplier<>(supplier) : null;\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/TreeMap.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.system.prototype.Prototype;\nimport com.pinecone.framework.system.prototype.TypeIndex;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.io.IOException;\nimport java.io.ObjectInputStream;\nimport java.io.ObjectOutputStream;\nimport java.util.*;\nimport java.util.function.Consumer;\nimport java.io.Serializable;\nimport java.util.function.BiConsumer;\nimport java.util.function.BiFunction;\n\n\npublic class TreeMap<K,V> extends AbstractMap<K,V> implements NavigableMap<K,V>, Cloneable, Serializable, Mapnut<K, V> {\n    protected final Comparator<? super K> comparator;\n\n    protected transient Entry<K,V> root;\n\n    protected transient int size = 0;\n\n    protected transient int modCount = 0;\n\n    public TreeMap() {\n        this.comparator = null;\n    }\n\n    public TreeMap( Comparator<? super K> comparator ) {\n        this.comparator = comparator;\n    }\n\n    public TreeMap( Map<? extends K, ? extends V> m ) {\n        this.comparator = null;\n        this.putAll(m);\n    }\n\n    public TreeMap( SortedMap<K, ? extends V> m ) {\n        this.comparator = m.comparator();\n        try {\n            this.buildFromSorted(m.size(), m.entrySet().iterator(), null, null);\n        }\n        catch ( IOException | ClassNotFoundException cannotHappen ) {\n\n        }\n    }\n\n\n    // Query Operations\n    public int size() {\n        return this.size;\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object key ){\n        return this.containsKey( key );\n    }\n\n    public boolean containsKey( Object key ) {\n        return this.getEntry(key) != null;\n    }\n\n    public boolean containsValue( Object value ) {\n        for ( Entry<K,V> e = this.getFirstEntry(); e != null; e = successor(e) )\n            if ( valEquals(value, e.value) ) {\n                return true;\n            }\n        return false;\n    }\n\n    public V get( Object key ) {\n        Entry<K,V> p = this.getEntry(key);\n        return (p==null ? null : p.value);\n    }\n\n    @Override\n    public Map.Entry<K, V > getEntryByKey( Object compatibleKey ) {\n        return this.getEntry( compatibleKey );\n    }\n\n    @Override\n    public Map.Entry<K, V > getEntryCopyByKey( Object compatibleKey ) {\n        Map.Entry<K, V > auth = this.getEntryByKey( compatibleKey );\n        if( auth != null ) {\n            return new KeyValue<>( auth.getKey(), auth.getValue() );\n        }\n        return null;\n    }\n\n    public Comparator<? super K> comparator() {\n        return comparator;\n    }\n\n    /**\n     * @throws NoSuchElementException {@inheritDoc}\n     */\n    public K firstKey() {\n        return key(getFirstEntry());\n    }\n\n    /**\n     * @throws NoSuchElementException {@inheritDoc}\n     */\n    public K lastKey() {\n        return key(getLastEntry());\n    }\n\n    /**\n     * Copies all of the mappings from the specified map to this map.\n     * These mappings replace any mappings that this map had for any\n     * of the keys currently in the specified map.\n     *\n     * @param  map mappings to be stored in this map\n     * @throws ClassCastException if the class of a key or value in\n     *         the specified map prevents it from being stored in this map\n     * @throws NullPointerException if the specified map is null or\n     *         the specified map contains a null key and this map does not\n     *         permit null keys\n     */\n    public void putAll( Map<? extends K, ? extends V> map ) {\n        this.putMapEntries( map, true );\n    }\n\n    protected void putMapEntries( Map<? extends K, ? extends V> map, boolean evict ) {\n        int mapSize = map.size();\n        if ( this.size == 0 && mapSize != 0 && map instanceof SortedMap && !( map instanceof ListedSortedMap ) ) {\n            Comparator<?> c = ((SortedMap<?,?>)map).comparator();\n            if ( c == this.comparator || (c != null && c.equals(this.comparator)) ) {\n                ++this.modCount;\n                try {\n                    this.buildFromSorted( mapSize, map.entrySet().iterator(), null, null );\n                }\n                catch (IOException | ClassNotFoundException cannotHappen) {\n                }\n                return;\n            }\n        }\n\n        for ( Map.Entry<? extends K, ? extends V> e : map.entrySet() ) {\n            K key = e.getKey();\n            V value = e.getValue();\n            this.putVal( key, value, false, evict );\n        }\n    }\n\n    /**\n     * Returns this map's entry for the given key, or {@code null} if the map\n     * does not contain an entry for the key.\n     *\n     * @return this map's entry for the given key, or {@code null} if the map\n     *         does not contain an entry for the key\n     * @throws ClassCastException if the specified key cannot be compared\n     *         with the keys currently in the map\n     * @throws NullPointerException if the specified key is null\n     *         and this map uses natural ordering, or its comparator\n     *         does not permit null keys\n     */\n    protected final Entry<K,V> getEntry(Object key) {\n        // Offload comparator-based version for sake of performance\n        if ( this.comparator != null ) {\n            return this.getEntryUsingComparator(key);\n        }\n        if ( key == null ) {\n            throw new NullPointerException();\n        }\n        @SuppressWarnings(\"unchecked\")\n        Comparable<? super K> k = (Comparable<? super K>) key;\n        Entry<K,V> p = this.root;\n        while ( p != null ) {\n            int cmp = k.compareTo(p.key);\n            if ( cmp < 0 ) {\n                p = p.left;\n            }\n            else if ( cmp > 0 ) {\n                p = p.right;\n            }\n            else {\n                return p;\n            }\n        }\n        return null;\n    }\n\n    protected final Entry<K,V> getEntryUsingComparator( Object key ) {\n        @SuppressWarnings(\"unchecked\")\n        K k = (K) key;\n        Comparator<? super K> cpr = comparator;\n        if ( cpr != null ) {\n            Entry<K,V> p = this.root;\n            while ( p != null ) {\n                int cmp = cpr.compare(k, p.key);\n                if ( cmp < 0 ) {\n                    p = p.left;\n                }\n                else if ( cmp > 0 ) {\n                    p = p.right;\n                }\n                else {\n                    return p;\n                }\n            }\n        }\n        return null;\n    }\n\n    /**\n     * Gets the entry corresponding to the specified key; if no such entry\n     * exists, returns the entry for the least key greater than the specified\n     * key; if no such entry exists (i.e., the greatest key in the Tree is less\n     * than the specified key), returns {@code null}.\n     */\n    protected final Entry<K,V> getCeilingEntry(K key) {\n        Entry<K,V> p = root;\n        while (p != null) {\n            int cmp = compare(key, p.key);\n            if (cmp < 0) {\n                if (p.left != null)\n                    p = p.left;\n                else\n                    return p;\n            } else if (cmp > 0) {\n                if (p.right != null) {\n                    p = p.right;\n                } else {\n                    Entry<K,V> parent = p.parent;\n                    Entry<K,V> ch = p;\n                    while (parent != null && ch == parent.right) {\n                        ch = parent;\n                        parent = parent.parent;\n                    }\n                    return parent;\n                }\n            } else\n                return p;\n        }\n        return null;\n    }\n\n    /**\n     * Gets the entry corresponding to the specified key; if no such entry\n     * exists, returns the entry for the greatest key less than the specified\n     * key; if no such entry exists, returns {@code null}.\n     */\n    protected final Entry<K,V> getFloorEntry(K key) {\n        Entry<K,V> p = root;\n        while (p != null) {\n            int cmp = compare(key, p.key);\n            if (cmp > 0) {\n                if (p.right != null)\n                    p = p.right;\n                else\n                    return p;\n            } else if (cmp < 0) {\n                if (p.left != null) {\n                    p = p.left;\n                } else {\n                    Entry<K,V> parent = p.parent;\n                    Entry<K,V> ch = p;\n                    while (parent != null && ch == parent.left) {\n                        ch = parent;\n                        parent = parent.parent;\n                    }\n                    return parent;\n                }\n            } else\n                return p;\n\n        }\n        return null;\n    }\n\n    /**\n     * Gets the entry for the least key greater than the specified\n     * key; if no such entry exists, returns the entry for the least\n     * key greater than the specified key; if no such entry exists\n     * returns {@code null}.\n     */\n    protected final Entry<K,V> getHigherEntry(K key) {\n        Entry<K,V> p = root;\n        while (p != null) {\n            int cmp = compare(key, p.key);\n            if (cmp < 0) {\n                if (p.left != null)\n                    p = p.left;\n                else\n                    return p;\n            } else {\n                if (p.right != null) {\n                    p = p.right;\n                } else {\n                    Entry<K,V> parent = p.parent;\n                    Entry<K,V> ch = p;\n                    while (parent != null && ch == parent.right) {\n                        ch = parent;\n                        parent = parent.parent;\n                    }\n                    return parent;\n                }\n            }\n        }\n        return null;\n    }\n\n    /**\n     * Returns the entry for the greatest key less than the specified key; if\n     * no such entry exists (i.e., the least key in the Tree is greater than\n     * the specified key), returns {@code null}.\n     */\n    protected final Entry<K,V> getLowerEntry(K key) {\n        Entry<K,V> p = root;\n        while (p != null) {\n            int cmp = compare(key, p.key);\n            if (cmp > 0) {\n                if (p.right != null)\n                    p = p.right;\n                else\n                    return p;\n            } else {\n                if (p.left != null) {\n                    p = p.left;\n                } else {\n                    Entry<K,V> parent = p.parent;\n                    Entry<K,V> ch = p;\n                    while (parent != null && ch == parent.left) {\n                        ch = parent;\n                        parent = parent.parent;\n                    }\n                    return parent;\n                }\n            }\n        }\n        return null;\n    }\n\n    /** Overridable Method, for unified customize.*/\n    protected Entry<K,V> spawnNode( K key, V value, Entry<K,V > parent ) {\n        return new Entry<>( key, value, parent );\n    }\n\n    protected void afterNodeAccess( Entry<K,V> p ) { }\n\n    protected void afterNodeInsertion( boolean evict ) { }\n\n    protected void afterNodeRemoval( Entry<K,V> p ) { }\n\n    protected V putVal( K key, V value, boolean onlyIfAbsent, boolean evict ) {\n        Entry<K,V> t = this.root;\n        if ( t == null ) {\n            this.compare( key, key ); // type (and possibly null) check\n\n            this.root = this.spawnNode( key, value, null );\n            this.size = 1;\n            ++this.modCount;\n            return null;\n        }\n        int cmp;\n        Entry<K,V> parent;\n        // split comparator and comparable paths\n        Comparator<? super K> cpr = this.comparator;\n\n        Entry<K,V> legacy = null;\n        if ( cpr != null ) {\n            do {\n                parent = t;\n                cmp = cpr.compare(key, t.key);\n                if (cmp < 0) {\n                    t = t.left;\n                }\n                else if ( cmp > 0 ) {\n                    t = t.right;\n                }\n                else {\n                    legacy = t;\n                    break;\n                }\n            }\n            while ( t != null );\n        }\n        else {\n            if ( key == null ) {\n                throw new NullPointerException();\n            }\n            @SuppressWarnings(\"unchecked\")\n            Comparable<? super K> k = (Comparable<? super K>) key;\n            do {\n                parent = t;\n                cmp = k.compareTo(t.key);\n                if ( cmp < 0 ) {\n                    t = t.left;\n                }\n                else if ( cmp > 0 ) {\n                    t = t.right;\n                }\n                else {\n                    legacy = t;\n                    break;\n                }\n            }\n            while ( t != null );\n        }\n\n        if ( legacy != null ) { // existing mapping for key\n            V oldValue = legacy.value;\n            if ( !onlyIfAbsent || oldValue == null ) {\n                legacy.setValue( value );\n            }\n            this.afterNodeAccess( legacy );\n            return oldValue;\n        }\n\n        Entry<K,V> e = this.spawnNode( key, value, parent );\n        if ( cmp < 0 ) {\n            parent.left = e;\n        }\n        else {\n            parent.right = e;\n        }\n        this.fixAfterInsertion(e);\n        ++this.size;\n        ++this.modCount;\n        this.afterNodeInsertion( evict );\n        return null;\n    }\n\n    public V put( K key, V value ) {\n        return this.putVal( key, value, false, true );\n    }\n\n    @Override\n    public V putIfAbsent(K key, V value) {\n        return this.putVal( key, value, true, true );\n    }\n\n    public V remove( Object key ) {\n        Entry<K,V > p = this.getEntry(key);\n        if ( p == null ) {\n            return null;\n        }\n\n        V oldValue = p.value;\n        this.deleteEntry(p);\n        return oldValue;\n    }\n\n    public void clear() {\n        this.modCount++;\n        this.size = 0;\n        this.root = null;\n    }\n\n\n    protected TreeMap<?,?> superClone() {\n        TreeMap<?,?> clone;\n        try {\n            clone = (TreeMap<?,?>) super.clone();\n        }\n        catch ( CloneNotSupportedException e ) {\n            throw new InternalError(e);\n        }\n\n        // Put clone into \"virgin\" state (except for comparator)\n        clone.root = null;\n        clone.size = 0;\n        clone.modCount = 0;\n        clone.entrySet = null;\n        clone.navigableKeySet = null;\n        clone.descendingMap = null;\n\n        return clone;\n    }\n\n    /**\n     * Returns a shallow copy of this {@code TreeMap} instance. (The keys and\n     * values themselves are not cloned.)\n     *\n     * @return a shallow copy of this map\n     */\n    public Object clone() {\n        TreeMap<?,?> clone = this.superClone();\n\n        // Initialize clone with our mappings\n        try {\n            clone.buildFromSorted(size, entrySet().iterator(), null, null);\n        }\n        catch ( IOException | ClassNotFoundException cannotHappen ) {\n        }\n\n        return clone;\n    }\n\n    // NavigableMap API methods\n\n    public Map.Entry<K,V> firstEntry() {\n        return exportEntry(getFirstEntry());\n    }\n\n    public Map.Entry<K,V> lastEntry() {\n        return exportEntry(getLastEntry());\n    }\n\n    public Map.Entry<K,V> pollFirstEntry() {\n        Entry<K,V> p = getFirstEntry();\n        Map.Entry<K,V> result = exportEntry(p);\n        if (p != null)\n            deleteEntry(p);\n        return result;\n    }\n\n    public Map.Entry<K,V> pollLastEntry() {\n        Entry<K,V> p = getLastEntry();\n        Map.Entry<K,V> result = exportEntry(p);\n        if (p != null)\n            deleteEntry(p);\n        return result;\n    }\n\n    public Map.Entry<K,V> lowerEntry(K key) {\n        return exportEntry(getLowerEntry(key));\n    }\n\n    public K lowerKey(K key) {\n        return keyOrNull(getLowerEntry(key));\n    }\n\n    public Map.Entry<K,V> floorEntry(K key) {\n        return exportEntry(getFloorEntry(key));\n    }\n\n    public K floorKey(K key) {\n        return keyOrNull(getFloorEntry(key));\n    }\n\n    public Map.Entry<K,V> ceilingEntry(K key) {\n        return exportEntry(getCeilingEntry(key));\n    }\n\n    public K ceilingKey(K key) {\n        return keyOrNull(getCeilingEntry(key));\n    }\n\n    public Map.Entry<K,V> higherEntry(K key) {\n        return exportEntry(getHigherEntry(key));\n    }\n\n    public K higherKey(K key) {\n        return keyOrNull(getHigherEntry(key));\n    }\n\n    // Views\n\n    /**\n     * Fields initialized to contain an instance of the entry set view\n     * the first time this view is requested.  Views are stateless, so\n     * there's no reason to create more than one.\n     */\n    protected transient Set<Map.Entry<K,V> > entrySet;\n    protected transient KeySet<K> navigableKeySet;\n    protected transient NavigableMap<K,V> descendingMap;\n\n    /**\n     * Returns a {@link Set} view of the keys contained in this map.\n     *\n     * <p>The set's iterator returns the keys in ascending order.\n     * The set's spliterator is\n     * <em><a href=\"Spliterator.html#binding\">late-binding</a></em>,\n     * <em>fail-fast</em>, and additionally reports {@link Spliterator#SORTED}\n     * and {@link Spliterator#ORDERED} with an encounter order that is ascending\n     * key order.  The spliterator's comparator (see\n     * {@link java.util.Spliterator#getComparator()}) is {@code null} if\n     * the tree map's comparator (see {@link #comparator()}) is {@code null}.\n     * Otherwise, the spliterator's comparator is the same as or imposes the\n     * same total ordering as the tree map's comparator.\n     *\n     * <p>The set is backed by the map, so changes to the map are\n     * reflected in the set, and vice-versa.  If the map is modified\n     * while an iteration over the set is in progress (except through\n     * the iterator's own {@code remove} operation), the results of\n     * the iteration are undefined.  The set supports element removal,\n     * which removes the corresponding mapping from the map, via the\n     * {@code Iterator.remove}, {@code Set.remove},\n     * {@code removeAll}, {@code retainAll}, and {@code clear}\n     * operations.  It does not support the {@code add} or {@code addAll}\n     * operations.\n     */\n    public Set<K> keySet() {\n        return navigableKeySet();\n    }\n\n    /**\n     * @since 1.6\n     */\n    public NavigableSet<K> navigableKeySet() {\n        KeySet<K> nks = navigableKeySet;\n        return (nks != null) ? nks : (navigableKeySet = new KeySet<>(this));\n    }\n\n    /**\n     * @since 1.6\n     */\n    public NavigableSet<K> descendingKeySet() {\n        return descendingMap().navigableKeySet();\n    }\n\n    /**\n     * Returns a {@link Collection} view of the values contained in this map.\n     *\n     * <p>The collection's iterator returns the values in ascending order\n     * of the corresponding keys. The collection's spliterator is\n     * <em><a href=\"Spliterator.html#binding\">late-binding</a></em>,\n     * <em>fail-fast</em>, and additionally reports {@link Spliterator#ORDERED}\n     * with an encounter order that is ascending order of the corresponding\n     * keys.\n     *\n     * <p>The collection is backed by the map, so changes to the map are\n     * reflected in the collection, and vice-versa.  If the map is\n     * modified while an iteration over the collection is in progress\n     * (except through the iterator's own {@code remove} operation),\n     * the results of the iteration are undefined.  The collection\n     * supports element removal, which removes the corresponding\n     * mapping from the map, via the {@code Iterator.remove},\n     * {@code Collection.remove}, {@code removeAll},\n     * {@code retainAll} and {@code clear} operations.  It does not\n     * support the {@code add} or {@code addAll} operations.\n     */\n    public Collection<V> values() {\n        Collection<V> vs = values;\n        if (vs == null) {\n            vs = new Values();\n            values = vs;\n        }\n        return vs;\n    }\n\n    @Override\n    public Set<Map.Entry<K,V> > entrySet() {\n        Set<Map.Entry<K,V>>  es = this.entrySet;\n        return (es != null) ? es : (this.entrySet = new EntrySet());\n    }\n\n    /**\n     * @since 1.6\n     */\n    public NavigableMap<K, V> descendingMap() {\n        NavigableMap<K, V> km = descendingMap;\n        return (km != null) ? km :\n                (descendingMap = new DescendingSubMap<>(this,\n                        true, null, true,\n                        true, null, true));\n    }\n\n    /**\n     * @throws ClassCastException       {@inheritDoc}\n     * @throws NullPointerException if {@code fromKey} or {@code toKey} is\n     *         null and this map uses natural ordering, or its comparator\n     *         does not permit null keys\n     * @throws IllegalArgumentException {@inheritDoc}\n     * @since 1.6\n     */\n    public NavigableMap<K,V> subMap(K fromKey, boolean fromInclusive,\n                                    K toKey,   boolean toInclusive) {\n        return new AscendingSubMap<>(this,\n                false, fromKey, fromInclusive,\n                false, toKey,   toInclusive);\n    }\n\n    /**\n     * @throws ClassCastException       {@inheritDoc}\n     * @throws NullPointerException if {@code toKey} is null\n     *         and this map uses natural ordering, or its comparator\n     *         does not permit null keys\n     * @throws IllegalArgumentException {@inheritDoc}\n     * @since 1.6\n     */\n    public NavigableMap<K,V> headMap(K toKey, boolean inclusive) {\n        return new AscendingSubMap<>(this,\n                true,  null,  true,\n                false, toKey, inclusive);\n    }\n\n    /**\n     * @throws ClassCastException       {@inheritDoc}\n     * @throws NullPointerException if {@code fromKey} is null\n     *         and this map uses natural ordering, or its comparator\n     *         does not permit null keys\n     * @throws IllegalArgumentException {@inheritDoc}\n     * @since 1.6\n     */\n    public NavigableMap<K,V> tailMap(K fromKey, boolean inclusive) {\n        return new AscendingSubMap<>(this,\n                false, fromKey, inclusive,\n                true,  null,    true);\n    }\n\n    /**\n     * @throws ClassCastException       {@inheritDoc}\n     * @throws NullPointerException if {@code fromKey} or {@code toKey} is\n     *         null and this map uses natural ordering, or its comparator\n     *         does not permit null keys\n     * @throws IllegalArgumentException {@inheritDoc}\n     */\n    public SortedMap<K,V> subMap(K fromKey, K toKey) {\n        return subMap(fromKey, true, toKey, false);\n    }\n\n    /**\n     * @throws ClassCastException       {@inheritDoc}\n     * @throws NullPointerException if {@code toKey} is null\n     *         and this map uses natural ordering, or its comparator\n     *         does not permit null keys\n     * @throws IllegalArgumentException {@inheritDoc}\n     */\n    public SortedMap<K,V> headMap(K toKey) {\n        return headMap(toKey, false);\n    }\n\n    /**\n     * @throws ClassCastException       {@inheritDoc}\n     * @throws NullPointerException if {@code fromKey} is null\n     *         and this map uses natural ordering, or its comparator\n     *         does not permit null keys\n     * @throws IllegalArgumentException {@inheritDoc}\n     */\n    public SortedMap<K,V> tailMap(K fromKey) {\n        return tailMap(fromKey, true);\n    }\n\n    @Override\n    public boolean replace( K key, V oldValue, V newValue ) {\n        Entry<K,V> p = this.getEntry(key);\n        if ( p != null && Objects.equals(oldValue, p.value) ) {\n            p.value = newValue;\n            this.afterNodeAccess( p );\n            return true;\n        }\n        return false;\n    }\n\n    @Override\n    public V replace( K key, V value ) {\n        Entry<K,V> p = getEntry(key);\n        if ( p != null ) {\n            V oldValue = p.value;\n            p.value = value;\n            this.afterNodeAccess( p );\n            return oldValue;\n        }\n        return null;\n    }\n\n    @Override\n    public void forEach(BiConsumer<? super K, ? super V> action) {\n        Objects.requireNonNull(action);\n        int expectedModCount = modCount;\n        for (Entry<K, V> e = getFirstEntry(); e != null; e = successor(e)) {\n            action.accept(e.key, e.value);\n\n            if (expectedModCount != modCount) {\n                throw new ConcurrentModificationException();\n            }\n        }\n    }\n\n    @Override\n    public void replaceAll(BiFunction<? super K, ? super V, ? extends V> function) {\n        Objects.requireNonNull(function);\n        int expectedModCount = modCount;\n\n        for (Entry<K, V> e = getFirstEntry(); e != null; e = successor(e)) {\n            e.value = function.apply(e.key, e.value);\n\n            if (expectedModCount != modCount) {\n                throw new ConcurrentModificationException();\n            }\n        }\n    }\n\n    // View class support\n\n    class Values extends AbstractCollection<V> {\n        public Iterator<V> iterator() {\n            return new ValueIterator(getFirstEntry());\n        }\n\n        public int size() {\n            return TreeMap.this.size();\n        }\n\n        public boolean contains(Object o) {\n            return TreeMap.this.containsValue(o);\n        }\n\n        public boolean remove(Object o) {\n            for (Entry<K,V> e = getFirstEntry(); e != null; e = successor(e)) {\n                if (valEquals(e.getValue(), o)) {\n                    deleteEntry(e);\n                    return true;\n                }\n            }\n            return false;\n        }\n\n        public void clear() {\n            TreeMap.this.clear();\n        }\n\n        public Spliterator<V> spliterator() {\n            return new ValueSpliterator<>(TreeMap.this, null, null, 0, -1, 0);\n        }\n    }\n\n    class EntrySet extends AbstractSet<Map.Entry<K,V>> {\n        public Iterator<Map.Entry<K,V>> iterator() {\n            return new EntryIterator(getFirstEntry());\n        }\n\n        public boolean contains(Object o) {\n            if (!(o instanceof Map.Entry))\n                return false;\n            Map.Entry<?,?> entry = (Map.Entry<?,?>) o;\n            Object value = entry.getValue();\n            Entry<K,V> p = getEntry(entry.getKey());\n            return p != null && valEquals(p.getValue(), value);\n        }\n\n        public boolean remove(Object o) {\n            if (!(o instanceof Map.Entry))\n                return false;\n            Map.Entry<?,?> entry = (Map.Entry<?,?>) o;\n            Object value = entry.getValue();\n            Entry<K,V> p = getEntry(entry.getKey());\n            if (p != null && valEquals(p.getValue(), value)) {\n                deleteEntry(p);\n                return true;\n            }\n            return false;\n        }\n\n        public int size() {\n            return TreeMap.this.size();\n        }\n\n        public void clear() {\n            TreeMap.this.clear();\n        }\n\n        public Spliterator<Map.Entry<K,V>> spliterator() {\n            return new EntrySpliterator<>(TreeMap.this, null, null, 0, -1, 0);\n        }\n    }\n\n    /*\n     * Unlike Values and EntrySet, the KeySet class is static,\n     * delegating to a NavigableMap to allow use by SubMaps, which\n     * outweighs the ugliness of needing type-tests for the following\n     * Iterator methods that are defined appropriately in main versus\n     * submap classes.\n     */\n\n    Iterator<K> keyIterator() {\n        return new KeyIterator(getFirstEntry());\n    }\n\n    Iterator<K> descendingKeyIterator() {\n        return new DescendingKeyIterator(getLastEntry());\n    }\n\n    static final class KeySet<E> extends AbstractSet<E> implements NavigableSet<E> {\n        private final NavigableMap<E, ?> m;\n        KeySet(NavigableMap<E,?> map) { m = map; }\n\n        public Iterator<E> iterator() {\n            if (m instanceof TreeMap)\n                return ((TreeMap<E,?>)m).keyIterator();\n            else\n                return ((TreeMap.NavigableSubMap<E,?>)m).keyIterator();\n        }\n\n        public Iterator<E> descendingIterator() {\n            if (m instanceof TreeMap)\n                return ((TreeMap<E,?>)m).descendingKeyIterator();\n            else\n                return ((TreeMap.NavigableSubMap<E,?>)m).descendingKeyIterator();\n        }\n\n        public int size() { return m.size(); }\n        public boolean isEmpty() { return m.isEmpty(); }\n        public boolean contains(Object o) { return m.containsKey(o); }\n        public void clear() { m.clear(); }\n        public E lower(E e) { return m.lowerKey(e); }\n        public E floor(E e) { return m.floorKey(e); }\n        public E ceiling(E e) { return m.ceilingKey(e); }\n        public E higher(E e) { return m.higherKey(e); }\n        public E first() { return m.firstKey(); }\n        public E last() { return m.lastKey(); }\n        public Comparator<? super E> comparator() { return m.comparator(); }\n        public E pollFirst() {\n            Map.Entry<E,?> e = m.pollFirstEntry();\n            return (e == null) ? null : e.getKey();\n        }\n        public E pollLast() {\n            Map.Entry<E,?> e = m.pollLastEntry();\n            return (e == null) ? null : e.getKey();\n        }\n        public boolean remove(Object o) {\n            int oldSize = size();\n            m.remove(o);\n            return size() != oldSize;\n        }\n        public NavigableSet<E> subSet(E fromElement, boolean fromInclusive,\n                                      E toElement,   boolean toInclusive) {\n            return new KeySet<>(m.subMap(fromElement, fromInclusive,\n                    toElement,   toInclusive));\n        }\n        public NavigableSet<E> headSet(E toElement, boolean inclusive) {\n            return new KeySet<>(m.headMap(toElement, inclusive));\n        }\n        public NavigableSet<E> tailSet(E fromElement, boolean inclusive) {\n            return new KeySet<>(m.tailMap(fromElement, inclusive));\n        }\n        public SortedSet<E> subSet(E fromElement, E toElement) {\n            return subSet(fromElement, true, toElement, false);\n        }\n        public SortedSet<E> headSet(E toElement) {\n            return headSet(toElement, false);\n        }\n        public SortedSet<E> tailSet(E fromElement) {\n            return tailSet(fromElement, true);\n        }\n        public NavigableSet<E> descendingSet() {\n            return new KeySet<>(m.descendingMap());\n        }\n\n        public Spliterator<E> spliterator() {\n            return keySpliteratorFor(m);\n        }\n    }\n\n    /**\n     * Base class for TreeMap Iterators\n     */\n    abstract class PrivateEntryIterator<T> implements Iterator<T> {\n        Entry<K,V> next;\n        Entry<K,V> lastReturned;\n        int expectedModCount;\n\n        PrivateEntryIterator(Entry<K,V> first) {\n            expectedModCount = modCount;\n            lastReturned = null;\n            next = first;\n        }\n\n        public final boolean hasNext() {\n            return next != null;\n        }\n\n        final Entry<K,V> nextEntry() {\n            Entry<K,V> e = next;\n            if (e == null)\n                throw new NoSuchElementException();\n            if (modCount != expectedModCount)\n                throw new ConcurrentModificationException();\n            next = successor(e);\n            lastReturned = e;\n            return e;\n        }\n\n        final Entry<K,V> prevEntry() {\n            Entry<K,V> e = next;\n            if (e == null)\n                throw new NoSuchElementException();\n            if (modCount != expectedModCount)\n                throw new ConcurrentModificationException();\n            next = predecessor(e);\n            lastReturned = e;\n            return e;\n        }\n\n        public void remove() {\n            if (lastReturned == null)\n                throw new IllegalStateException();\n            if (modCount != expectedModCount)\n                throw new ConcurrentModificationException();\n            // deleted entries are replaced by their successors\n            if (lastReturned.left != null && lastReturned.right != null)\n                next = lastReturned;\n            deleteEntry(lastReturned);\n            expectedModCount = modCount;\n            lastReturned = null;\n        }\n    }\n\n    final class EntryIterator extends PrivateEntryIterator<Map.Entry<K,V>> {\n        EntryIterator(Entry<K,V> first) {\n            super(first);\n        }\n        public Map.Entry<K,V> next() {\n            return nextEntry();\n        }\n    }\n\n    final class ValueIterator extends PrivateEntryIterator<V> {\n        ValueIterator(Entry<K,V> first) {\n            super(first);\n        }\n        public V next() {\n            return nextEntry().value;\n        }\n    }\n\n    final class KeyIterator extends PrivateEntryIterator<K> {\n        KeyIterator(Entry<K,V> first) {\n            super(first);\n        }\n        public K next() {\n            return nextEntry().key;\n        }\n    }\n\n    final class DescendingKeyIterator extends PrivateEntryIterator<K> {\n        DescendingKeyIterator(Entry<K,V> first) {\n            super(first);\n        }\n        public K next() {\n            return prevEntry().key;\n        }\n        public void remove() {\n            if (lastReturned == null)\n                throw new IllegalStateException();\n            if (modCount != expectedModCount)\n                throw new ConcurrentModificationException();\n            deleteEntry(lastReturned);\n            lastReturned = null;\n            expectedModCount = modCount;\n        }\n    }\n\n    // Little utilities\n\n    /**\n     * Compares two keys using the correct comparison method for this TreeMap.\n     */\n    @SuppressWarnings(\"unchecked\")\n    final int compare(Object k1, Object k2) {\n        return this.comparator == null ? ((Comparable<? super K>)k1).compareTo((K)k2) : this.comparator.compare((K)k1, (K)k2);\n    }\n\n    /**\n     * Test two values for equality.  Differs from o1.equals(o2) only in\n     * that it copes with {@code null} o1 properly.\n     */\n    static final boolean valEquals(Object o1, Object o2) {\n        return (o1==null ? o2==null : o1.equals(o2));\n    }\n\n    /**\n     * Return SimpleImmutableEntry for entry, or null if null\n     */\n    static <K,V> Map.Entry<K,V> exportEntry(TreeMap.Entry<K,V> e) {\n        return (e == null) ? null :\n                new AbstractMap.SimpleImmutableEntry<>(e);\n    }\n\n    /**\n     * Return key for entry, or null if null\n     */\n    static <K,V> K keyOrNull(TreeMap.Entry<K,V> e) {\n        return (e == null) ? null : e.key;\n    }\n\n    /**\n     * Returns the key corresponding to the specified Entry.\n     * @throws NoSuchElementException if the Entry is null\n     */\n    static <K> K key(Entry<K,?> e) {\n        if (e==null)\n            throw new NoSuchElementException();\n        return e.key;\n    }\n\n\n    // SubMaps\n\n    /**\n     * Dummy value serving as unmatchable fence key for unbounded\n     * SubMapIterators\n     */\n    private static final Object UNBOUNDED = new Object();\n\n    /**\n     * @serial include\n     */\n    abstract static class NavigableSubMap<K,V> extends AbstractMap<K,V> implements NavigableMap<K,V>, java.io.Serializable {\n        private static final long serialVersionUID = -2102997345730753016L;\n        /**\n         * The backing map.\n         */\n        final TreeMap<K,V> m;\n\n        /**\n         * Endpoints are represented as triples (fromStart, lo,\n         * loInclusive) and (toEnd, hi, hiInclusive). If fromStart is\n         * true, then the low (absolute) bound is the start of the\n         * backing map, and the other values are ignored. Otherwise,\n         * if loInclusive is true, lo is the inclusive bound, else lo\n         * is the exclusive bound. Similarly for the upper bound.\n         */\n        final K lo, hi;\n        final boolean fromStart, toEnd;\n        final boolean loInclusive, hiInclusive;\n\n        NavigableSubMap(TreeMap<K,V> m,\n                        boolean fromStart, K lo, boolean loInclusive,\n                        boolean toEnd, K hi, boolean hiInclusive) {\n            if (!fromStart && !toEnd) {\n                if (m.compare(lo, hi) > 0)\n                    throw new IllegalArgumentException(\"fromKey > toKey\");\n            } else {\n                if (!fromStart) // type check\n                    m.compare(lo, lo);\n                if (!toEnd)\n                    m.compare(hi, hi);\n            }\n\n            this.m = m;\n            this.fromStart = fromStart;\n            this.lo = lo;\n            this.loInclusive = loInclusive;\n            this.toEnd = toEnd;\n            this.hi = hi;\n            this.hiInclusive = hiInclusive;\n        }\n\n        // internal utilities\n\n        final boolean tooLow(Object key) {\n            if (!fromStart) {\n                int c = m.compare(key, lo);\n                if (c < 0 || (c == 0 && !loInclusive))\n                    return true;\n            }\n            return false;\n        }\n\n        final boolean tooHigh(Object key) {\n            if (!toEnd) {\n                int c = m.compare(key, hi);\n                if (c > 0 || (c == 0 && !hiInclusive))\n                    return true;\n            }\n            return false;\n        }\n\n        final boolean inRange(Object key) {\n            return !tooLow(key) && !tooHigh(key);\n        }\n\n        final boolean inClosedRange(Object key) {\n            return (fromStart || m.compare(key, lo) >= 0)\n                    && (toEnd || m.compare(hi, key) >= 0);\n        }\n\n        final boolean inRange(Object key, boolean inclusive) {\n            return inclusive ? inRange(key) : inClosedRange(key);\n        }\n\n        /*\n         * Absolute versions of relation operations.\n         * Subclasses map to these using like-named \"sub\"\n         * versions that invert senses for descending maps\n         */\n\n        final TreeMap.Entry<K,V> absLowest() {\n            TreeMap.Entry<K,V> e =\n                    (fromStart ?  m.getFirstEntry() :\n                            (loInclusive ? m.getCeilingEntry(lo) :\n                                    m.getHigherEntry(lo)));\n            return (e == null || tooHigh(e.key)) ? null : e;\n        }\n\n        final TreeMap.Entry<K,V> absHighest() {\n            TreeMap.Entry<K,V> e =\n                    (toEnd ?  m.getLastEntry() :\n                            (hiInclusive ?  m.getFloorEntry(hi) :\n                                    m.getLowerEntry(hi)));\n            return (e == null || tooLow(e.key)) ? null : e;\n        }\n\n        final TreeMap.Entry<K,V> absCeiling(K key) {\n            if (tooLow(key))\n                return absLowest();\n            TreeMap.Entry<K,V> e = m.getCeilingEntry(key);\n            return (e == null || tooHigh(e.key)) ? null : e;\n        }\n\n        final TreeMap.Entry<K,V> absHigher(K key) {\n            if (tooLow(key))\n                return absLowest();\n            TreeMap.Entry<K,V> e = m.getHigherEntry(key);\n            return (e == null || tooHigh(e.key)) ? null : e;\n        }\n\n        final TreeMap.Entry<K,V> absFloor(K key) {\n            if (tooHigh(key))\n                return absHighest();\n            TreeMap.Entry<K,V> e = m.getFloorEntry(key);\n            return (e == null || tooLow(e.key)) ? null : e;\n        }\n\n        final TreeMap.Entry<K,V> absLower(K key) {\n            if (tooHigh(key))\n                return absHighest();\n            TreeMap.Entry<K,V> e = m.getLowerEntry(key);\n            return (e == null || tooLow(e.key)) ? null : e;\n        }\n\n        /** Returns the absolute high fence for ascending traversal */\n        final TreeMap.Entry<K,V> absHighFence() {\n            return (toEnd ? null : (hiInclusive ?\n                    m.getHigherEntry(hi) :\n                    m.getCeilingEntry(hi)));\n        }\n\n        /** Return the absolute low fence for descending traversal  */\n        final TreeMap.Entry<K,V> absLowFence() {\n            return (fromStart ? null : (loInclusive ?\n                    m.getLowerEntry(lo) :\n                    m.getFloorEntry(lo)));\n        }\n\n        // Abstract methods defined in ascending vs descending classes\n        // These relay to the appropriate absolute versions\n\n        abstract TreeMap.Entry<K,V> subLowest();\n        abstract TreeMap.Entry<K,V> subHighest();\n        abstract TreeMap.Entry<K,V> subCeiling(K key);\n        abstract TreeMap.Entry<K,V> subHigher(K key);\n        abstract TreeMap.Entry<K,V> subFloor(K key);\n        abstract TreeMap.Entry<K,V> subLower(K key);\n\n        /** Returns ascending iterator from the perspective of this submap */\n        abstract Iterator<K> keyIterator();\n\n        abstract Spliterator<K> keySpliterator();\n\n        /** Returns descending iterator from the perspective of this submap */\n        abstract Iterator<K> descendingKeyIterator();\n\n        // public methods\n\n        public boolean isEmpty() {\n            return (fromStart && toEnd) ? m.isEmpty() : entrySet().isEmpty();\n        }\n\n        public int size() {\n            return (fromStart && toEnd) ? m.size() : entrySet().size();\n        }\n\n        public final boolean containsKey(Object key) {\n            return inRange(key) && m.containsKey(key);\n        }\n\n        public final V put(K key, V value) {\n            if (!inRange(key))\n                throw new IllegalArgumentException(\"key out of range\");\n            return m.put(key, value);\n        }\n\n        public final V get(Object key) {\n            return !inRange(key) ? null :  m.get(key);\n        }\n\n        public final V remove(Object key) {\n            return !inRange(key) ? null : m.remove(key);\n        }\n\n        public final Map.Entry<K,V> ceilingEntry(K key) {\n            return exportEntry(subCeiling(key));\n        }\n\n        public final K ceilingKey(K key) {\n            return keyOrNull(subCeiling(key));\n        }\n\n        public final Map.Entry<K,V> higherEntry(K key) {\n            return exportEntry(subHigher(key));\n        }\n\n        public final K higherKey(K key) {\n            return keyOrNull(subHigher(key));\n        }\n\n        public final Map.Entry<K,V> floorEntry(K key) {\n            return exportEntry(subFloor(key));\n        }\n\n        public final K floorKey(K key) {\n            return keyOrNull(subFloor(key));\n        }\n\n        public final Map.Entry<K,V> lowerEntry(K key) {\n            return exportEntry(subLower(key));\n        }\n\n        public final K lowerKey(K key) {\n            return keyOrNull(subLower(key));\n        }\n\n        public final K firstKey() {\n            return key(subLowest());\n        }\n\n        public final K lastKey() {\n            return key(subHighest());\n        }\n\n        public final Map.Entry<K,V> firstEntry() {\n            return exportEntry(subLowest());\n        }\n\n        public final Map.Entry<K,V> lastEntry() {\n            return exportEntry(subHighest());\n        }\n\n        public final Map.Entry<K,V> pollFirstEntry() {\n            TreeMap.Entry<K,V> e = subLowest();\n            Map.Entry<K,V> result = exportEntry(e);\n            if (e != null)\n                m.deleteEntry(e);\n            return result;\n        }\n\n        public final Map.Entry<K,V> pollLastEntry() {\n            TreeMap.Entry<K,V> e = subHighest();\n            Map.Entry<K,V> result = exportEntry(e);\n            if (e != null)\n                m.deleteEntry(e);\n            return result;\n        }\n\n        // Views\n        transient NavigableMap<K,V> descendingMapView;\n        transient EntrySetView entrySetView;\n        transient KeySet<K> navigableKeySetView;\n\n        public final NavigableSet<K> navigableKeySet() {\n            KeySet<K> nksv = navigableKeySetView;\n            return (nksv != null) ? nksv :\n                    (navigableKeySetView = new TreeMap.KeySet<>(this));\n        }\n\n        public final Set<K> keySet() {\n            return navigableKeySet();\n        }\n\n        public NavigableSet<K> descendingKeySet() {\n            return descendingMap().navigableKeySet();\n        }\n\n        public final SortedMap<K,V> subMap(K fromKey, K toKey) {\n            return subMap(fromKey, true, toKey, false);\n        }\n\n        public final SortedMap<K,V> headMap(K toKey) {\n            return headMap(toKey, false);\n        }\n\n        public final SortedMap<K,V> tailMap(K fromKey) {\n            return tailMap(fromKey, true);\n        }\n\n        // View classes\n\n        abstract class EntrySetView extends AbstractSet<Map.Entry<K,V>> {\n            private transient int size = -1, sizeModCount;\n\n            public int size() {\n                if (fromStart && toEnd)\n                    return m.size();\n                if (size == -1 || sizeModCount != m.modCount) {\n                    sizeModCount = m.modCount;\n                    size = 0;\n                    Iterator<?> i = iterator();\n                    while (i.hasNext()) {\n                        size++;\n                        i.next();\n                    }\n                }\n                return size;\n            }\n\n            public boolean isEmpty() {\n                TreeMap.Entry<K,V> n = absLowest();\n                return n == null || tooHigh(n.key);\n            }\n\n            public boolean contains(Object o) {\n                if (!(o instanceof Map.Entry))\n                    return false;\n                Map.Entry<?,?> entry = (Map.Entry<?,?>) o;\n                Object key = entry.getKey();\n                if (!inRange(key))\n                    return false;\n                TreeMap.Entry<?,?> node = m.getEntry(key);\n                return node != null &&\n                        valEquals(node.getValue(), entry.getValue());\n            }\n\n            public boolean remove(Object o) {\n                if (!(o instanceof Map.Entry))\n                    return false;\n                Map.Entry<?,?> entry = (Map.Entry<?,?>) o;\n                Object key = entry.getKey();\n                if (!inRange(key))\n                    return false;\n                TreeMap.Entry<K,V> node = m.getEntry(key);\n                if (node!=null && valEquals(node.getValue(),\n                        entry.getValue())) {\n                    m.deleteEntry(node);\n                    return true;\n                }\n                return false;\n            }\n        }\n\n        /**\n         * Iterators for SubMaps\n         */\n        abstract class SubMapIterator<T> implements Iterator<T> {\n            TreeMap.Entry<K,V> lastReturned;\n            TreeMap.Entry<K,V> next;\n            final Object fenceKey;\n            int expectedModCount;\n\n            SubMapIterator(TreeMap.Entry<K,V> first,\n                           TreeMap.Entry<K,V> fence) {\n                expectedModCount = m.modCount;\n                lastReturned = null;\n                next = first;\n                fenceKey = fence == null ? UNBOUNDED : fence.key;\n            }\n\n            public final boolean hasNext() {\n                return next != null && next.key != fenceKey;\n            }\n\n            final TreeMap.Entry<K,V> nextEntry() {\n                TreeMap.Entry<K,V> e = next;\n                if (e == null || e.key == fenceKey)\n                    throw new NoSuchElementException();\n                if (m.modCount != expectedModCount)\n                    throw new ConcurrentModificationException();\n                next = successor(e);\n                lastReturned = e;\n                return e;\n            }\n\n            final TreeMap.Entry<K,V> prevEntry() {\n                TreeMap.Entry<K,V> e = next;\n                if (e == null || e.key == fenceKey)\n                    throw new NoSuchElementException();\n                if (m.modCount != expectedModCount)\n                    throw new ConcurrentModificationException();\n                next = predecessor(e);\n                lastReturned = e;\n                return e;\n            }\n\n            final void removeAscending() {\n                if (lastReturned == null)\n                    throw new IllegalStateException();\n                if (m.modCount != expectedModCount)\n                    throw new ConcurrentModificationException();\n                // deleted entries are replaced by their successors\n                if (lastReturned.left != null && lastReturned.right != null)\n                    next = lastReturned;\n                m.deleteEntry(lastReturned);\n                lastReturned = null;\n                expectedModCount = m.modCount;\n            }\n\n            final void removeDescending() {\n                if (lastReturned == null)\n                    throw new IllegalStateException();\n                if (m.modCount != expectedModCount)\n                    throw new ConcurrentModificationException();\n                m.deleteEntry(lastReturned);\n                lastReturned = null;\n                expectedModCount = m.modCount;\n            }\n\n        }\n\n        final class SubMapEntryIterator extends SubMapIterator<Map.Entry<K,V>> {\n            SubMapEntryIterator(TreeMap.Entry<K,V> first,\n                                TreeMap.Entry<K,V> fence) {\n                super(first, fence);\n            }\n            public Map.Entry<K,V> next() {\n                return nextEntry();\n            }\n            public void remove() {\n                removeAscending();\n            }\n        }\n\n        final class DescendingSubMapEntryIterator extends SubMapIterator<Map.Entry<K,V>> {\n            DescendingSubMapEntryIterator(TreeMap.Entry<K,V> last,\n                                          TreeMap.Entry<K,V> fence) {\n                super(last, fence);\n            }\n\n            public Map.Entry<K,V> next() {\n                return prevEntry();\n            }\n            public void remove() {\n                removeDescending();\n            }\n        }\n\n        // Implement minimal Spliterator as KeySpliterator backup\n        final class SubMapKeyIterator extends SubMapIterator<K>\n                implements Spliterator<K> {\n            SubMapKeyIterator(TreeMap.Entry<K,V> first,\n                              TreeMap.Entry<K,V> fence) {\n                super(first, fence);\n            }\n            public K next() {\n                return nextEntry().key;\n            }\n            public void remove() {\n                removeAscending();\n            }\n            public Spliterator<K> trySplit() {\n                return null;\n            }\n            public void forEachRemaining(Consumer<? super K> action) {\n                while (hasNext())\n                    action.accept(next());\n            }\n            public boolean tryAdvance(Consumer<? super K> action) {\n                if (hasNext()) {\n                    action.accept(next());\n                    return true;\n                }\n                return false;\n            }\n            public long estimateSize() {\n                return Long.MAX_VALUE;\n            }\n            public int characteristics() {\n                return Spliterator.DISTINCT | Spliterator.ORDERED |\n                        Spliterator.SORTED;\n            }\n            public final Comparator<? super K>  getComparator() {\n                return NavigableSubMap.this.comparator();\n            }\n        }\n\n        final class DescendingSubMapKeyIterator extends SubMapIterator<K>\n                implements Spliterator<K> {\n            DescendingSubMapKeyIterator(TreeMap.Entry<K,V> last,\n                                        TreeMap.Entry<K,V> fence) {\n                super(last, fence);\n            }\n            public K next() {\n                return prevEntry().key;\n            }\n            public void remove() {\n                removeDescending();\n            }\n            public Spliterator<K> trySplit() {\n                return null;\n            }\n            public void forEachRemaining(Consumer<? super K> action) {\n                while (hasNext())\n                    action.accept(next());\n            }\n            public boolean tryAdvance(Consumer<? super K> action) {\n                if (hasNext()) {\n                    action.accept(next());\n                    return true;\n                }\n                return false;\n            }\n            public long estimateSize() {\n                return Long.MAX_VALUE;\n            }\n            public int characteristics() {\n                return Spliterator.DISTINCT | Spliterator.ORDERED;\n            }\n        }\n    }\n\n    /**\n     * @serial include\n     */\n    static final class AscendingSubMap<K,V> extends NavigableSubMap<K,V> {\n        private static final long serialVersionUID = 912986545866124060L;\n\n        AscendingSubMap(TreeMap<K,V> m,\n                        boolean fromStart, K lo, boolean loInclusive,\n                        boolean toEnd, K hi, boolean hiInclusive) {\n            super(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive);\n        }\n\n        public Comparator<? super K> comparator() {\n            return m.comparator();\n        }\n\n        public NavigableMap<K,V> subMap(K fromKey, boolean fromInclusive,\n                                        K toKey,   boolean toInclusive) {\n            if (!inRange(fromKey, fromInclusive))\n                throw new IllegalArgumentException(\"fromKey out of range\");\n            if (!inRange(toKey, toInclusive))\n                throw new IllegalArgumentException(\"toKey out of range\");\n            return new AscendingSubMap<>(m,\n                    false, fromKey, fromInclusive,\n                    false, toKey,   toInclusive);\n        }\n\n        public NavigableMap<K,V> headMap(K toKey, boolean inclusive) {\n            if (!inRange(toKey, inclusive))\n                throw new IllegalArgumentException(\"toKey out of range\");\n            return new AscendingSubMap<>(m,\n                    fromStart, lo,    loInclusive,\n                    false,     toKey, inclusive);\n        }\n\n        public NavigableMap<K,V> tailMap(K fromKey, boolean inclusive) {\n            if (!inRange(fromKey, inclusive))\n                throw new IllegalArgumentException(\"fromKey out of range\");\n            return new AscendingSubMap<>(m,\n                    false, fromKey, inclusive,\n                    toEnd, hi,      hiInclusive);\n        }\n\n        public NavigableMap<K,V> descendingMap() {\n            NavigableMap<K,V> mv = descendingMapView;\n            return (mv != null) ? mv :\n                    (descendingMapView =\n                            new DescendingSubMap<>(m,\n                                    fromStart, lo, loInclusive,\n                                    toEnd,     hi, hiInclusive));\n        }\n\n        Iterator<K> keyIterator() {\n            return new SubMapKeyIterator(absLowest(), absHighFence());\n        }\n\n        Spliterator<K> keySpliterator() {\n            return new SubMapKeyIterator(absLowest(), absHighFence());\n        }\n\n        Iterator<K> descendingKeyIterator() {\n            return new DescendingSubMapKeyIterator(absHighest(), absLowFence());\n        }\n\n        final class AscendingEntrySetView extends EntrySetView {\n            public Iterator<Map.Entry<K,V>> iterator() {\n                return new SubMapEntryIterator(absLowest(), absHighFence());\n            }\n        }\n\n        public Set<Map.Entry<K,V>> entrySet() {\n            EntrySetView es = entrySetView;\n            return (es != null) ? es : (entrySetView = new AscendingEntrySetView());\n        }\n\n        TreeMap.Entry<K,V> subLowest()       { return absLowest(); }\n        TreeMap.Entry<K,V> subHighest()      { return absHighest(); }\n        TreeMap.Entry<K,V> subCeiling(K key) { return absCeiling(key); }\n        TreeMap.Entry<K,V> subHigher(K key)  { return absHigher(key); }\n        TreeMap.Entry<K,V> subFloor(K key)   { return absFloor(key); }\n        TreeMap.Entry<K,V> subLower(K key)   { return absLower(key); }\n    }\n\n    /**\n     * @serial include\n     */\n    static final class DescendingSubMap<K,V>  extends NavigableSubMap<K,V> {\n        private static final long serialVersionUID = 912986545866120460L;\n        DescendingSubMap(TreeMap<K,V> m,\n                         boolean fromStart, K lo, boolean loInclusive,\n                         boolean toEnd, K hi, boolean hiInclusive) {\n            super(m, fromStart, lo, loInclusive, toEnd, hi, hiInclusive);\n        }\n\n        private final Comparator<? super K> reverseComparator =\n                Collections.reverseOrder(m.comparator);\n\n        public Comparator<? super K> comparator() {\n            return reverseComparator;\n        }\n\n        public NavigableMap<K,V> subMap(K fromKey, boolean fromInclusive,\n                                        K toKey,   boolean toInclusive) {\n            if (!inRange(fromKey, fromInclusive))\n                throw new IllegalArgumentException(\"fromKey out of range\");\n            if (!inRange(toKey, toInclusive))\n                throw new IllegalArgumentException(\"toKey out of range\");\n            return new DescendingSubMap<>(m,\n                    false, toKey,   toInclusive,\n                    false, fromKey, fromInclusive);\n        }\n\n        public NavigableMap<K,V> headMap(K toKey, boolean inclusive) {\n            if (!inRange(toKey, inclusive))\n                throw new IllegalArgumentException(\"toKey out of range\");\n            return new DescendingSubMap<>(m,\n                    false, toKey, inclusive,\n                    toEnd, hi,    hiInclusive);\n        }\n\n        public NavigableMap<K,V> tailMap(K fromKey, boolean inclusive) {\n            if (!inRange(fromKey, inclusive))\n                throw new IllegalArgumentException(\"fromKey out of range\");\n            return new DescendingSubMap<>(m,\n                    fromStart, lo, loInclusive,\n                    false, fromKey, inclusive);\n        }\n\n        public NavigableMap<K,V> descendingMap() {\n            NavigableMap<K,V> mv = descendingMapView;\n            return (mv != null) ? mv :\n                    (descendingMapView =\n                            new AscendingSubMap<>(m,\n                                    fromStart, lo, loInclusive,\n                                    toEnd,     hi, hiInclusive));\n        }\n\n        Iterator<K> keyIterator() {\n            return new DescendingSubMapKeyIterator(absHighest(), absLowFence());\n        }\n\n        Spliterator<K> keySpliterator() {\n            return new DescendingSubMapKeyIterator(absHighest(), absLowFence());\n        }\n\n        Iterator<K> descendingKeyIterator() {\n            return new SubMapKeyIterator(absLowest(), absHighFence());\n        }\n\n        final class DescendingEntrySetView extends EntrySetView {\n            public Iterator<Map.Entry<K,V>> iterator() {\n                return new DescendingSubMapEntryIterator(absHighest(), absLowFence());\n            }\n        }\n\n        public Set<Map.Entry<K,V>> entrySet() {\n            EntrySetView es = entrySetView;\n            return (es != null) ? es : (entrySetView = new DescendingEntrySetView());\n        }\n\n        TreeMap.Entry<K,V> subLowest()       { return absHighest(); }\n        TreeMap.Entry<K,V> subHighest()      { return absLowest(); }\n        TreeMap.Entry<K,V> subCeiling(K key) { return absFloor(key); }\n        TreeMap.Entry<K,V> subHigher(K key)  { return absLower(key); }\n        TreeMap.Entry<K,V> subFloor(K key)   { return absCeiling(key); }\n        TreeMap.Entry<K,V> subLower(K key)   { return absHigher(key); }\n    }\n\n    /**\n     * This class exists solely for the sake of serialization\n     * compatibility with previous releases of TreeMap that did not\n     * support NavigableMap.  It translates an old-version SubMap into\n     * a new-version AscendingSubMap. This class is never otherwise\n     * used.\n     *\n     * @serial include\n     */\n    private class SubMap extends AbstractMap<K,V> implements SortedMap<K,V>, java.io.Serializable {\n        private static final long serialVersionUID = -6520786458950516097L;\n        private boolean fromStart = false, toEnd = false;\n        private K fromKey, toKey;\n        private Object readResolve() {\n            return new AscendingSubMap<>(TreeMap.this,\n                    fromStart, fromKey, true,\n                    toEnd, toKey, false);\n        }\n        public Set<Map.Entry<K,V>> entrySet() { throw new InternalError(); }\n        public K lastKey() { throw new InternalError(); }\n        public K firstKey() { throw new InternalError(); }\n        public SortedMap<K,V> subMap(K fromKey, K toKey) { throw new InternalError(); }\n        public SortedMap<K,V> headMap(K toKey) { throw new InternalError(); }\n        public SortedMap<K,V> tailMap(K fromKey) { throw new InternalError(); }\n        public Comparator<? super K> comparator() { throw new InternalError(); }\n    }\n\n\n    // Red-black mechanics\n\n    protected  static final boolean RED   = false;\n    protected  static final boolean BLACK = true;\n\n    protected static class Entry<K,V> implements Map.Entry<K,V>, Pinenut {\n        K key;\n        V value;\n        Entry<K,V> left;\n        Entry<K,V> right;\n        Entry<K,V> parent;\n        boolean color = BLACK;\n\n        Entry( K key, V value, Entry<K,V > parent ) {\n            this.key = key;\n            this.value = value;\n            this.parent = parent;\n        }\n\n        /**\n         * Returns the key.\n         *\n         * @return the key\n         */\n        public K getKey() {\n            return key;\n        }\n\n        /**\n         * Returns the value associated with the key.\n         *\n         * @return the value associated with the key\n         */\n        public V getValue() {\n            return value;\n        }\n\n        /**\n         * Replaces the value currently associated with the key with the given\n         * value.\n         *\n         * @return the value associated with the key before this method was\n         *         called\n         */\n        public V setValue(V value) {\n            V oldValue = this.value;\n            this.value = value;\n            return oldValue;\n        }\n\n        public boolean equals(Object o) {\n            if (!(o instanceof Map.Entry))\n                return false;\n            Map.Entry<?,?> e = (Map.Entry<?,?>)o;\n\n            return valEquals(key,e.getKey()) && valEquals(value,e.getValue());\n        }\n\n        @Override\n        public int hashCode() {\n            int keyHash = (key==null ? 0 : key.hashCode());\n            int valueHash = (value==null ? 0 : value.hashCode());\n            return keyHash ^ valueHash;\n        }\n\n        @Override\n        public String toString() {\n            return this.toJSONString();\n        }\n\n        @Override\n        public String toJSONString() {\n            return \"{\" + StringUtils.jsonQuote( this.key.toString() ) + \":\" + JSON.stringify( this.value ) + \"}\";\n        }\n\n        @Override\n        public TypeIndex prototype() {\n            return Prototype.typeid( this );\n        }\n    }\n\n    /**\n     * Returns the first Entry in the TreeMap (according to the TreeMap's\n     * key-sort function).  Returns null if the TreeMap is empty.\n     */\n    final Entry<K,V> getFirstEntry() {\n        Entry<K,V> p = root;\n        if (p != null)\n            while (p.left != null)\n                p = p.left;\n        return p;\n    }\n\n    /**\n     * Returns the last Entry in the TreeMap (according to the TreeMap's\n     * key-sort function).  Returns null if the TreeMap is empty.\n     */\n    final Entry<K,V> getLastEntry() {\n        Entry<K,V> p = root;\n        if (p != null)\n            while (p.right != null)\n                p = p.right;\n        return p;\n    }\n\n    /**\n     * Returns the successor of the specified Entry, or null if no such.\n     */\n    static <K,V> TreeMap.Entry<K,V> successor(Entry<K,V> t) {\n        if (t == null)\n            return null;\n        else if (t.right != null) {\n            Entry<K,V> p = t.right;\n            while (p.left != null)\n                p = p.left;\n            return p;\n        } else {\n            Entry<K,V> p = t.parent;\n            Entry<K,V> ch = t;\n            while (p != null && ch == p.right) {\n                ch = p;\n                p = p.parent;\n            }\n            return p;\n        }\n    }\n\n    /**\n     * Returns the predecessor of the specified Entry, or null if no such.\n     */\n    static <K,V> Entry<K,V> predecessor(Entry<K,V> t) {\n        if (t == null)\n            return null;\n        else if (t.left != null) {\n            Entry<K,V> p = t.left;\n            while (p.right != null)\n                p = p.right;\n            return p;\n        } else {\n            Entry<K,V> p = t.parent;\n            Entry<K,V> ch = t;\n            while (p != null && ch == p.left) {\n                ch = p;\n                p = p.parent;\n            }\n            return p;\n        }\n    }\n\n    /**\n     * Balancing operations.\n     *\n     * Implementations of rebalancings during insertion and deletion are\n     * slightly different than the CLR version.  Rather than using dummy\n     * nilnodes, we use a set of accessors that deal properly with null.  They\n     * are used to avoid messiness surrounding nullness checks in the main\n     * algorithms.\n     */\n\n    private static <K,V> boolean colorOf(Entry<K,V> p) {\n        return (p == null ? BLACK : p.color);\n    }\n\n    private static <K,V> Entry<K,V> parentOf(Entry<K,V> p) {\n        return (p == null ? null: p.parent);\n    }\n\n    private static <K,V> void setColor(Entry<K,V> p, boolean c) {\n        if (p != null)\n            p.color = c;\n    }\n\n    private static <K,V> Entry<K,V> leftOf(Entry<K,V> p) {\n        return (p == null) ? null: p.left;\n    }\n\n    private static <K,V> Entry<K,V> rightOf(Entry<K,V> p) {\n        return (p == null) ? null: p.right;\n    }\n\n    /** From CLR */\n    private void rotateLeft(Entry<K,V> p) {\n        if (p != null) {\n            Entry<K,V> r = p.right;\n            p.right = r.left;\n            if (r.left != null)\n                r.left.parent = p;\n            r.parent = p.parent;\n            if (p.parent == null)\n                root = r;\n            else if (p.parent.left == p)\n                p.parent.left = r;\n            else\n                p.parent.right = r;\n            r.left = p;\n            p.parent = r;\n        }\n    }\n\n    /** From CLR */\n    private void rotateRight(Entry<K,V> p) {\n        if (p != null) {\n            Entry<K,V> l = p.left;\n            p.left = l.right;\n            if (l.right != null) l.right.parent = p;\n            l.parent = p.parent;\n            if (p.parent == null)\n                root = l;\n            else if (p.parent.right == p)\n                p.parent.right = l;\n            else p.parent.left = l;\n            l.right = p;\n            p.parent = l;\n        }\n    }\n\n    /** From CLR */\n    protected void fixAfterInsertion(Entry<K,V> x) {\n        x.color = RED;\n\n        while (x != null && x != root && x.parent.color == RED) {\n            if (parentOf(x) == leftOf(parentOf(parentOf(x)))) {\n                Entry<K,V> y = rightOf(parentOf(parentOf(x)));\n                if (colorOf(y) == RED) {\n                    setColor(parentOf(x), BLACK);\n                    setColor(y, BLACK);\n                    setColor(parentOf(parentOf(x)), RED);\n                    x = parentOf(parentOf(x));\n                } else {\n                    if (x == rightOf(parentOf(x))) {\n                        x = parentOf(x);\n                        rotateLeft(x);\n                    }\n                    setColor(parentOf(x), BLACK);\n                    setColor(parentOf(parentOf(x)), RED);\n                    rotateRight(parentOf(parentOf(x)));\n                }\n            } else {\n                Entry<K,V> y = leftOf(parentOf(parentOf(x)));\n                if (colorOf(y) == RED) {\n                    setColor(parentOf(x), BLACK);\n                    setColor(y, BLACK);\n                    setColor(parentOf(parentOf(x)), RED);\n                    x = parentOf(parentOf(x));\n                } else {\n                    if (x == leftOf(parentOf(x))) {\n                        x = parentOf(x);\n                        rotateRight(x);\n                    }\n                    setColor(parentOf(x), BLACK);\n                    setColor(parentOf(parentOf(x)), RED);\n                    rotateLeft(parentOf(parentOf(x)));\n                }\n            }\n        }\n        root.color = BLACK;\n    }\n\n    protected Entry<K,V> onlyDeleteEntry( Entry<K,V> p ) {\n        this.modCount++;\n        this.size--;\n\n        // If strictly internal, copy successor's element to p and then make p\n        // point to successor.\n        if ( p.left != null && p.right != null ) {\n            Entry<K,V> s = successor(p);\n            p.key = s.key;\n            p.value = s.value;\n            p = s; // Fuck this... The `p` could be replaced... bad for Linked-Opt...\n        } // p has 2 children\n\n        // Start fixup at replacement node, if it exists.\n        Entry<K,V> replacement = ( p.left != null ? p.left : p.right );\n\n        if ( replacement != null ) {\n            // Link replacement to parent\n            replacement.parent = p.parent;\n            if ( p.parent == null ) {\n                root = replacement;\n            }\n            else if ( p == p.parent.left ) {\n                p.parent.left  = replacement;\n            }\n            else {\n                p.parent.right = replacement;\n            }\n\n            // Null out links so they are OK to use by fixAfterDeletion.\n            p.left = p.right = p.parent = null;\n\n            // Fix replacement\n            if ( p.color == BLACK ) {\n                this.fixAfterDeletion(replacement);\n            }\n        }\n        else if ( p.parent == null ) { // return if we are the only node.\n            this.root = null;\n        }\n        else { //  No children. Use self as phantom replacement and unlink.\n            if ( p.color == BLACK ) {\n                this.fixAfterDeletion( p );\n            }\n\n            if ( p.parent != null ) {\n                if ( p == p.parent.left ) {\n                    p.parent.left = null;\n                }\n                else if ( p == p.parent.right ) {\n                    p.parent.right = null;\n                }\n                p.parent = null;\n            }\n        }\n\n        return p;\n    }\n\n    protected void deleteEntry( Entry<K,V> p ) {\n        p = this.onlyDeleteEntry( p );\n        this.afterNodeRemoval( p );\n    }\n\n    /** From CLR */\n    private void fixAfterDeletion(Entry<K,V> x) {\n        while (x != root && colorOf(x) == BLACK) {\n            if (x == leftOf(parentOf(x))) {\n                Entry<K,V> sib = rightOf(parentOf(x));\n\n                if (colorOf(sib) == RED) {\n                    setColor(sib, BLACK);\n                    setColor(parentOf(x), RED);\n                    rotateLeft(parentOf(x));\n                    sib = rightOf(parentOf(x));\n                }\n\n                if (colorOf(leftOf(sib))  == BLACK &&\n                        colorOf(rightOf(sib)) == BLACK) {\n                    setColor(sib, RED);\n                    x = parentOf(x);\n                } else {\n                    if (colorOf(rightOf(sib)) == BLACK) {\n                        setColor(leftOf(sib), BLACK);\n                        setColor(sib, RED);\n                        rotateRight(sib);\n                        sib = rightOf(parentOf(x));\n                    }\n                    setColor(sib, colorOf(parentOf(x)));\n                    setColor(parentOf(x), BLACK);\n                    setColor(rightOf(sib), BLACK);\n                    rotateLeft(parentOf(x));\n                    x = root;\n                }\n            } else { // symmetric\n                Entry<K,V> sib = leftOf(parentOf(x));\n\n                if (colorOf(sib) == RED) {\n                    setColor(sib, BLACK);\n                    setColor(parentOf(x), RED);\n                    rotateRight(parentOf(x));\n                    sib = leftOf(parentOf(x));\n                }\n\n                if ( colorOf(rightOf(sib)) == BLACK && colorOf(leftOf(sib) ) == BLACK) {\n                    setColor(sib, RED);\n                    x = parentOf(x);\n                }\n                else {\n                    if (colorOf(leftOf(sib)) == BLACK) {\n                        setColor(rightOf(sib), BLACK);\n                        setColor(sib, RED);\n                        rotateLeft(sib);\n                        sib = leftOf(parentOf(x));\n                    }\n                    setColor(sib, colorOf(parentOf(x)));\n                    setColor(parentOf(x), BLACK);\n                    setColor(leftOf(sib), BLACK);\n                    rotateRight(parentOf(x));\n                    x = root;\n                }\n            }\n        }\n\n        setColor(x, BLACK);\n    }\n\n    private static final long serialVersionUID = 919286545866124006L;\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this );\n    }\n\n    protected void internalWriteEntries( ObjectOutputStream s ) throws IOException {\n        for ( Map.Entry<K, V> e : entrySet() ) {\n            s.writeObject(e.getKey());\n            s.writeObject(e.getValue());\n        }\n    }\n\n    protected void internalReadEntries( int size, final ObjectInputStream s ) throws IOException, ClassNotFoundException {\n        this.buildFromSorted( size, null, s, null );\n    }\n\n    protected void writeObject( ObjectOutputStream s ) throws IOException {\n        // Write out the Comparator and any hidden stuff\n        s.defaultWriteObject();\n\n        // Write out size (number of Mappings)\n        s.writeInt( this.size );\n\n        // Write out keys and values (alternating)\n        this.internalWriteEntries( s );\n    }\n\n    protected void readObject( final ObjectInputStream s ) throws IOException, ClassNotFoundException {\n        // Read in the Comparator and any hidden stuff\n        s.defaultReadObject();\n\n        // Read in size\n        int size = s.readInt();\n\n        this.internalReadEntries( size,  s );\n    }\n\n    /** Intended to be called only from TreeSet.readObject */\n    void readTreeSet(int size, ObjectInputStream s, V defaultVal) throws IOException, ClassNotFoundException {\n        buildFromSorted(size, null, s, defaultVal);\n    }\n\n    /** Intended to be called only from TreeSet.addAll */\n    void addAllForTreeSet(SortedSet<? extends K> set, V defaultVal) {\n        try {\n            buildFromSorted(set.size(), set.iterator(), null, defaultVal);\n        } catch (IOException | ClassNotFoundException cannotHappen) {\n        }\n    }\n\n\n    /**\n     * Linear time tree building algorithm from sorted data.  Can accept keys\n     * and/or values from iterator or stream. This leads to too many\n     * parameters, but seems better than alternatives.  The four formats\n     * that this method accepts are:\n     *\n     *    1) An iterator of Map.Entries.  (it != null, defaultVal == null).\n     *    2) An iterator of keys.         (it != null, defaultVal != null).\n     *    3) A stream of alternating serialized keys and values.\n     *                                   (it == null, defaultVal == null).\n     *    4) A stream of serialized keys. (it == null, defaultVal != null).\n     *\n     * It is assumed that the comparator of the TreeMap is already set prior\n     * to calling this method.\n     *\n     * @param size the number of keys (or key-value pairs) to be read from\n     *        the iterator or stream\n     * @param it If non-null, new entries are created from entries\n     *        or keys read from this iterator.\n     * @param str If non-null, new entries are created from keys and\n     *        possibly values read from this stream in serialized form.\n     *        Exactly one of it and str should be non-null.\n     * @param defaultVal if non-null, this default value is used for\n     *        each value in the map.  If null, each value is read from\n     *        iterator or stream, as described above.\n     * @throws java.io.IOException propagated from stream reads. This cannot\n     *         occur if str is null.\n     * @throws ClassNotFoundException propagated from readObject.\n     *         This cannot occur if str is null.\n     */\n    private void buildFromSorted( int size, Iterator<?> it, ObjectInputStream str, V defaultVal ) throws  IOException, ClassNotFoundException {\n        this.size = size;\n        this.root = this.buildFromSorted( 0, 0, size-1, computeRedLevel(size), it, str, defaultVal );\n    }\n\n    /**\n     * Recursive \"helper method\" that does the real work of the\n     * previous method.  Identically named parameters have\n     * identical definitions.  Additional parameters are documented below.\n     * It is assumed that the comparator and size fields of the TreeMap are\n     * already set prior to calling this method.  (It ignores both fields.)\n     *\n     * @param level the current level of tree. Initial call should be 0.\n     * @param lo the first element index of this subtree. Initial should be 0.\n     * @param hi the last element index of this subtree.  Initial should be\n     *        size-1.\n     * @param redLevel the level at which nodes should be red.\n     *        Must be equal to computeRedLevel for tree of this size.\n     */\n    @SuppressWarnings(\"unchecked\")\n    private final Entry<K,V> buildFromSorted( int level, int lo, int hi, int redLevel, Iterator<?> it, ObjectInputStream str, V defaultVal ) throws  IOException, ClassNotFoundException {\n        /*\n         * Strategy: The root is the middlemost element. To get to it, we\n         * have to first recursively construct the entire left subtree,\n         * so as to grab all of its elements. We can then proceed with right\n         * subtree.\n         *\n         * The lo and hi arguments are the minimum and maximum\n         * indices to pull out of the iterator or stream for current subtree.\n         * They are not actually indexed, we just proceed sequentially,\n         * ensuring that items are extracted in corresponding order.\n         */\n\n        if (hi < lo) {\n            return null;\n        }\n\n        int mid = (lo + hi) >>> 1;\n\n        Entry<K,V> left  = null;\n        if ( lo < mid ) {\n            left = this.buildFromSorted(level+1, lo, mid - 1, redLevel, it, str, defaultVal);\n        }\n\n        // extract key and/or value from iterator or stream\n        K key;\n        V value;\n        if ( it != null ) {\n            if ( defaultVal == null ) {\n                Map.Entry<?,?> entry = (Map.Entry<?,?>)it.next();\n                key = (K)entry.getKey();\n                value = (V)entry.getValue();\n            }\n            else {\n                key = (K)it.next();\n                value = defaultVal;\n            }\n        }\n        else { // use stream\n            key = (K) str.readObject();\n            value = (defaultVal != null ? defaultVal : (V) str.readObject());\n        }\n\n        Entry<K,V> middle = this.spawnNode( key, value, null );\n\n        // color nodes in non-full bottommost level red\n        if ( level == redLevel ) {\n            middle.color = RED;\n        }\n\n        if ( left != null ) {\n            middle.left = left;\n            left.parent = middle;\n        }\n\n        if ( mid < hi ) {\n            Entry<K,V> right = this.buildFromSorted(level+1, mid+1, hi, redLevel, it, str, defaultVal);\n            middle.right = right;\n            right.parent = middle;\n        }\n\n        return middle;\n    }\n\n    /**\n     * Finds the level down to which to assign all nodes BLACK.  This is the\n     * last `full' level of the complete binary tree produced by buildTree.\n     * The remaining nodes are colored RED. (This makes a `nice' set of\n     * color assignments wrt future insertions.) This level number is\n     * computed by finding the number of splits needed to reach the zeroeth\n     * node.\n     *\n     * @param size the (non-negative) number of keys in the tree to be built\n     */\n    private static int computeRedLevel(int size) {\n        return 31 - Integer.numberOfLeadingZeros(size + 1);\n    }\n\n    /**\n     * Currently, we support Spliterator-based versions only for the\n     * full map, in either plain of descending form, otherwise relying\n     * on defaults because size estimation for submaps would dominate\n     * costs. The type tests needed to check these for key views are\n     * not very nice but avoid disrupting existing class\n     * structures. Callers must use plain default spliterators if this\n     * returns null.\n     */\n    static <K> Spliterator<K> keySpliteratorFor(NavigableMap<K,?> m) {\n        if (m instanceof TreeMap) {\n            @SuppressWarnings(\"unchecked\") TreeMap<K,Object> t =\n                    (TreeMap<K,Object>) m;\n            return t.keySpliterator();\n        }\n        if (m instanceof DescendingSubMap) {\n            @SuppressWarnings(\"unchecked\") DescendingSubMap<K,?> dm =\n                    (DescendingSubMap<K,?>) m;\n            TreeMap<K,?> tm = dm.m;\n            if (dm == tm.descendingMap) {\n                @SuppressWarnings(\"unchecked\") TreeMap<K,Object> t =\n                        (TreeMap<K,Object>) tm;\n                return t.descendingKeySpliterator();\n            }\n        }\n        @SuppressWarnings(\"unchecked\") NavigableSubMap<K,?> sm =\n                (NavigableSubMap<K,?>) m;\n        return sm.keySpliterator();\n    }\n\n    final Spliterator<K> keySpliterator() {\n        return new KeySpliterator<>(this, null, null, 0, -1, 0);\n    }\n\n    final Spliterator<K> descendingKeySpliterator() {\n        return new DescendingKeySpliterator<>(this, null, null, 0, -2, 0);\n    }\n\n    /**\n     * Base class for spliterators.  Iteration starts at a given\n     * origin and continues up to but not including a given fence (or\n     * null for end).  At top-level, for ascending cases, the first\n     * split uses the root as left-fence/right-origin. From there,\n     * right-hand splits replace the current fence with its left\n     * child, also serving as origin for the split-off spliterator.\n     * Left-hands are symmetric. Descending versions place the origin\n     * at the end and invert ascending split rules.  This base class\n     * is non-committal about directionality, or whether the top-level\n     * spliterator covers the whole tree. This means that the actual\n     * split mechanics are located in subclasses. Some of the subclass\n     * trySplit methods are identical (except for return types), but\n     * not nicely factorable.\n     *\n     * Currently, subclass versions exist only for the full map\n     * (including descending keys via its descendingMap).  Others are\n     * possible but currently not worthwhile because submaps require\n     * O(n) computations to determine size, which substantially limits\n     * potential speed-ups of using custom Spliterators versus default\n     * mechanics.\n     *\n     * To boostrap initialization, external constructors use\n     * negative size estimates: -1 for ascend, -2 for descend.\n     */\n    static class TreeMapSpliterator<K,V> {\n        final TreeMap<K,V> tree;\n        TreeMap.Entry<K,V> current; // traverser; initially first node in range\n        TreeMap.Entry<K,V> fence;   // one past last, or null\n        int side;                   // 0: top, -1: is a left split, +1: right\n        int est;                    // size estimate (exact only for top-level)\n        int expectedModCount;       // for CME checks\n\n        TreeMapSpliterator(TreeMap<K,V> tree,\n                           TreeMap.Entry<K,V> origin, TreeMap.Entry<K,V> fence,\n                           int side, int est, int expectedModCount) {\n            this.tree = tree;\n            this.current = origin;\n            this.fence = fence;\n            this.side = side;\n            this.est = est;\n            this.expectedModCount = expectedModCount;\n        }\n\n        final int getEstimate() { // force initialization\n            int s; TreeMap<K,V> t;\n            if ((s = est) < 0) {\n                if ((t = tree) != null) {\n                    current = (s == -1) ? t.getFirstEntry() : t.getLastEntry();\n                    s = est = t.size;\n                    expectedModCount = t.modCount;\n                }\n                else\n                    s = est = 0;\n            }\n            return s;\n        }\n\n        public final long estimateSize() {\n            return (long)getEstimate();\n        }\n    }\n\n    static final class KeySpliterator<K,V> extends TreeMapSpliterator<K,V> implements Spliterator<K> {\n        KeySpliterator(TreeMap<K,V> tree,\n                       TreeMap.Entry<K,V> origin, TreeMap.Entry<K,V> fence,\n                       int side, int est, int expectedModCount) {\n            super(tree, origin, fence, side, est, expectedModCount);\n        }\n\n        public KeySpliterator<K,V> trySplit() {\n            if (est < 0)\n                getEstimate(); // force initialization\n            int d = side;\n            TreeMap.Entry<K,V> e = current, f = fence,\n                    s = ((e == null || e == f) ? null :      // empty\n                            (d == 0)              ? tree.root : // was top\n                                    (d >  0)              ? e.right :   // was right\n                                            (d <  0 && f != null) ? f.left :    // was left\n                                                    null);\n            if (s != null && s != e && s != f &&\n                    tree.compare(e.key, s.key) < 0) {        // e not already past s\n                side = 1;\n                return new KeySpliterator<>\n                        (tree, e, current = s, -1, est >>>= 1, expectedModCount);\n            }\n            return null;\n        }\n\n        public void forEachRemaining(Consumer<? super K> action) {\n            if (action == null)\n                throw new NullPointerException();\n            if (est < 0)\n                getEstimate(); // force initialization\n            TreeMap.Entry<K,V> f = fence, e, p, pl;\n            if ((e = current) != null && e != f) {\n                current = f; // exhaust\n                do {\n                    action.accept(e.key);\n                    if ((p = e.right) != null) {\n                        while ((pl = p.left) != null)\n                            p = pl;\n                    }\n                    else {\n                        while ((p = e.parent) != null && e == p.right)\n                            e = p;\n                    }\n                } while ((e = p) != null && e != f);\n                if (tree.modCount != expectedModCount)\n                    throw new ConcurrentModificationException();\n            }\n        }\n\n        public boolean tryAdvance(Consumer<? super K> action) {\n            TreeMap.Entry<K,V> e;\n            if (action == null)\n                throw new NullPointerException();\n            if (est < 0)\n                getEstimate(); // force initialization\n            if ((e = current) == null || e == fence)\n                return false;\n            current = successor(e);\n            action.accept(e.key);\n            if (tree.modCount != expectedModCount)\n                throw new ConcurrentModificationException();\n            return true;\n        }\n\n        public int characteristics() {\n            return (side == 0 ? Spliterator.SIZED : 0) |\n                    Spliterator.DISTINCT | Spliterator.SORTED | Spliterator.ORDERED;\n        }\n\n        public final Comparator<? super K>  getComparator() {\n            return tree.comparator;\n        }\n\n    }\n\n    static final class DescendingKeySpliterator<K,V> extends TreeMapSpliterator<K,V> implements Spliterator<K> {\n        DescendingKeySpliterator(TreeMap<K,V> tree, TreeMap.Entry<K,V> origin, TreeMap.Entry<K,V> fence, int side, int est, int expectedModCount) {\n            super(tree, origin, fence, side, est, expectedModCount);\n        }\n\n        public DescendingKeySpliterator<K,V> trySplit() {\n            if (est < 0)\n                getEstimate(); // force initialization\n            int d = side;\n            TreeMap.Entry<K,V> e = current, f = fence,\n                    s = ((e == null || e == f) ? null :      // empty\n                            (d == 0)              ? tree.root : // was top\n                                    (d <  0)              ? e.left :    // was left\n                                            (d >  0 && f != null) ? f.right :   // was right\n                                                    null);\n            if (s != null && s != e && s != f &&\n                    tree.compare(e.key, s.key) > 0) {       // e not already past s\n                side = 1;\n                return new DescendingKeySpliterator<>\n                        (tree, e, current = s, -1, est >>>= 1, expectedModCount);\n            }\n            return null;\n        }\n\n        public void forEachRemaining(Consumer<? super K> action) {\n            if (action == null)\n                throw new NullPointerException();\n            if (est < 0)\n                getEstimate(); // force initialization\n            TreeMap.Entry<K,V> f = fence, e, p, pr;\n            if ((e = current) != null && e != f) {\n                current = f; // exhaust\n                do {\n                    action.accept(e.key);\n                    if ((p = e.left) != null) {\n                        while ((pr = p.right) != null)\n                            p = pr;\n                    }\n                    else {\n                        while ((p = e.parent) != null && e == p.left)\n                            e = p;\n                    }\n                } while ((e = p) != null && e != f);\n                if (tree.modCount != expectedModCount)\n                    throw new ConcurrentModificationException();\n            }\n        }\n\n        public boolean tryAdvance(Consumer<? super K> action) {\n            TreeMap.Entry<K,V> e;\n            if (action == null)\n                throw new NullPointerException();\n            if (est < 0)\n                getEstimate(); // force initialization\n            if ((e = current) == null || e == fence)\n                return false;\n            current = predecessor(e);\n            action.accept(e.key);\n            if (tree.modCount != expectedModCount)\n                throw new ConcurrentModificationException();\n            return true;\n        }\n\n        public int characteristics() {\n            return (side == 0 ? Spliterator.SIZED : 0) |\n                    Spliterator.DISTINCT | Spliterator.ORDERED;\n        }\n    }\n\n    static final class ValueSpliterator<K,V> extends TreeMapSpliterator<K,V> implements Spliterator<V> {\n        ValueSpliterator(TreeMap<K,V> tree, TreeMap.Entry<K,V> origin, TreeMap.Entry<K,V> fence, int side, int est, int expectedModCount) {\n            super(tree, origin, fence, side, est, expectedModCount);\n        }\n\n        public ValueSpliterator<K,V> trySplit() {\n            if (est < 0)\n                getEstimate(); // force initialization\n            int d = side;\n            TreeMap.Entry<K,V> e = current, f = fence,\n                    s = ((e == null || e == f) ? null :      // empty\n                            (d == 0)              ? tree.root : // was top\n                                    (d >  0)              ? e.right :   // was right\n                                            (d <  0 && f != null) ? f.left :    // was left\n                                                    null);\n            if (s != null && s != e && s != f &&\n                    tree.compare(e.key, s.key) < 0) {        // e not already past s\n                side = 1;\n                return new ValueSpliterator<>\n                        (tree, e, current = s, -1, est >>>= 1, expectedModCount);\n            }\n            return null;\n        }\n\n        public void forEachRemaining(Consumer<? super V> action) {\n            if (action == null)\n                throw new NullPointerException();\n            if (est < 0)\n                getEstimate(); // force initialization\n            TreeMap.Entry<K,V> f = fence, e, p, pl;\n            if ((e = current) != null && e != f) {\n                current = f; // exhaust\n                do {\n                    action.accept(e.value);\n                    if ((p = e.right) != null) {\n                        while ((pl = p.left) != null)\n                            p = pl;\n                    }\n                    else {\n                        while ((p = e.parent) != null && e == p.right)\n                            e = p;\n                    }\n                } while ((e = p) != null && e != f);\n                if (tree.modCount != expectedModCount)\n                    throw new ConcurrentModificationException();\n            }\n        }\n\n        public boolean tryAdvance(Consumer<? super V> action) {\n            TreeMap.Entry<K,V> e;\n            if (action == null)\n                throw new NullPointerException();\n            if (est < 0)\n                getEstimate(); // force initialization\n            if ((e = current) == null || e == fence)\n                return false;\n            current = successor(e);\n            action.accept(e.value);\n            if (tree.modCount != expectedModCount)\n                throw new ConcurrentModificationException();\n            return true;\n        }\n\n        public int characteristics() {\n            return (side == 0 ? Spliterator.SIZED : 0) | Spliterator.ORDERED;\n        }\n    }\n\n    static final class EntrySpliterator<K,V> extends TreeMapSpliterator<K,V> implements Spliterator<Map.Entry<K,V>> {\n        EntrySpliterator(TreeMap<K,V> tree, TreeMap.Entry<K,V> origin, TreeMap.Entry<K,V> fence, int side, int est, int expectedModCount) {\n            super(tree, origin, fence, side, est, expectedModCount);\n        }\n\n        public EntrySpliterator<K,V> trySplit() {\n            if (est < 0)\n                getEstimate(); // force initialization\n            int d = side;\n            TreeMap.Entry<K,V> e = current, f = fence,\n                    s = ((e == null || e == f) ? null :      // empty\n                            (d == 0)              ? tree.root : // was top\n                                    (d >  0)              ? e.right :   // was right\n                                            (d <  0 && f != null) ? f.left :    // was left\n                                                    null);\n            if (s != null && s != e && s != f &&\n                    tree.compare(e.key, s.key) < 0) {        // e not already past s\n                side = 1;\n                return new EntrySpliterator<>\n                        (tree, e, current = s, -1, est >>>= 1, expectedModCount);\n            }\n            return null;\n        }\n\n        public void forEachRemaining(Consumer<? super Map.Entry<K, V>> action) {\n            if (action == null)\n                throw new NullPointerException();\n            if (est < 0)\n                getEstimate(); // force initialization\n            TreeMap.Entry<K,V> f = fence, e, p, pl;\n            if ((e = current) != null && e != f) {\n                current = f; // exhaust\n                do {\n                    action.accept(e);\n                    if ((p = e.right) != null) {\n                        while ((pl = p.left) != null)\n                            p = pl;\n                    }\n                    else {\n                        while ((p = e.parent) != null && e == p.right)\n                            e = p;\n                    }\n                } while ((e = p) != null && e != f);\n                if (tree.modCount != expectedModCount)\n                    throw new ConcurrentModificationException();\n            }\n        }\n\n        public boolean tryAdvance(Consumer<? super Map.Entry<K,V>> action) {\n            TreeMap.Entry<K,V> e;\n            if (action == null)\n                throw new NullPointerException();\n            if (est < 0)\n                getEstimate(); // force initialization\n            if ((e = current) == null || e == fence)\n                return false;\n            current = successor(e);\n            action.accept(e);\n            if (tree.modCount != expectedModCount)\n                throw new ConcurrentModificationException();\n            return true;\n        }\n\n        public int characteristics() {\n            return (side == 0 ? Spliterator.SIZED : 0) |\n                    Spliterator.DISTINCT | Spliterator.SORTED | Spliterator.ORDERED;\n        }\n\n        @Override\n        public Comparator<Map.Entry<K, V>> getComparator() {\n            // Adapt or create a key-based comparator\n            if (tree.comparator != null) {\n                return Map.Entry.comparingByKey(tree.comparator);\n            }\n            else {\n                return (Comparator<Map.Entry<K, V>> & Serializable) (e1, e2) -> {\n                    @SuppressWarnings(\"unchecked\")\n                    Comparable<? super K> k1 = (Comparable<? super K>) e1.getKey();\n                    return k1.compareTo(e2.getKey());\n                };\n            }\n        }\n    }\n}\n\n\n\n\n\n\n\n\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/UniScopeMap.java",
    "content": "package com.pinecone.framework.unit;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\n\npublic interface UniScopeMap<K, V > extends ScopeMap<K, V > {\n    UniScopeMap<K, V >             parent();\n\n    Map<K, V >                     thisScope();\n\n    UniScopeMap<K, V >             setParent    ( UniScopeMap<K, V > that );\n\n    UniScopeMap<K, V >             setThisScope ( Map<K, V > that );\n\n    @Override\n    default boolean                isProgenitor() {\n        return this.parent() == null;\n    }\n\n    @Override\n    default void                   purge() {\n        this.setParent( null );\n        this.clear();\n    }\n\n    @Override\n    default void                   depurate() {\n        UniScopeMap<K, V > p = this.parent();\n\n        while ( p != null ) {\n            p.clear();\n            p = p.parent();\n        }\n\n        this.clear();\n    }\n\n    @Override\n    default void                   overrideTo ( Map<K, V > neo ) {\n        neo.putAll( this.thisScope() );\n        UniScopeMap<K, V > p = this.parent();\n        while ( p != null ) {\n            Map<K, V > pm = p.thisScope();\n            for( Map.Entry<K, V > o : pm.entrySet() ) {\n                neo.putIfAbsent( o.getKey(), o.getValue() );\n            }\n\n            p = p.parent();\n        }\n    }\n\n    @Override\n    default boolean                isScopeEmpty  () {\n        boolean b = this.isEmpty();\n\n        if( b ) {\n            UniScopeMap<K, V > p = this.parent();\n\n            while ( p != null ) {\n                b = p.isEmpty();\n                if( !b ) {\n                    return b;\n                }\n\n                p = p.parent();\n            }\n        }\n\n        return b;\n    }\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    default ScopeMap<K, V >[]      ancestors     (){\n        ArrayList<ScopeMap<K, V > > l = new ArrayList<>();\n        ScopeTrees.groupByNodes( this, l );\n        return l.toArray( (ScopeMap<K, V >[]) new UniScopeMap[0] );\n    }\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    default ScopeMap<K, V >[]      scopes        (){\n        ArrayList<ScopeMap<K, V > > l = new ArrayList<>();\n        l.add( this );\n        ScopeTrees.groupByNodes( this, l );\n        return l.toArray( (ScopeMap<K, V >[]) new UniScopeMap[0] );\n    }\n\n    default UniScopeMap<K, V >     progenitor () {\n        if( this.parent() == null ) {\n            return this;\n        }\n        else {\n            UniScopeMap<K, V > p = this.parent();\n            while ( p != null ) {\n                p = p.parent();\n            }\n\n            return p;\n        }\n    }\n\n    @Override\n    default ScopeMap<K, V >        getAll        ( Object key, List<V > ret ) {\n        V v = this.thisScope().get( key );\n        if( v != null ) {\n            ret.add( v );\n        }\n\n        UniScopeMap<K, V > p = this.parent();\n        while ( p != null ) {\n            Map<K, V > pm = p.thisScope();\n            v = pm.get( key );\n            if( v != null ) {\n                ret.add( v );\n            }\n\n            p = p.parent();\n        }\n\n        return this;\n    }\n\n    @Override\n    default ScopeMap<K, V >        removeAll  ( Object key ) {\n        this.thisScope().remove( key );\n\n        UniScopeMap<K, V > p = this.parent();\n        while ( p != null ) {\n            Map<K, V > pm = p.thisScope();\n            pm.remove( key );\n\n            p = p.parent();\n        }\n\n        return this;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/UniScopeMaptron.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.io.Serializable;\nimport java.util.*;\n\npublic class UniScopeMaptron<K, V > implements PineUnit, Map<K, V >, UniScopeMap<K, V >, Cloneable, Serializable, Iterable<Map.Entry<K, V > > {\n    protected UniScopeMap<K, V >    mParent;   // This is the [[prototype]] link, same as Javascript.\n\n    protected Map<K, V>             mThisMap;\n\n    protected transient Set<Map.Entry<K,V> > entrySet;\n    protected transient Set<K>               scKeySet;\n    protected transient Collection<V>        scValues;\n\n    public UniScopeMaptron() {\n        this( true, null );\n    }\n\n    public UniScopeMaptron( Map<K, V > thisMap, UniScopeMap prototype ){\n        this.mThisMap = thisMap;\n        this.mParent  = prototype;\n    }\n\n    public UniScopeMaptron( boolean bLinked, UniScopeMap prototype ){\n        this( bLinked ? new LinkedHashMap<>() : new HashMap<>(), prototype );\n    }\n\n    public UniScopeMaptron( Map<K, V > thisMap ){\n        this( thisMap, null );\n    }\n\n    /** Scope Map **/\n    @Override\n    public UniScopeMap<K, V > parent() {\n        return this.mParent;\n    }\n\n    @Override\n    public Map<K, V >         thisScope(){\n        return this.mThisMap;\n    }\n\n    @Override\n    public UniScopeMap<K, V > setParent    ( UniScopeMap<K, V > that ) {\n        this.mParent = that;\n        return this;\n    }\n\n    @Override\n    public UniScopeMap<K, V > setThisScope ( Map<K, V > that ) {\n        this.mThisMap = that;\n        return this;\n    }\n\n    @Override\n    public ScopeMap<K, V >    elevate      ( Map<K, V > child ) {\n        UniScopeMaptron<K, V > sup = new UniScopeMaptron<>( this.mThisMap, this.mParent );\n        this.setThisScope( child );\n        this.setParent   ( sup   );\n        return this;\n    }\n\n    /** Basic Map **/\n    @Override\n    public int size() {\n        return this.mThisMap.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mThisMap.isEmpty();\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        boolean result = this.mThisMap.containsKey(key);\n        if ( !result && this.mParent != null ) {\n            result = this.mParent.containsKey( key );\n        }\n\n        return result;\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        boolean result = this.mThisMap.containsValue(value);\n        if ( !result && this.mParent != null ) {\n            result = this.mParent.containsValue( value );\n        }\n\n        return result;\n    }\n\n    @Override\n    public V get( Object key ) {\n        V val = this.mThisMap.get( key );\n        if ( val == null && this.mParent != null ) {\n            val = this.mParent.get( key );\n        }\n\n        return val;\n    }\n\n    @Override\n    public void putAll( Map<? extends K, ? extends V> m ) {\n        this.mThisMap.putAll( m );\n    }\n\n    public UniScopeMaptron xPutAll(Map<? extends K, ? extends V> m ) {\n        this.putAll(m);\n        return this;\n    }\n\n    @Override\n    public void clear() {\n        this.mThisMap.clear();\n    }\n\n    public UniScopeMaptron xClear() {\n        this.clear();\n        return this;\n    }\n\n    @Override\n    public V remove( Object key ) {\n        V v = this.mThisMap.remove(key);\n        if( v == null && this.mParent != null ) {\n            v = this.mParent.remove( key );\n        }\n\n        return v;\n    }\n\n    public UniScopeMaptron xRemove(Object key) {\n        this.remove(key);\n        return this;\n    }\n\n    @Override\n    public Set<K > keySet() {\n        return this.mThisMap.keySet();\n    }\n\n    @Override\n    public Collection<V > values() {\n        return this.mThisMap.values();\n    }\n\n    @Override\n    public Set<Entry<K, V > > entrySet() {\n        return this.mThisMap.entrySet();\n    }\n\n    @Override\n    public Iterator<Map.Entry<K, V> > iterator() {\n        return this.mThisMap.entrySet().iterator();\n    }\n\n    @Override\n    public V put( K key, V value ) {\n        return this.mThisMap.put( key, value );\n    }\n\n    @Override\n    public V putIfAbsent( K key, V value ) {\n        return this.mThisMap.putIfAbsent( key, value );\n    }\n\n    @Override\n    public boolean hasOwnProperty ( Object key ) {\n        return this.mThisMap.containsKey( key );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this.mThisMap );\n    }\n\n    @Override\n    public Iterator<Map.Entry<K, V> > scopeIterator() {\n        return new ScopeEntryIterator();\n    }\n\n    @Override\n    public Set<Map.Entry<K,V> > scopeEntrySet() {\n        Set<Map.Entry<K,V>> es;\n        return (es = this.entrySet) == null ? (this.entrySet = new ScopeEntrySet()) : es;\n    }\n\n    @Override\n    public Set<K > scopeKeySet() {\n        Set<K> ks = this.scKeySet;\n        if ( ks == null ) {\n            ks = new ScopeKeySet();\n            this.scKeySet = ks;\n        }\n        return ks;\n    }\n\n    @Override\n    public Collection<V > scopeValues() {\n        Collection<V> vs = this.scValues;\n        if ( vs == null ) {\n            vs = new ScopeValues();\n            this.scValues = vs;\n        }\n        return vs;\n    }\n\n    protected final class ScopeEntrySet extends AbstractSet<Map.Entry<K,V> > {\n        public final int size()                 { throw new UnsupportedOperationException(\"Iterator only.\"); }\n\n        public final void clear()               { UniScopeMaptron.this.clear(); }\n\n        public final Iterator<Map.Entry<K,V> > iterator() {\n            return new ScopeEntryIterator();\n        }\n\n        public final boolean contains( Object o ) {\n            if ( !(o instanceof Map.Entry) ) {\n                return false;\n            }\n            Map.Entry<?,?> e = (Map.Entry<?,?>) o;\n            Object key = e.getKey();\n\n            V v = UniScopeMaptron.this.get(key);\n            return v != null && v.equals(e.getValue());\n        }\n\n        public final boolean remove( Object o ) {\n            if ( this.contains(o) ) {\n                Map.Entry<?,?> e = (Map.Entry<?,?>) o;\n                Object key = e.getKey();\n\n                return UniScopeMaptron.this.remove(key) != null;\n            }\n            return false;\n        }\n\n        public final Spliterator<Map.Entry<K,V>> spliterator() {\n            return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT );\n        }\n    }\n\n    protected abstract class ScopeIterator {\n        protected Iterator<Map.Entry<K, V>> thisMapIterator;\n        protected UniScopeMap<K, V>         currentScope;\n\n        ScopeIterator() {\n            this.thisMapIterator = mThisMap.entrySet().iterator();\n            this.currentScope = UniScopeMaptron.this;\n        }\n\n        public boolean hasNext() {\n            while ( !this.thisMapIterator.hasNext() && this.currentScope.parent() != null ) {\n                this.currentScope    = this.currentScope.parent();\n                this.thisMapIterator = this.currentScope.thisScope().entrySet().iterator();\n            }\n            return this.thisMapIterator.hasNext();\n        }\n\n        protected Map.Entry<K, V> nextNode() {\n            if ( !this.hasNext() ) {\n                throw new NoSuchElementException();\n            }\n            return this.thisMapIterator.next();\n        }\n\n        public void remove() {\n            this.thisMapIterator.remove();\n        }\n    }\n\n    final class ScopeKeySet extends AbstractSet<K> {\n        public final int size()                 { throw new UnsupportedOperationException(\"Iterator only.\"); }\n\n        public final void clear()               { UniScopeMaptron.this.clear(); }\n\n        public final Iterator<K> iterator() {\n            return new ScopeKeyIterator();\n        }\n\n        public final boolean contains( Object o ) { return containsKey(o); }\n\n        public final boolean remove( Object key ) {\n            return UniScopeMaptron.this.remove(key) != null;\n        }\n\n        public final Spliterator<K> spliterator()  {\n            return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT );\n        }\n    }\n\n    protected final class ScopeKeyIterator extends ScopeIterator implements Iterator<K> {\n        public final K next() { return nextNode().getKey(); }\n    }\n\n    final class ScopeValues extends AbstractCollection<V> {\n        public final int size()                 { throw new UnsupportedOperationException(\"Iterator only.\"); }\n\n        public final void clear()               { UniScopeMaptron.this.clear(); }\n\n        public final Iterator<V> iterator() {\n            return new ScopeValueIterator();\n        }\n\n        public final boolean contains( Object o ) { return containsValue(o); }\n\n        public final Spliterator<V> spliterator() {\n            return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED );\n        }\n    }\n\n    protected final class ScopeValueIterator extends ScopeIterator implements Iterator<V> {\n        public final V next() { return (V)nextNode().getValue(); }\n    }\n\n    protected final class ScopeEntryIterator extends ScopeIterator implements Iterator<Map.Entry<K,V>> {\n        public final Map.Entry<K,V> next() { return nextNode(); }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/Units.java",
    "content": "package com.pinecone.framework.unit;\n\nimport com.pinecone.framework.system.prototype.ObjectiveEvaluator;\nimport com.pinecone.framework.system.prototype.Objectom;\nimport com.pinecone.framework.util.lang.DynamicFactory;\n\nimport java.lang.reflect.Array;\nimport java.lang.reflect.InvocationTargetException;\n\nimport java.util.Collection;\nimport java.util.Map;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.TreeMap;\n\n@SuppressWarnings(\"unchecked\")\npublic final class Units {\n    public final static List EmptyList             = List.of();\n\n    public final static Collection EmptyCollection = Units.EmptyList;\n\n    public final static Set EmptySet               = Set.of();\n\n    public final static Map EmptyMap               = Map.of();\n\n    public static <E> Collection<E> emptyCollection() {\n        return Units.EmptyCollection;\n    }\n\n    public static <E> List<E> emptyList() {\n        return Units.EmptyList;\n    }\n\n    public static <E> Set<E> emptySet() {\n        return Units.EmptySet;\n    }\n\n    public static <K, V> Map<K, V> emptyMap() {\n        return Units.EmptyMap;\n    }\n\n\n    public static <T> Collection<T> spawnExtendParent( Collection<T > parent ) {\n        return Units.spawnExtendParent( parent, ArrayList.class );\n    }\n\n    public static <K, V > Map<K, V > spawnExtendParent( Map<K, V > parent ) {\n        return Units.spawnExtendParent( parent, TreeMap.class );\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public static <C > C spawnExtendParent( Object parent, Class<? > basic ) {\n        Object subList = null;\n        try{\n            subList = parent.getClass().getDeclaredConstructor().newInstance();\n        }\n        catch ( IllegalAccessException | InstantiationException | NoSuchMethodException | InvocationTargetException e ) {\n            try{\n                subList = basic.getDeclaredConstructor().newInstance();\n            }\n            catch ( IllegalAccessException | InstantiationException | NoSuchMethodException | InvocationTargetException e1 ) {\n                throw new IllegalArgumentException( \"Illegal 'basic' class given.\", e1 );\n            }\n        }\n        return (C)subList;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public static <C > C newInstance( Class<? > clazz, Object...args ) {\n        Object subList = null;\n        try{\n            if( args.length == 0 ) {\n                subList = clazz.getDeclaredConstructor().newInstance();\n            }\n            else {\n                subList = DynamicFactory.DefaultFactory.newInstance( clazz, null, args );\n            }\n        }\n        catch ( IllegalAccessException | InstantiationException | NoSuchMethodException | InvocationTargetException e1 ) {\n            throw new IllegalArgumentException( \"Illegal 'class' class given.\", e1 );\n        }\n        return (C)subList;\n    }\n\n\n    /**\n     * getFromMapStructure\n     * Similar to other dynamic languages(e.g. Javascript/PHP/Python/etc.), which is using to retrieve the value from the potential gettable object.\n     * @param mapLiked Any object that resembles the map operation (get/set/index/query/etc.) in form.\n     * @param key The string key( number-fmt/string-key/etc. ) that uses to retrieve the value from the map-liked object.\n     * @param bIncludeIterable if true, for iterable object will uses the enum-index as the key.\n     * @param bIncludeAnyPotentialMapLiked if true, for other any potential map-liked objects will try get from bean-liked-object.\n     * @return null for not found, object for the value which is affiliated to the key.\n     */\n    public static Object getFromMapStructure ( Object mapLiked, String key, boolean bIncludeIterable, boolean bIncludeAnyPotentialMapLiked ) {\n        if( mapLiked instanceof Map ) {\n            return ((Map) mapLiked).get( key );\n        }\n        else if( mapLiked instanceof Objectom ) {\n            return ((Objectom) mapLiked).get( key );\n        }\n        else if( mapLiked instanceof List ) {\n            try{\n                return ((List) mapLiked).get( Integer.parseInt( key ) );\n            }\n            catch ( NumberFormatException e ) {\n                return null;\n            }\n        }\n        else if( mapLiked.getClass().isArray() ) {\n            try{\n                return Array.get( mapLiked, Integer.parseInt( key ) );\n            }\n            catch ( NumberFormatException e ) {\n                return null;\n            }\n        }\n        else if( mapLiked instanceof Iterable && bIncludeIterable ) {\n            try{\n                int k = Integer.parseInt( key );\n                int i = 0;\n                for( Object v : (Iterable) mapLiked ) {\n                    if( i == k ) {\n                        return v;\n                    }\n                    ++i;\n                }\n                return null;\n            }\n            catch ( NumberFormatException e ) {\n                return null;\n            }\n        }\n        else if( mapLiked.getClass().isPrimitive() ) {\n            return null;\n        }\n        else if( mapLiked.getClass().isEnum() ) {\n            return null;\n        }\n        else if( mapLiked instanceof Number ) {\n            return null;\n        }\n        else if( mapLiked instanceof String ) {\n            return null;\n        }\n\n        if( bIncludeAnyPotentialMapLiked ) {\n            return ObjectiveEvaluator.MapStructures.classGet( mapLiked, key );\n        }\n        return null;\n    }\n\n    public static Object getValueFromMapStructureRecursively( Object mapLiked, String key, String szSplitRegex, boolean bIncludeIterable, boolean bIncludeAnyPotentialMapLiked ) {\n        String[] keys = key.split( szSplitRegex );\n        Object value = mapLiked;\n        for ( String k : keys ) {\n            value = Units.getFromMapStructure(\n                    value, k, bIncludeIterable, bIncludeAnyPotentialMapLiked\n            );\n        }\n        return value;\n    }\n\n    public static Object getValueFromMapStructureRecursively( Object mapLiked, String key ) {\n        return Units.getValueFromMapStructureRecursively(\n                mapLiked, key, \"\\\\.|\\\\/\", true, true\n        );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/affinity/DataSharer.java",
    "content": "package com.pinecone.framework.unit.affinity;\n\nimport com.pinecone.framework.system.prototype.ObjectiveBean;\nimport com.pinecone.framework.system.prototype.Objectom;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface DataSharer extends Pinenut {\n    Object share ( Objectom that, boolean ignoreIfNoSetter ) ;\n\n    default Object share( Objectom that ) {\n        return this.share( that, true );\n    }\n\n    default Object shareFromBean( Object that ) {\n        return this.share( new ObjectiveBean( that ) );\n    }\n\n    default Object share( Object that ) {\n        return this.share( Objectom.wrap( that ) );\n    }\n\n\n    static Objectom warp( Object target, boolean isBean ) {\n        if( target instanceof Objectom ) {\n            return (Objectom)target;\n        }\n        else {\n            if( isBean ) {\n                return new ObjectiveBean( target );\n            }\n            else {\n                return Objectom.wrap( target );\n            }\n        }\n    }\n\n    static Object share( Objectom target, Objectom shared, boolean ignoreIfNoSetter ) {\n        for ( Object key : shared.keys() ) {\n            try{\n                target.set( key, shared.get( key ) );\n            }\n            catch ( IllegalArgumentException e ) {\n                if( !ignoreIfNoSetter ) {\n                    throw e;\n                }\n            }\n        }\n        return target.prototype().proto();\n    }\n\n    static Object share( Objectom target, Objectom shared ) {\n        return DataSharer.share( target, shared, true );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/affinity/GenericObjectomSharer.java",
    "content": "package com.pinecone.framework.unit.affinity;\n\nimport com.pinecone.framework.system.prototype.ObjectiveBean;\nimport com.pinecone.framework.system.prototype.Objectom;\n\npublic class GenericObjectomSharer implements DataSharer {\n    protected Objectom  mWarped;\n\n    public GenericObjectomSharer( Object target, boolean isBean ) {\n        this.mWarped = DataSharer.warp( target, isBean );\n    }\n\n    @Override\n    public Object share( Objectom that, boolean ignoreIfNoSetter ) {\n        return DataSharer.share( this.mWarped, that, ignoreIfNoSetter );\n    }\n\n    @Override\n    public Object share( Objectom that ) {\n        return this.share( that, true );\n    }\n\n    @Override\n    public Object shareFromBean( Object that ) {\n        return this.share( new ObjectiveBean( that ) );\n    }\n\n    @Override\n    public Object share( Object that ) {\n        return this.share( Objectom.wrap( that ) );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/affinity/ObjectOverrider.java",
    "content": "package com.pinecone.framework.unit.affinity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.List;\nimport java.util.Map;\n\npublic interface ObjectOverrider<K, V > extends Pinenut {\n\n    void override       ( Object instance, Object prototype, boolean bRecursive ) ;\n\n    void overrideObject ( Map<K, V> instance, Map<K, V> parentScope, boolean bRecursive ) ;\n\n    void overrideList   ( List<V> instanceList, List<V> templateList, boolean bRecursive ) ;\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/affinity/RecursiveUnitOverrider.java",
    "content": "package com.pinecone.framework.unit.affinity;\n\nimport java.util.List;\nimport java.util.Map;\n\npublic class RecursiveUnitOverrider<K, V> implements ObjectOverrider<K, V> {\n    public RecursiveUnitOverrider() { }\n\n    @SuppressWarnings( \"unchecked\" )\n    public void override       ( Object instance, Object prototype, boolean bRecursive ) {\n        if ( instance != null && prototype != null ) {\n            if ( instance instanceof Map && prototype instanceof Map ) {\n                this.overrideObject( (Map<K, V>) instance, (Map<K, V>) prototype, bRecursive );\n            }\n            else if ( instance instanceof List && prototype instanceof List ) {\n                this.overrideList( (List<V>) instance, (List<V>) prototype, bRecursive );\n            }\n        }\n    }\n\n    public void overrideObject ( Map<K, V> instance, Map<K, V> parentScope, boolean bRecursive ) {\n        for ( Map.Entry<K, V > kv : parentScope.entrySet() ) {\n            K key = kv.getKey();\n            V templateValue = kv.getValue();\n\n            if ( !instance.containsKey( key ) ) {\n                instance.put( key, templateValue );\n            }\n            else {\n                Object instanceValue = instance.get( key );\n                this.override( instanceValue, templateValue, bRecursive );\n            }\n        }\n    }\n\n    public void overrideList   ( List<V> instanceList, List<V> templateList, boolean bRecursive ) {\n        for ( int i = 0; i < templateList.size(); ++i ) {\n            V templateElement = templateList.get( i );\n\n            if ( i < instanceList.size() ) {\n                V instanceElement = instanceList.get( i );\n\n                this.override( instanceElement, templateElement, bRecursive );\n            }\n            else {\n                instanceList.add( templateElement );\n            }\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/ArchBloomDistinctAudit.java",
    "content": "package com.pinecone.framework.unit.distinct;\n\nimport com.pinecone.framework.unit.Units;\n\nimport java.util.BitSet;\nimport java.util.Iterator;\nimport java.util.Set;\nimport java.util.Map;\nimport java.util.HashSet;\nimport java.util.Collection;\n\npublic abstract class ArchBloomDistinctAudit<E > implements DistinctAudit<E > {\n    protected Collection<Iterator<E > >      mIterators                        ;\n    protected Collection<Iterator<E > >      mIteratorsCopy                    ;\n    protected int                            mBitSize                          ;\n    protected Class<? extends Map >          mConflictMapType                  ;\n    protected Collection<E >                 mDistinctions                     ;\n    protected DistinctType                   mDistinctType                     ;\n\n\n    protected ArchBloomDistinctAudit( int bitSize, Collection<Iterator<E > > iterators, Collection<Iterator<E > > iteratorsCopy, Collection<E > distinctions, Class<? extends Map > conflictMapType, DistinctType distinctType ) {\n        this.mBitSize            = bitSize;\n        this.mIterators          = iterators;\n        this.mIteratorsCopy      = iteratorsCopy;\n        this.mDistinctions       = distinctions;\n        this.mConflictMapType    = conflictMapType;\n        this.mDistinctType       = distinctType;\n    }\n\n    protected Map<Integer, Set<E > > newConflictMap() {\n        return Units.newInstance( this.mConflictMapType );\n    }\n\n    @Override\n    public boolean hasOwnElement( E element ) {\n        return this.hasOwnElement( -1, element );\n    }\n\n    protected abstract boolean hasOwnElement( int id, E element );\n\n    protected void filterFromIterator( int id, Iterator<E > iterator ){\n        while ( iterator.hasNext() ) {\n            E element = iterator.next();\n\n            boolean owned = this.hasOwnElement( id, element );\n            if ( this.mDistinctType == DistinctType.SymmetricDistinct && !owned ) {\n                this.mDistinctions.add(element);\n            }\n            else if ( this.mDistinctType == DistinctType.SymmetricHomogeneity && owned ) {\n                this.mDistinctions.add(element);\n            }\n        }\n    }\n\n    protected void addBitSet( Iterator<E > iterator, BitSet bitset, Map<Integer, Set<E > > conflictMap ) {\n        while ( iterator.hasNext() ) {\n            E element = iterator.next();\n            int hash  = element.hashCode();\n            int index = Math.abs( hash % this.mBitSize );\n\n            if ( bitset.get( index ) ) {\n                conflictMap.computeIfAbsent(index, k -> new HashSet<>()).add( element );\n            }\n            else {\n                bitset.set( index );\n                conflictMap.computeIfAbsent(index, k -> new HashSet<>()).add( element );\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/DistinctAudit.java",
    "content": "package com.pinecone.framework.unit.distinct;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.List;\n\npublic interface DistinctAudit<E > extends Pinenut {\n    boolean hasOwnElement( E element );\n\n    Collection<E > audit();\n\n    Collection<E> audit( Iterator<E> neoIter, Iterator<E> neoIterCopy );\n\n    default Collection<E> audit( Collection<E> neo ) {\n        return this.audit( neo.iterator(), neo.iterator() );\n    }\n\n    static <E> Collection<Iterator<E>> toIterators( Collection<Collection<E > > collections ) {\n        List<Iterator<E>> iterators = new ArrayList<>();\n        for ( Collection<E> collection : collections ) {\n            iterators.add(collection.iterator());\n        }\n        return iterators;\n    }\n\n    static <E> int getMaxSize( Collection<Collection<E>> collections ) {\n        int maxSize = 0;\n        for ( Collection<E> collection : collections ) {\n            maxSize = Math.max( maxSize, collection.size() );\n        }\n        return maxSize;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/DistinctType.java",
    "content": "package com.pinecone.framework.unit.distinct;\n\npublic enum DistinctType {\n    SymmetricDistinct       ( \"SymmetricDistinct\"    ),\n    SymmetricHomogeneity    ( \"SymmetricHomogeneity\" );\n\n    private final String value;\n    DistinctType( String value ){\n        this.value = value;\n    }\n\n    public String getName(){\n        return this.value;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/GenericDistinctAudit.java",
    "content": "package com.pinecone.framework.unit.distinct;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.unit.Units;\n\nimport java.util.Iterator;\nimport java.util.Set;\nimport java.util.Collection;\nimport java.util.ArrayList;\nimport java.util.HashSet;\n\npublic class GenericDistinctAudit<E> implements DistinctAudit<E> {\n    protected Collection<Iterator<E > > mIterators;\n    protected Collection<E>             mDistinctions;\n    protected DistinctType              mDistinctType;\n    protected Set<E>                    mCommonElements;\n    protected Set<E>                    mDistinctElements;\n    protected Collection<E >            mDuplicateElements;\n    protected Class<? >                 mSetType;\n\n    protected Set<E > newSet() {\n        return Units.newInstance( this.mSetType );\n    }\n\n    public GenericDistinctAudit( Collection<Iterator<E>> iterators, Collection<E> distinctions, DistinctType distinctType, Class<? > setType ) {\n        this.mIterators         = iterators;\n        this.mDistinctions      = distinctions;\n        this.mDistinctType      = distinctType;\n        this.mSetType           = setType;\n        this.mCommonElements    = this.newSet();\n        this.mDistinctElements  = this.newSet();\n\n\n        if( !( this.mDistinctions instanceof Set ) ) {\n            this.mDuplicateElements = new ArrayList<>();\n        }\n    }\n\n    public GenericDistinctAudit( Collection<Iterator<E>> iterators, Collection<E> distinctions, DistinctType distinctType ) {\n        this( iterators, distinctions, distinctType, HashSet.class );\n    }\n\n    public GenericDistinctAudit( Collection<Iterator<E>> iterators, DistinctType distinctType ) {\n        this( iterators, new ArrayList<>(), distinctType );\n    }\n\n    public GenericDistinctAudit( Collection<Collection<E > > collections, DistinctType distinctType, Collection<E> distinctions, Class<? > setType ) {\n        this( DistinctAudit.toIterators(collections), distinctions, distinctType, setType );\n    }\n\n    public GenericDistinctAudit( Collection<Collection<E > > collections, DistinctType distinctType, Collection<E> distinctions ) {\n        this( DistinctAudit.toIterators(collections), distinctions, distinctType );\n    }\n\n    public GenericDistinctAudit( DistinctType distinctType, Collection<Collection<E > > collections ) {\n        this( collections, distinctType, new ArrayList<>() );\n    }\n\n    @Override\n    public boolean hasOwnElement( E element ) {\n        return this.mCommonElements.contains( element );\n    }\n\n\n    protected void addInnerSet( Iterator<E> iterator ) {\n        Set<E > currentSet = this.newSet();\n        while ( iterator.hasNext() ) {\n            E elem = iterator.next();\n            if( currentSet.contains( elem ) ) {\n                if( this.mDuplicateElements != null ) {\n                    this.mDuplicateElements.add( elem );\n                }\n                continue;\n            }\n            else {\n                currentSet.add( elem );\n            }\n\n            if( this.mDistinctElements.contains( elem ) ) {\n                this.mCommonElements.add( elem );\n                this.mDistinctElements.remove( elem );\n            }\n            else if( !this.mCommonElements.contains( elem ) ){\n                this.mDistinctElements.add( elem );\n            }\n//            else if( !this.mDistinctElements.contains( elem ) /*&& !this.mCommonElements.contains( elem )*/ ) {\n//                this.mCommonElements.add( elem );\n//            }\n//            else {\n//                this.mDistinctElements.add( elem );\n//            }\n        }\n    }\n\n    protected Collection<E> applyInnerSetToDistinctions() {\n        if( this.mDuplicateElements == null ) {\n            if ( this.mDistinctType == DistinctType.SymmetricDistinct ) {\n                return this.mDistinctElements;\n            }\n            else if ( this.mDistinctType == DistinctType.SymmetricHomogeneity ) {\n                return this.mCommonElements;\n            }\n        }\n        else {\n            if ( this.mDistinctType == DistinctType.SymmetricDistinct ) {\n                this.mDistinctions.addAll( this.mDistinctElements );\n            }\n            else if ( this.mDistinctType == DistinctType.SymmetricHomogeneity ) {\n                this.mDistinctions.addAll( this.mCommonElements );\n            }\n\n            for( E e : this.mDuplicateElements ) {\n                if ( this.mDistinctElements.contains( e ) ) {\n                    this.mDistinctions.add( e );\n                }\n                else if ( this.mCommonElements.contains( e ) ) {\n                    this.mDistinctions.add( e );\n                }\n            }\n        }\n\n        return this.mDistinctions;\n    }\n\n    @Override\n    public Collection<E> audit() {\n        for ( Iterator<E> iterator : this.mIterators ) {\n            this.addInnerSet( iterator );\n        }\n\n        return this.applyInnerSetToDistinctions();\n    }\n\n    @Override\n    public Collection<E> audit( Iterator<E> neoIter, @Nullable Iterator<E> dummy ) {\n        this.addInnerSet( neoIter );\n        this.mDistinctions.clear();\n        return this.applyInnerSetToDistinctions();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/GenericPrototypeDistinctAudit.java",
    "content": "package com.pinecone.framework.unit.distinct;\n\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.Collection;\nimport java.util.ArrayList;\n\npublic class GenericPrototypeDistinctAudit<E> extends GenericDistinctAudit<E > {\n    protected Iterator<E> mMasterProtoIterator;\n\n    public GenericPrototypeDistinctAudit( Iterator<E> masterProtoIterator, Collection<Iterator<E>> iterators, Collection<E> distinctions, DistinctType distinctType, Class<? > setType ) {\n        super( iterators, distinctions, distinctType, setType );\n\n        this.mMasterProtoIterator = masterProtoIterator;\n        while ( this.mMasterProtoIterator.hasNext() ) {\n            E elem = this.mMasterProtoIterator.next();\n            this.mCommonElements.add( elem );\n        }\n    }\n\n    public GenericPrototypeDistinctAudit( Iterator<E> masterProtoIterator, Collection<Iterator<E>> iterators, Collection<E> distinctions, DistinctType distinctType ) {\n        this( masterProtoIterator, iterators, distinctions, distinctType, HashSet.class );\n    }\n\n    public GenericPrototypeDistinctAudit( Iterator<E> masterProtoIterator, Collection<Iterator<E>> iterators, DistinctType distinctType ) {\n        this( masterProtoIterator, iterators, new ArrayList<>(), distinctType );\n    }\n\n    public GenericPrototypeDistinctAudit( Iterator<E> masterProtoIterator, Collection<Collection<E > > collections, DistinctType distinctType, Collection<E> distinctions, Class<? > setType ) {\n        this( masterProtoIterator, DistinctAudit.toIterators(collections), distinctions, distinctType, setType );\n    }\n\n    public GenericPrototypeDistinctAudit( Iterator<E> masterProtoIterator, Collection<Collection<E > > collections, DistinctType distinctType, Collection<E> distinctions ) {\n        this( masterProtoIterator, DistinctAudit.toIterators(collections), distinctions, distinctType );\n    }\n\n    public GenericPrototypeDistinctAudit( Iterator<E> masterProtoIterator, DistinctType distinctType, Collection<Collection<E > > collections ) {\n        this( masterProtoIterator, collections, distinctType, new ArrayList<>() );\n    }\n\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/MegaBloomDistinctAudit.java",
    "content": "package com.pinecone.framework.unit.distinct;\n\nimport java.util.BitSet;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Set;\nimport java.util.Map;\nimport java.util.HashMap;\nimport java.util.Collection;\nimport java.util.ArrayList;\n\npublic class MegaBloomDistinctAudit<E > extends ArchBloomDistinctAudit<E > implements DistinctAudit<E > {\n    protected List<BitSet >                  mBitSets      = new ArrayList<>() ;\n    protected List<Map<Integer, Set<E > > >  mConflictMaps = new ArrayList<>() ;\n\n    /**\n     * Constructs\n     *\n     * @param bitSize              The size of the bit array used for the bloom filter.\n     * @param iterators            A collection of iterators whose elements will be compared against the master prototype.\n     * @param iteratorsCopy        A collection of iterators that serve as copies for auditing purposes.\n     * @param distinctions         A collection to store the resulting distinctions found during the audit.\n     * @param distinctType         The type of distinction to perform, either finding symmetric distinct elements or symmetric homogeneous elements.\n     */\n    public MegaBloomDistinctAudit( int bitSize, Collection<Iterator<E > > iterators, Collection<Iterator<E > > iteratorsCopy, Collection<E > distinctions, Class<? extends Map > conflictMapType, DistinctType distinctType ) {\n        super( bitSize, iterators, iteratorsCopy, distinctions, conflictMapType, distinctType );\n    }\n\n    public MegaBloomDistinctAudit( int bitSize, Collection<Iterator<E > > iterators, Collection<Iterator<E > > iteratorsCopy, Collection<E > distinctions, DistinctType distinctType ) {\n        this( bitSize, iterators, iteratorsCopy, distinctions, HashMap.class, distinctType );\n    }\n\n    public MegaBloomDistinctAudit( int bitSize, Collection<Iterator<E > > iterators, Collection<Iterator<E > > iteratorsCopy, DistinctType distinctType ) {\n        this( bitSize, iterators, iteratorsCopy, new ArrayList<>(), distinctType );\n    }\n\n    public MegaBloomDistinctAudit( Collection<Iterator<E > > iterators, Collection<Iterator<E > > iteratorsCopy, DistinctType distinctType ) {\n        this( (int)1e6, iterators, iteratorsCopy, distinctType );\n    }\n\n    public MegaBloomDistinctAudit( Collection<Collection<E > > collections, DistinctType distinctType, Collection<E > distinctions ) {\n        this( (int)( DistinctAudit.getMaxSize( collections ) * (float)1.5 ), DistinctAudit.toIterators( collections ), DistinctAudit.toIterators( collections ), distinctions, HashMap.class, distinctType );\n    }\n\n    public MegaBloomDistinctAudit( int bitSize, Collection<Collection<E > > collections, DistinctType distinctType ) {\n        this( bitSize, DistinctAudit.toIterators( collections ), DistinctAudit.toIterators( collections ), new ArrayList<>(), distinctType );\n    }\n\n    public MegaBloomDistinctAudit( Collection<Collection<E > > collections, DistinctType distinctType  ) {\n        this( collections, distinctType, new ArrayList<>() );\n    }\n\n\n    @Override\n    protected boolean hasOwnElement( int id, E element ) {\n        int hash = element.hashCode();\n        int index = Math.abs(hash % this.mBitSize);\n        boolean owned = false;\n\n        for ( int j = 0; j < this.mIteratorsCopy.size(); ++j ) {\n            if ( id < 0 || id != j ) {\n                BitSet                  bitmap = this.mBitSets.get(j);\n                Map<Integer, Set<E > > hashMap = this.mConflictMaps.get(j);\n\n                if ( bitmap.get(index) && hashMap.containsKey(index) && hashMap.get(index).contains(element) ) {\n                    owned = true;\n                    break;\n                }\n            }\n        }\n\n        return owned;\n    }\n\n    @Override\n    public Collection<E > audit() {\n        for ( int i = 0; i < this.mIterators.size(); ++i ) {\n            this.mBitSets.add( new BitSet( this.mBitSize ) );\n            this.mConflictMaps.add( this.newConflictMap() );\n        }\n\n        Iterator<Iterator<E > > iters = this.mIterators.iterator();\n        int i = 0;\n        while ( iters.hasNext() ) {\n            Iterator<E >          iterator     = iters.next();\n            BitSet                  bitset     = this.mBitSets.get(i);\n            Map<Integer, Set<E > > conflictMap = this.mConflictMaps.get(i);\n\n            this.addBitSet( iterator, bitset, conflictMap );\n\n            ++i;\n        }\n\n        iters = this.mIteratorsCopy.iterator();\n        i = 0;\n        while ( iters.hasNext() ) {\n            Iterator<E> iterator = iters.next();\n\n            this.filterFromIterator( i, iterator );\n\n            ++i;\n        }\n\n\n        return this.mDistinctions;\n    }\n\n    @Override\n    public Collection<E> audit( Iterator<E> neoIter, Iterator<E> neoIterCopy ) {\n        BitSet                  newBitset     = new BitSet( this.mBitSize );\n        Map<Integer, Set<E > > newConflictMap = this.newConflictMap();\n\n        this.addBitSet( neoIter, newBitset, newConflictMap );\n\n        this.mBitSets.add( newBitset );\n        this.mConflictMaps.add( newConflictMap );\n\n        int id = this.mBitSets.size() - 1;\n        while ( neoIterCopy.hasNext() ) {\n            this.filterFromIterator( id, neoIterCopy );\n        }\n\n        return this.mDistinctions;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/MegaMergeDistinctAudit.java",
    "content": "package com.pinecone.framework.unit.distinct;\nimport com.pinecone.framework.system.NotImplementedException;\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.unit.Units;\n\nimport java.util.Iterator;\nimport java.util.Set;\nimport java.util.Collection;\nimport java.util.HashSet;\n\n/**\n * MegaMergeDistinctAudit\n * These two iterators should each include unique elements.\n * @param <E>\n */\npublic class MegaMergeDistinctAudit<E > implements DistinctAudit<E > {\n    protected Iterator<E >   mIterator1;\n    protected Iterator<E >   mIterator2;\n    protected int            mSegmentSize;\n    protected Set<E >        mDistinctSet;\n    protected Class<? >      mSetType;\n    protected Set<E>         mResultSet;\n\n    protected Set<E > newSet() {\n        return Units.newInstance( this.mSetType );\n    }\n\n    protected Set<E > newSet( Object...args ) {\n        return Units.newInstance( this.mSetType, args );\n    }\n\n\n    public MegaMergeDistinctAudit( Iterator<E> iterator1, Iterator<E> iterator2, int segmentSize, Class<? > setType ) {\n        this.mIterator1     = iterator1;\n        this.mIterator2     = iterator2;\n        this.mSegmentSize   = segmentSize;\n        this.mSetType       = setType;\n        this.mDistinctSet   = this.newSet();\n        this.mResultSet     = this.newSet();\n    }\n\n    public MegaMergeDistinctAudit( Iterator<E> iterator1, Iterator<E> iterator2, int segmentSize ) {\n        this( iterator1, iterator2, segmentSize, HashSet.class );\n    }\n\n    @Override\n    public Collection<E > audit() {\n        while ( this.mIterator1.hasNext() || this.mIterator2.hasNext() ) {\n            Set<E > segment1 = this.getNextSegment( this.mIterator1, this.mSegmentSize );\n            Set<E > segment2 = this.getNextSegment( this.mIterator2, this.mSegmentSize );\n\n            Set<E > processedSegment = this.xorSets( segment1, segment2 );\n            this.mResultSet = this.mergeResults( this.mResultSet, processedSegment );\n        }\n        return this.mResultSet;\n    }\n\n    protected Set<E > getNextSegment( Iterator<E> iterator, int segmentSize ) {\n        Set<E > segment = this.newSet();\n        int count = 0;\n        while ( iterator.hasNext() && count < segmentSize ) {\n            segment.add( iterator.next() );\n            count++;\n        }\n        return segment;\n    }\n\n    protected Set<E> xorSets( Set<E> set1, Set<E> set2 ) {\n        Set<E > result = this.newSet(set1);\n        for ( E element : set2 ) {\n            if ( !result.add(element) ) {\n                result.remove(element);\n            }\n        }\n        return result;\n    }\n\n    protected Set<E> mergeResults( Set<E> resultSet, Set<E> processedSegment ) {\n        return this.xorSets( resultSet, processedSegment );\n    }\n\n    @Override\n    public boolean hasOwnElement( E element ) {\n        throw new NotImplementedException();\n    }\n\n    @Override\n    public Collection<E> audit( Iterator<E> neoIter, @Nullable Iterator<E> dummy ) {\n        throw new NotImplementedException();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/distinct/MegaPrototypeBloomDistinctAudit.java",
    "content": "package com.pinecone.framework.unit.distinct;\n\nimport java.util.BitSet;\nimport java.util.Iterator;\nimport java.util.Set;\nimport java.util.Map;\nimport java.util.HashMap;\nimport java.util.Collection;\nimport java.util.ArrayList;\n\npublic class MegaPrototypeBloomDistinctAudit<E > extends ArchBloomDistinctAudit<E > implements DistinctAudit<E > {\n    protected Iterator<E >           mMasterProtoIterator;\n    protected BitSet                 mMasterBitSet;\n    protected Map<Integer, Set<E > > mMasterConflictMap;\n\n    public MegaPrototypeBloomDistinctAudit(\n            int bitSize, Iterator<E> masterProtoIterator, Collection<Iterator<E>> iterators, Collection<Iterator<E>> iteratorsCopy,\n            Collection<E> distinctions, Class<? extends Map> conflictMapType, DistinctType distinctType\n    ) {\n        super( bitSize, iterators, iteratorsCopy, distinctions, conflictMapType, distinctType );\n        this.mMasterProtoIterator = masterProtoIterator;\n        this.mMasterBitSet        = new BitSet(bitSize);\n        this.mMasterConflictMap   = this.newConflictMap();\n\n        this.addBitSet( this.mMasterProtoIterator, this.mMasterBitSet, this.mMasterConflictMap );\n    }\n\n    /**\n     * Constructs\n     *\n     * @param bitSize              The size of the bit array used for the bloom filter.\n     * @param masterProtoIterator  The master prototype iterator that will be used as the reference for comparison.\n     * @param iterators            A collection of iterators whose elements will be compared against the master prototype.\n     * @param iteratorsCopy        A collection of iterators that serve as copies for auditing purposes.\n     * @param distinctions         A collection to store the resulting distinctions found during the audit.\n     * @param distinctType         The type of distinction to perform, either finding symmetric distinct elements or symmetric homogeneous elements.\n     */\n    public MegaPrototypeBloomDistinctAudit( int bitSize, Iterator<E> masterProtoIterator, Collection<Iterator<E>> iterators, Collection<Iterator<E>> iteratorsCopy, Collection<E> distinctions, DistinctType distinctType ) {\n        this( bitSize, masterProtoIterator, iterators, iteratorsCopy, distinctions, HashMap.class, distinctType);\n    }\n\n    public MegaPrototypeBloomDistinctAudit( int bitSize, Iterator<E> masterProtoIterator, Collection<Iterator<E>> iterators, Collection<Iterator<E>> iteratorsCopy, DistinctType distinctType ) {\n        this( bitSize, masterProtoIterator, iterators, iteratorsCopy, new ArrayList<>(), distinctType);\n    }\n\n    public MegaPrototypeBloomDistinctAudit( Iterator<E> masterProtoIterator, Collection<Iterator<E>> iterators, Collection<Iterator<E>> iteratorsCopy, DistinctType distinctType ) {\n        this( (int) 1e6, masterProtoIterator, iterators, iteratorsCopy, distinctType );\n    }\n\n    public MegaPrototypeBloomDistinctAudit( int bitSize, Iterator<E> masterProtoIterator, Collection<Collection<E>> collections, DistinctType distinctType ) {\n        this( bitSize, masterProtoIterator, DistinctAudit.toIterators(collections), DistinctAudit.toIterators(collections), new ArrayList<>(), HashMap.class, distinctType);\n    }\n\n    public MegaPrototypeBloomDistinctAudit( int bitSize, Iterator<E> masterProtoIterator, Collection<Collection<E>> collections, DistinctType distinctType, Collection<E> distinctions ) {\n        this( bitSize, masterProtoIterator, DistinctAudit.toIterators(collections), DistinctAudit.toIterators(collections), distinctions, HashMap.class, distinctType);\n    }\n\n    public MegaPrototypeBloomDistinctAudit( Iterator<E> masterProtoIterator, Collection<Collection<E>> collections, DistinctType distinctType, Collection<E> distinctions ) {\n        this( (int) (DistinctAudit.getMaxSize(collections) * 1.5), masterProtoIterator, collections, distinctType, distinctions );\n    }\n\n    public MegaPrototypeBloomDistinctAudit( Iterator<E> masterProtoIterator, Collection<Collection<E>> collections, DistinctType distinctType ) {\n        this( (int) (DistinctAudit.getMaxSize(collections) * 1.5), masterProtoIterator, collections, distinctType );\n    }\n\n\n    @Override\n    protected boolean hasOwnElement( int id, E element ) {\n        int  hash = element.hashCode();\n        int index = Math.abs( hash % this.mBitSize );\n        return this.mMasterBitSet.get(index) && this.mMasterConflictMap.containsKey(index) && this.mMasterConflictMap.get(index).contains(element);\n    }\n\n\n    @Override\n    public Collection<E > audit() {\n        Iterator<Iterator<E > > iters = this.mIteratorsCopy.iterator();\n        int i = 0;\n        while ( iters.hasNext() ) {\n            this.filterFromIterator( i, iters.next() );\n            ++i;\n        }\n\n        return this.mDistinctions;\n    }\n\n    @Override\n    public Collection<E> audit( Iterator<E> neoIter, Iterator<E> neoIterCopy ) {\n        BitSet                  newBitset     = new BitSet( this.mBitSize );\n        Map<Integer, Set<E > > newConflictMap = this.newConflictMap();\n\n        this.addBitSet( neoIter, newBitset, newConflictMap );\n\n        this.filterFromIterator( -1, neoIterCopy );\n\n        return this.mDistinctions;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiCollectionMap.java",
    "content": "package com.pinecone.framework.unit.multi;\n\nimport java.util.Collection;\n\npublic interface MultiCollectionMap<K, V > extends MultiCollectionProxyMap<K, V, Collection<V > > {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiCollectionMaptron.java",
    "content": "package com.pinecone.framework.unit.multi;\n\nimport com.pinecone.framework.unit.MultiValueMaptron;\n\nimport java.util.Collection;\nimport java.util.LinkedHashMap;\nimport java.util.ArrayList;\nimport java.util.Map;\nimport java.util.Set;\n\npublic class MultiCollectionMaptron<K, V > extends MultiValueMaptron<K, V, Collection<V > > implements MultiCollectionMap<K, V > {\n    private static final long serialVersionUID = 1897280134591921341L;\n\n    public MultiCollectionMaptron( int initialCapacity ) {\n        this( new LinkedHashMap<>( initialCapacity ) );\n    }\n\n    public MultiCollectionMaptron( Map<K, Collection<V > > otherMap, boolean bAssimilate ) {\n        super( otherMap, bAssimilate );\n    }\n\n    public MultiCollectionMaptron( Map<K, Collection<V > > otherMap ) {\n        this( otherMap, false );\n    }\n\n    public MultiCollectionMaptron() {\n        this( new LinkedHashMap<>(), true );\n    }\n\n    @Override\n    protected Collection<V> newCollection() {\n        return new ArrayList<>();\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiCollectionProxyMap.java",
    "content": "package com.pinecone.framework.unit.multi;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.unit.MultiValueMapper;\n\nimport java.util.Map;\nimport java.util.Collection;\n\npublic interface MultiCollectionProxyMap<K, V, U extends Collection<V > > extends Map<K, U >, MultiValueMapper<K, V > {\n    @Override\n    default V erase( Object key, V value ) {\n        Collection<V > more = this.get( key );\n\n        if( more.size() == 1 ) {\n            return this.remove( key ).iterator().next();\n        }\n\n        if( more.remove( value ) ){\n            return value;\n        }\n\n        return null;\n    }\n\n    @Override\n    default V get( Object k, V v ) {\n        Collection<V > more = this.get( k );\n        if( more.contains( v ) ){\n            return v;\n        }\n        return null;\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default Collection<V > puts( K key, Collection<V > value ){\n        return this.put( key, (U)value );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    default void putsAll( Map<? extends K, ? extends Collection<V > > m ) {\n        this.putAll( (Map<? extends K, ? extends U >) m );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiHashSetMaptron.java",
    "content": "package com.pinecone.framework.unit.multi;\n\nimport com.pinecone.framework.unit.MultiValueMaptron;\n\nimport java.util.LinkedHashMap;\nimport java.util.LinkedHashSet;\nimport java.util.Set;\nimport java.util.Map;\n\npublic class MultiHashSetMaptron<K, V > extends MultiValueMaptron<K, V, Set<V > > implements MultiSetMap<K, V > {\n    public MultiHashSetMaptron() {\n        this( new LinkedHashMap<>() );\n    }\n\n    public MultiHashSetMaptron( int initialCapacity ) {\n        this( new LinkedHashMap<>( initialCapacity ) );\n    }\n\n    public MultiHashSetMaptron( Map<K, Set<V > > otherMap, boolean bAssimilate ) {\n        super( otherMap, bAssimilate );\n    }\n\n    public MultiHashSetMaptron( Map<K, Set<V > > otherMap ) {\n        this( otherMap, false );\n    }\n\n    @Override\n    protected Set<V> newCollection() {\n        return new LinkedHashSet<>();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiListMaptron.java",
    "content": "package com.pinecone.framework.unit.multi;\n\nimport com.pinecone.framework.unit.MultiValueMaptron;\nimport com.pinecone.framework.unit.MultiValueMap;\n\nimport java.util.LinkedHashMap;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\n\npublic class MultiListMaptron<K, V > extends MultiValueMaptron<K, V, List<V > > implements MultiValueMap<K, V > {\n    private static final long serialVersionUID = 3801124242820219131L;\n\n    public MultiListMaptron( Map<K, List<V > > otherMap, boolean bAssimilate ) {\n        super( otherMap, bAssimilate );\n    }\n\n    public MultiListMaptron( Map<K, List<V > > otherMap ) {\n        this( otherMap, false );\n    }\n\n    public MultiListMaptron( int initialCapacity ) {\n        this( new LinkedHashMap<>( initialCapacity ) );\n    }\n\n    public MultiListMaptron() {\n        this( new LinkedHashMap<>() );\n    }\n\n    @Override\n    protected List<V > newCollection() {\n        return new ArrayList<>();\n    }\n\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiSetMap.java",
    "content": "package com.pinecone.framework.unit.multi;\n\nimport java.util.Set;\n\npublic interface MultiSetMap<K, V > extends MultiCollectionProxyMap<K, V, Set<V > > {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/multi/MultiSetMaptron.java",
    "content": "package com.pinecone.framework.unit.multi;\n\nimport com.pinecone.framework.unit.LinkedTreeSet;\nimport com.pinecone.framework.unit.MultiValueMaptron;\n\nimport java.util.LinkedHashMap;\nimport java.util.Map;\nimport java.util.Set;\n\npublic class MultiSetMaptron<K, V > extends MultiValueMaptron<K, V, Set<V > > implements MultiSetMap<K, V > {\n    private static final long serialVersionUID = 1367280134591921341L;\n\n    public MultiSetMaptron( Map<K, Set<V > > otherMap, boolean bAssimilate ) {\n        super( otherMap, bAssimilate );\n    }\n\n    public MultiSetMaptron( Map<K, Set<V > > otherMap ) {\n        this( otherMap, false );\n    }\n\n    public MultiSetMaptron( int initialCapacity ) {\n        this( new LinkedHashMap<>( initialCapacity ) );\n    }\n\n    public MultiSetMaptron() {\n        this( new LinkedHashMap<>() );\n    }\n\n\n    @Override\n    protected Set<V> newCollection() {\n        return new LinkedTreeSet<>();\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/CollectedEntryDecoder.java",
    "content": "package com.pinecone.framework.unit.tabulate;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.Collection;\nimport java.util.Map;\n\npublic interface CollectedEntryDecoder<V > extends Pinenut {\n    Map<?, V > decode( Collection<Map.Entry<?, V > > collection ) ;\n\n    Map<?, V > evolve( Map<?, V > regressed ) ;\n\n    Class<? > getListClass();\n    CollectedEntryDecoder setListClass( Class<?> listClass );\n\n    Class<? > getMapClass();\n    CollectedEntryDecoder setMapClass( Class<?> mapClass );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/CollectedEntryEncoder.java",
    "content": "package com.pinecone.framework.unit.tabulate;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.Collection;\nimport java.util.LinkedHashMap;\nimport java.util.Map;\n\npublic interface CollectedEntryEncoder<V > extends Pinenut {\n    Collection<Map.Entry<?, V > > encode();\n\n    // To single layer map.\n    Map<?, V > regress( Class<? extends Map > stereotypedClass );\n\n    default Map<?, V > regress() {\n        return this.regress( LinkedHashMap.class );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/FamilyEntryNameEncoder.java",
    "content": "package com.pinecone.framework.unit.tabulate;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.name.Namespace;\n\npublic interface FamilyEntryNameEncoder extends Pinenut {\n    FamilyEntryNameEncoder DefaultEncoder = new GenericNamespaceFamilyEntryNameEncoder();\n\n    String getSeparator();\n\n    boolean isNameForValue();\n\n    default String encode( UnitFamilyNode node ) {\n        return this.encode( node, this.getSeparator(), this.isNameForValue() );\n    }\n\n    default String encode( UnitFamilyNode node, boolean bNameForValue ) {\n        return this.encode( node, this.getSeparator(), bNameForValue );\n    }\n\n    String encode( UnitFamilyNode node, String szSeparator, boolean bNameForValue ) ;\n\n\n    default Namespace encodeNS( UnitFamilyNode node ) {\n        return this.encodeNS( node, this.getSeparator(), this.isNameForValue() );\n    }\n\n    default Namespace encodeNS( UnitFamilyNode node, boolean bNameForValue ) {\n        return this.encodeNS( node, this.getSeparator(), bNameForValue );\n    }\n\n    Namespace encodeNS( UnitFamilyNode node, String szSeparator, boolean bNameForValue ) ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/FamilyIterator.java",
    "content": "package com.pinecone.framework.unit.tabulate;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.Iterator;\n\npublic interface FamilyIterator<V > extends Iterator<UnitFamilyNode<?, V > >, Pinenut {\n    @Override\n    UnitFamilyNode<Object, V > next();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/GenericCollectedEntryDecoder.java",
    "content": "package com.pinecone.framework.unit.tabulate;\n\nimport com.pinecone.framework.unit.Units;\n\nimport java.util.Collection;\nimport java.util.LinkedHashMap;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.LinkedHashSet;\nimport java.util.List;\nimport java.util.ArrayList;\n\npublic class GenericCollectedEntryDecoder<V > implements CollectedEntryDecoder<V > {\n    protected String          mszSeparator;\n    protected String          mszListType;\n    protected String          mszSetType;\n    protected String          mszTypeIndicator;\n    protected Class<?>        mListClass;\n    protected Class<?>        mMapClass;\n\n    public GenericCollectedEntryDecoder( String separator, String typeIndicator, String listType, String setType, Class<?> listClass, Class<?> mapClass ) {\n        this.mszSeparator     = separator;\n        this.mszTypeIndicator = typeIndicator;\n        this.mszListType      = typeIndicator + listType;\n        this.mszSetType       = typeIndicator + setType;\n        this.mListClass       = listClass;\n        this.mMapClass        = mapClass;\n    }\n\n    public GenericCollectedEntryDecoder( String separator, String typeIndicator, String listType, String setType ) {\n        this( separator, typeIndicator, listType, setType, ArrayList.class, LinkedHashMap.class );\n    }\n\n    public GenericCollectedEntryDecoder( String separator, String typeIndicator ) {\n        this( separator, typeIndicator, \"list\", \"set\" );\n    }\n\n    public GenericCollectedEntryDecoder() {\n        this( \"::\", \"$\" );\n    }\n\n    @Override\n    public Class<? > getListClass() {\n        return this.mListClass;\n    }\n\n    @Override\n    public CollectedEntryDecoder setListClass( Class<?> listClass ) {\n        this.mListClass = listClass;\n        return this;\n    }\n\n    @Override\n    public Class<? > getMapClass() {\n        return this.mMapClass;\n    }\n\n    @Override\n    public CollectedEntryDecoder setMapClass(Class<?> mapClass) {\n        this.mMapClass = mapClass;\n        return this;\n    }\n\n    protected Map<?, V > newMap() {\n        return Units.newInstance( this.mMapClass );\n    }\n\n    protected List<V > newList() {\n        return Units.newInstance( this.mListClass );\n    }\n\n\n    @Override\n    public Map<?, V > decode( Collection<Map.Entry<?, V > > collection ) {\n        Map<?, V > result = this.newMap();\n        for ( Map.Entry<?, V> entry : collection ) {\n            Object  key = entry.getKey();\n            V     value = entry.getValue(); // Ignored parent-type case scenarios.\n            this.addAndBootstrap( result, key, value );\n        }\n        return result;\n    }\n\n    @Override\n    public Map<?, V > evolve( Map<?, V > regressed ) {\n        Map<?, V > result = this.newMap();\n        for ( Map.Entry<?, V> entry : regressed.entrySet() ) {\n            Object  key = entry.getKey();\n            V     value = entry.getValue(); // Ignored parent-type case scenarios.\n            this.addAndBootstrap( result, key, value );\n        }\n        return result;\n    }\n\n    // Ignored parent-type case scenarios.\n    // `V` is for unit`s elements type.\n    @SuppressWarnings( \"unchecked\" )\n    protected void addAndBootstrap( Map<?, V > result, Object key, V value ) {\n        String szKey = key.toString();\n        String[] debris = szKey.split( this.mszSeparator );\n        Object current = result;\n        for ( int i = 1; i < debris.length - 1; ++i ) {\n            String part = debris[i];\n            if ( part.endsWith( this.mszListType ) ) {\n                part = part.substring( 0, part.length() - this.mszListType.length() );\n                current = this.affirmListExists( current, part );\n            }\n            else if ( part.endsWith( this.mszSetType ) ) {\n                part = part.substring( 0, part.length() - this.mszSetType.length() );\n                current = this.affirmSetExists( current, part );\n            }\n            else {\n                current = this.affirmMapExists( current, part );\n            }\n        }\n        \n        String lastPart = debris[ debris.length - 1 ];\n        if ( lastPart.endsWith( this.mszListType ) ) {\n            lastPart = lastPart.substring( 0, lastPart.length() - this.mszListType.length() );\n            current = this.affirmListExists( current, lastPart );\n            ((List) current).add( value );\n        }\n        else if ( lastPart.endsWith( this.mszSetType ) ) {\n            lastPart = lastPart.substring( 0, lastPart.length() - this.mszSetType.length() );\n            current = this.affirmSetExists( current, lastPart );\n            ((Set) current).add( value );\n        }\n        else {\n            if ( current instanceof Map ) {\n                ((Map) current).put( lastPart, value );\n            }\n            else if ( current instanceof List ) {\n                ((List) current).add( value );\n            }\n            else if ( current instanceof Set ) {\n                ((Set) current).add( value );\n            }\n        }\n    }\n\n    protected Object affirmLastList( Collection collection, Object last ) {\n        if ( collection.isEmpty() || !( last instanceof List ) ) {\n            List neo = this.newList();\n            collection.add( neo );\n            return neo;\n        }\n        return last;\n    }\n\n    protected Object affirmListExists( Object current, String part ) {\n        if ( current instanceof Map ) {\n            Map map = (Map) current;\n            if ( !map.containsKey(part) ) {\n                List neo = this.newList();\n                map.put( part, neo );\n                return neo;\n            }\n            return map.get( part );\n        }\n        else if ( current instanceof List ) {\n            List list = (List) current;\n            return this.affirmLastList( list, list.get( list.size() - 1 ) );\n        }\n        else if ( current instanceof Set ) {\n            Set set = (Set) current;\n            return this.affirmLastList( set, this.setLastElement( set ) );\n        }\n        return null;\n    }\n\n    protected Object setLastElement( Set that ) {\n        Object lastElement = null;\n        Iterator iterator = that.iterator();\n        while ( iterator.hasNext() ) {\n            lastElement = iterator.next();\n        }\n        return lastElement;\n    }\n\n    // Set must be Linked.\n    protected Object affirmLastSet( Collection collection, Object last ) {\n        if ( collection.isEmpty() || !( last instanceof Set ) ) {\n            Set newSet = new LinkedHashSet<>();\n            collection.add( newSet );\n            return newSet;\n        }\n        return last;\n    }\n\n    protected Object affirmSetExists( Object current, String part ) {\n        if ( current instanceof Map ) {\n            Map map = (Map) current;\n            if ( !map.containsKey( part ) ) {\n                Set neo = new LinkedHashSet<>();\n                map.put( part, neo );\n                return neo;\n            }\n            return map.get( part );\n        }\n        else if ( current instanceof List ) {\n            List list = (List) current;\n            return this.affirmLastSet( list, list.get( list.size() - 1 ) );\n        }\n        else if ( current instanceof Set ) {\n            Set set = (Set) current;\n            return this.affirmLastSet( set, this.setLastElement( set ) );\n        }\n        return null;\n    }\n\n    protected Object affirmLastMap( Collection collection, Object last ) {\n        if ( collection.isEmpty() || !( last instanceof Map) ) {\n            Map neo = this.newMap();\n            collection.add( neo );\n            return neo;\n        }\n        return last;\n    }\n\n    protected Object affirmMapExists( Object current, String part ) {\n        if ( current instanceof Map ) {\n            Map map = (Map) current;\n            if ( !map.containsKey( part ) ) {\n                Map neo = this.newMap();\n                map.put( part, neo );\n                return neo;\n            }\n            return map.get( part );\n        }\n        else if ( current instanceof List ) {\n            List list = (List) current;\n            return this.affirmLastMap( list, list.get( list.size() - 1 ) );\n        }\n        else if ( current instanceof Set ) {\n            Set set = (Set) current;\n            return this.affirmLastMap( set, this.setLastElement( set ) );\n        }\n        return null;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/GenericCollectedEntryEncoder.java",
    "content": "package com.pinecone.framework.unit.tabulate;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.unit.Units;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.LinkedHashMap;\nimport java.util.Map;\n\npublic class GenericCollectedEntryEncoder<V > implements CollectedEntryEncoder<V > {\n    protected FamilyIterator<V >                        mFamilyIterator;\n    protected FamilyEntryNameEncoder                    mFamilyEntryNameEncoder;\n    protected Class<Collection<Map.Entry<?, V > > >     mStereotypedClass;\n\n    public GenericCollectedEntryEncoder( FamilyIterator<V >  iterator, FamilyEntryNameEncoder encoder, Class<Collection<Map.Entry<?, V > > > stereotypedClass ) {\n        this.mFamilyIterator         = iterator;\n        this.mFamilyEntryNameEncoder = encoder;\n        this.mStereotypedClass       = stereotypedClass;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public GenericCollectedEntryEncoder( FamilyIterator<V >  iterator, FamilyEntryNameEncoder encoder ) {\n        this( iterator, encoder, (Class) ArrayList.class );\n    }\n\n    public GenericCollectedEntryEncoder( FamilyIterator<V >  iterator ) {\n        this( iterator, new TypedNamespaceFamilyEntryNameEncoder( true ) );\n    }\n\n    @Override\n    public Collection<Map.Entry<?, V > > encode() {\n        Collection<Map.Entry<?, V > > collection;\n        try{\n            collection = Units.newInstance( this.mStereotypedClass );\n        }\n        catch ( IllegalArgumentException e ) {\n            collection = new ArrayList<>();\n        }\n\n        while( this.mFamilyIterator.hasNext() ) {\n            UnitFamilyNode<Object, V > node = this.mFamilyIterator.next();\n\n            String k = this.mFamilyEntryNameEncoder.encode( node );\n            collection.add( new KeyValue<>( k, node.getEntry().getValue() ));\n        }\n\n        return collection;\n    }\n\n\n    @Override\n    public Map<?, V > regress( Class<? extends Map > stereotypedClass ) {\n        Map<Object, V >  map;\n        try{\n            map = Units.newInstance( stereotypedClass );\n        }\n        catch ( IllegalArgumentException e ) {\n            map = new LinkedHashMap<>();\n        }\n\n        while( this.mFamilyIterator.hasNext() ) {\n            UnitFamilyNode<Object, V > node = this.mFamilyIterator.next();\n\n            String k = this.mFamilyEntryNameEncoder.encode( node );\n            map.put( k, node.getEntry().getValue() );\n        }\n\n        return map;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/GenericNamespaceFamilyEntryNameEncoder.java",
    "content": "package com.pinecone.framework.unit.tabulate;\n\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.framework.util.name.UniNamespace;\n\npublic class GenericNamespaceFamilyEntryNameEncoder implements FamilyEntryNameEncoder {\n    protected String    mszSeparator;\n    protected boolean   mbNameForValue;\n\n    public GenericNamespaceFamilyEntryNameEncoder( String szSeparator, boolean bNameForValue ) {\n        this.mszSeparator   = szSeparator;\n        this.mbNameForValue = bNameForValue;\n    }\n\n    public GenericNamespaceFamilyEntryNameEncoder() {\n        this( \"::\", false );\n    }\n\n    @Override\n    public String getSeparator() {\n        return this.mszSeparator;\n    }\n\n    @Override\n    public boolean isNameForValue() {\n        return this.mbNameForValue;\n    }\n\n    @Override\n    public String encode( UnitFamilyNode node ) {\n        return this.encode( node, this.mszSeparator, this.mbNameForValue );\n    }\n\n    @Override\n    public String encode( UnitFamilyNode node, String szSeparator, boolean bNameForValue ) {\n        if( node.getSelfKey() != null ) {\n            StringBuilder sb = new StringBuilder( this.wrapGetCurrentNodeName( node ) );\n            UnitFamilyNode p = node.parent();\n            while ( p != null ) {\n                Object k = this.wrapGetCurrentNodeName( p );\n\n                sb.insert(0, k + szSeparator );\n                p = p.parent();\n            }\n\n            String sz = sb.toString();\n            if( bNameForValue ) {\n                sz = sz + szSeparator + this.wrapGetCurrentEntryKey( node );\n            }\n            return sz;\n        }\n\n        if( bNameForValue ) {\n            return szSeparator + this.wrapGetCurrentEntryKey( node );\n        }\n        return null;\n    }\n\n    @Override\n    public Namespace encodeNS( UnitFamilyNode node ) {\n        return this.encodeNS( node, this.mszSeparator, this.mbNameForValue );\n    }\n\n    @Override\n    public Namespace encodeNS( UnitFamilyNode node, String szSeparator, boolean bNameForValue ) {\n        if( node.getSelfKey() != null ) {\n            Namespace ns = new UniNamespace( this.wrapGetCurrentNodeName( node ), szSeparator );\n            UnitFamilyNode p = node.parent();\n            while ( p != null ) {\n                Object k = this.wrapGetCurrentNodeName( p );\n\n                Namespace root_p = ns;\n                while ( root_p.parent() != null ) {\n                    root_p = root_p.parent();\n                }\n                root_p.setParent( new UniNamespace( k.toString(), szSeparator ) );\n                p = p.parent();\n            }\n\n            if( bNameForValue ) {\n                ns = new UniNamespace( this.wrapGetCurrentEntryKey( node ), ns, szSeparator );\n            }\n            return ns;\n        }\n\n        if( bNameForValue ) {\n            return new UniNamespace( this.wrapGetCurrentEntryKey( node ), new UniNamespace( \"\", szSeparator ), szSeparator );\n        }\n        return null;\n    }\n\n    protected String transferName( String szBad ) {\n        return szBad;\n    }\n\n    protected String wrapGetCurrentNodeName( UnitFamilyNode node ) {\n        Object k = node.getSelfKey();\n        if( k == null ) {\n            k = \"\";\n        }\n        return this.transferName( k.toString() );\n    }\n\n    protected String wrapGetCurrentEntryKey( UnitFamilyNode node ) {\n        Object k = node.getEntry().getKey();\n        if( k == null ) {\n            k = \"\";\n        }\n        return this.transferName( k.toString() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/RecursiveEntryIterator.java",
    "content": "package com.pinecone.framework.unit.tabulate;\n\nimport com.pinecone.framework.unit.KeyValue;\n\nimport java.util.Map;\nimport java.util.Iterator;\nimport java.util.Deque;\nimport java.util.ArrayDeque;\nimport java.util.Collection;\nimport java.util.NoSuchElementException;\n\npublic class RecursiveEntryIterator<V > implements Iterator<Map.Entry<?, V > > {\n    private Deque<Iterator<? > > mIterStack;\n    private Deque<Integer >      mIndexStack;\n    private DummyEntry           mNextEntry;\n    private boolean              mbIncludeCollection;\n\n    protected RecursiveEntryIterator ( boolean bIncludeCollection ) {\n        this.mbIncludeCollection = bIncludeCollection;\n        this.mIterStack          = new ArrayDeque<>();\n        this.mIndexStack         = new ArrayDeque<>();\n        this.mIndexStack.push( 0 );\n    }\n\n    public RecursiveEntryIterator( Map<?, V > map, boolean bIncludeCollection ) {\n        this( bIncludeCollection );\n        this.mIterStack.push( map.entrySet().iterator() );\n        this.advance();\n    }\n\n    public RecursiveEntryIterator( Map<?, V > map ) {\n        this( map, true );\n    }\n\n    public RecursiveEntryIterator( Collection<V> collection ) {\n        this( true );\n        this.mIterStack.push( collection.iterator() );\n        this.advance();\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    private void advance() {\n        this.mNextEntry = null;\n\n        while ( !this.mIterStack.isEmpty() ) {\n            Iterator<?> iterator = this.mIterStack.peek();\n\n            if ( iterator.hasNext() ) {\n                Object next = iterator.next();\n                if ( next instanceof Map.Entry ) {\n                    Map.Entry<?, V > entry = (Map.Entry<?, V >) next;\n                    Object value = entry.getValue();\n                    if ( value instanceof Map ) {\n                        this.mIterStack.push( ((Map<?, V >) value).entrySet().iterator() );\n                        this.mIndexStack.push(0);\n                    }\n                    else if ( value instanceof Collection && this.mbIncludeCollection ) {\n                        this.mIterStack.push( ((Collection<?>) value).iterator() );\n                        this.mIndexStack.push(0);\n                    }\n                    else {\n                        //this.mNextEntry = new KeyValue<>( entry.getKey(), value );\n                        this.updateNextEntryCursor( entry.getKey(), (V)value );\n                        this.updateIndex();\n                        break;\n                    }\n                }\n                else if ( next instanceof Map ) {\n                    this.mIterStack.push( ((Map<?, V>) next).entrySet().iterator() );\n                    this.mIndexStack.push(0);\n                }\n                else if ( next instanceof Collection && this.mbIncludeCollection ) {\n                    this.mIterStack.push(((Collection<V>) next).iterator());\n                    this.mIndexStack.push(0);\n                }\n                else {\n                    //this.mNextEntry = new KeyValue<>( this.mIndexStack.peek(), next );\n                    this.updateNextEntryCursor( this.mIndexStack.peek(), (V)next );\n                    this.updateIndex();\n                    break;\n                }\n            }\n            else {\n                this.mIterStack.pop();\n                this.mIndexStack.pop();\n                this.updateIndex();\n            }\n        }\n    }\n\n    protected void updateNextEntryCursor( Object key, V value ) {\n        if( this.mNextEntry == null ) {\n            this.mNextEntry = new DummyEntry( key, value );\n        }\n\n        this.mNextEntry.apply( key, value );\n    }\n\n    protected void updateIndex() {\n        if ( !this.mIndexStack.isEmpty() ) {\n            int currentIndex = this.mIndexStack.pop();\n            this.mIndexStack.push( currentIndex + 1 );\n        }\n    }\n\n    @Override\n    public boolean hasNext() {\n        return this.mNextEntry != null;\n    }\n\n    @Override\n    public Map.Entry<?, V > next() {\n        if ( this.mNextEntry == null ) {\n            throw new NoSuchElementException();\n        }\n        Map.Entry<?, V > result = this.mNextEntry;\n        this.advance();\n        return result;\n    }\n\n    class DummyEntry extends KeyValue<Object, V > {\n        public DummyEntry( Object key, V value ) {\n            super( key, value );\n        }\n\n        public void setKey( Object key ) {\n            this.key = key;\n        }\n\n        public void apply( Object key, V value ) {\n            this.key   = key;\n            this.value = value;\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/RecursiveFamilyIterator.java",
    "content": "package com.pinecone.framework.unit.tabulate;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.StringUtils;\n\nimport java.util.Map;\nimport java.util.Iterator;\nimport java.util.Deque;\nimport java.util.ArrayDeque;\nimport java.util.Collection;\nimport java.util.NoSuchElementException;\n\npublic class RecursiveFamilyIterator<V > implements FamilyIterator<V > {\n    private Deque<Iterator<? > >         mIterStack;\n    private Deque<Integer >              mIndexStack;\n    private Deque<Object >               mParentKeyStack;\n    private Deque<DummyFamilyNode >      mParentStack;\n    private DummyFamilyNode              mNextNode;\n    private boolean                      mbIncludeCollection;\n\n    protected RecursiveFamilyIterator( boolean bIncludeCollection ) {\n        this.mbIncludeCollection = bIncludeCollection;\n        this.mIterStack          = new ArrayDeque<>();\n        this.mIndexStack         = new ArrayDeque<>();\n        this.mParentStack        = new ArrayDeque<>();\n        this.mParentKeyStack     = new ArrayDeque<>();\n        this.mIndexStack.push(0);\n        this.mParentStack.push( new DummyFamilyNode( null, null ) ); // Deque don't accept null, using dummy.\n        this.mParentKeyStack.push( \"\" );\n    }\n\n    public RecursiveFamilyIterator( Map<?, V > map, boolean bIncludeCollection ) {\n        this( bIncludeCollection );\n        this.mIterStack.push(map.entrySet().iterator());\n        this.advance();\n    }\n\n    public RecursiveFamilyIterator( Map<?, V > map ) {\n        this( map, true );\n    }\n\n    public RecursiveFamilyIterator( Collection<?> collection ) {\n        this( true );\n        this.mIterStack.push( collection.iterator() );\n        this.advance();\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    private void advance() {\n        Object selfKey   = null;\n        Object parentKey = null;\n        this.mNextNode   = null;\n\n        while ( !this.mIterStack.isEmpty() ) {\n            Iterator<?>   iterator = this.mIterStack.peek();\n            DummyFamilyNode parent = this.mParentStack.peek();\n            parentKey = this.mParentKeyStack.peek();\n            if( StringUtils.isEmpty( parentKey ) ) {\n                parentKey = null;\n            }\n            if( parent != null ) {\n                if( parent.getEntry() == null ) {\n                    parent  = null;\n                    selfKey = null;\n                }\n                else {\n                    selfKey = parent.getEntry().getKey();\n                }\n            }\n\n            if ( iterator.hasNext() ) {\n                Object next = iterator.next();\n                if ( next instanceof Map.Entry ) {\n                    Map.Entry<?, ?> entry = (Map.Entry<?, ?>) next;\n                    Object value = entry.getValue();\n                    if ( value instanceof Map ) {\n                        this.mIterStack.push(((Map<?, ?>) value).entrySet().iterator());\n                        this.mIndexStack.push(0);\n\n                        Object thisKey = entry.getKey();\n                        this.mParentKeyStack.push( thisKey );\n                        this.mParentStack.push( new DummyFamilyNode( parent, parentKey, thisKey, (V)entry.getValue() ) );\n                    }\n                    else if ( value instanceof Collection && this.mbIncludeCollection ) {\n                        this.mIterStack.push(((Collection<?>) value).iterator());\n                        this.mIndexStack.push(0);\n\n                        Object thisKey = entry.getKey();\n                        this.mParentKeyStack.push( thisKey );\n                        this.mParentStack.push( new DummyFamilyNode( parent, parentKey, thisKey, (V)entry.getValue() ) );\n                    }\n                    else {\n                        this.updateNextNodeCursor( parent, selfKey, entry.getKey(), (V)value );\n                        this.updateIndex();\n                        break;\n                    }\n                }\n                else if ( next instanceof Map ) {\n                    Object thisKey = this.mIndexStack.getFirst();\n                    this.mParentKeyStack.push( thisKey );\n                    this.mParentStack.push(  new DummyFamilyNode( parent, parentKey, thisKey, (V)next ) );\n                    this.mIterStack.push(((Map<?, ?>) next).entrySet().iterator());\n                    this.mIndexStack.push(0);\n                }\n                else if ( next instanceof Collection && this.mbIncludeCollection ) {\n                    Object thisKey = this.mIndexStack.getFirst();\n                    this.mParentKeyStack.push( thisKey );\n                    this.mParentStack.push(  new DummyFamilyNode( parent, parentKey, thisKey, (V)next ) );\n                    this.mIterStack.push(((Collection<?>) next).iterator());\n                    this.mIndexStack.push(0);\n                }\n                else {\n                    this.updateNextNodeCursor( parent, selfKey, this.mIndexStack.peek(), (V)next );\n                    this.updateIndex();\n                    break;\n                }\n            }\n            else {\n                this.mIterStack.pop();\n                this.mIndexStack.pop();\n                this.mParentStack.pop();\n                this.mParentKeyStack.pop();\n                this.updateIndex();\n            }\n        }\n    }\n\n    protected void updateNextNodeCursor( UnitFamilyNode<Object, V > parent, Object selfKey, Object key, V value ) {\n        if ( this.mNextNode == null ) {\n            this.mNextNode = new DummyFamilyNode( parent, selfKey, key, value );\n        }\n        else {\n            this.mNextNode.apply( parent, selfKey, key, value );\n        }\n    }\n\n    protected void updateIndex() {\n        if ( !this.mIndexStack.isEmpty() ) {\n            int currentIndex = this.mIndexStack.pop();\n            this.mIndexStack.push(currentIndex + 1);\n        }\n    }\n\n    @Override\n    public boolean hasNext() {\n        return this.mNextNode != null;\n    }\n\n    @Override\n    public UnitFamilyNode<Object, V > next() {\n        if ( this.mNextNode == null ) {\n            throw new NoSuchElementException();\n        }\n        DummyFamilyNode result = this.mNextNode;\n        this.advance();\n        return result;\n    }\n\n    class DummyEntry extends KeyValue<Object, V > {\n        public DummyEntry( Object key, V value ) {\n            super( key, value );\n        }\n\n        public void setKey( Object key ) {\n            this.key = key;\n        }\n\n        public void apply( Object key, V value ) {\n            this.key   = key;\n            this.value = value;\n        }\n    }\n\n    class DummyFamilyNode implements UnitFamilyNode<Object, V > {\n        UnitFamilyNode<Object, V > parent;\n        Object                     selfKey;\n        DummyEntry                 entry;\n\n        public DummyFamilyNode( UnitFamilyNode<Object, V > parent, Object selfKey ) {\n            this.entry   = null;\n            this.parent  = parent;\n            this.selfKey = selfKey;\n        }\n\n        public DummyFamilyNode( UnitFamilyNode<Object, V > parent, Object selfKey, Object entryKey, V entryValue ) {\n            this.entry   = new DummyEntry( entryKey, entryValue );\n            this.parent  = parent;\n            this.selfKey = selfKey;\n        }\n\n        public void setKey( Object key ) {\n            this.entry.setKey( key );\n        }\n\n        public void apply( UnitFamilyNode<Object, V > parent, Object selfKey, Object key, V value ) {\n            this.parent  = parent;\n            this.selfKey = selfKey;\n            this.entry.setKey( key );\n            this.entry.setValue( value );\n        }\n\n        @Override\n        public UnitFamilyNode<Object, V > parent() {\n            return this.parent;\n        }\n\n        @Override\n        public Object getSelfKey() {\n            return this.selfKey;\n        }\n\n        @Override\n        public Map.Entry<Object, V > getEntry() {\n            return this.entry;\n        }\n\n        @Override\n        public String toString() {\n            return this.toJSONString();\n        }\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/TypedNamespaceFamilyEntryNameEncoder.java",
    "content": "package com.pinecone.framework.unit.tabulate;\n\nimport java.util.List;\nimport java.util.Set;\n\npublic class TypedNamespaceFamilyEntryNameEncoder extends GenericNamespaceFamilyEntryNameEncoder {\n    protected String mszFmtTypeLabel;\n\n    public TypedNamespaceFamilyEntryNameEncoder( String szSeparator, boolean bNameForValue, String szFmtTypeLabel ) {\n        super( szSeparator, bNameForValue );\n        this.mszFmtTypeLabel = szFmtTypeLabel;\n    }\n\n    public TypedNamespaceFamilyEntryNameEncoder( String szSeparator, boolean bNameForValue ) {\n        this( szSeparator, bNameForValue, \"$\" );\n    }\n\n    public TypedNamespaceFamilyEntryNameEncoder( boolean bNameForValue ) {\n        this( \"::\", bNameForValue, \"$\" );\n    }\n\n    public TypedNamespaceFamilyEntryNameEncoder( String szFmtTypeLabel ) {\n        this( \"::\", false, szFmtTypeLabel );\n    }\n\n    public TypedNamespaceFamilyEntryNameEncoder() {\n        this( \"::\", false );\n    }\n\n    @Override\n    protected String transferName( String szBad ) {\n        return szBad; // TODO\n    }\n\n    protected String queryType( Object val ) {\n        if( val instanceof List ) {\n            return \"list\";\n        }\n        else if( val instanceof Set) {\n            return \"set\";\n        }\n\n        return \"\";\n    }\n\n    @Override\n    protected String wrapGetCurrentNodeName( UnitFamilyNode node ) {\n        Object k = node.getSelfKey();\n        if( k == null ) {\n            k = \"\";\n        }\n\n        String szType = \"\";\n        if( node.parent() != null ) {\n            szType = this.queryType( node.parent().getEntry().getValue() );\n            if( !szType.isEmpty() ) {\n                szType = this.mszFmtTypeLabel + szType;\n            }\n        }\n        return this.transferName( k.toString() + szType );\n    }\n\n    @Override\n    protected String wrapGetCurrentEntryKey( UnitFamilyNode node ) {\n        Object k = node.getEntry().getKey();\n        if( k == null ) {\n            k = \"\";\n        }\n        String szType = this.queryType( node.getEntry().getValue() );\n        if( !szType.isEmpty() ) {\n            szType = this.mszFmtTypeLabel + szType;\n        }\n        return this.transferName( k.toString() + szType );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/tabulate/UnitFamilyNode.java",
    "content": "package com.pinecone.framework.unit.tabulate;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\n\nimport java.util.Map;\n\npublic interface UnitFamilyNode<K, V > extends Pinenut {\n    UnitFamilyNode<K, V > parent();\n\n    K getSelfKey();\n\n    Map.Entry<K, V > getEntry();\n\n    default String namespacify( String szSeparator, boolean bNameForValue ) {\n        return FamilyEntryNameEncoder.DefaultEncoder.encode( this, szSeparator, bNameForValue );\n    }\n\n    default String namespacify( String szSeparator ) {\n        return this.namespacify( szSeparator, false );\n    }\n\n    default String namespacify( boolean bNameForValue ) {\n        return this.namespacify( \"::\", bNameForValue );\n    }\n\n    default String namespacify() {\n        return this.namespacify( false );\n    }\n\n    @Override\n    default String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"class\", this.className() ),\n                new KeyValue<>( \"key\", this.getSelfKey() ),\n                new KeyValue<>( \"entry\", this.getEntry() )\n        } );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/HeapTopper.java",
    "content": "package com.pinecone.framework.unit.top;\n\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.util.AbstractCollection;\nimport java.util.PriorityQueue;\nimport java.util.Comparator;\nimport java.util.Collection;\nimport java.util.Iterator;\n\npublic class HeapTopper<E > extends AbstractCollection<E > implements Topper<E > {\n    private int                          mnTopmost;\n    private final PriorityQueue<E>       mHeap;\n    private final Comparator<? super E>  mComparator;\n\n    public HeapTopper( int nTopmost, Comparator<? super E> comparator ) {\n        this.mnTopmost    = nTopmost;\n        this.mComparator  = comparator;\n        this.mHeap        = new PriorityQueue<>( nTopmost, comparator );\n    }\n\n    public HeapTopper( int nTopmost ) {\n        this( nTopmost, new Comparator<E>() {\n            @Override\n            @SuppressWarnings( \"unchecked\" )\n            public int compare( E o1, E o2 ) {\n                return ( (Comparable<E >)o1 ).compareTo( o2 );\n            }\n        } );\n    }\n\n    @Override\n    public int size() {\n        return this.mHeap.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mHeap.isEmpty();\n    }\n\n    @Override\n    public void clear() {\n        this.mHeap.clear();\n    }\n\n    @Override\n    public boolean add( E e ) {\n        if ( this.mHeap.size() < this.mnTopmost ) {\n            this.mHeap.offer(e);\n        }\n        else if ( this.mComparator.compare( e, this.mHeap.peek() ) > 0 ) {\n            this.mHeap.poll();\n            this.mHeap.offer(e);\n        }\n        return true;\n    }\n\n    @Override\n    public boolean addAll( Collection<? extends E> c ) {\n        for( E e : c ) {\n            this.add( e );\n        }\n        return true;\n    }\n\n    @Override\n    public boolean removeAll( Collection<?> c ) {\n        return this.mHeap.removeAll( c );\n    }\n\n    @Override\n    public boolean retainAll( Collection<?> c ) {\n        return this.mHeap.retainAll( c );\n    }\n\n    @Override\n    public boolean remove( Object o ) {\n        return this.mHeap.remove( o );\n    }\n\n    @Override\n    public Collection<E > topmost() {\n        return this.mHeap;\n    }\n\n    @Override\n    public Topper<E > setTopmostSize( int nTopmost ) {\n        this.mnTopmost = nTopmost;\n        while ( this.mHeap.size() > nTopmost ) {\n            this.mHeap.poll();\n        }\n        return this;\n    }\n\n    @Override\n    public int getTopmostSize() {\n        return this.mnTopmost;\n    }\n\n    @Override\n    public E nextEviction() {\n        return this.mHeap.peek();\n    }\n\n    @Override\n    public boolean willAccept( E e ) {\n        return this.mHeap.size() < this.mnTopmost || this.mComparator.compare( e, this.mHeap.peek() ) > 0;\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        if( key instanceof Number ) {\n            int i = ((Number) key).intValue();\n            return this.getTopmostSize() > i;\n        }\n        return false;\n    }\n\n    @Override\n    public boolean containsAll( Collection<?> c ) {\n        return this.mHeap.containsAll( c );\n    }\n\n    @Override\n    public boolean contains( Object o ) {\n        return this.topmost().contains( o );\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object elm ) {\n        return this.contains( elm );\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public Iterator<E > iterator() {\n        return this.topmost().iterator();\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/LinkedMultiTreeToptron.java",
    "content": "package com.pinecone.framework.unit.top;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\n\nimport java.util.Collection;\n\nimport java.util.NavigableMap;\nimport java.util.Set;\n\npublic class LinkedMultiTreeToptron<K, V > extends MultiTreeToptron<K, V > {\n    protected LinkedMultiTreeToptron( int nTopmost, NavigableMap<K, Collection<V > > coreMap, MultiToptronValueAdapter<V > valueAdapter, TopmostSelector<K, Collection<V > > selector ) {\n        super( nTopmost, coreMap, valueAdapter, selector );\n    }\n\n    public LinkedMultiTreeToptron( int nTopmost, MultiToptronValueAdapter<V > valueAdapter, TopmostSelector<K, Collection<V > > selector, boolean accessOrder ) {\n        this( nTopmost, new LinkedTreeMap<>( accessOrder ), valueAdapter, selector );\n    }\n\n    public LinkedMultiTreeToptron( int nTopmost, TopmostSelector<K, Collection<V > > selector, boolean accessOrder ) {\n        this( nTopmost, MultiTreeToptron.newLinkdedHashValueAdapter(), selector, accessOrder );\n    }\n\n    public LinkedMultiTreeToptron( int nTopmost, boolean accessOrder ) {\n        this( nTopmost, MultiTreeToptron.newLinkdedHashValueAdapter(), TopmostSelector.newGenericGreatestSelector( false ), accessOrder );\n    }\n\n    public LinkedMultiTreeToptron( int nTopmost ) {\n        this( nTopmost, true );\n    }\n\n    @Override\n    public LinkedTreeMap<K, Collection<V > > getMap() {\n        return ( LinkedTreeMap<K, Collection<V > > ) this.mTopNCoreMap;\n    }\n\n    @Override\n    public Set<Entry<K, Collection<V > > > bottomEntrySet(){\n        return this.getMap().treeEntrySet();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/LinkedTreeToptron.java",
    "content": "package com.pinecone.framework.unit.top;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\n\nimport java.util.Map;\nimport java.util.NavigableMap;\nimport java.util.Set;\n\npublic class LinkedTreeToptron<K, V > extends TreeToptron<K, V > {\n    protected LinkedTreeToptron( int nTopmost, NavigableMap<K, V > map, TopmostSelector<K, V > selector ) {\n        super( nTopmost, map, selector );\n    }\n\n    public LinkedTreeToptron( int nTopmost, TopmostSelector<K, V > selector ) {\n        this( nTopmost, new LinkedTreeMap<>( selector ), selector );\n    }\n\n    public LinkedTreeToptron( int nTopmost, TopmostSelector<K, V > selector, boolean accessOrder ) {\n        this( nTopmost, new LinkedTreeMap<>( selector, accessOrder ), selector );\n    }\n\n    public LinkedTreeToptron( int nTopmost, boolean accessOrder ) {\n        this( nTopmost, new LinkedTreeMap<>( accessOrder ), TopmostSelector.newGenericGreatestSelector( false ) );\n    }\n\n    public LinkedTreeToptron( int nTopmost ) {\n        this( nTopmost, true );\n    }\n\n    @Override\n    public LinkedTreeMap<K, V > getMap() {\n        return ( LinkedTreeMap<K, V > ) this.mTopNCoreMap;\n    }\n\n    @Override\n    public Set<Entry<K, V > > bottomEntrySet(){\n        return this.getMap().treeEntrySet();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/MultiToptronValueAdapter.java",
    "content": "package com.pinecone.framework.unit.top;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.Collection;\n\npublic interface MultiToptronValueAdapter<E > extends Pinenut {\n    Collection<E > newCollection();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/MultiTreeToptron.java",
    "content": "package com.pinecone.framework.unit.top;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.unit.MultiValueMapper;\nimport com.pinecone.framework.unit.TreeMap;\nimport com.pinecone.framework.unit.Units;\nimport com.pinecone.framework.unit.multi.MultiCollectionMaptron;\n\nimport java.util.Collection;\nimport java.util.LinkedHashSet;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.NavigableMap;\n\n/**\n *  Pinecone Ursus For Java, MultiTreeToptron: For dynamic top-N scenarios.\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  This tron has not wrapped `Collection` into Readonly mode.\n *  WARNING, All methods which returns `Collection` should be operated-Readonly outside, otherwise the elementSize will got malfunction.\n *  *****************************************************************************************\n *  @param <K> The key, which should implements the comparable in principle.\n *  @param <V> The value\n */\npublic class MultiTreeToptron<K, V > implements ToptronMultiMap<K, V > {\n    protected static <MV >MultiToptronValueAdapter<MV > newLinkdedHashValueAdapter() {\n        return new MultiToptronValueAdapter<MV >() {\n            @Override\n            public Collection<MV > newCollection() {\n                return new LinkedHashSet<>();\n            }\n        };\n    }\n\n    protected final NavigableMap<K, Collection<V > >      mTopNCoreMap;\n    protected final MultiValueMapper<K, V >               mTopNMap;\n    protected int                                         mnTopmost;\n    protected int                                         mnElementSize;\n    protected TopmostSelector<K, Collection<V > >         mSelector;\n\n    protected MultiTreeToptron( int nTopmost, NavigableMap<K, Collection<V > > coreMap, MultiToptronValueAdapter<V > valueAdapter, TopmostSelector<K, Collection<V > > selector ) {\n        this.mTopNCoreMap  = coreMap;\n        this.mTopNMap      = new MultiCollectionMaptron<>( this.mTopNCoreMap, true ){\n            @Override\n            protected Collection<V> newCollection() {\n                return valueAdapter.newCollection();\n            }\n        };\n        this.mnTopmost     = nTopmost;\n        this.mnElementSize = 0;\n        this.mSelector     = selector;\n    }\n\n    public MultiTreeToptron( int nTopmost, MultiToptronValueAdapter<V > valueAdapter, TopmostSelector<K, Collection<V > > selector ) {\n        this( nTopmost, new TreeMap<>(), valueAdapter, selector );\n    }\n\n    public MultiTreeToptron( int nTopmost, TopmostSelector<K, Collection<V > > selector ) {\n        this( nTopmost, MultiTreeToptron.newLinkdedHashValueAdapter(), selector );\n    }\n\n    public MultiTreeToptron( int nTopmost ) {\n        this( nTopmost, TopmostSelector.newGenericGreatestSelector( false ) );\n    }\n\n    protected Map.Entry<K, Collection<V > > getMostEntry() {\n        return this.mSelector.getMostEntry( this.mTopNCoreMap );\n    }\n\n    @Override\n    public int getTopmostSize() {\n        return this.mnTopmost;\n    }\n\n    protected void trim( int nNewTopmost ) {\n        int det = this.mnTopmost - nNewTopmost;\n        if( det > 0 ) {\n            for ( int i = 0; i < det; ++i ) {\n                Map.Entry<K, Collection<V > > kv = this.getMostEntry();\n                this.erase( kv.getKey(), kv.getValue().iterator().next() );\n            }\n        }\n    }\n\n    @Override\n    public MultiTreeToptron<K, V > setTopmostSize( int nTopmost ) {\n        this.trim( nTopmost );\n        this.mnTopmost = nTopmost;\n        return this;\n    }\n\n    @Override\n    public K nextEvictionKey() {\n        Map.Entry<K, Collection<V > > preElimination = this.getMostEntry();\n        if( preElimination != null ) {\n            return preElimination.getKey();\n        }\n        return null;\n    }\n\n    // In this context, which means there are ONE-Single value will be inserted.\n    @Override\n    public boolean willAccept( K key ) {\n        if ( this.size() >= this.mnTopmost ) {\n            Collection<V > c = this.mTopNCoreMap.get( key );\n            if ( c == null ) {\n                Map.Entry<K, Collection<V > > estEntry = this.getMostEntry();\n                return this.mSelector.selects( estEntry, key );\n            }\n        }\n        return true;\n    }\n\n    @Override\n    public Map.Entry<K, V > nextEviction() {\n        Map.Entry<K, Collection<V > > preElimination = this.getMostEntry();\n        if( preElimination != null ) {\n            return new KeyValue<>( preElimination.getKey(), preElimination.getValue().iterator().next() );\n        }\n        return null;\n    }\n\n    @Override\n    public int size() {\n        return this.mTopNMap.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mTopNMap.isEmpty();\n    }\n\n    @Override\n    public void clear() {\n        this.mTopNMap.clear();\n        this.mnElementSize = 0;\n    }\n\n    @Override\n    public V get( Object k, V v ) {\n        return this.mTopNMap.get( k, v );\n    }\n\n    @Override\n    public Collection<V > get( Object key ) {\n        return this.mTopNMap.get( key );\n    }\n\n    @Override\n    public V getFirst( K key ) {\n        return this.mTopNMap.getFirst( key );\n    }\n\n    @Override\n    public V add( K key, V value ) {\n        if ( this.mnElementSize < this.mnTopmost ) {\n            V v = this.mTopNMap.add( key, value );\n            if( v != null ) {\n                ++this.mnElementSize;\n            }\n            return v;\n        }\n        else {\n            Collection<V > more = this.mTopNMap.get( key );\n            if ( more == null || !more.contains( value ) ) {\n                Map.Entry<K, Collection<V > > estEntry = this.getMostEntry();\n                if( this.mSelector.selects( estEntry, key ) ) {\n                    V oldestValue = estEntry.getValue().iterator().next();\n\n                    K           legacyKey = estEntry.getKey();\n                    Collection<V > legacy = this.mTopNMap.get( legacyKey );\n                    legacy.remove( oldestValue );\n                    if( legacy.isEmpty() ) {\n                        this.mTopNMap.remove( legacyKey );\n                    }\n                    --this.mnElementSize;\n                    return this.add( key, value );\n                }\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public V set( K k, V v ) {\n        Collection<V > legacy = this.mTopNMap.get( k );\n        if( legacy != null && !legacy.isEmpty() ) {\n            if( this.mTopNMap.set( k, v ) != null ){\n                this.mnElementSize -= legacy.size();\n                ++this.mnElementSize;\n                return v;\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public void setAll( Map<K, V > m ) {\n        for( Map.Entry<K, V > kv : m.entrySet() ) {\n            this.add( kv.getKey(), kv.getValue() );\n        }\n    }\n\n    @Override\n    public Collection<V > put( K key, Collection<V > values ) {\n        Collection<V > ret = Units.spawnExtendParent( values );\n        for( V v : values ) {\n            if( this.add( key, v ) != null ) {\n                ret.add( v );\n            }\n        }\n\n        if( ret.isEmpty() ) {\n            return null;\n        }\n        return ret;\n    }\n\n    @Override\n    public Collection<V > putIfAbsent( K key, Collection<V > value ) {\n        if( !this.containsKey( key ) ) {\n            return this.put( key, value );\n        }\n        return null;\n    }\n\n    @Override\n    public void putAll( Map<? extends K, ? extends Collection<V > > m ) {\n        for( Map.Entry<? extends K, ? extends Collection<V > > kv : m.entrySet() ) {\n            Collection<V > c = kv.getValue();\n            for( V v : c ) {\n                this.add( kv.getKey(), v );\n            }\n        }\n    }\n\n    @Override\n    public V erase( Object key, V value ) {\n        Collection<V > legacy = this.mTopNMap.get( key );\n        if ( legacy != null && legacy.contains( value ) ) {\n            legacy.remove( value );\n            if ( legacy.isEmpty() ) {\n                this.mTopNMap.remove( key );\n            }\n            --this.mnElementSize;\n            return value;\n        }\n        return null;\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public boolean remove( Object key, Object values ) {\n        if( values instanceof Collection ) {\n            Collection c = (Collection)values;\n            boolean b = true;\n            for( Object v : c ) {\n                b = b & this.erase( key, (V)v ) != null;\n            }\n            return b;\n        }\n        return false;\n    }\n\n    @Override\n    public Collection<V > remove( Object key ) {\n        Collection<V > legacy = this.mTopNMap.remove( key );\n        this.mnElementSize -= legacy.size();\n        return legacy;\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        return this.mTopNMap.containsKey( key );\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        return false;\n    }\n\n    public V update( K oldKey, K newKey, V value ) {\n        this.remove( oldKey, value );\n        return this.add( newKey, value );\n    }\n\n    public Collection<V > update( K oldKey, K newKey ) {\n        Collection<V > legacy = this.mTopNMap.get( oldKey );\n        this.mTopNMap.remove( oldKey );\n        return this.mTopNMap.puts( newKey, legacy );\n    }\n\n    public int elementSize() {\n        return this.mnElementSize;\n    }\n\n    @Override\n    public Set<K > keySet() {\n        return this.mTopNMap.keySet();\n    }\n\n    @Override\n    public Map<K, V > toSingleValueMap() {\n        return this.mTopNMap.toSingleValueMap();\n    }\n\n    @Override\n    public Collection<Entry<K, V > > collection() {\n        return this.mTopNMap.collection();\n    }\n\n    @Override\n    public Collection<V > collectionValues() {\n        return this.mTopNMap.collectionValues();\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Set<Map.Entry<K, Collection<V > > > entrySet() {\n        return (Set<Map.Entry<K, Collection<V > > >)this.mTopNMap.entrySet();\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Collection<Collection<V > > values() {\n        return (Collection<Collection<V > >)this.mTopNMap.values();\n    }\n\n    public NavigableMap<K, Collection<V > > getMap() {\n        return this.mTopNCoreMap;\n    }\n\n    public Set<Map.Entry<K, Collection<V > > > topEntrySet(){\n        return this.getMap().descendingMap().entrySet();\n    }\n\n    public Set<Map.Entry<K, Collection<V > > > bottomEntrySet(){\n        return this.getMap().entrySet();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/TopmostSelector.java",
    "content": "package com.pinecone.framework.unit.top;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.Comparator;\nimport java.util.Map;\nimport java.util.NavigableMap;\n\npublic interface TopmostSelector<K, V > extends Pinenut, Comparator<K > {\n    Map.Entry<K, V > getMostEntry( NavigableMap<K,V > map );\n\n    // Selecting candidate if it is meets qualification.\n    default boolean selects ( Map.Entry<K, V > most, Map.Entry<K, V > candidate ) {\n        return this.selects( most, candidate.getKey() );\n    }\n\n    boolean selects ( Map.Entry<K, V > most, K candidateKey );\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    default int compare( Object o1, Object o2 ) {\n        return ( (Comparable)o1 ).compareTo( o2 );\n    }\n\n\n    // Selecting greatest top-N elements\n    static <K, V > TopmostSelector<K, V > newGenericGreatestSelector( boolean bInsertDirectly ) {\n        return new TopmostSelector<>() {\n            @Override\n            public Map.Entry<K, V > getMostEntry( NavigableMap<K, V > map ) {\n                return map.firstEntry();\n            }\n\n            @Override\n            @SuppressWarnings(\"unchecked\")\n            public boolean selects(  Map.Entry<K, V > most, K candidateKey ) {\n                if( bInsertDirectly ) {\n                    return true;\n                }\n                return ( (Comparable<K >)most.getKey() ).compareTo( candidateKey ) < 0; // most < candidate\n            }\n        };\n    }\n\n    // Selecting smallest top-N elements\n    static <K, V > TopmostSelector<K, V > newGenericSmallestSelector( boolean bInsertDirectly ) {\n        return new TopmostSelector<>() {\n            @Override\n            public Map.Entry<K, V > getMostEntry( NavigableMap<K, V > map ) {\n                return map.lastEntry();\n            }\n\n            @Override\n            @SuppressWarnings(\"unchecked\")\n            public boolean selects(  Map.Entry<K, V > most, K candidateKey ) {\n                if( bInsertDirectly ) {\n                    return true;\n                }\n                return ( (Comparable<K >)most.getKey() ).compareTo( candidateKey ) > 0; // most > candidate\n            }\n        };\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/Topper.java",
    "content": "package com.pinecone.framework.unit.top;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\n\nimport java.util.Collection;\nimport java.util.Iterator;\n\npublic interface Topper<E > extends PineUnit, Collection<E > {\n    @Override\n    int size();\n\n    @Override\n    boolean isEmpty();\n\n    @Override\n    void clear();\n\n    @Override\n    boolean add( E e );\n\n    Collection<E > topmost();\n\n    Topper<E > setTopmostSize( int nTopmost );\n\n    int getTopmostSize();\n\n    E nextEviction();\n\n    boolean willAccept( E e );\n\n    @Override\n    default Iterator<E > iterator() {\n        return this.topmost().iterator();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/Toptron.java",
    "content": "package com.pinecone.framework.unit.top;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\n\nimport java.util.NavigableMap;\n\npublic interface Toptron<K, V > extends PineUnit {\n    int size();\n\n    boolean isEmpty();\n\n    void clear();\n\n    boolean containsKey( Object key );\n\n    boolean containsValue( Object val );\n\n    Object get( Object key );\n\n    V add( K key, V value );\n\n    Toptron<K, V > setTopmostSize(int nTopmost );\n\n    int getTopmostSize();\n\n    NavigableMap<K, ? > getMap();\n\n    V update( K oldKey, K newKey, V value ) ;\n\n    Object update( K oldKey, K newKey ) ;\n\n    K nextEvictionKey();\n\n    boolean willAccept( K key );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/ToptronMap.java",
    "content": "package com.pinecone.framework.unit.top;\n\nimport java.util.Map;\nimport java.util.NavigableMap;\n\npublic interface ToptronMap<K, V > extends Map<K, V >, Toptron<K, V > {\n    @Override\n    V get( Object key );\n\n    @Override\n    default V put( K key, V value ) {\n        return this.add( key, value );\n    }\n\n    @Override\n    V update( K oldKey, K newKey ) ;\n\n    @Override\n    ToptronMap<K, V > setTopmostSize( int nTopmost );\n\n    @Override\n    NavigableMap<K, V > getMap();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/ToptronMultiMap.java",
    "content": "package com.pinecone.framework.unit.top;\n\nimport com.pinecone.framework.unit.multi.MultiCollectionMap;\n\nimport java.util.Collection;\nimport java.util.Map;\nimport java.util.NavigableMap;\n\npublic interface ToptronMultiMap<K, V > extends MultiCollectionMap<K, V >, Toptron<K, V > {\n    Collection<V > get( Object key );\n\n    ToptronMultiMap<K, V > setTopmostSize( int nTopmost );\n\n    NavigableMap<K, Collection<V > > getMap();\n\n    @Override\n    Collection<V > update( K oldKey, K newKey ) ;\n\n    Map.Entry<K, V > nextEviction();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/top/TreeToptron.java",
    "content": "package com.pinecone.framework.unit.top;\n\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.util.Collection;\nimport java.util.Map;\nimport java.util.TreeMap;\nimport java.util.Set;\nimport java.util.NavigableMap;\n\npublic class TreeToptron<K, V > implements ToptronMap<K, V > {\n    protected final NavigableMap<K, V >                 mTopNCoreMap;\n    protected int                                       mnTopmost;\n    protected TopmostSelector<K, V >                    mSelector;\n\n    public TreeToptron( int nTopmost, NavigableMap<K, V > map, TopmostSelector<K, V > selector ) {\n        this.mnTopmost    = nTopmost;\n        this.mTopNCoreMap = map;\n        this.mSelector    = selector;\n    }\n\n    public TreeToptron( int nTopmost, TopmostSelector<K, V > selector ) {\n        this( nTopmost, new TreeMap<>( selector ), selector );\n    }\n\n    public TreeToptron( int nTopmost ) {\n        this( nTopmost, TopmostSelector.newGenericSmallestSelector( false ) );\n    }\n\n    protected Map.Entry<K, V > getMostEntry() {\n        return this.mSelector.getMostEntry( this.mTopNCoreMap );\n    }\n\n    protected void trim( int nNewTopmost ) {\n        int det = this.mnTopmost - nNewTopmost;\n        if( det > 0 ) {\n            for ( int i = 0; i < det; ++i ) {\n                this.remove( this.getMostEntry().getKey() );\n            }\n        }\n    }\n\n    @Override\n    public TreeToptron<K, V > setTopmostSize( int nTopmost ) {\n        this.trim( nTopmost );\n        this.mnTopmost = nTopmost;\n        return this;\n    }\n\n    @Override\n    public int getTopmostSize() {\n        return this.mnTopmost;\n    }\n\n    @Override\n    public int size() {\n        return this.mTopNCoreMap.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mTopNCoreMap.isEmpty();\n    }\n\n    @Override\n    public void clear() {\n        this.mTopNCoreMap.clear();\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        return this.mTopNCoreMap.containsKey(key);\n    }\n\n    @Override\n    public boolean containsValue( Object val ) {\n        return this.mTopNCoreMap.containsValue(val);\n    }\n\n    @Override\n    public V get( Object key ) {\n        return this.mTopNCoreMap.get( key );\n    }\n\n    @Override\n    public void putAll( Map<? extends K, ? extends V> m ) {\n        for( Map.Entry<? extends K, ? extends V > kv : m.entrySet() ) {\n            this.put( kv.getKey(), kv.getValue() );\n        }\n    }\n\n    @Override\n    public K nextEvictionKey() {\n        Map.Entry<K, V > preElimination = this.getMostEntry();\n        if( preElimination != null ) {\n            return preElimination.getKey();\n        }\n        return null;\n    }\n\n    @Override\n    public boolean willAccept( K key ) {\n        if ( this.size() >= this.mnTopmost ) {\n            V v = this.mTopNCoreMap.get( key );\n            if ( v == null ) {\n                Map.Entry<K, V > estEntry = this.getMostEntry();\n                return this.mSelector.selects( estEntry, key );\n            }\n        }\n        return true;\n    }\n\n    @Override\n    public V add( K key, V value ) {\n        if ( this.size() < this.mnTopmost ) {\n            return this.mTopNCoreMap.put( key, value );\n        }\n        else {\n            V v = this.mTopNCoreMap.get( key );\n            if ( v == null ) {\n                Map.Entry<K, V > estEntry = this.getMostEntry();\n                if( this.mSelector.selects( estEntry, key ) ) {\n                    this.mTopNCoreMap.remove( estEntry.getKey() );\n                    return this.put( key, value );\n                }\n            }\n            return null;\n        }\n    }\n\n    @Override\n    public V putIfAbsent( K key, V value ) {\n        if ( this.size() < this.mnTopmost ) {\n            return this.mTopNCoreMap.putIfAbsent( key, value );\n        }\n        else {\n            V v = this.mTopNCoreMap.get( key );\n            if ( v == null ) {\n                return this.put( key, value );\n            }\n            return null;\n        }\n    }\n\n    @Override\n    public V remove( Object key ) {\n        return this.mTopNCoreMap.remove( key );\n    }\n\n    @Override\n    public V update( K oldKey, K newKey, V value ) {\n        this.remove( oldKey );\n        return this.put( newKey, value );\n    }\n\n    @Override\n    public V update( K oldKey, K newKey ) {\n        V legacy = this.mTopNCoreMap.get( oldKey );\n        this.mTopNCoreMap.remove( oldKey );\n        return this.mTopNCoreMap.put( newKey, legacy );\n    }\n\n\n    @Override\n    public Set<Entry<K, V > > entrySet() {\n        return this.mTopNCoreMap.entrySet();\n    }\n\n    @Override\n    public Set<K > keySet() {\n        return this.mTopNCoreMap.keySet();\n    }\n\n    @Override\n    public Collection<V > values() {\n        return this.mTopNCoreMap.values();\n    }\n\n    @Override\n    public NavigableMap<K, V > getMap() {\n        return this.mTopNCoreMap;\n    }\n\n    public Set<Map.Entry<K, V > > topEntrySet(){\n        return this.getMap().descendingMap().entrySet();\n    }\n\n    public Set<Map.Entry<K, V > > bottomEntrySet(){\n        return this.getMap().entrySet();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object key ) {\n        return this.containsKey( key );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/AbstractTrieMap.java",
    "content": "package com.pinecone.framework.unit.trie;\n\npublic abstract class AbstractTrieMap<K, V> implements TrieMap<K, V> {\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/ArchTrieNode.java",
    "content": "package com.pinecone.framework.unit.trie;\n\npublic abstract class ArchTrieNode<V > implements TrieNode<V > {\n    protected String                mszKey;\n    protected TrieNode<V>           mParent;\n    protected TrieMap<String, V >   mTrieMap;\n\n    @SuppressWarnings( \"unchecked\" )\n    public <K extends String > ArchTrieNode( String szKey, TrieNode<V> parent, TrieMap<K, V > map ) {\n        this.mParent  = parent;\n        this.mTrieMap = (TrieMap<String, V>) map;\n        this.mszKey   = szKey;\n    }\n\n    @Override\n    public boolean isLeaf() {\n        return false;\n    }\n\n    @Override\n    public String getNodeName() {\n        return this.mszKey;\n    }\n\n    @Override\n    public String getNamespace() {\n        TrieNode<V> p = this.mParent;\n        StringBuilder sb = new StringBuilder();\n        String separator = this.getTrieMap().getSeparator();\n\n        if( p == null || p.parent() == null ) {\n            return null;\n        }\n\n        while ( true ) {\n            if ( sb.length() > 0 ) {\n                sb.insert( 0, separator );\n            }\n            sb.insert(0, p.getNodeName());\n            p = p.parent();\n            if( p == null || p.parent() == null ) {\n                break;\n            }\n        }\n\n        return sb.toString();\n    }\n\n    @Override\n    public String getFullName() {\n        String ns = this.getNamespace();\n        if( ns != null ) {\n            return ns + this.getTrieMap().getSeparator() + this.getNodeName();\n        }\n        return this.getNodeName();\n    }\n\n    @Override\n    public TrieNode<V> parent() {\n        return this.mParent;\n    }\n\n    @Override\n    public TrieMap<String, V> getTrieMap() {\n        return this.mTrieMap;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/DirectoryNode.java",
    "content": "package com.pinecone.framework.unit.trie;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\npublic interface DirectoryNode<V > extends TrieNode<V > {\n    Map<String, TrieNode<V > > children();\n\n    Map<String, TrieNode<V > > segmentMap();\n\n    default DirectoryNode<V > getDirectory( String szSegName ) {\n        TrieNode<V > n = this.get( szSegName );\n        if( n != null ) {\n            return n.evinceDirectory();\n        }\n        return null;\n    }\n\n    default ValueNode<V > getValue( String szSegName ) {\n        TrieNode<V > n = this.get( szSegName );\n        if( n != null ) {\n            return n.evinceValue();\n        }\n        return null;\n    }\n\n    default ReparseNode getReparse( String szSegName ) {\n        TrieNode n = this.get( szSegName );\n        if( n != null ) {\n            return n.evinceReparse();\n        }\n        return null;\n    }\n\n    TrieNode<V > get( String szSegName );\n\n    void put( String szSegName, TrieNode<V > node );\n\n    void putIfAbsent( String szSegName, TrieNode<V > node );\n\n    TrieNode<V > remove( String szSegName );\n\n    boolean isEmpty();\n\n    int size();\n\n    int childrenLeafSize();\n\n    void purge();\n\n    Set<Map.Entry<String, TrieNode<V >> > entrySet();\n\n    default Collection<TrieNode<V > > values() {\n        return this.children().values();\n    }\n\n    default Set<String > keySet(){\n        return this.children().keySet();\n    }\n\n    default Collection<TrieNode<V > > listItems() {\n        return this.values();\n    }\n\n    List<ValueNode<V > > listValueNodes();\n\n    List<V > listValues();\n\n    List<DirectoryNode<V > > listDirectories();\n\n    @Override\n    default DirectoryNode<V > evinceDirectory() {\n        return this;\n    }\n\n    @Override\n    default String getTypeName() {\n        return DirectoryNode.class.getSimpleName();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/GenericDirectoryNode.java",
    "content": "package com.pinecone.framework.unit.trie;\n\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\n\npublic class GenericDirectoryNode<V > extends ArchTrieNode<V > implements DirectoryNode<V > {\n    protected Map<String, TrieNode<V > > mChildren;\n\n    /**\n     * Root constructor.\n     */\n    public <K extends String > GenericDirectoryNode( Map<String, TrieNode<V > > children, TrieMap<K, V > map ) {\n        this( null, children, null, map );\n    }\n\n    public <K extends String > GenericDirectoryNode( String szKey, Map<String, TrieNode<V > > children, TrieNode<V> parent, TrieMap<K, V > map ) {\n        super( szKey, parent, map );\n        this.mChildren = children;\n    }\n\n    @Override\n    public int childrenLeafSize() {\n        int leafCount = 0;\n\n        for ( TrieNode<V> child : this.mChildren.values() ) {\n            DirectoryNode<V> directoryNode = child.evinceDirectory();\n\n            if ( directoryNode != null ) {\n                leafCount += directoryNode.childrenLeafSize();\n            }\n            else {\n                if ( child.isLeaf() ) {\n                    ++leafCount;\n                }\n            }\n        }\n\n        return leafCount;\n    }\n\n    @Override\n    public Map<String, TrieNode<V > > children() {\n        return this.mChildren;\n    }\n\n    @Override\n    public Map<String, TrieNode<V > > segmentMap() {\n        return this.mChildren;\n    }\n\n    @Override\n    public TrieNode<V > get( String szSegName ) {\n        return this.mChildren.get( szSegName );\n    }\n\n    @Override\n    public void put( String szSegName, TrieNode<V > node ) {\n        this.mChildren.put( szSegName, node );\n    }\n\n    @Override\n    public void putIfAbsent( String szSegName, TrieNode<V > node ) {\n        this.mChildren.putIfAbsent( szSegName, node );\n    }\n\n    protected void notifyMapChildrenEliminated( int nFatalities ) {\n        //( (UniTrieMaptron) this.mTrieMap ).notifyChildrenEliminated( nFatalities );\n    }\n\n    @Override\n    public TrieNode<V > remove( String szSegName ) {\n//        int nFatalities = 1;\n//        DirectoryNode<V > childDir = this.getDirectory( szSegName );\n//        if( childDir != null ) {\n//            nFatalities = childDir.childrenLeafSize();\n//        }\n//        TrieNode<V > legacy = this.mChildren.remove( szSegName );\n//        this.notifyMapChildrenEliminated( nFatalities ); // Cascading leafs.\n//        return legacy;\n\n        return this.mChildren.remove( szSegName );\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mChildren.isEmpty();\n    }\n\n    @Override\n    public int size() {\n        return this.mChildren.size();\n    }\n\n    @Override\n    public void purge() {\n        //int nFatalities = this.childrenLeafSize();\n        this.mChildren.clear();\n        //this.notifyMapChildrenEliminated( nFatalities );\n    }\n\n    @Override\n    public Set<Map.Entry<String, TrieNode<V >> > entrySet() {\n        return this.mChildren.entrySet();\n    }\n\n    @Override\n    public List<ValueNode<V>> listValueNodes() {\n        List<ValueNode<V>>             list = new ArrayList<>();\n        Collection<TrieNode<V > > trieNodes = this.values();\n        for( TrieNode<V > node : trieNodes ) {\n            ValueNode<V> vn = node.evinceValue();\n            if( vn != null ) {\n                list.add( vn );\n            }\n        }\n        return list;\n    }\n\n    @Override\n    public List<V> listValues() {\n        List<V>                        list = new ArrayList<>();\n        Collection<TrieNode<V > > trieNodes = this.values();\n        for( TrieNode<V > node : trieNodes ) {\n            ValueNode<V> vn = node.evinceValue();\n            if( vn != null ) {\n                list.add( vn.getValue() );\n            }\n        }\n        return list;\n    }\n\n    @Override\n    public List<DirectoryNode<V >> listDirectories() {\n        List<DirectoryNode<V >>            list = new ArrayList<>();\n        Collection<TrieNode<V > >     trieNodes = this.values();\n        for( TrieNode<V > node : trieNodes ) {\n            DirectoryNode<V> dir = node.evinceDirectory();\n            if( dir != null ) {\n                list.add( dir );\n            }\n        }\n        return list;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"FullName\"      , this.getFullName()                ),\n                new KeyValue<>( \"Type\"          , ReparseNode.class.getSimpleName() ),\n                new KeyValue<>( \"ChildrenSize\"  , this.size()                       )\n        } );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/GenericReparseNode.java",
    "content": "package com.pinecone.framework.unit.trie;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\n\npublic class GenericReparseNode<V> extends ArchTrieNode<V > implements ReparseNode<V > {\n    protected String                mszReparsePointer    ;\n\n    public <K extends String > GenericReparseNode( String szKey, TrieNode<V> parent, String szReparsePointer, TrieMap<K, V> trieMap ) {\n        super( szKey, parent, trieMap );\n        this.mszReparsePointer = szReparsePointer;\n    }\n\n    @Override\n    public String getReparsePointer() {\n        return mszReparsePointer;\n    }\n\n    @Override\n    public void setReparsePointer( String path ) {\n        this.mszReparsePointer = path;\n    }\n\n    @Override\n    public TrieNode<V > reparse() {\n        String szReparsePointer = this.mszReparsePointer;\n        while ( true ) {\n            TrieNode<V >    revealed = this.getTrieMap().queryNode( szReparsePointer );\n            if( revealed != null ) {\n                ReparseNode<V > reparsed = revealed.evinceReparse();\n                if( reparsed != null ) {\n                    szReparsePointer = reparsed.getReparsePointer();\n                    continue;\n                }\n\n                return revealed;\n            }\n            else {\n                return null;\n            }\n        }\n    }\n\n    @Override\n    public boolean isLeaf() {\n        return true;\n    }\n\n    @Override\n    public String toString() {\n        TrieNode<V > revealed = this.reparse();\n        if( revealed != null ) {\n            return revealed.toString();\n        }\n        return null;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"FullName\"      , this.getFullName()                ),\n                new KeyValue<>( \"Type\"          , ReparseNode.class.getSimpleName() ),\n                new KeyValue<>( \"ReparsePoint\"  , this.getReparsePointer()          )\n        } );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/GenericValueNode.java",
    "content": "package com.pinecone.framework.unit.trie;\n\nimport com.pinecone.framework.util.json.JSON;\n\npublic class GenericValueNode<V > extends ArchTrieNode<V > implements ValueNode<V > {\n    protected V value;\n\n    public <K extends String > GenericValueNode( String szKey, V value, TrieNode<V> parent, TrieMap<K, V > map ) {\n        super( szKey, parent, map );\n        this.value = value;\n    }\n\n    @Override\n    public V getValue() {\n        return this.value;\n    }\n\n    @Override\n    public void setValue( V value ) {\n        this.value = value;\n    }\n\n    @Override\n    public boolean isLeaf() {\n        return true;\n    }\n\n    @Override\n    public String toString() {\n        return this.value.toString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this.value );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/IllegalOperationException.java",
    "content": "package com.pinecone.framework.unit.trie;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class IllegalOperationException extends PineRuntimeException {\n    public IllegalOperationException    () {\n        super();\n    }\n\n    public IllegalOperationException    ( String message ) {\n        super(message);\n    }\n\n    public IllegalOperationException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public IllegalOperationException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected IllegalOperationException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/ReparseNode.java",
    "content": "package com.pinecone.framework.unit.trie;\n\npublic interface ReparseNode<V > extends TrieNode<V > {\n    String getReparsePointer();\n\n    void setReparsePointer( String path );\n\n    TrieNode<V > reparse();\n\n    @Override\n    default ReparseNode<V > evinceReparse() {\n        return this;\n    }\n\n    @Override\n    default String getTypeName() {\n        return ReparseNode.class.getSimpleName();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/SeparatedSegmentor.java",
    "content": "package com.pinecone.framework.unit.trie;\n\npublic class SeparatedSegmentor implements TrieSegmentor {\n    protected String separator;\n\n    public SeparatedSegmentor( String szSeparator ) {\n        this.separator = szSeparator;\n    }\n\n    public SeparatedSegmentor() {\n        this( \"/\" );\n    }\n\n    @Override\n    public String[] segments( String szPathKey ) {\n        return szPathKey.split( this.separator );\n    }\n\n    @Override\n    public String getSeparator() {\n        return this.separator;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/TrieMap.java",
    "content": "package com.pinecone.framework.unit.trie;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\n\nimport java.util.Map;\n\npublic interface TrieMap<K, V > extends Map<K, V >, PineUnit {\n\n    @Override\n    V put( K key, V value );\n\n    default Object putEntity( K key, Object value ) {\n        return this.putEntity( key, value, false );\n    }\n\n    Object putEntity( K key, Object value, boolean isAbsent ) ;\n\n    @Override\n    V get( Object key );\n\n    @Override\n    boolean containsKey( Object key );\n\n    @Override\n    V remove( Object key );\n\n    @Override\n    int size();\n\n    @Override\n    boolean isEmpty();\n\n    TrieNode<V> queryNode( String path );\n\n    String getSeparator();\n\n    DirectoryNode<V > root();\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/TrieNode.java",
    "content": "package com.pinecone.framework.unit.trie;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface TrieNode<V > extends Pinenut {\n    boolean isLeaf();\n\n    TrieNode<V > parent();\n\n    TrieMap<String, V> getTrieMap();\n\n    String getNodeName();\n\n    String getFullName();\n\n    String getNamespace();\n\n    default DirectoryNode<V > evinceDirectory() {\n        return null;\n    }\n\n    default ValueNode<V > evinceValue() {\n        return null;\n    }\n\n    default ReparseNode<V > evinceReparse() {\n        return null;\n    }\n\n    String getTypeName();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/TrieSegmentor.java",
    "content": "package com.pinecone.framework.unit.trie;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface TrieSegmentor extends Pinenut {\n    TrieSegmentor PathSegmentor    = new SeparatedSegmentor();\n\n    TrieSegmentor ObjectSegmentor  = new SeparatedSegmentor( \".\" );\n\n    TrieSegmentor DefaultSegmentor = TrieSegmentor.PathSegmentor;\n\n\n    String[] segments( String szPathKey );\n\n    String getSeparator();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/UniTrieMaptron.java",
    "content": "package com.pinecone.framework.unit.trie;\n\nimport java.util.AbstractCollection;\nimport java.util.AbstractMap;\nimport java.util.AbstractSet;\nimport java.util.ArrayDeque;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Deque;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.NoSuchElementException;\nimport java.util.Objects;\nimport java.util.Set;\nimport java.util.TreeMap;\nimport java.util.function.Supplier;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSON;\n\n/**\n *  Pinecone Ursus For Java UniTrieMaptron\n *  SharedList Author: Ken, DragonKing\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  **********************************************************\n *  Thanks for Ken`s contribution.\n *  **********************************************************\n */\npublic class UniTrieMaptron<K extends String, V > extends AbstractTrieMap<K, V > implements TrieMap<K, V >, Cloneable {\n    protected transient DirectoryNode<V >                           mRoot;\n    protected final transient Supplier<Map<String, TrieNode<V > > > mMapSupplier;\n    //protected transient int                                         mnSize;\n    protected transient TrieSegmentor                               mSegmentor;\n\n    protected transient Set<Entry<K, V> >                           mEntrySet;\n    protected transient Set<K>                                      mKeySet;\n    protected transient Collection<V>                               mValues;\n\n    @SuppressWarnings( \"unchecked\" )\n    public UniTrieMaptron( Supplier mapSupplier, TrieSegmentor segmentor ) {\n        if ( mapSupplier == null ) {\n            throw new IllegalArgumentException( \"Map supplier cannot be null.\" );\n        }\n        this.mMapSupplier  = mapSupplier;\n        this.mRoot         = new GenericDirectoryNode( this.mMapSupplier.get(), this );\n        //this.mnSize        = 0;\n        this.mSegmentor    = segmentor;\n    }\n\n    public UniTrieMaptron( Supplier mapSupplier ) {\n        this( mapSupplier, TrieSegmentor.DefaultSegmentor );\n    }\n\n    public UniTrieMaptron( TrieSegmentor segmentor ) {\n        this( TreeMap::new, segmentor );\n    }\n\n    public UniTrieMaptron() {\n        this( (Supplier) TreeMap::new );\n    }\n\n\n\n\n    @SuppressWarnings( \"unchecked\" )\n    protected V convertValue( Object value ) {\n        return ( V ) value;\n    }\n\n    protected String getStringKey( Object key ) {\n        if ( key instanceof String ) {\n            return  (String) key;\n        }\n\n        return key.toString();\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object elm ) {\n        return this.containsKey( elm );\n    }\n\n    @Override\n    public V put( K key, V value ) {\n        return this.putEntity0( key, value, false );\n    }\n\n    @Override\n    public V putIfAbsent( K key, V value ) {\n        return this.putEntity0( key, value, true );\n    }\n\n    public V makeSymbolic (K key, K target ) {\n        ReparseNode<V> p = new GenericReparseNode<>( null, null, target,this );\n        return this.putEntity0( key, p, false );\n    }\n\n    protected V putEntity0( K key, Object value, boolean isAbsent ) {\n        Object ret = this.putEntity( key, value, isAbsent );\n        if ( ret instanceof TrieNode ) {\n            return null;\n        }\n\n        return this.convertValue( ret );\n    }\n\n    @Override\n    public Object putEntity( K key, Object value, boolean isAbsent ) {\n        if ( key == null ) {\n            throw new IllegalArgumentException( \"Key cannot be null.\" );\n        }\n        String[] segments         = this.mSegmentor.segments( key );\n        TrieNode<V> node          = this.mRoot;\n        DirectoryNode<V> dir      = this.mRoot;\n        TrieNode<V> parent        = this.mRoot;\n\n        String szLeafKey          = null;\n        for ( int i = 0; i < segments.length; ++i ) {\n            String segment = segments[ i ];\n\n            if ( i < segments.length - 1 ) {\n                node = dir.get( segment );\n                if( node == null ) {\n                    DirectoryNode<V> neo = new GenericDirectoryNode<>( segment, this.mMapSupplier.get() ,parent, this );\n                    dir.put( segment, neo );\n                    node = neo;\n                    dir  = neo;\n                }\n                else {\n                    dir = node.evinceDirectory();\n                    if( dir == null ) {\n                        throw new IllegalArgumentException( \"Path `\" + key + \"` given is not a full-directory insertion path.\" );\n                    }\n                }\n            }\n            else { // Leaf Node\n                szLeafKey = segment;\n                node = dir.get( segment );\n                if( node == null ) {\n                    TrieNode<V> neo;\n                    if ( value instanceof ReparseNode ) {\n                        ReparseNode dummy = (ReparseNode) value;\n                        neo = new GenericReparseNode<>( segment, dir, dummy.getReparsePointer(),this );\n                    }\n                    else {\n                        neo = new GenericValueNode<>( segment, this.convertValue( value ), parent, this );\n                    }\n                    dir.put( segment, neo );\n                    //++this.mnSize;\n                    return neo; // Insertion\n                }\n            }\n            parent = node;\n        }\n\n        if ( isAbsent ) {\n            return null;\n        }\n\n        // Modification\n        ValueNode<V > vn = node.evinceValue();\n        if( vn != null ) {\n            V legacyValue = vn.getValue();\n            vn.setValue( this.convertValue( value ) );\n\n            return legacyValue;\n        }\n\n        ReparseNode<V > rn = node.evinceReparse();\n        if( rn != null ) {\n            TrieNode<V > revealed = rn.reparse();\n            if( revealed != null ) {\n                vn = revealed.evinceValue();\n                if( vn != null ) {\n                    V legacyValue = vn.getValue();\n                    vn.setValue( this.convertValue( value ) );\n\n                    return legacyValue;\n                }\n            }\n        }\n\n        DirectoryNode<V > dn = node.evinceDirectory();\n        if( dn != null ) {\n            TrieNode<V>       pp = dn.parent();\n            if( pp == null ) {\n                pp = this.mRoot;\n            }\n            DirectoryNode<V > pd = pp.evinceDirectory();\n            pd.remove( szLeafKey );\n            pd.put( szLeafKey, new GenericValueNode<>( dn.getNodeName(), this.convertValue( value ), pp, this ) );\n        }\n\n        return null;\n    }\n\n    @Override\n    public V get( Object key ) {\n        String szKey = this.getStringKey( key );\n\n        TrieNode<V> node = this.queryNode( szKey );\n        if ( node == null ) {\n            return null;\n        }\n\n        ValueNode<V > vn = node.evinceValue();\n        if ( vn != null ){\n            return vn.getValue();\n        }\n\n        ReparseNode<V > rp = node.evinceReparse();\n        if ( rp != null ){\n            TrieNode<V > revealed = rp.reparse();\n            if( revealed != null ) {\n                vn = revealed.evinceValue();\n                if( vn != null ) {\n                    return vn.getValue();\n                }\n            }\n        }\n\n        return null;\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        String szKey = this.getStringKey( key );\n\n        return this.queryNode( szKey ) != null;\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        return this.dfsContainsValue( this.mRoot, value );\n    }\n\n    private boolean dfsContainsValue( TrieNode<V > node, Object value ) {\n        if ( node == null ) {\n            return false;\n        }\n\n        DirectoryNode<V > directory = node.evinceDirectory();\n        if ( directory != null ) {\n            for ( TrieNode<V > childNode : directory.children().values() ) {\n                if ( this.dfsContainsValue( childNode, value ) ) {\n                    return true;\n                }\n            }\n        }\n        else {\n            ValueNode<V > vn = node.evinceValue();\n            if( vn != null ) {\n                return vn.getValue().equals( value );\n            }\n\n            ReparseNode<V > rp = node.evinceReparse();\n            if ( rp != null ){\n                TrieNode<V > revealed = rp.reparse();\n                if( revealed != null ) {\n                    vn = revealed.evinceValue();\n                    if( vn != null ) {\n                        return vn.getValue().equals( value );\n                    }\n                }\n            }\n        }\n\n        return false;\n    }\n\n    @Override\n    public V remove( Object key ) {\n        String szKey = this.getStringKey( key );\n\n        //return this.remove( this.mRoot, this.mSegmentor.segments( szKey ), 0 );\n        return this.remove( this.mRoot, this.mSegmentor.segments( szKey ) );\n    }\n\n\n    protected V remove( TrieNode<V> startNode, String[] segments ) {\n        if ( startNode == null || segments.length == 0 ) {\n            return null;\n        }\n\n        TrieNode<V> node = startNode;\n        DirectoryNode<V> directory;\n        int depth = 0;\n\n        while ( depth < segments.length ) {\n            directory = node.evinceDirectory();\n            if ( directory == null ) {\n                return null;\n            }\n\n            String segment = segments[ depth ];\n            TrieNode<V> childNode = directory.get( segment );\n\n            if ( depth == segments.length - 1 ) {\n                if ( childNode == null ) {\n                    return null; // Illegal path.\n                }\n\n                directory.remove( segment ); // <= Fatalities statistics therein.\n\n                ValueNode<V > valueNode = childNode.evinceValue();\n                if ( valueNode != null ) {\n                    return valueNode.getValue();\n                }\n                return null;\n            }\n\n            node = childNode;\n            ++depth;\n        }\n\n        return null;\n    }\n\n    /*protected V remove( TrieNode<V > node, String[] segments, int depth ) {\n        if ( node == null || depth >= segments.length ) {\n            return null;\n        }\n\n        String segment = segments[ depth ];\n        DirectoryNode<V > directory = node.evinceDirectory();\n        if ( directory == null ) {\n            return null;\n        }\n\n        TrieNode<V > childNode = directory.get(segment);\n\n        if ( depth == segments.length - 1 ) {\n            if ( childNode == null ) {\n                return null; // Illegal path.\n            }\n\n            directory.remove( segment ); // <= Fatalities statistics therein.\n\n            ValueNode<V> valueNode = childNode.evinceValue();\n            if ( valueNode != null ) {\n                return valueNode.getValue();\n            }\n            return null;\n        }\n\n\n        return this.remove( childNode, segments, depth + 1 );\n\n\n//        if ( node == null ) {\n//            return null;\n//        }\n//\n//        if ( depth == segments.length ) {\n//            if ( !node.isEnd ) {\n//                return null;\n//            }\n//            node.isEnd = false;\n//            V oldValue = this.convertValue( node.value );\n//            node.value = null;\n//            --this.mnSize;\n//            return oldValue;\n//        }\n//\n//        String segment = segments[depth];\n//        TrieNode nextNode = node.children.get( segment );\n//        V result = this.remove( nextNode, segments, depth + 1 );\n//\n//        if ( nextNode != null && nextNode.children.isEmpty() && !nextNode.isEnd ) {\n//            node.children.remove( segment );\n//        }\n//\n//        return result;\n    }*/\n\n    @Override\n    public void putAll( Map<? extends K, ? extends V> m ) {\n        for ( Entry<? extends K, ? extends V> entry : m.entrySet() ) {\n            this.put( entry.getKey(), entry.getValue() );\n        }\n    }\n\n    @Override\n    public void clear() {\n        this.mRoot.segmentMap().clear();\n        //this.mnSize = 0;\n    }\n\n//    protected void notifyChildrenEliminated( int nFatalities ) {\n//        this.mnSize -= nFatalities;\n//    }\n\n\n    @Override\n    public DirectoryNode<V> root() {\n        return this.mRoot;\n    }\n\n    @Override\n    public int size() {\n        return this.mRoot.childrenLeafSize();\n        //return this.mnSize;\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mRoot.isEmpty();\n    }\n\n    @Override\n    public TrieNode<V> queryNode( String path ) {\n        String[] segments    = this.mSegmentor.segments( path );\n        DirectoryNode<V> dir = this.mRoot;\n        TrieNode<V>     node = this.mRoot;\n\n        int is = 0;\n        if ( segments.length > 1 && segments[0].isEmpty() ) {  // \"/xxx/xxx\" => Skip first `/` => \"xxx/xxx\"\n            is = 1;\n        }\n\n        for ( int i = is; i < segments.length; ++i ) {\n            String segment = segments[ i ];\n\n            if ( i < segments.length - 1 ) {\n                node = dir.get( segment );\n                if ( node == null ) {\n                    return null;\n                }\n                dir  = node.evinceDirectory();\n                if( dir == null ) {\n                    return null; // Illegal path.\n                }\n            }\n            else {\n                return dir.get( segment );\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public Set<K> keySet() {\n        Set<K >  es = this.mKeySet;\n        return (es != null) ? es : (this.mKeySet = new KeySet());\n    }\n\n    @Override\n    public Collection<V> values() {\n        Collection<V> vs = this.mValues;\n        if (vs == null) {\n            vs = new Values();\n            this.mValues = vs;\n        }\n        return vs;\n    }\n\n    @Override\n    public Set<Entry<K,V > > entrySet() {\n        Set<Entry<K,V > >  es = this.mEntrySet;\n        return (es != null) ? es : (this.mEntrySet = new EntrySet());\n    }\n\n\n\n\n    class EntrySet extends AbstractSet<Entry<K, V>> {\n\n        @Override\n        public Iterator<Entry<K, V>> iterator() {\n            return new EntryIterator();\n        }\n\n        @Override\n        public int size() {\n            return UniTrieMaptron.this.size();\n        }\n\n        @Override\n        public boolean contains( Object o ) {\n            if ( !( o instanceof Map.Entry ) ) {\n                return false;\n            }\n            Entry<?, ?> entry = ( Entry<?, ?> ) o;\n            Object value = UniTrieMaptron.this.get( entry.getKey() );\n            return Objects.equals( value, entry.getValue() );\n        }\n\n        @Override\n        public boolean remove( Object o ) {\n            if ( !( o instanceof Map.Entry ) ) {\n                return false;\n            }\n            Entry<?, ?> entry = ( Entry<?, ?> ) o;\n            K key = ( K ) entry.getKey();\n            V currentValue = UniTrieMaptron.this.get( key );\n            if ( Objects.equals( currentValue, entry.getValue() ) ) {\n                UniTrieMaptron.this.remove( key );\n                return true;\n            }\n            return false;\n        }\n\n        @Override\n        public void clear() {\n            UniTrieMaptron.this.clear();\n        }\n    }\n\n    class EntryIterator implements Iterator<Entry<K, V>> {\n        private final Map<String, TrieNode<V > > dummyTerminationMap = Map.of();\n\n        private final Deque<Iterator<Entry<String, TrieNode<V >>>> stack;\n        private final Deque<StringBuilder> pathStack;\n        private Entry<K, V> nextEntry;\n        private StringBuilder currentPath;\n\n        public EntryIterator() {\n            this.stack = new ArrayDeque<>();\n            this.pathStack = new ArrayDeque<>();\n            this.stack.push( UniTrieMaptron.this.mRoot.children().entrySet().iterator() );\n            this.currentPath = new StringBuilder();\n\n            this.advance();\n        }\n\n        @SuppressWarnings( \"unchecked\" )\n        private void advance() {\n            this.nextEntry = null;\n\n            while ( !this.stack.isEmpty() ) {\n                Iterator<Entry<String, TrieNode<V >>> iterator = this.stack.peek();\n                if ( !iterator.hasNext() ) {\n                    this.stack.pop();\n                    if ( !this.pathStack.isEmpty() ) {\n                        this.currentPath = this.pathStack.pop();\n                    }\n                    continue;\n                }\n\n                Entry<String, TrieNode<V >> entry = iterator.next();\n                TrieNode<V > node = entry.getValue();\n                String segment = entry.getKey();\n\n                this.pathStack.push( new StringBuilder( this.currentPath ) );\n                if ( this.currentPath.length() > 0 ) {\n                    this.currentPath.append( UniTrieMaptron.this.mSegmentor.getSeparator() );\n                }\n                this.currentPath.append( segment );\n\n//                while ( node.value instanceof TrieReparseNode ) {\n//                    TrieReparseNode<K, V> reparseNode = ( TrieReparseNode<K, V> ) node.value;\n//                    node = UniTrieMaptron.this.getNode( reparseNode.getPath() );\n//                    if ( node == null ) {\n//                        break;\n//                    }\n//                }\n\n                if ( node == null ) {\n                    continue;\n                }\n\n                DirectoryNode<V > dir = node.evinceDirectory();\n                if( dir != null ) {\n                    this.stack.push( dir.children().entrySet().iterator() );\n                }\n                else {\n                    ValueNode<V> vn = node.evinceValue();\n                    if( vn != null ) {\n                        this.nextEntry = new AbstractMap.SimpleEntry<>( ( K ) this.currentPath.toString(), vn.getValue() );\n                        this.stack.push( this.dummyTerminationMap.entrySet().iterator() );\n                    }\n\n                    ReparseNode<V> rn = node.evinceReparse();\n                    if( rn != null ) {\n                        this.nextEntry = new AbstractMap.SimpleEntry( this.currentPath.toString(), rn );\n                        this.stack.push( this.dummyTerminationMap.entrySet().iterator() );\n                    }\n\n                    break;\n                }\n            }\n        }\n\n        @Override\n        public boolean hasNext() {\n            return this.nextEntry != null;\n        }\n\n        @Override\n        public Entry<K, V> next() {\n            if ( !this.hasNext() ) {\n                throw new NoSuchElementException();\n            }\n\n            Entry<K, V> entry = this.nextEntry;\n            this.advance();\n            return entry;\n        }\n    }\n\n    class KeySet extends AbstractSet<K> {\n        @Override\n        public Iterator<K> iterator() {\n            return new KeyIterator();\n        }\n\n        @Override\n        public int size() {\n            return UniTrieMaptron.this.size();\n        }\n\n        @Override\n        public boolean contains( Object o ) {\n            return UniTrieMaptron.this.containsKey( o );\n        }\n\n        @Override\n        public boolean remove( Object o ) {\n            return UniTrieMaptron.this.remove( o ) != null;\n        }\n\n        @Override\n        public void clear() {\n            UniTrieMaptron.this.clear();\n        }\n    }\n\n    class KeyIterator implements Iterator<K> {\n        private final Iterator<Entry<K, V>> entryIterator;\n\n        public KeyIterator() {\n            this.entryIterator = UniTrieMaptron.this.entrySet().iterator();\n        }\n\n        @Override\n        public boolean hasNext() {\n            return this.entryIterator.hasNext();\n        }\n\n        @Override\n        public K next() {\n            return this.entryIterator.next().getKey();\n        }\n    }\n\n    class Values extends AbstractCollection<V> {\n        @Override\n        public Iterator<V> iterator() {\n            return new Iterator<V>() {\n                private final Iterator<Entry<K, V>> entryIterator = UniTrieMaptron.this.entrySet().iterator();\n\n                @Override\n                public boolean hasNext() {\n                    return this.entryIterator.hasNext();\n                }\n\n                @Override\n                public V next() {\n                    return this.entryIterator.next().getValue();\n                }\n            };\n        }\n\n        @Override\n        public int size() {\n            return UniTrieMaptron.this.size();\n        }\n\n        @Override\n        public boolean contains( Object o ) {\n            return UniTrieMaptron.this.containsValue(o);\n        }\n    }\n\n    @Override\n    public TrieMap<K, V> clone() {\n        try {\n            @SuppressWarnings(\"unchecked\")\n            UniTrieMaptron<K, V> clonedMap = (UniTrieMaptron<K, V>) super.clone();\n\n            clonedMap.mRoot     = this.cloneDirectoryNode( this.mRoot, clonedMap, null );\n\n            clonedMap.mEntrySet = null;\n            clonedMap.mKeySet   = null;\n            clonedMap.mValues   = null;\n            //clonedMap.mnSize    = this.mnSize;\n\n            return clonedMap;\n        }\n        catch ( CloneNotSupportedException e ) {\n            throw new AssertionError( \"Clone not supported\", e );\n        }\n    }\n\n    protected DirectoryNode<V> cloneDirectoryNode( DirectoryNode<V> original, TrieMap<K, V> pm, TrieNode<V > parent ) {\n        if ( original == null ) {\n            return null;\n        }\n\n        Map<String, TrieNode<V>> clonedChildren = this.mMapSupplier.get();\n        DirectoryNode<V > neo = new GenericDirectoryNode<>( original.getNodeName(), clonedChildren, parent, pm );\n        for ( Map.Entry<String, TrieNode<V>> entry : original.children().entrySet() ) {\n            TrieNode<V> clonedChild = this.cloneTrieNode( entry.getValue(), pm, neo );\n            clonedChildren.put( entry.getKey(), clonedChild );\n        }\n\n        return neo;\n    }\n\n    protected TrieNode<V> cloneTrieNode( TrieNode<V> original, TrieMap<K, V> pm, TrieNode<V > parent ) {\n        if ( original == null ) {\n            return null;\n        }\n\n        DirectoryNode<V> directoryNode = original.evinceDirectory();\n        if ( directoryNode != null ) {\n            return this.cloneDirectoryNode( directoryNode, pm, parent );\n        }\n\n        ValueNode<V> valueNode = original.evinceValue();\n        if ( valueNode != null ) {\n            return new GenericValueNode<>( original.getNodeName(), valueNode.getValue(), parent, pm );\n        }\n\n        ReparseNode<V > rp = original.evinceReparse();\n        if ( rp != null ){\n            return new GenericReparseNode<>( original.getNodeName(), parent, rp.getReparsePointer(), pm );\n        }\n\n        return null;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String getSeparator(){\n        return this.mSegmentor.getSeparator();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/unit/trie/ValueNode.java",
    "content": "package com.pinecone.framework.unit.trie;\n\npublic interface ValueNode<V > extends TrieNode<V > {\n    V getValue();\n\n    void setValue( V value );\n\n    @Override\n    default ValueNode<V > evinceValue() {\n        return this;\n    }\n\n    @Override\n    default String getTypeName() {\n        return ValueNode.class.getSimpleName();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/Assert.java",
    "content": "package com.pinecone.framework.util;\n\nimport java.util.Collection;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.AssertionRuntimeException;\n\npublic abstract class Assert {\n    public Assert() {\n    }\n\n    public static void isTrue(boolean expression, String message) {\n        if ( !expression ) {\n            throw new AssertionRuntimeException( message );\n        }\n    }\n\n    public static void isTrue( boolean expression ) {\n        Assert.isTrue( expression, \"[Assertion failed] - this expression must be true\" );\n    }\n\n    public static void isNull( Object object, String message ) {\n        if ( object != null ) {\n            throw new AssertionRuntimeException(message);\n        }\n    }\n\n    public static void isNull( Object object ) {\n        Assert.isNull(object, \"[Assertion failed] - the object argument must be null\");\n    }\n\n    public static void notNull( Object object, String message ) {\n        if ( object == null ) {\n            throw new AssertionRuntimeException( message );\n        }\n    }\n\n    public static void notNull( Object object ) {\n        Assert.notNull( object, \"[Assertion failed] - this argument is required; it must not be null\" );\n    }\n\n    public static void hasLength( String text, String message ) {\n        if ( !StringUtils.hasLength(text) ) {\n            throw new AssertionRuntimeException(message);\n        }\n    }\n\n    public static void hasLength( String text ) {\n        Assert.hasLength( text, \"[Assertion failed] - this String argument must have length; it must not be null or empty\" );\n    }\n\n    public static void hasText( String text, String message ) {\n        if ( !StringUtils.hasText(text) ) {\n            throw new AssertionRuntimeException(message);\n        }\n    }\n\n    public static void hasText( String text ) {\n        Assert.hasText(text, \"[Assertion failed] - this String argument must have text; it must not be null, empty, or blank\");\n    }\n\n    public static void doesNotContain( String textToSearch, String substring, String message ) {\n        if ( StringUtils.hasLength(textToSearch) && StringUtils.hasLength(substring) && textToSearch.contains(substring) ) {\n            throw new AssertionRuntimeException(message);\n        }\n    }\n\n    public static void doesNotContain( String textToSearch, String substring ) {\n        Assert.doesNotContain(textToSearch, substring, \"[Assertion failed] - this String argument must not contain the substring [\" + substring + \"]\");\n    }\n\n    public static void notEmpty( Object[] array, String message ) {\n        if ( ObjectUtils.isEmpty(array) ) {\n            throw new AssertionRuntimeException(message);\n        }\n    }\n\n    public static void notEmpty( Object[] array ) {\n        Assert.notEmpty( array, \"[Assertion failed] - this array must not be empty: it must contain at least 1 element\" );\n    }\n\n    public static void noNullElements( Object[] array, String message ) {\n        if ( array != null ) {\n            int len = array.length;\n\n            for( int i = 0; i < len; ++i ) {\n                Object element = array[i];\n                if ( element == null ) {\n                    throw new AssertionRuntimeException(message);\n                }\n            }\n        }\n\n    }\n\n    public static void noNullElements( Object[] array ) {\n        Assert.noNullElements(array, \"[Assertion failed] - this array must not contain any null elements\");\n    }\n\n    public static void notEmpty( Collection<?> collection, String message ) {\n        if ( CollectionUtils.isEmpty(collection) ) {\n            throw new AssertionRuntimeException(message);\n        }\n    }\n\n    public static void notEmpty( Collection<?> collection ) {\n        Assert.notEmpty(collection, \"[Assertion failed] - this collection must not be empty: it must contain at least 1 element\");\n    }\n\n    public static void notEmpty( Map<?, ?> map, String message ) {\n        if ( CollectionUtils.isEmpty(map) ) {\n            throw new AssertionRuntimeException(message);\n        }\n    }\n\n    public static void notEmpty( Map<?, ?> map ) {\n        Assert.notEmpty(map, \"[Assertion failed] - this map must not be empty; it must contain at least one entry\");\n    }\n\n    public static void isInstanceOf( Class<?> clazz, Object obj ) {\n        isInstanceOf(clazz, obj, \"\");\n    }\n\n    public static void isInstanceOf( Class<?> type, Object obj, String message ) {\n        Assert.notNull(type, \"Type to check against must not be null\");\n        if ( !type.isInstance(obj) ) {\n            throw new AssertionRuntimeException((StringUtils.hasLength(message) ? message + \" \" : \"\") + \"Object of class [\" + (obj != null ? obj.getClass().getName() : \"null\") + \"] must be an instance of \" + type);\n        }\n    }\n\n    public static void isAssignable( Class<?> superType, Class<?> subType ) {\n        Assert.isAssignable( superType, subType, \"\" );\n    }\n\n    public static void isAssignable( Class<?> superType, Class<?> subType, String message ) {\n        Assert.notNull(superType, \"Type to check against must not be null\");\n        if (subType == null || !superType.isAssignableFrom(subType)) {\n            throw new AssertionRuntimeException(message + subType + \" is not assignable to \" + superType);\n        }\n    }\n\n    public static void state( boolean expression, String message ) {\n        if ( !expression ) {\n            throw new IllegalStateException(message);\n        }\n    }\n\n    public static void state( boolean expression ) {\n        Assert.state( expression, \"[Assertion failed] - this state invariant must be true\" );\n    }\n\n    public static void provokeIrrationally( Throwable bad ) {\n        throw new AssertionRuntimeException( bad );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/Bits.java",
    "content": "package com.pinecone.framework.util;\n\npublic final class Bits {\n    /**\n     * From Pinecone CPP,\n     * Pinecone/Framework/Util/Bits/BitsProcessor.h\n     * Pinecone/Framework/Util/Bits/BitsProcessor.cpp\n     */\n    // Reverse All Bits (Smallest Unit: Bit)\n    private static final byte[] BitReverseTable256 = new byte[ 256 ];\n\n    static {\n        for ( int i = 0; i < 256; ++i ) {\n            Bits.BitReverseTable256[i] = (byte) (((i & 0x01) << 7) | ((i & 0x02) << 5) | ((i & 0x04) << 3) | ((i & 0x08) << 1)\n                    | ((i & 0x10) >> 1) | ((i & 0x20) >> 3) | ((i & 0x40) >> 5) | ((i & 0x80) >> 7));\n        }\n    }\n\n    public static byte reverse8Bits( byte nNum ) {\n        return Bits.BitReverseTable256[ nNum & 0xFF ];\n    }\n\n    public static short reverse16Bits( short nNum ) {\n        int nRes = 0;\n        byte p0 = (byte) (nNum & 0xFF);\n        byte p1 = (byte) ((nNum >> 8) & 0xFF);\n\n        byte q1 = Bits.BitReverseTable256[ p0 & 0xFF ];\n        byte q0 = Bits.BitReverseTable256[ p1 & 0xFF ];\n        nRes = (q0 & 0xFF) | ((q1 & 0xFF) << 8);\n        return (short) nRes;\n    }\n\n    public static int reverse32Bits( int nNum ) {\n        int nRes = 0;\n        byte p0 = (byte) (nNum & 0xFF);\n        byte p1 = (byte) ((nNum >> 8) & 0xFF);\n        byte p2 = (byte) ((nNum >> 16) & 0xFF);\n        byte p3 = (byte) ((nNum >> 24) & 0xFF);\n\n        byte q3 = Bits.BitReverseTable256[ p0 & 0xFF ];\n        byte q2 = Bits.BitReverseTable256[ p1 & 0xFF ];\n        byte q1 = Bits.BitReverseTable256[ p2 & 0xFF ];\n        byte q0 = Bits.BitReverseTable256[ p3 & 0xFF ];\n        nRes = (q0 & 0xFF) | ((q1 & 0xFF) << 8) | ((q2 & 0xFF) << 16) | ((q3 & 0xFF) << 24);\n        return nRes;\n    }\n\n    public static long reverse64Bits( long nNum ) {\n        long nRes = 0;\n        int lower = (int) (nNum & 0xFFFFFFFFL);\n        int upper = (int) ((nNum >> 32) & 0xFFFFFFFFL);\n        int reversedLower = Bits.reverse32Bits(upper);\n        int reversedUpper = Bits.reverse32Bits(lower);\n        nRes = ((long) reversedLower & 0xFFFFFFFFL) | (((long) reversedUpper & 0xFFFFFFFFL) << 32);\n        return nRes;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/Bytes.java",
    "content": "package com.pinecone.framework.util;\n\npublic final class Bytes {\n    public static final byte[] Empty = new byte[0];\n\n    // LE\n    public static byte[] int16ToBytesLE( short value ) {\n        return new byte[]{\n                (byte) value,\n                (byte) (value >> 8)\n        };\n    }\n\n    public static byte[] int32ToBytesLE( int value ) {\n        return new byte[]{\n                (byte) value,\n                (byte) (value >> 8),\n                (byte) (value >> 16),\n                (byte) (value >> 24)\n        };\n    }\n\n    public static byte[] int64ToBytesLE( long value ) {\n        return new byte[]{\n                (byte) value,\n                (byte) (value >> 8),\n                (byte) (value >> 16),\n                (byte) (value >> 24),\n                (byte) (value >> 32),\n                (byte) (value >> 40),\n                (byte) (value >> 48),\n                (byte) (value >> 56)\n        };\n    }\n\n    public static byte[] float32ToBytesLE( float value ) {\n        return Bytes.int32ToBytesLE( Float.floatToIntBits(value) );\n    }\n\n    public static byte[] float64ToBytesLE( double value ) {\n        return Bytes.int64ToBytesLE( Double.doubleToLongBits(value) );\n    }\n\n\n    // BE\n    public static byte[] int16ToBytesBE( short value ) {\n        return new byte[]{\n                (byte) (value >> 8),\n                (byte) value\n        };\n    }\n\n    public static byte[] int32ToBytesBE( int value ) {\n        return new byte[]{\n                (byte) (value >> 24),\n                (byte) (value >> 16),\n                (byte) (value >> 8),\n                (byte) value\n        };\n    }\n\n    public static byte[] int64ToBytesBE( long value ) {\n        return new byte[]{\n                (byte) (value >> 56),\n                (byte) (value >> 48),\n                (byte) (value >> 40),\n                (byte) (value >> 32),\n                (byte) (value >> 24),\n                (byte) (value >> 16),\n                (byte) (value >> 8),\n                (byte) value\n        };\n    }\n\n    public static byte[] float32ToBytesBE( float value ) {\n        return Bytes.int32ToBytesBE( Float.floatToIntBits(value) );\n    }\n\n    public static byte[] float64ToBytesBE( double value ) {\n        return Bytes.int64ToBytesBE( Double.doubleToLongBits(value) );\n    }\n\n\n\n    // LE / Decode\n    public static short bytesToInt16LE( byte[] bytes ) {\n        return (short) ((bytes[1] << 8) | (bytes[0] & 0xFF));\n    }\n\n    public static int bytesToInt32LE( byte[] bytes ) {\n        return (bytes[3] << 24) | ((bytes[2] & 0xFF) << 16) | ((bytes[1] & 0xFF) << 8) | (bytes[0] & 0xFF);\n    }\n\n    public static long bytesToInt64LE( byte[] bytes ) {\n        return ((long) bytes[7] << 56) | ((long) (bytes[6] & 0xFF) << 48) | ((long) (bytes[5] & 0xFF) << 40) |\n                ((long) (bytes[4] & 0xFF) << 32) | ((long) (bytes[3] & 0xFF) << 24) | ((bytes[2] & 0xFF) << 16) |\n                ((bytes[1] & 0xFF) << 8) | (bytes[0] & 0xFF);\n    }\n\n    public static float bytesToFloat32LE( byte[] bytes ) {\n        return Float.intBitsToFloat( Bytes.bytesToInt32LE( bytes ) );\n    }\n\n    public static double bytesToFloat64LE( byte[] bytes ) {\n        return Double.longBitsToDouble( Bytes.bytesToInt64LE( bytes ) );\n    }\n\n\n    // BE / Decode\n    public static short bytesToInt16BE( byte[] bytes ) {\n        return (short) ((bytes[0] << 8) | (bytes[1] & 0xFF));\n    }\n\n    public static int bytesToInt32BE( byte[] bytes ) {\n        return (bytes[0] << 24) | ((bytes[1] & 0xFF) << 16) | ((bytes[2] & 0xFF) << 8) | (bytes[3] & 0xFF);\n    }\n\n    public static long bytesToInt64BE( byte[] bytes ) {\n        return ((long) bytes[0] << 56) | ((long) (bytes[1] & 0xFF) << 48) | ((long) (bytes[2] & 0xFF) << 40) |\n                ((long) (bytes[3] & 0xFF) << 32) | ((long) (bytes[4] & 0xFF) << 24) | ((bytes[5] & 0xFF) << 16) |\n                ((bytes[6] & 0xFF) << 8) | (bytes[7] & 0xFF);\n    }\n\n    public static float bytesToFloat32BE( byte[] bytes ) {\n        return Float.intBitsToFloat( Bytes.bytesToInt32BE( bytes ) );\n    }\n\n    public static double bytesToFloat64BE( byte[] bytes ) {\n        return Double.longBitsToDouble( Bytes.bytesToInt64BE( bytes ) );\n    }\n\n\n\n\n    public static int calculateParity( byte b ) {\n        int count = 0;\n        for ( int i = 0; i < 8; i++ ) {\n            if ((b & (1 << i)) != 0) {\n                count++;\n            }\n        }\n        if( (count % 2) == 0 ){\n            return 1;\n        }\n        return 0;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/CharactersUtils.java",
    "content": "package com.pinecone.framework.util;\n\npublic abstract class CharactersUtils {\n    public static Character[] toObjects( char[] that ) {\n        Character[] characters = new Character[ that.length ];\n        for ( int i = 0; i < that.length; i++ ) {\n            characters[i] = that[i];\n        }\n        return characters;\n    }\n\n    public static Character[] toObjects( String that ) { // No more array copy\n        Character[] characters = new Character[ that.length() ];\n        for ( int i = 0; i < that.length(); i++ ) {\n            characters[i] = that.charAt(i);\n        }\n        return characters;\n    }\n\n    public static char[]      toChars  ( Character[] that ) {\n        char[] characters = new char[ that.length ];\n        for ( int i = 0; i < that.length; i++ ) {\n            characters[i] = that[i];\n        }\n        return characters;\n    }\n\n    public static char[]      toChars  ( Object[] that ) {\n        char[] characters = new char[ that.length ];\n        for ( int i = 0; i < that.length; i++ ) {\n            characters[i] = (char) that[i];\n        }\n        return characters;\n    }\n\n    public static boolean     regionMatches ( char c1, char c2 ) {\n        c1 = Character.toUpperCase( c1 );\n        c2 = Character.toUpperCase( c2 );\n        if ( c1 == c2 ) {\n            return true;\n        }\n        // Unfortunately, conversion to uppercase does not work properly\n        // for the Georgian alphabet, which has strange rules about case\n        // conversion.  So we need to make one last check before\n        // exiting.\n        /** I agree ! **/\n        return Character.toLowerCase(c1) == Character.toLowerCase(c2);\n    }\n\n    public static int         compareTo  ( char[] hThis, int nThisFrom, int nThisTo, char[] that, int nThatFrom, int nThatTo, boolean bNoCase ) {\n        // Fuck java, there is no FUCKING pointer !!\n        nThisTo = nThisTo > hThis.length ? hThis.length : nThisTo;\n        nThatTo = nThatTo > that.length  ? that.length  : nThatTo;\n        nThisFrom = nThisFrom > 0 ? nThisFrom : 0;\n        nThatFrom = nThatFrom > 0 ? nThatFrom : 0;\n\n        int len1 = nThisTo - nThisFrom;\n        int len2 = nThatTo - nThatFrom;\n        int lim = Math.min( len1, len2 );\n\n        int k = 0;\n        while ( k < lim ) {\n            char c1 = hThis[ k + nThisFrom ];\n            char c2 = that [ k + nThatFrom ];\n            if( bNoCase ){\n                if( !CharactersUtils.regionMatches( c1, c2 ) ) {\n                    return c1 - c2;\n                }\n            }\n            else {\n                if ( c1 != c2 ) {\n                    return c1 - c2;\n                }\n            }\n            k++;\n        }\n        return len1 - len2;\n    }\n\n    public static int         compareTo  ( char[] hThis, int nThisFrom, int nThisTo, char[] that, int nThatFrom, int nThatTo ) {\n        return CharactersUtils.compareTo( hThis, nThisFrom, nThisTo, that, nThatFrom, nThatTo, false );\n    }\n\n    public static boolean     equals     ( char[] hThis, int nThisFrom, int nThisTo, char[] that, int nThatFrom, int nThatTo, boolean bNoCase ) {\n        // Fuck java, there is no FUCKING pointer !!\n        nThisTo = nThisTo > hThis.length ? hThis.length : nThisTo;\n        nThatTo = nThatTo > that.length  ? that.length  : nThatTo;\n        nThisFrom = nThisFrom > 0 ? nThisFrom : 0;\n        nThatFrom = nThatFrom > 0 ? nThatFrom : 0;\n\n        int len1  = nThisTo - nThisFrom;\n        int len2  = nThatTo - nThatFrom;\n        if ( len1 == len2 ) {\n            int i = 0;\n            while ( len1-- != 0 ) {\n                char c1 = hThis[ i + nThisFrom ];\n                char c2 = that [ i + nThatFrom ];\n\n                if( bNoCase ){\n                    if( !CharactersUtils.regionMatches( c1, c2 ) ){\n                        return false;\n                    }\n                }\n                else {\n                    if ( c1 != c2 ) {\n                        return false;\n                    }\n                }\n                i++;\n            }\n            return true;\n        }\n\n        return false;\n    }\n\n    public static boolean     equals     ( char[] hThis, int nThisFrom, int nThisTo, char[] that, int nThatFrom, int nThatTo ) {\n        return CharactersUtils.equals( hThis, nThisFrom, nThisTo, that, nThatFrom, nThatTo, false );\n    }\n\n\n\n    public static char[] toLower( char[] arrThis ){\n        for ( int i = 0; i < arrThis.length; i++ ) {\n            arrThis[i] = Character.toLowerCase( arrThis[i] );\n        }\n        return arrThis;\n    }\n\n    public static char[] toUpper( char[] arrThis ){\n        for ( int i = 0; i < arrThis.length; i++ ) {\n            arrThis[i] = Character.toUpperCase( arrThis[i] );\n        }\n        return arrThis;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/ClassUtils.java",
    "content": "package com.pinecone.framework.util;\n\nimport java.beans.Introspector;\nimport java.lang.reflect.Array;\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.Method;\nimport java.lang.reflect.Modifier;\nimport java.lang.reflect.Proxy;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.HashMap;\nimport java.util.HashSet;\nimport java.util.Iterator;\nimport java.util.LinkedHashSet;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.Map.Entry;\n\npublic abstract class ClassUtils {\n    public static final String ARRAY_SUFFIX = \"[]\";\n    private static final String INTERNAL_ARRAY_PREFIX = \"[\";\n    private static final String NON_PRIMITIVE_ARRAY_PREFIX = \"[L\";\n    private static final char PACKAGE_SEPARATOR = '.';\n    private static final char PATH_SEPARATOR = '/';\n    private static final char INNER_CLASS_SEPARATOR = '$';\n    public static final String CGLIB_CLASS_SEPARATOR = \"$$\";\n    public static final String CLASS_FILE_SUFFIX = \".class\";\n    private static final Map<Class<?>, Class<?>> primitiveWrapperTypeMap = new HashMap<>(8);\n    private static final Map<Class<?>, Class<?>> primitiveTypeToWrapperMap = new HashMap<>(8);\n    private static final Map<String, Class<?>> primitiveTypeNameMap = new HashMap<>(32);\n    private static final Map<String, Class<?>> commonClassCache = new HashMap<>(32);\n\n    public ClassUtils() {\n    }\n\n    private static void registerCommonClasses(Class... commonClasses) {\n        Class[] var1 = commonClasses;\n        int var2 = commonClasses.length;\n\n        for(int var3 = 0; var3 < var2; ++var3) {\n            Class<?> clazz = var1[var3];\n            commonClassCache.put(clazz.getName(), clazz);\n        }\n\n    }\n\n    public static ClassLoader getDefaultClassLoader() {\n        ClassLoader cl = null;\n\n        try {\n            cl = Thread.currentThread().getContextClassLoader();\n        } catch (Throwable var3) {\n        }\n\n        if (cl == null) {\n            cl = ClassUtils.class.getClassLoader();\n            if (cl == null) {\n                try {\n                    cl = ClassLoader.getSystemClassLoader();\n                } catch (Throwable var2) {\n                }\n            }\n        }\n\n        return cl;\n    }\n\n    public static ClassLoader overrideThreadContextClassLoader(ClassLoader classLoaderToUse) {\n        Thread currentThread = Thread.currentThread();\n        ClassLoader threadContextClassLoader = currentThread.getContextClassLoader();\n        if (classLoaderToUse != null && !classLoaderToUse.equals(threadContextClassLoader)) {\n            currentThread.setContextClassLoader(classLoaderToUse);\n            return threadContextClassLoader;\n        } else {\n            return null;\n        }\n    }\n\n    public static Class<?> forName(String name, ClassLoader classLoader) throws ClassNotFoundException, LinkageError {\n        Assert.notNull(name, \"Name must not be null\");\n        Class<?> clazz = resolvePrimitiveClassName(name);\n        if (clazz == null) {\n            clazz = (Class)commonClassCache.get(name);\n        }\n\n        if (clazz != null) {\n            return clazz;\n        } else {\n            Class elementClass;\n            String elementName;\n            if (name.endsWith(\"[]\")) {\n                elementName = name.substring(0, name.length() - \"[]\".length());\n                elementClass = forName(elementName, classLoader);\n                return Array.newInstance(elementClass, 0).getClass();\n            } else if (name.startsWith(\"[L\") && name.endsWith(\";\")) {\n                elementName = name.substring(\"[L\".length(), name.length() - 1);\n                elementClass = forName(elementName, classLoader);\n                return Array.newInstance(elementClass, 0).getClass();\n            } else if (name.startsWith(\"[\")) {\n                elementName = name.substring(\"[\".length());\n                elementClass = forName(elementName, classLoader);\n                return Array.newInstance(elementClass, 0).getClass();\n            } else {\n                ClassLoader clToUse = classLoader;\n                if (classLoader == null) {\n                    clToUse = getDefaultClassLoader();\n                }\n\n                try {\n                    return clToUse != null ? clToUse.loadClass(name) : Class.forName(name);\n                } catch (ClassNotFoundException var9) {\n                    int lastDotIndex = name.lastIndexOf(46);\n                    if (lastDotIndex != -1) {\n                        String innerClassName = name.substring(0, lastDotIndex) + '$' + name.substring(lastDotIndex + 1);\n\n                        try {\n                            return clToUse != null ? clToUse.loadClass(innerClassName) : Class.forName(innerClassName);\n                        } catch (ClassNotFoundException var8) {\n                        }\n                    }\n\n                    throw var9;\n                }\n            }\n        }\n    }\n\n    public static Class<?> resolveClassName(String className, ClassLoader classLoader) throws IllegalArgumentException {\n        try {\n            return forName(className, classLoader);\n        } catch (ClassNotFoundException var3) {\n            throw new IllegalArgumentException(\"Cannot find class [\" + className + \"]\", var3);\n        } catch (LinkageError var4) {\n            throw new IllegalArgumentException(\"Error loading class [\" + className + \"]: problem with class file or dependent class.\", var4);\n        }\n    }\n\n    public static Class<?> resolvePrimitiveClassName(String name) {\n        Class<?> result = null;\n        if (name != null && name.length() <= 8) {\n            result = (Class)primitiveTypeNameMap.get(name);\n        }\n\n        return result;\n    }\n\n    public static boolean isPresent(String className, ClassLoader classLoader) {\n        try {\n            forName(className, classLoader);\n            return true;\n        } catch (Throwable var3) {\n            return false;\n        }\n    }\n\n    public static Class<?> getUserClass(Object instance) {\n        Assert.notNull(instance, \"Instance must not be null\");\n        return getUserClass(instance.getClass());\n    }\n\n    public static Class<?> getUserClass(Class<?> clazz) {\n        if (clazz != null && clazz.getName().contains(\"$$\")) {\n            Class<?> superClass = clazz.getSuperclass();\n            if (superClass != null && !Object.class.equals(superClass)) {\n                return superClass;\n            }\n        }\n\n        return clazz;\n    }\n\n    public static boolean isCacheSafe(Class<?> clazz, ClassLoader classLoader) {\n        Assert.notNull(clazz, \"Class must not be null\");\n\n        try {\n            ClassLoader target = clazz.getClassLoader();\n            if (target == null) {\n                return true;\n            } else {\n                ClassLoader cur = classLoader;\n                if (classLoader == target) {\n                    return true;\n                } else {\n                    do {\n                        if (cur == null) {\n                            return false;\n                        }\n\n                        cur = cur.getParent();\n                    } while(cur != target);\n\n                    return true;\n                }\n            }\n        } catch (SecurityException var4) {\n            return true;\n        }\n    }\n\n    public static String getShortName(String className) {\n        Assert.hasLength(className, \"Class name must not be empty\");\n        int lastDotIndex = className.lastIndexOf(46);\n        int nameEndIndex = className.indexOf(\"$$\");\n        if (nameEndIndex == -1) {\n            nameEndIndex = className.length();\n        }\n\n        String shortName = className.substring(lastDotIndex + 1, nameEndIndex);\n        shortName = shortName.replace('$', '.');\n        return shortName;\n    }\n\n    public static String getShortName(Class<?> clazz) {\n        return getShortName(getQualifiedName(clazz));\n    }\n\n    public static String getShortNameAsProperty(Class<?> clazz) {\n        String shortName = getShortName(clazz);\n        int dotIndex = shortName.lastIndexOf(46);\n        shortName = dotIndex != -1 ? shortName.substring(dotIndex + 1) : shortName;\n        return Introspector.decapitalize(shortName);\n    }\n\n    public static String getClassFileName(Class<?> clazz) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        String className = clazz.getName();\n        int lastDotIndex = className.lastIndexOf(46);\n        return className.substring(lastDotIndex + 1) + \".class\";\n    }\n\n    public static String getPackageName(Class<?> clazz) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        return getPackageName(clazz.getName());\n    }\n\n    public static String getPackageName(String fqClassName) {\n        Assert.notNull(fqClassName, \"Class name must not be null\");\n        int lastDotIndex = fqClassName.lastIndexOf(46);\n        return lastDotIndex != -1 ? fqClassName.substring(0, lastDotIndex) : \"\";\n    }\n\n    public static String getQualifiedName(Class<?> clazz) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        return clazz.isArray() ? getQualifiedNameForArray(clazz) : clazz.getName();\n    }\n\n    private static String getQualifiedNameForArray(Class<?> clazz) {\n        StringBuilder result = new StringBuilder();\n\n        while(clazz.isArray()) {\n            clazz = clazz.getComponentType();\n            result.append(\"[]\");\n        }\n\n        result.insert(0, clazz.getName());\n        return result.toString();\n    }\n\n    public static String getQualifiedMethodName(Method method) {\n        Assert.notNull(method, \"Method must not be null\");\n        return method.getDeclaringClass().getName() + \".\" + method.getName();\n    }\n\n    public static String getDescriptiveType(Object value) {\n        if (value == null) {\n            return null;\n        } else {\n            Class<?> clazz = value.getClass();\n            if (Proxy.isProxyClass(clazz)) {\n                StringBuilder result = new StringBuilder(clazz.getName());\n                result.append(\" implementing \");\n                Class<?>[] ifcs = clazz.getInterfaces();\n\n                for(int i = 0; i < ifcs.length; ++i) {\n                    result.append(ifcs[i].getName());\n                    if (i < ifcs.length - 1) {\n                        result.append(',');\n                    }\n                }\n\n                return result.toString();\n            } else {\n                return clazz.isArray() ? getQualifiedNameForArray(clazz) : clazz.getName();\n            }\n        }\n    }\n\n    public static boolean matchesTypeName(Class<?> clazz, String typeName) {\n        return typeName != null && (typeName.equals(clazz.getName()) || typeName.equals(clazz.getSimpleName()) || clazz.isArray() && typeName.equals(getQualifiedNameForArray(clazz)));\n    }\n\n    public static boolean hasConstructor(Class<?> clazz, Class... paramTypes) {\n        return getConstructorIfAvailable(clazz, paramTypes) != null;\n    }\n\n    public static <T> Constructor<T> getConstructorIfAvailable(Class<T> clazz, Class... paramTypes) {\n        Assert.notNull(clazz, \"Class must not be null\");\n\n        try {\n            return clazz.getConstructor(paramTypes);\n        } catch (NoSuchMethodException var3) {\n            return null;\n        }\n    }\n\n    public static boolean hasMethod( Class<?> clazz, String methodName, Class... paramTypes ) {\n        return getMethodIfAvailable(clazz, methodName, paramTypes) != null;\n    }\n\n    public static Method getMethod( Class<?> clazz, String methodName, Class... paramTypes ) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        Assert.notNull(methodName, \"Method name must not be null\");\n        if ( paramTypes != null ) {\n            try {\n                return clazz.getMethod( methodName, paramTypes );\n            }\n            catch (NoSuchMethodException e) {\n                throw new IllegalStateException( \"Expected method not found: \" + e );\n            }\n        }\n        else {\n            Set<Method> candidates = new HashSet<>(1);\n            Method[] methods = clazz.getMethods();\n            int len = methods.length;\n\n            for( int i = 0; i < len; ++i ) {\n                Method method = methods[i];\n                if (methodName.equals(method.getName())) {\n                    candidates.add(method);\n                }\n            }\n\n            if ( candidates.size() == 1 ) {\n                return (Method)candidates.iterator().next();\n            }\n            else if ( candidates.isEmpty() ) {\n                throw new IllegalStateException(\"Expected method not found: \" + clazz + \".\" + methodName);\n            }\n            else {\n                throw new IllegalStateException(\"No unique method found: \" + clazz + \".\" + methodName);\n            }\n        }\n    }\n\n    public static Method getFirstMethodByName( Class<?> clazz, String methodName ) {\n        Method[] methods = clazz.getMethods();\n        for ( Method method : methods ) {\n            if( method.getName().equals( methodName ) ) {\n                return method;\n            }\n        }\n        return null;\n    }\n\n    public static Method getMethodIfAvailable(Class<?> clazz, String methodName, Class... paramTypes) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        Assert.notNull(methodName, \"Method name must not be null\");\n        if (paramTypes != null) {\n            try {\n                return clazz.getMethod(methodName, paramTypes);\n            } catch (NoSuchMethodException var9) {\n                return null;\n            }\n        } else {\n            Set<Method> candidates = new HashSet<>(1);\n            Method[] methods = clazz.getMethods();\n            Method[] var5 = methods;\n            int var6 = methods.length;\n\n            for(int var7 = 0; var7 < var6; ++var7) {\n                Method method = var5[var7];\n                if (methodName.equals(method.getName())) {\n                    candidates.add(method);\n                }\n            }\n\n            if (candidates.size() == 1) {\n                return (Method)candidates.iterator().next();\n            } else {\n                return null;\n            }\n        }\n    }\n\n    public static int getMethodCountForName(Class<?> clazz, String methodName) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        Assert.notNull(methodName, \"Method name must not be null\");\n        int count = 0;\n        Method[] declaredMethods = clazz.getDeclaredMethods();\n        Method[] var4 = declaredMethods;\n        int var5 = declaredMethods.length;\n\n        int var6;\n        for(var6 = 0; var6 < var5; ++var6) {\n            Method method = var4[var6];\n            if (methodName.equals(method.getName())) {\n                ++count;\n            }\n        }\n\n        Class<?>[] ifcs = clazz.getInterfaces();\n        Class[] var10 = ifcs;\n        var6 = ifcs.length;\n\n        for(int var11 = 0; var11 < var6; ++var11) {\n            Class<?> ifc = var10[var11];\n            count += getMethodCountForName(ifc, methodName);\n        }\n\n        if (clazz.getSuperclass() != null) {\n            count += getMethodCountForName(clazz.getSuperclass(), methodName);\n        }\n\n        return count;\n    }\n\n    public static boolean hasAtLeastOneMethodWithName(Class<?> clazz, String methodName) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        Assert.notNull(methodName, \"Method name must not be null\");\n        Method[] declaredMethods = clazz.getDeclaredMethods();\n        Method[] var3 = declaredMethods;\n        int var4 = declaredMethods.length;\n\n        int var5;\n        for(var5 = 0; var5 < var4; ++var5) {\n            Method method = var3[var5];\n            if (method.getName().equals(methodName)) {\n                return true;\n            }\n        }\n\n        Class<?>[] ifcs = clazz.getInterfaces();\n        Class[] var9 = ifcs;\n        var5 = ifcs.length;\n\n        for(int var10 = 0; var10 < var5; ++var10) {\n            Class<?> ifc = var9[var10];\n            if (hasAtLeastOneMethodWithName(ifc, methodName)) {\n                return true;\n            }\n        }\n\n        return clazz.getSuperclass() != null && hasAtLeastOneMethodWithName(clazz.getSuperclass(), methodName);\n    }\n\n    public static Method getMostSpecificMethod(Method method, Class<?> targetClass) {\n        if (method != null && isOverridable(method, targetClass) && targetClass != null && !targetClass.equals(method.getDeclaringClass())) {\n            try {\n                if (Modifier.isPublic(method.getModifiers())) {\n                    try {\n                        return targetClass.getMethod(method.getName(), method.getParameterTypes());\n                    } catch (NoSuchMethodException var3) {\n                        return method;\n                    }\n                }\n\n                Method specificMethod = ReflectionUtils.findMethod(targetClass, method.getName(), method.getParameterTypes());\n                return specificMethod != null ? specificMethod : method;\n            } catch (SecurityException var4) {\n            }\n        }\n\n        return method;\n    }\n\n    public static boolean isUserLevelMethod(Method method) {\n        Assert.notNull(method, \"Method must not be null\");\n        return method.isBridge() || !method.isSynthetic() && !isGroovyObjectMethod(method);\n    }\n\n    private static boolean isGroovyObjectMethod(Method method) {\n        return method.getDeclaringClass().getName().equals(\"groovy.lang.GroovyObject\");\n    }\n\n    private static boolean isOverridable(Method method, Class<?> targetClass) {\n        if (Modifier.isPrivate(method.getModifiers())) {\n            return false;\n        } else {\n            return !Modifier.isPublic(method.getModifiers()) && !Modifier.isProtected(method.getModifiers()) ? getPackageName(method.getDeclaringClass()).equals(getPackageName(targetClass)) : true;\n        }\n    }\n\n    public static Method getStaticMethod(Class<?> clazz, String methodName, Class... args) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        Assert.notNull(methodName, \"Method name must not be null\");\n\n        try {\n            Method method = clazz.getMethod(methodName, args);\n            return Modifier.isStatic(method.getModifiers()) ? method : null;\n        } catch (NoSuchMethodException var4) {\n            return null;\n        }\n    }\n\n    public static boolean isPrimitiveWrapper(Class<?> clazz) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        return primitiveWrapperTypeMap.containsKey(clazz);\n    }\n\n    public static boolean isPrimitiveOrWrapper(Class<?> clazz) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        return clazz.isPrimitive() || isPrimitiveWrapper(clazz);\n    }\n\n    public static boolean isPrimitiveArray(Class<?> clazz) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        return clazz.isArray() && clazz.getComponentType().isPrimitive();\n    }\n\n    public static boolean isPrimitiveWrapperArray(Class<?> clazz) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        return clazz.isArray() && isPrimitiveWrapper(clazz.getComponentType());\n    }\n\n    public static Class<?> resolvePrimitiveIfNecessary(Class<?> clazz) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        return clazz.isPrimitive() && clazz != Void.TYPE ? (Class)primitiveTypeToWrapperMap.get(clazz) : clazz;\n    }\n\n    public static boolean isAssignable( Class<?> lhsType, Class<?> rhsType ) {\n        Assert.notNull(lhsType, \"Left-hand side type must not be null\");\n        Assert.notNull(rhsType, \"Right-hand side type must not be null\");\n        if ( lhsType.isAssignableFrom(rhsType) ) {\n            return true;\n        }\n        else {\n            Class resolvedPrimitive;\n            if ( lhsType.isPrimitive() ) {\n                resolvedPrimitive = (Class)primitiveWrapperTypeMap.get( rhsType );\n                if ( lhsType.equals( resolvedPrimitive ) ) {\n                    return true;\n                }\n            }\n            else {\n                resolvedPrimitive = (Class)primitiveTypeToWrapperMap.get( rhsType );\n                if ( resolvedPrimitive != null && lhsType.isAssignableFrom( resolvedPrimitive ) ) {\n                    return true;\n                }\n            }\n\n            return false;\n        }\n    }\n\n    public static boolean isAssignableValue(Class<?> type, Object value) {\n        Assert.notNull(type, \"Type must not be null\");\n        return value != null ? isAssignable(type, value.getClass()) : !type.isPrimitive();\n    }\n\n    public static String convertResourcePathToClassName(String resourcePath) {\n        Assert.notNull(resourcePath, \"Resource path must not be null\");\n        return resourcePath.replace('/', '.');\n    }\n\n    public static String convertClassNameToResourcePath(String className) {\n        Assert.notNull(className, \"Class name must not be null\");\n        return className.replace('.', '/');\n    }\n\n    public static String addResourcePathToPackagePath(Class<?> clazz, String resourceName) {\n        Assert.notNull(resourceName, \"Resource name must not be null\");\n        return !resourceName.startsWith(\"/\") ? classPackageAsResourcePath(clazz) + \"/\" + resourceName : classPackageAsResourcePath(clazz) + resourceName;\n    }\n\n    public static String classPackageAsResourcePath(Class<?> clazz) {\n        if (clazz == null) {\n            return \"\";\n        } else {\n            String className = clazz.getName();\n            int packageEndIndex = className.lastIndexOf(46);\n            if (packageEndIndex == -1) {\n                return \"\";\n            } else {\n                String packageName = className.substring(0, packageEndIndex);\n                return packageName.replace('.', '/');\n            }\n        }\n    }\n\n    public static String classNamesToString(Class... classes) {\n        return classNamesToString((Collection)Arrays.asList(classes));\n    }\n\n    public static String classNamesToString(Collection<Class<?>> classes) {\n        if (CollectionUtils.isEmpty(classes)) {\n            return \"[]\";\n        } else {\n            StringBuilder sb = new StringBuilder(\"[\");\n            Iterator it = classes.iterator();\n\n            while(it.hasNext()) {\n                Class<?> clazz = (Class)it.next();\n                sb.append(clazz.getName());\n                if (it.hasNext()) {\n                    sb.append(\", \");\n                }\n            }\n\n            sb.append(\"]\");\n            return sb.toString();\n        }\n    }\n\n    public static Class<?>[] toClassArray(Collection<Class<?>> collection) {\n        return collection == null ? null : (Class[])collection.toArray(new Class[collection.size()]);\n    }\n\n    public static Class<?>[] getAllInterfaces(Object instance) {\n        Assert.notNull(instance, \"Instance must not be null\");\n        return getAllInterfacesForClass(instance.getClass());\n    }\n\n    public static Class<?>[] getAllInterfacesForClass(Class<?> clazz) {\n        return getAllInterfacesForClass(clazz, (ClassLoader)null);\n    }\n\n    public static Class<?>[] getAllInterfacesForClass(Class<?> clazz, ClassLoader classLoader) {\n        Set<Class<?>> ifcs = getAllInterfacesForClassAsSet(clazz, classLoader);\n        return (Class[])ifcs.toArray(new Class[ifcs.size()]);\n    }\n\n    public static Set<Class<?>> getAllInterfacesAsSet(Object instance) {\n        Assert.notNull(instance, \"Instance must not be null\");\n        return getAllInterfacesForClassAsSet(instance.getClass());\n    }\n\n    public static Set<Class<?>> getAllInterfacesForClassAsSet(Class<?> clazz) {\n        return getAllInterfacesForClassAsSet(clazz, (ClassLoader)null);\n    }\n\n    public static Set<Class<?>> getAllInterfacesForClassAsSet(Class<?> clazz, ClassLoader classLoader) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        if (clazz.isInterface() && isVisible(clazz, classLoader)) {\n            return Collections.singleton(clazz);\n        } else {\n            LinkedHashSet interfaces;\n            for(interfaces = new LinkedHashSet(); clazz != null; clazz = clazz.getSuperclass()) {\n                Class<?>[] ifcs = clazz.getInterfaces();\n                Class[] var4 = ifcs;\n                int var5 = ifcs.length;\n\n                for(int var6 = 0; var6 < var5; ++var6) {\n                    Class<?> ifc = var4[var6];\n                    interfaces.addAll(getAllInterfacesForClassAsSet(ifc, classLoader));\n                }\n            }\n\n            return interfaces;\n        }\n    }\n\n    public static Class<?> createCompositeInterface(Class<?>[] interfaces, ClassLoader classLoader) {\n        Assert.notEmpty(interfaces, \"Interfaces must not be empty\");\n        Assert.notNull(classLoader, \"ClassLoader must not be null\");\n        return Proxy.getProxyClass(classLoader, interfaces);\n    }\n\n    public static Class<?> determineCommonAncestor(Class<?> clazz1, Class<?> clazz2) {\n        if (clazz1 == null) {\n            return clazz2;\n        } else if (clazz2 == null) {\n            return clazz1;\n        } else if (clazz1.isAssignableFrom(clazz2)) {\n            return clazz1;\n        } else if (clazz2.isAssignableFrom(clazz1)) {\n            return clazz2;\n        } else {\n            Class ancestor = clazz1;\n\n            do {\n                ancestor = ancestor.getSuperclass();\n                if (ancestor == null || Object.class.equals(ancestor)) {\n                    return null;\n                }\n            } while(!ancestor.isAssignableFrom(clazz2));\n\n            return ancestor;\n        }\n    }\n\n    public static boolean isVisible(Class<?> clazz, ClassLoader classLoader) {\n        if (classLoader == null) {\n            return true;\n        } else {\n            try {\n                Class<?> actualClass = classLoader.loadClass(clazz.getName());\n                return clazz == actualClass;\n            } catch (ClassNotFoundException var3) {\n                return false;\n            }\n        }\n    }\n\n    public static boolean isCglibProxy(Object object) {\n        return isCglibProxyClass(object.getClass());\n    }\n\n    public static boolean isCglibProxyClass(Class<?> clazz) {\n        return clazz != null && isCglibProxyClassName(clazz.getName());\n    }\n\n    public static boolean isCglibProxyClassName(String className) {\n        return className != null && className.contains(\"$$\");\n    }\n\n    static {\n        primitiveWrapperTypeMap.put(Boolean.class, Boolean.TYPE);\n        primitiveWrapperTypeMap.put(Byte.class, Byte.TYPE);\n        primitiveWrapperTypeMap.put(Character.class, Character.TYPE);\n        primitiveWrapperTypeMap.put(Double.class, Double.TYPE);\n        primitiveWrapperTypeMap.put(Float.class, Float.TYPE);\n        primitiveWrapperTypeMap.put(Integer.class, Integer.TYPE);\n        primitiveWrapperTypeMap.put(Long.class, Long.TYPE);\n        primitiveWrapperTypeMap.put(Short.class, Short.TYPE);\n        Iterator var0 = primitiveWrapperTypeMap.entrySet().iterator();\n\n        while(var0.hasNext()) {\n            Entry<Class<?>, Class<?>> entry = (Entry)var0.next();\n            primitiveTypeToWrapperMap.put(entry.getValue(), entry.getKey());\n            registerCommonClasses((Class)entry.getKey());\n        }\n\n        Set<Class<?>> primitiveTypes = new HashSet<>(32);\n        primitiveTypes.addAll(primitiveWrapperTypeMap.values());\n        primitiveTypes.addAll(Arrays.asList(boolean[].class, byte[].class, char[].class, double[].class, float[].class, int[].class, long[].class, short[].class));\n        primitiveTypes.add(Void.TYPE);\n        Iterator var4 = primitiveTypes.iterator();\n\n        while(var4.hasNext()) {\n            Class<?> primitiveType = (Class)var4.next();\n            primitiveTypeNameMap.put(primitiveType.getName(), primitiveType);\n        }\n\n        registerCommonClasses(Boolean[].class, Byte[].class, Character[].class, Double[].class, Float[].class, Integer[].class, Long[].class, Short[].class);\n        registerCommonClasses(Number.class, Number[].class, String.class, String[].class, Object.class, Object[].class, Class.class, Class[].class);\n        registerCommonClasses(Throwable.class, Exception.class, RuntimeException.class, Error.class, StackTraceElement.class, StackTraceElement[].class);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/CollectionUtils.java",
    "content": "package com.pinecone.framework.util;\n\n\nimport com.pinecone.framework.system.Unsafe;\nimport com.pinecone.framework.unit.AbstractMultiValueMap;\nimport com.pinecone.framework.unit.MultiValueMap;\n\nimport java.io.Serializable;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Enumeration;\nimport java.util.Iterator;\nimport java.util.LinkedHashMap;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Properties;\nimport java.util.Set;\nimport java.util.Map.Entry;\n\n@SuppressWarnings( \"unchecked\" )\npublic abstract class CollectionUtils {\n    public CollectionUtils() {\n    }\n\n    public static boolean isEmpty( Collection<?> collection ) {\n        return collection == null || collection.isEmpty();\n    }\n\n    public static boolean isNoneEmpty( Collection<?> collection ) {\n        return !CollectionUtils.isEmpty( collection );\n    }\n\n    public static boolean isEmpty( Map<?, ?> map ) {\n        return map == null || map.isEmpty();\n    }\n\n    public static boolean isNoneEmpty( Map<?, ?> map ) {\n        return !CollectionUtils.isEmpty( map );\n    }\n\n    public static List arrayToList(Object source) {\n        return Arrays.asList(ObjectUtils.toObjectArray(source));\n    }\n\n    public static <E> void mergeArrayIntoCollection(Object array, Collection<E> collection) {\n        if (collection == null) {\n            throw new IllegalArgumentException(\"Collection must not be null\");\n        } else {\n            Object[] arr = ObjectUtils.toObjectArray(array);\n            Object[] var3 = arr;\n            int var4 = arr.length;\n\n            for(int var5 = 0; var5 < var4; ++var5) {\n                Object elem = var3[var5];\n                collection.add((E) elem);\n            }\n\n        }\n    }\n\n    public static <K, V> void mergePropertiesIntoMap(Properties props, Map<String, Object> map) {\n        if (map == null) {\n            throw new IllegalArgumentException(\"Map must not be null\");\n        } else {\n            String key;\n            Object value;\n            if (props != null) {\n                for(Enumeration en = props.propertyNames(); en.hasMoreElements(); map.put(key, value)) {\n                    key = (String)en.nextElement();\n                    value = props.getProperty(key);\n                    if (value == null) {\n                        value = props.get(key);\n                    }\n                }\n            }\n\n        }\n    }\n\n    public static boolean contains(Iterator<?> iterator, Object element) {\n        if (iterator != null) {\n            while(iterator.hasNext()) {\n                Object candidate = iterator.next();\n                if (ObjectUtils.nullSafeEquals(candidate, element)) {\n                    return true;\n                }\n            }\n        }\n\n        return false;\n    }\n\n    public static boolean contains(Enumeration<?> enumeration, Object element) {\n        if (enumeration != null) {\n            while(enumeration.hasMoreElements()) {\n                Object candidate = enumeration.nextElement();\n                if (ObjectUtils.nullSafeEquals(candidate, element)) {\n                    return true;\n                }\n            }\n        }\n\n        return false;\n    }\n\n    public static boolean containsInstance(Collection<?> collection, Object element) {\n        if (collection != null) {\n            Iterator var2 = collection.iterator();\n\n            while(var2.hasNext()) {\n                Object candidate = var2.next();\n                if (candidate == element) {\n                    return true;\n                }\n            }\n        }\n\n        return false;\n    }\n\n    public static boolean containsAny(Collection<?> source, Collection<?> candidates) {\n        if (!isEmpty(source) && !isEmpty(candidates)) {\n            Iterator var2 = candidates.iterator();\n\n            Object candidate;\n            do {\n                if (!var2.hasNext()) {\n                    return false;\n                }\n\n                candidate = var2.next();\n            } while(!source.contains(candidate));\n\n            return true;\n        } else {\n            return false;\n        }\n    }\n\n    public static <E> E findFirstMatch(Collection<?> source, Collection<E> candidates) {\n        if (!isEmpty(source) && !isEmpty(candidates)) {\n            Iterator var2 = candidates.iterator();\n\n            Object candidate;\n            do {\n                if (!var2.hasNext()) {\n                    return null;\n                }\n\n                candidate = var2.next();\n            } while(!source.contains(candidate));\n\n            return (E) candidate;\n        } else {\n            return null;\n        }\n    }\n\n    public static <T> T findValueOfType(Collection<?> collection, Class<T> type) {\n        if (isEmpty(collection)) {\n            return null;\n        } else {\n            T value = null;\n            Iterator var3 = collection.iterator();\n\n            while(true) {\n                Object element;\n                do {\n                    if (!var3.hasNext()) {\n                        return value;\n                    }\n\n                    element = var3.next();\n                } while(type != null && !type.isInstance(element));\n\n                if (value != null) {\n                    return null;\n                }\n\n                value = (T) element;\n            }\n        }\n    }\n\n    public static Object findValueOfType(Collection<?> collection, Class<?>[] types) {\n        if (!isEmpty(collection) && !ObjectUtils.isEmpty(types)) {\n            Class[] var2 = types;\n            int var3 = types.length;\n\n            for(int var4 = 0; var4 < var3; ++var4) {\n                Class<?> type = var2[var4];\n                Object value = findValueOfType(collection, type);\n                if (value != null) {\n                    return value;\n                }\n            }\n\n            return null;\n        } else {\n            return null;\n        }\n    }\n\n    public static boolean hasUniqueObject(Collection<?> collection) {\n        if (isEmpty(collection)) {\n            return false;\n        } else {\n            boolean hasCandidate = false;\n            Object candidate = null;\n            Iterator var3 = collection.iterator();\n\n            while(var3.hasNext()) {\n                Object elem = var3.next();\n                if (!hasCandidate) {\n                    hasCandidate = true;\n                    candidate = elem;\n                } else if (candidate != elem) {\n                    return false;\n                }\n            }\n\n            return true;\n        }\n    }\n\n    public static Class<?> findCommonElementType(Collection<?> collection) {\n        if (isEmpty(collection)) {\n            return null;\n        } else {\n            Class<?> candidate = null;\n            Iterator var2 = collection.iterator();\n\n            while(var2.hasNext()) {\n                Object val = var2.next();\n                if (val != null) {\n                    if (candidate == null) {\n                        candidate = val.getClass();\n                    } else if (candidate != val.getClass()) {\n                        return null;\n                    }\n                }\n            }\n\n            return candidate;\n        }\n    }\n\n    public static <A, E extends A> A[] toArray(Enumeration<E> enumeration, A[] array) {\n        ArrayList elements = new ArrayList();\n\n        while(enumeration.hasMoreElements()) {\n            elements.add(enumeration.nextElement());\n        }\n\n        return (A[]) elements.toArray(array);\n    }\n\n    public static <E> Iterator<E> toIterator(Enumeration<E> enumeration) {\n        return new CollectionUtils.EnumerationIterator(enumeration);\n    }\n\n    public static <K, V> MultiValueMap<K, V> toMultiValueMap(Map<K, List<V>> map) {\n        return new CollectionUtils.MultiValueMapAdapter(map);\n    }\n\n    public static <K, V> MultiValueMap<K, V> unmodifiableMultiValueMap(MultiValueMap<? extends K, ? extends V> map) {\n        Assert.notNull(map, \"'map' must not be null\");\n        Map<K, List<V>> result = new LinkedHashMap(map.size());\n        Iterator var2 = map.entrySet().iterator();\n\n        while(var2.hasNext()) {\n            Entry<? extends K, ? extends List<? extends V>> entry = (Entry)var2.next();\n            List<V> values = Collections.unmodifiableList((List)entry.getValue());\n            result.put(entry.getKey(), values);\n        }\n\n        Map<K, List<V>> unmodifiableMap = Collections.unmodifiableMap(result);\n        return toMultiValueMap(unmodifiableMap);\n    }\n\n    private static class MultiValueMapAdapter<K, V> extends AbstractMultiValueMap<K, V > implements MultiValueMap<K, V>, Serializable {\n        private final Map<K, List<V>> map;\n\n        public MultiValueMapAdapter(Map<K, List<V>> map) {\n            Assert.notNull(map, \"'map' must not be null\");\n            this.map = map;\n        }\n\n        public V add( K key, V value ) {\n            List<V> values = (List)this.map.get(key);\n            if (values == null) {\n                values = new LinkedList();\n                this.map.put(key, values);\n            }\n\n            ( (List) values ).add( value );\n            return value;\n        }\n\n        public V getFirst(K key) {\n            List<V> values = (List)this.map.get(key);\n            return values != null ? values.get(0) : null;\n        }\n\n        public V set( K key, V value ) {\n            List<V> values = new LinkedList();\n            values.add(value);\n            this.map.put( key, values );\n            return value;\n        }\n\n        public void setAll(Map<K, V> values) {\n            Iterator var2 = values.entrySet().iterator();\n\n            while(var2.hasNext()) {\n                Entry<K, V> entry = (Entry)var2.next();\n                this.set(entry.getKey(), entry.getValue());\n            }\n\n        }\n\n        public Map<K, V> toSingleValueMap() {\n            LinkedHashMap<K, V> singleValueMap = new LinkedHashMap(this.map.size());\n            Iterator var2 = this.map.entrySet().iterator();\n\n            while(var2.hasNext()) {\n                Entry<K, List<V>> entry = (Entry)var2.next();\n                singleValueMap.put(entry.getKey(), (V) ((List)entry.getValue()).get(0));\n            }\n\n            return singleValueMap;\n        }\n\n        public int size() {\n            return this.map.size();\n        }\n\n        public boolean isEmpty() {\n            return this.map.isEmpty();\n        }\n\n        public boolean containsKey(Object key) {\n            return this.map.containsKey(key);\n        }\n\n        public boolean containsValue(Object value) {\n            return this.map.containsValue(value);\n        }\n\n        public List<V> get(Object key) {\n            return (List)this.map.get(key);\n        }\n\n        public List<V> put(K key, List<V> value) {\n            return (List)this.map.put(key, value);\n        }\n\n        public List<V> remove(Object key) {\n            return (List)this.map.remove(key);\n        }\n\n        public void putAll(Map<? extends K, ? extends List<V>> m) {\n            this.map.putAll(m);\n        }\n\n        public void clear() {\n            this.map.clear();\n        }\n\n        public Set<K> keySet() {\n            return this.map.keySet();\n        }\n\n        public Collection<List<V>> values() {\n            return this.map.values();\n        }\n\n        public Set<Entry<K, List<V>>> entrySet() {\n            return this.map.entrySet();\n        }\n\n        public boolean equals(Object other) {\n            return this == other ? true : this.map.equals(other);\n        }\n\n        public int hashCode() {\n            return this.map.hashCode();\n        }\n\n        public String toString() {\n            return this.map.toString();\n        }\n    }\n\n    private static class EnumerationIterator<E> implements Iterator<E> {\n        private Enumeration<E> enumeration;\n\n        public EnumerationIterator(Enumeration<E> enumeration) {\n            this.enumeration = enumeration;\n        }\n\n        public boolean hasNext() {\n            return this.enumeration.hasMoreElements();\n        }\n\n        public E next() {\n            return this.enumeration.nextElement();\n        }\n\n        public void remove() throws UnsupportedOperationException {\n            throw new UnsupportedOperationException(\"Not supported\");\n        }\n    }\n\n\n\n    @Unsafe\n    public static <T> List<T> genericConvert( List list ) {\n        return (List<T>) list;\n    }\n\n    @Unsafe\n    public static <T> Collection<T> genericConvert( Collection collection ) {\n        return (Collection<T>) collection;\n    }\n\n    @Unsafe\n    public static <K, V> Map<K, V> genericConvert( Map map ) {\n        return (Map<K, V>) map;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/CursorParser.java",
    "content": "package com.pinecone.framework.util;\n\nimport com.pinecone.framework.system.ParseException;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface CursorParser extends Pinenut {\n    void back() throws ParseException;\n\n    char next() throws ParseException;\n\n    String next( int n ) throws ParseException;\n\n    Object nextValue() throws ParseException ;\n\n    Object nextValue( Object indexKey, Object parent, Object[] args ) throws ParseException ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/Debug.java",
    "content": "package com.pinecone.framework.util;\n\nimport java.util.concurrent.atomic.AtomicInteger;\n\nimport com.pinecone.framework.system.InstantKillError;\nimport com.pinecone.framework.util.io.Tracer;\nimport com.pinecone.framework.util.io.Tracerson;\nimport com.pinecone.framework.util.json.JSON;\n\npublic class Debug {\n    private final static Tracer console = new Tracerson();\n\n    public static Tracer console() {\n        return Debug.console;\n    }\n\n    public static Tracer probe(){\n        System.err.println(\"\\n\\rFuck is here !\\n\\r\");\n        return Debug.console;\n    }\n\n    public static Tracer fmt( int nIndentFactor, Object Anything, Object...objects ){\n        Debug.console.getOut().print( JSON.stringify( Anything, nIndentFactor ) );\n        for ( Object row : objects ) {\n            Debug.console.getOut().print( JSON.stringify( row, nIndentFactor ) );\n        }\n        return Debug.console;\n    }\n\n    public static Tracer fmp( int nIndentFactor, Object Anything, Object...objects ){\n        Debug.console.getOut().print( JSON.stringify( JSON.parse( JSON.stringify( Anything ) ), nIndentFactor ) );\n        for ( Object row : objects ) {\n            Debug.console.getOut().print( JSON.stringify( JSON.parse( JSON.stringify( row ) ), nIndentFactor ) );\n        }\n        return Debug.console;\n    }\n\n    public static Tracer trace( Object Anything, Object...objects ){\n        return Debug.console.log( Anything, objects );\n    }\n\n    public synchronized static Tracer traceSyn( Object Anything, Object...objects ){\n        return Debug.console.log( Anything, objects );\n    }\n\n    public static Tracer info ( Object Anything, Object...objects ){\n        return Debug.console.info( Anything, objects );\n    }\n\n    public synchronized static Tracer infoSyn( Object Anything, Object...objects ){\n        return Debug.console.info( Anything, objects );\n    }\n\n    public static Tracer warn ( Object Anything, Object...objects ){\n        return Debug.console.warn( Anything, objects );\n    }\n\n    public synchronized static Tracer warnSyn( Object Anything, Object...objects ){\n        return Debug.console.warn( Anything, objects );\n    }\n\n    public static Tracer colorf( int colorCode, Object Anything, Object...objects ){\n        return Debug.console.colorf( colorCode, Anything, objects );\n    }\n\n    public static Tracer purplef( Object Anything, Object...objects ){\n        return Debug.console.colorf( 35, Anything, objects );\n    }\n\n    public synchronized static Tracer purplefs( Object Anything, Object...objects ){\n        return Debug.purplef( Anything, objects );\n    }\n\n    public static Tracer redf( Object Anything, Object...objects ){\n        return Debug.console.colorf( 31, Anything, objects );\n    }\n\n    public synchronized static Tracer redfs( Object Anything, Object...objects ){\n        return Debug.redf( Anything, objects );\n    }\n\n    public static Tracer greenf( Object Anything, Object...objects ){\n        return Debug.console.colorf( 32, Anything, objects );\n    }\n\n    public synchronized static Tracer greenfs( Object Anything, Object...objects ){\n        return Debug.greenf( Anything, objects );\n    }\n\n    public static Tracer bluef( Object Anything, Object...objects ){\n        return Debug.console.colorf( 34, Anything, objects );\n    }\n\n    public synchronized static Tracer bluefs( Object Anything, Object...objects ){\n        return Debug.bluef( Anything, objects );\n    }\n\n    public static Tracer whitef( Object Anything, Object...objects ){\n        return Debug.console.colorf( 30, Anything, objects );\n    }\n\n    public synchronized static Tracer whitefs( Object Anything, Object...objects ){\n        return Debug.whitef( Anything, objects );\n    }\n\n\n    public static Tracer hhf(){\n        Debug.console.getOut().println();\n        return Debug.console;\n    }\n\n\n    public static Tracer echo( Object data, Object...objects ) {\n        return Debug.console.echo( data, objects );\n    }\n\n    public static Tracer cerr( Object data, Object...objects ) {\n        return Debug.console.cerr( data, objects );\n    }\n\n    public synchronized static Tracer cerrSyn( Object data, Object...objects ) {\n        return Debug.console.cerr( data, objects );\n    }\n\n\n    public static void sleep( long millis ) {\n        try {\n            Thread.sleep( millis );\n        }\n        catch ( InterruptedException e ) {\n            Debug.cerr( e.getMessage() );\n        }\n    }\n\n    public static void stop() {\n        throw new InstantKillError( \"Invoked at [ Debug::stop() ]\" );\n    }\n\n    public static void exit() {\n        System.exit( -666 );\n    }\n\n\n\n    private static AtomicInteger InvokeCounts = new AtomicInteger();\n\n    public static long invokeCounts() {\n        return Debug.InvokeCounts.getAndIncrement();\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/GeneralStrings.java",
    "content": "package com.pinecone.framework.util;\n\npublic abstract class GeneralStrings {\n    public static boolean transferCharParse ( char c, CursorParser cursor, StringBuilder sb ) {\n        switch ( c ) {\n            case '\"':\n            case '\\'':\n            case '/':\n            case '\\\\': {\n                sb.append(c);\n                return true;\n            }\n            case 'b': {\n                sb.append('\\b');\n                return true;\n            }\n            case 'f': {\n                sb.append('\\f');\n                return true;\n            }\n            case 'n':\n                sb.append('\\n');\n                return true;\n            case 'r': {\n                sb.append('\\r');\n                return true;\n            }\n            case 't': {\n                sb.append('\\t');\n                return true;\n            }\n            case 'x': {\n                // Notice: It is seem JSON not supported '\\xFF' format in JSON standard, but who care.\n                sb.append( (char) Integer.parseInt(cursor.next( 2), 16) );\n                return true;\n            }\n            case 'u': {\n                sb.append((char) Integer.parseInt(cursor.next( 4), 16));\n                return true;\n            }\n            default: {\n                // Notice: In Pinecone CPP will keep the illegal escape, you can modified as strict [JSON standard NOT allowed].\n                sb.append( '\\\\' );\n                //throw this.syntaxError( \"Error parser json string with illegal escape.\" );\n            }\n        }\n\n        return false;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/OSIdentifier.java",
    "content": "package com.pinecone.framework.util;\n\npublic class OSIdentifier {\n    private static String OS = System.getProperty(\"os.name\").toLowerCase();\n\n    private static OSIdentifier OSIInstance = new OSIdentifier();\n\n    private Platform platform;\n\n    private OSIdentifier(){}\n\n\n    public static boolean isLinux(){\n        return OS.contains(\"linux\");\n    }\n\n    public static boolean isMacOS(){\n        return OS.contains(\"mac\") &&OS.indexOf(\"os\")>0&& !OS.contains(\"x\");\n    }\n\n    public static boolean isMacOSX(){\n        return OS.contains(\"mac\") &&OS.indexOf(\"os\")>0&&OS.indexOf(\"x\")>0;\n    }\n\n    public static boolean isWindows(){\n        return OS.contains(\"windows\");\n    }\n\n    public static boolean isOS2(){\n        return OS.contains(\"os/2\");\n    }\n\n    public static boolean isSolaris(){\n        return OS.contains(\"solaris\");\n    }\n\n    public static boolean isSunOS(){\n        return OS.contains(\"sunos\");\n    }\n\n    public static boolean isMPEiX(){\n        return OS.contains(\"mpe/ix\");\n    }\n\n    public static boolean isHPUX(){\n        return OS.contains(\"hp-ux\");\n    }\n\n    public static boolean isAix(){\n        return OS.contains(\"aix\");\n    }\n\n    public static boolean isOS390(){\n        return OS.contains(\"os/390\");\n    }\n\n    public static boolean isFreeBSD(){\n        return OS.contains(\"freebsd\");\n    }\n\n    public static boolean isIrix(){\n        return OS.contains(\"irix\");\n    }\n\n    public static boolean isDigitalUnix(){\n        return OS.contains(\"digital\") &&OS.indexOf(\"unix\")>0;\n    }\n\n    public static boolean isNetWare(){\n        return OS.contains(\"netware\");\n    }\n\n    public static boolean isOSF1(){\n        return OS.contains(\"osf1\");\n    }\n\n    public static boolean isOpenVMS(){\n        return OS.contains(\"openvms\");\n    }\n\n\n    public static Platform getOSname(){\n        if(isAix()){\n            OSIdentifier.OSIInstance.platform = Platform.AIX;\n        }else if (isDigitalUnix()) {\n            OSIdentifier.OSIInstance.platform = Platform.Digital_Unix;\n        }else if (isFreeBSD()) {\n            OSIdentifier.OSIInstance.platform = Platform.FreeBSD;\n        }else if (isHPUX()) {\n            OSIdentifier.OSIInstance.platform = Platform.HP_UX;\n        }else if (isIrix()) {\n            OSIdentifier.OSIInstance.platform = Platform.Irix;\n        }else if (isLinux()) {\n            OSIdentifier.OSIInstance.platform = Platform.Linux;\n        }else if (isMacOS()) {\n            OSIdentifier.OSIInstance.platform = Platform.Mac_OS;\n        }else if (isMacOSX()) {\n            OSIdentifier.OSIInstance.platform = Platform.Mac_OS_X;\n        }else if (isMPEiX()) {\n            OSIdentifier.OSIInstance.platform = Platform.MPEiX;\n        }else if (isNetWare()) {\n            OSIdentifier.OSIInstance.platform = Platform.NetWare_411;\n        }else if (isOpenVMS()) {\n            OSIdentifier.OSIInstance.platform = Platform.OpenVMS;\n        }else if (isOS2()) {\n            OSIdentifier.OSIInstance.platform = Platform.OS2;\n        }else if (isOS390()) {\n            OSIdentifier.OSIInstance.platform = Platform.OS390;\n        }else if (isOSF1()) {\n            OSIdentifier.OSIInstance.platform = Platform.OSF1;\n        }else if (isSolaris()) {\n            OSIdentifier.OSIInstance.platform = Platform.Solaris;\n        }else if (isSunOS()) {\n            OSIdentifier.OSIInstance.platform = Platform.SunOS;\n        }else if (isWindows()) {\n            OSIdentifier.OSIInstance.platform = Platform.Windows;\n        }else{\n            OSIdentifier.OSIInstance.platform = Platform.Others;\n        }\n        return OSIdentifier.OSIInstance.platform;\n    }\n\n    public enum Platform {\n        Any(\"any\"),\n        Linux(\"Linux\"),\n        Mac_OS(\"Mac OS\"),\n        Mac_OS_X(\"Mac OS X\"),\n        Windows(\"Windows\"),\n        OS2(\"OS/2\"),\n        Solaris(\"Solaris\"),\n        SunOS(\"SunOS\"),\n        MPEiX(\"MPE/iX\"),\n        HP_UX(\"HP-UX\"),\n        AIX(\"AIX\"),\n        OS390(\"OS/390\"),\n        FreeBSD(\"FreeBSD\"),\n        Irix(\"Irix\"),\n        Digital_Unix(\"Digital Unix\"),\n        NetWare_411(\"NetWare\"),\n        OSF1(\"OSF1\"),\n        OpenVMS(\"OpenVMS\"),\n        Others(\"Others\");\n\n        private String description;\n\n        private Platform(String desc) {\n            this.description = desc;\n        }\n\n        public String toString() {\n            return this.description;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/ObjectUtils.java",
    "content": "package com.pinecone.framework.util;\n\nimport java.lang.reflect.Array;\nimport java.util.Arrays;\n\npublic abstract class ObjectUtils {\n    private static final int INITIAL_HASH = 7;\n    private static final int MULTIPLIER = 31;\n    private static final String EMPTY_STRING = \"\";\n    private static final String NULL_STRING = \"null\";\n    private static final String ARRAY_START = \"{\";\n    private static final String ARRAY_END = \"}\";\n    private static final String EMPTY_ARRAY = \"{}\";\n    private static final String ARRAY_ELEMENT_SEPARATOR = \", \";\n\n    public ObjectUtils() {\n    }\n\n    public static boolean isCheckedException(Throwable ex) {\n        return !(ex instanceof RuntimeException) && !(ex instanceof Error);\n    }\n\n    public static boolean isCompatibleWithThrowsClause(Throwable ex, Class... declaredExceptions) {\n        if (!isCheckedException(ex)) {\n            return true;\n        } else {\n            if (declaredExceptions != null) {\n                Class[] var2 = declaredExceptions;\n                int var3 = declaredExceptions.length;\n\n                for(int var4 = 0; var4 < var3; ++var4) {\n                    Class<?> declaredException = var2[var4];\n                    if (declaredException.isInstance(ex)) {\n                        return true;\n                    }\n                }\n            }\n\n            return false;\n        }\n    }\n\n    public static boolean isArray(Object obj) {\n        return obj != null && obj.getClass().isArray();\n    }\n\n    public static boolean isEmpty(Object[] array) {\n        return array == null || array.length == 0;\n    }\n\n    public static boolean containsElement(Object[] array, Object element) {\n        if (array == null) {\n            return false;\n        } else {\n            Object[] var2 = array;\n            int var3 = array.length;\n\n            for(int var4 = 0; var4 < var3; ++var4) {\n                Object arrayEle = var2[var4];\n                if (nullSafeEquals(arrayEle, element)) {\n                    return true;\n                }\n            }\n\n            return false;\n        }\n    }\n\n    public static boolean containsConstant(Enum<?>[] enumValues, String constant) {\n        return containsConstant(enumValues, constant, false);\n    }\n\n    public static boolean containsConstant(Enum<?>[] enumValues, String constant, boolean caseSensitive) {\n        Enum[] var3 = enumValues;\n        int var4 = enumValues.length;\n        int var5 = 0;\n\n        while(true) {\n            if (var5 >= var4) {\n                return false;\n            }\n\n            Enum<?> candidate = var3[var5];\n            if (caseSensitive) {\n                if (candidate.toString().equals(constant)) {\n                    break;\n                }\n            } else if (candidate.toString().equalsIgnoreCase(constant)) {\n                break;\n            }\n\n            ++var5;\n        }\n\n        return true;\n    }\n\n    public static <E extends Enum<?>> E caseInsensitiveValueOf(E[] enumValues, String constant) {\n        Enum[] var2 = enumValues;\n        int var3 = enumValues.length;\n\n        for(int var4 = 0; var4 < var3; ++var4) {\n            E candidate = (E) var2[var4];\n            if (candidate.toString().equalsIgnoreCase(constant)) {\n                return candidate;\n            }\n        }\n\n        throw new IllegalArgumentException(String.format(\"constant [%s] does not exist in enum type %s\", constant, enumValues.getClass().getComponentType().getName()));\n    }\n\n    public static <A, O extends A> A[] addObjectToArray(A[] array, O obj) {\n        Class<?> compType = Object.class;\n        if (array != null) {\n            compType = array.getClass().getComponentType();\n        } else if (obj != null) {\n            compType = obj.getClass();\n        }\n\n        int newArrLength = array != null ? array.length + 1 : 1;\n        A[] newArr = (A[])((Object[])Array.newInstance(compType, newArrLength));\n        if (array != null) {\n            System.arraycopy(array, 0, newArr, 0, array.length);\n        }\n\n        newArr[newArr.length - 1] = obj;\n        return newArr;\n    }\n\n    public static Object[] toObjectArray(Object source) {\n        if (source instanceof Object[]) {\n            return (Object[])((Object[])source);\n        } else if (source == null) {\n            return new Object[0];\n        } else if (!source.getClass().isArray()) {\n            throw new IllegalArgumentException(\"Source is not an array: \" + source);\n        } else {\n            int length = Array.getLength(source);\n            if (length == 0) {\n                return new Object[0];\n            } else {\n                Class<?> wrapperType = Array.get(source, 0).getClass();\n                Object[] newArray = (Object[])((Object[])Array.newInstance(wrapperType, length));\n\n                for(int i = 0; i < length; ++i) {\n                    newArray[i] = Array.get(source, i);\n                }\n\n                return newArray;\n            }\n        }\n    }\n\n    public static boolean nullSafeEquals(Object o1, Object o2) {\n        if (o1 == o2) {\n            return true;\n        } else if (o1 != null && o2 != null) {\n            if (o1.equals(o2)) {\n                return true;\n            } else {\n                if (o1.getClass().isArray() && o2.getClass().isArray()) {\n                    if (o1 instanceof Object[] && o2 instanceof Object[]) {\n                        return Arrays.equals((Object[])((Object[])o1), (Object[])((Object[])o2));\n                    }\n\n                    if (o1 instanceof boolean[] && o2 instanceof boolean[]) {\n                        return Arrays.equals((boolean[])((boolean[])o1), (boolean[])((boolean[])o2));\n                    }\n\n                    if (o1 instanceof byte[] && o2 instanceof byte[]) {\n                        return Arrays.equals((byte[])((byte[])o1), (byte[])((byte[])o2));\n                    }\n\n                    if (o1 instanceof char[] && o2 instanceof char[]) {\n                        return Arrays.equals((char[])((char[])o1), (char[])((char[])o2));\n                    }\n\n                    if (o1 instanceof double[] && o2 instanceof double[]) {\n                        return Arrays.equals((double[])((double[])o1), (double[])((double[])o2));\n                    }\n\n                    if (o1 instanceof float[] && o2 instanceof float[]) {\n                        return Arrays.equals((float[])((float[])o1), (float[])((float[])o2));\n                    }\n\n                    if (o1 instanceof int[] && o2 instanceof int[]) {\n                        return Arrays.equals((int[])((int[])o1), (int[])((int[])o2));\n                    }\n\n                    if (o1 instanceof long[] && o2 instanceof long[]) {\n                        return Arrays.equals((long[])((long[])o1), (long[])((long[])o2));\n                    }\n\n                    if (o1 instanceof short[] && o2 instanceof short[]) {\n                        return Arrays.equals((short[])((short[])o1), (short[])((short[])o2));\n                    }\n                }\n\n                return false;\n            }\n        } else {\n            return false;\n        }\n    }\n\n    public static int nullSafeHashCode(Object obj) {\n        if (obj == null) {\n            return 0;\n        } else {\n            if (obj.getClass().isArray()) {\n                if (obj instanceof Object[]) {\n                    return nullSafeHashCode((Object[])((Object[])obj));\n                }\n\n                if (obj instanceof boolean[]) {\n                    return nullSafeHashCode((boolean[])((boolean[])obj));\n                }\n\n                if (obj instanceof byte[]) {\n                    return nullSafeHashCode((byte[])((byte[])obj));\n                }\n\n                if (obj instanceof char[]) {\n                    return nullSafeHashCode((char[])((char[])obj));\n                }\n\n                if (obj instanceof double[]) {\n                    return nullSafeHashCode((double[])((double[])obj));\n                }\n\n                if (obj instanceof float[]) {\n                    return nullSafeHashCode((float[])((float[])obj));\n                }\n\n                if (obj instanceof int[]) {\n                    return nullSafeHashCode((int[])((int[])obj));\n                }\n\n                if (obj instanceof long[]) {\n                    return nullSafeHashCode((long[])((long[])obj));\n                }\n\n                if (obj instanceof short[]) {\n                    return nullSafeHashCode((short[])((short[])obj));\n                }\n            }\n\n            return obj.hashCode();\n        }\n    }\n\n    public static int nullSafeHashCode(Object[] array) {\n        if (array == null) {\n            return 0;\n        } else {\n            int hash = 7;\n            Object[] var2 = array;\n            int var3 = array.length;\n\n            for(int var4 = 0; var4 < var3; ++var4) {\n                Object element = var2[var4];\n                hash = 31 * hash + nullSafeHashCode(element);\n            }\n\n            return hash;\n        }\n    }\n\n    public static int nullSafeHashCode(boolean[] array) {\n        if (array == null) {\n            return 0;\n        } else {\n            int hash = 7;\n            boolean[] var2 = array;\n            int var3 = array.length;\n\n            for(int var4 = 0; var4 < var3; ++var4) {\n                boolean element = var2[var4];\n                hash = 31 * hash + hashCode(element);\n            }\n\n            return hash;\n        }\n    }\n\n    public static int nullSafeHashCode(byte[] array) {\n        if (array == null) {\n            return 0;\n        } else {\n            int hash = 7;\n            byte[] var2 = array;\n            int var3 = array.length;\n\n            for(int var4 = 0; var4 < var3; ++var4) {\n                byte element = var2[var4];\n                hash = 31 * hash + element;\n            }\n\n            return hash;\n        }\n    }\n\n    public static int nullSafeHashCode(char[] array) {\n        if (array == null) {\n            return 0;\n        } else {\n            int hash = 7;\n            char[] var2 = array;\n            int var3 = array.length;\n\n            for(int var4 = 0; var4 < var3; ++var4) {\n                char element = var2[var4];\n                hash = 31 * hash + element;\n            }\n\n            return hash;\n        }\n    }\n\n    public static int nullSafeHashCode(double[] array) {\n        if (array == null) {\n            return 0;\n        } else {\n            int hash = 7;\n            double[] var2 = array;\n            int var3 = array.length;\n\n            for(int var4 = 0; var4 < var3; ++var4) {\n                double element = var2[var4];\n                hash = 31 * hash + hashCode(element);\n            }\n\n            return hash;\n        }\n    }\n\n    public static int nullSafeHashCode(float[] array) {\n        if (array == null) {\n            return 0;\n        } else {\n            int hash = 7;\n            float[] var2 = array;\n            int var3 = array.length;\n\n            for(int var4 = 0; var4 < var3; ++var4) {\n                float element = var2[var4];\n                hash = 31 * hash + hashCode(element);\n            }\n\n            return hash;\n        }\n    }\n\n    public static int nullSafeHashCode(int[] array) {\n        if (array == null) {\n            return 0;\n        } else {\n            int hash = 7;\n            int[] var2 = array;\n            int var3 = array.length;\n\n            for(int var4 = 0; var4 < var3; ++var4) {\n                int element = var2[var4];\n                hash = 31 * hash + element;\n            }\n\n            return hash;\n        }\n    }\n\n    public static int nullSafeHashCode(long[] array) {\n        if (array == null) {\n            return 0;\n        } else {\n            int hash = 7;\n            long[] var2 = array;\n            int var3 = array.length;\n\n            for(int var4 = 0; var4 < var3; ++var4) {\n                long element = var2[var4];\n                hash = 31 * hash + hashCode(element);\n            }\n\n            return hash;\n        }\n    }\n\n    public static int nullSafeHashCode(short[] array) {\n        if (array == null) {\n            return 0;\n        } else {\n            int hash = 7;\n            short[] var2 = array;\n            int var3 = array.length;\n\n            for(int var4 = 0; var4 < var3; ++var4) {\n                short element = var2[var4];\n                hash = 31 * hash + element;\n            }\n\n            return hash;\n        }\n    }\n\n    public static int hashCode(boolean bool) {\n        return bool ? 1231 : 1237;\n    }\n\n    public static int hashCode(double dbl) {\n        return hashCode(Double.doubleToLongBits(dbl));\n    }\n\n    public static int hashCode(float flt) {\n        return Float.floatToIntBits(flt);\n    }\n\n    public static int hashCode(long lng) {\n        return (int)(lng ^ lng >>> 32);\n    }\n\n    public static String identityToString(Object obj) {\n        return obj == null ? \"\" : obj.getClass().getName() + \"@\" + getIdentityHexString(obj);\n    }\n\n    public static String getIdentityHexString(Object obj) {\n        return Integer.toHexString(System.identityHashCode(obj));\n    }\n\n    public static String getDisplayString(Object obj) {\n        return obj == null ? \"\" : nullSafeToString(obj);\n    }\n\n    public static String nullSafeClassName(Object obj) {\n        return obj != null ? obj.getClass().getName() : \"null\";\n    }\n\n    public static String nullSafeToString(Object obj) {\n        if (obj == null) {\n            return \"null\";\n        } else if (obj instanceof String) {\n            return (String)obj;\n        } else if (obj instanceof Object[]) {\n            return nullSafeToString((Object[])((Object[])obj));\n        } else if (obj instanceof boolean[]) {\n            return nullSafeToString((boolean[])((boolean[])obj));\n        } else if (obj instanceof byte[]) {\n            return nullSafeToString((byte[])((byte[])obj));\n        } else if (obj instanceof char[]) {\n            return nullSafeToString((char[])((char[])obj));\n        } else if (obj instanceof double[]) {\n            return nullSafeToString((double[])((double[])obj));\n        } else if (obj instanceof float[]) {\n            return nullSafeToString((float[])((float[])obj));\n        } else if (obj instanceof int[]) {\n            return nullSafeToString((int[])((int[])obj));\n        } else if (obj instanceof long[]) {\n            return nullSafeToString((long[])((long[])obj));\n        } else if (obj instanceof short[]) {\n            return nullSafeToString((short[])((short[])obj));\n        } else {\n            String str = obj.toString();\n            return str != null ? str : \"\";\n        }\n    }\n\n    public static String nullSafeToString(Object[] array) {\n        if (array == null) {\n            return \"null\";\n        } else {\n            int length = array.length;\n            if (length == 0) {\n                return \"{}\";\n            } else {\n                StringBuilder sb = new StringBuilder();\n\n                for(int i = 0; i < length; ++i) {\n                    if (i == 0) {\n                        sb.append(\"{\");\n                    } else {\n                        sb.append(\", \");\n                    }\n\n                    sb.append(String.valueOf(array[i]));\n                }\n\n                sb.append(\"}\");\n                return sb.toString();\n            }\n        }\n    }\n\n    public static String nullSafeToString(boolean[] array) {\n        if (array == null) {\n            return \"null\";\n        } else {\n            int length = array.length;\n            if (length == 0) {\n                return \"{}\";\n            } else {\n                StringBuilder sb = new StringBuilder();\n\n                for(int i = 0; i < length; ++i) {\n                    if (i == 0) {\n                        sb.append(\"{\");\n                    } else {\n                        sb.append(\", \");\n                    }\n\n                    sb.append(array[i]);\n                }\n\n                sb.append(\"}\");\n                return sb.toString();\n            }\n        }\n    }\n\n    public static String nullSafeToString(byte[] array) {\n        if (array == null) {\n            return \"null\";\n        } else {\n            int length = array.length;\n            if (length == 0) {\n                return \"{}\";\n            } else {\n                StringBuilder sb = new StringBuilder();\n\n                for(int i = 0; i < length; ++i) {\n                    if (i == 0) {\n                        sb.append(\"{\");\n                    } else {\n                        sb.append(\", \");\n                    }\n\n                    sb.append(array[i]);\n                }\n\n                sb.append(\"}\");\n                return sb.toString();\n            }\n        }\n    }\n\n    public static String nullSafeToString(char[] array) {\n        if (array == null) {\n            return \"null\";\n        } else {\n            int length = array.length;\n            if (length == 0) {\n                return \"{}\";\n            } else {\n                StringBuilder sb = new StringBuilder();\n\n                for(int i = 0; i < length; ++i) {\n                    if (i == 0) {\n                        sb.append(\"{\");\n                    } else {\n                        sb.append(\", \");\n                    }\n\n                    sb.append(\"'\").append(array[i]).append(\"'\");\n                }\n\n                sb.append(\"}\");\n                return sb.toString();\n            }\n        }\n    }\n\n    public static String nullSafeToString(double[] array) {\n        if (array == null) {\n            return \"null\";\n        } else {\n            int length = array.length;\n            if (length == 0) {\n                return \"{}\";\n            } else {\n                StringBuilder sb = new StringBuilder();\n\n                for(int i = 0; i < length; ++i) {\n                    if (i == 0) {\n                        sb.append(\"{\");\n                    } else {\n                        sb.append(\", \");\n                    }\n\n                    sb.append(array[i]);\n                }\n\n                sb.append(\"}\");\n                return sb.toString();\n            }\n        }\n    }\n\n    public static String nullSafeToString(float[] array) {\n        if (array == null) {\n            return \"null\";\n        } else {\n            int length = array.length;\n            if (length == 0) {\n                return \"{}\";\n            } else {\n                StringBuilder sb = new StringBuilder();\n\n                for(int i = 0; i < length; ++i) {\n                    if (i == 0) {\n                        sb.append(\"{\");\n                    } else {\n                        sb.append(\", \");\n                    }\n\n                    sb.append(array[i]);\n                }\n\n                sb.append(\"}\");\n                return sb.toString();\n            }\n        }\n    }\n\n    public static String nullSafeToString(int[] array) {\n        if (array == null) {\n            return \"null\";\n        } else {\n            int length = array.length;\n            if (length == 0) {\n                return \"{}\";\n            } else {\n                StringBuilder sb = new StringBuilder();\n\n                for(int i = 0; i < length; ++i) {\n                    if (i == 0) {\n                        sb.append(\"{\");\n                    } else {\n                        sb.append(\", \");\n                    }\n\n                    sb.append(array[i]);\n                }\n\n                sb.append(\"}\");\n                return sb.toString();\n            }\n        }\n    }\n\n    public static String nullSafeToString(long[] array) {\n        if (array == null) {\n            return \"null\";\n        } else {\n            int length = array.length;\n            if (length == 0) {\n                return \"{}\";\n            } else {\n                StringBuilder sb = new StringBuilder();\n\n                for(int i = 0; i < length; ++i) {\n                    if (i == 0) {\n                        sb.append(\"{\");\n                    } else {\n                        sb.append(\", \");\n                    }\n\n                    sb.append(array[i]);\n                }\n\n                sb.append(\"}\");\n                return sb.toString();\n            }\n        }\n    }\n\n    public static String nullSafeToString(short[] array) {\n        if (array == null) {\n            return \"null\";\n        } else {\n            int length = array.length;\n            if (length == 0) {\n                return \"{}\";\n            } else {\n                StringBuilder sb = new StringBuilder();\n\n                for(int i = 0; i < length; ++i) {\n                    if (i == 0) {\n                        sb.append(\"{\");\n                    } else {\n                        sb.append(\", \");\n                    }\n\n                    sb.append(array[i]);\n                }\n\n                sb.append(\"}\");\n                return sb.toString();\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/PatternMatchUtils.java",
    "content": "package com.pinecone.framework.util;\n\n\nimport com.pinecone.framework.system.Nullable;\n\npublic abstract class PatternMatchUtils {\n    public PatternMatchUtils() {\n\n    }\n\n    public static boolean simpleMatch(@Nullable String pattern, @Nullable String str) {\n        if (pattern != null && str != null) {\n            int firstIndex = pattern.indexOf(42);\n            if (firstIndex == -1) {\n                return pattern.equals(str);\n            }\n            else if (firstIndex == 0) {\n                if (pattern.length() == 1) {\n                    return true;\n                }\n                else {\n                    int nextIndex = pattern.indexOf(42, 1);\n                    if (nextIndex == -1) {\n                        return str.endsWith(pattern.substring(1));\n                    }\n                    else {\n                        String part = pattern.substring(1, nextIndex);\n                        if (part.isEmpty()) {\n                            return simpleMatch(pattern.substring(nextIndex), str);\n                        }\n                        else {\n                            for( int partIndex = str.indexOf(part); partIndex != -1; partIndex = str.indexOf(part, partIndex + 1) ) {\n                                if (simpleMatch(pattern.substring(nextIndex), str.substring(partIndex + part.length()))) {\n                                    return true;\n                                }\n                            }\n\n                            return false;\n                        }\n                    }\n                }\n            }\n            else {\n                return str.length() >= firstIndex && pattern.substring(0, firstIndex).equals(str.substring(0, firstIndex)) && simpleMatch(pattern.substring(firstIndex), str.substring(firstIndex));\n            }\n        }\n        else {\n            return false;\n        }\n    }\n\n    public static boolean simpleMatch(@Nullable String[] patterns, String str) {\n        if ( patterns != null ) {\n            for( int i = 0; i < patterns.length; ++i ) {\n                String pattern = patterns[i];\n                if ( simpleMatch(pattern, str) ) {\n                    return true;\n                }\n            }\n        }\n\n        return false;\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/Randomium.java",
    "content": "package com.pinecone.framework.util;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.math.RoundingMode;\nimport java.util.Random;\n\npublic class Randomium extends Random implements Pinenut {\n    private static final String S_ALP_NUM_STRING_DICT = \"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz\";\n\n    public Randomium(){\n        super();\n    }\n\n    public Randomium( long seed ){\n        super( seed );\n    }\n\n    public int nextInteger ( int from, int to ) {\n        if ( from > to ) {\n            throw new IllegalArgumentException( \"'from' cannot be greater than 'to'!\" );\n        }\n        return this.nextInt(to - from + 1) + from;\n    }\n\n    public long nextLong ( long from, long to ) {\n        if ( from > to ) {\n            throw new IllegalArgumentException( \"'from' cannot be greater than 'to'!\" );\n        }\n        return from + (long) ( this.nextLong() * (to - from + 1) / (Long.MAX_VALUE + 1.0) );\n    }\n\n    public short nextShort( short from, short to ) {\n        if (from > to) {\n            throw new IllegalArgumentException( \"'from' cannot be greater than 'to'!\" );\n        }\n        return (short) ( this.nextInt(to - from + 1) + from );\n    }\n\n    public byte nextByte( byte from, byte to ) {\n        if ( from > to ) {\n            throw new IllegalArgumentException( \"'from' cannot be greater than 'to'!\" );\n        }\n        return (byte) ( this.nextInt(to - from + 1) + from );\n    }\n\n    public char nextCharacter( char from, char to ) {\n        if ( from > to ) {\n            throw new IllegalArgumentException( \"'from' char cannot be greater than 'to' char!\" );\n        }\n        return (char) ( this.nextInt(to - from + 1) + from );\n    }\n\n    public float nextFloat32( float from, float to ) {\n        if ( from > to ) {\n            throw new IllegalArgumentException(\"'from' cannot be greater than 'to'!\");\n        }\n        return from + this.nextFloat() * (to - from);\n    }\n\n    public double nextFloat64( double from, double to ) {\n        if ( from > to ) {\n            throw new IllegalArgumentException(\"'from' cannot be greater than 'to'!\");\n        }\n        return from + this.nextDouble() * (to - from);\n    }\n\n    public BigDecimal nextBigDecimal( BigDecimal from, BigDecimal to, int scale ) {\n        if ( from.compareTo(to) > 0 ) {\n            throw new IllegalArgumentException(\"'from' cannot be greater than 'to'!\");\n        }\n        BigDecimal randomBigDecimal = from.add(new BigDecimal(this.nextDouble()).multiply(to.subtract(from)));\n        return randomBigDecimal.setScale( scale, RoundingMode.HALF_UP );\n    }\n\n    public BigInteger nextBigInteger( BigInteger from, BigInteger to ) {\n        if ( from.compareTo(to) > 0 ) {\n            throw new IllegalArgumentException( \"'from' cannot be greater than 'to'!\" );\n        }\n\n        BigInteger range = to.subtract( from ).add( BigInteger.ONE ); // Calculate the range\n        BigInteger randomNumber;\n\n        do {\n            randomNumber = new BigInteger(range.bitLength(), this);\n        } while (randomNumber.compareTo(range) >= 0);\n\n        return randomNumber.add( from );\n    }\n\n    public String nextString( char from, char to, int scale ) {\n        if( from > to ){\n            throw new IllegalArgumentException(\"'from' char can't beyond 'to' char !\");\n        }\n        if( scale > Pinecone.COMMON_ACCURACY_LIMIT ){\n            throw new ArithmeticException(\"Randomium scale is too big limit '\" + Pinecone.COMMON_ACCURACY_LIMIT + \"' !\");\n        }\n        String randomDict = Randomium.S_ALP_NUM_STRING_DICT ;\n        int fromIndex = randomDict.indexOf(from), toIndex = randomDict.indexOf(to);\n        StringBuilder sb = new StringBuilder();\n        for( int i=0; i < scale; ++i ){\n            sb.append(randomDict.charAt( this.nextInt(toIndex - fromIndex + 1) + fromIndex) ) ;\n        }\n        return sb.toString();\n    }\n\n    public String nextString( int scale ){\n        return nextString('0','z',scale);\n    }\n\n    public String nextString(){\n        return nextString(10);\n    }\n\n    public String nextMBString( char from, char to, int scale ) {\n        if ( from > to ) {\n            throw new IllegalArgumentException( \"'from' char can't be beyond 'to' char!\" );\n        }\n        if ( scale < 0 ) {\n            throw new IllegalArgumentException( \"Scale cannot be negative!\" );\n        }\n        StringBuilder sb = new StringBuilder();\n        for ( int i = 0; i < scale; ++i ) {\n            sb.append((char) (this.nextInt(to - from + 1) + from));\n        }\n        return sb.toString();\n    }\n\n    public String nextMBString( int scale ){\n        return nextMBString('0','z',scale);\n    }\n\n    public String nextMBString(){\n        return nextMBString(10);\n    }\n\n\n    public double nextGaussian( double mean, double stddev ) {\n        return mean + stddev * this.nextGaussian();\n    }\n\n    public int nextPoisson( double lambda ) {\n        double L = Math.exp(-lambda);\n        int k = 0;\n        double p = 1.0;\n        do {\n            k++;\n            p *= this.nextDouble();\n        }\n        while (p > L);\n        return k - 1;\n    }\n\n    public double nextBias( double from, double to, double bias ) {\n        double randomValue = this.nextDouble();\n        double biasedValue = Math.pow( randomValue, bias );\n        return from + (to - from) * biasedValue;\n    }\n\n\n    public static Randomium newInstance() {\n        return new Randomium();\n    }\n\n    public static Randomium newInstance( long seed ) {\n        return new Randomium( seed );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/ReflectionUtils.java",
    "content": "package com.pinecone.framework.util;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.unit.ConcurrentReferenceHashMap;\n\nimport java.beans.IntrospectionException;\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.Field;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.lang.reflect.Modifier;\nimport java.lang.reflect.UndeclaredThrowableException;\nimport java.sql.SQLException;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\n\npublic final class ReflectionUtils {\n    private static final String CGLIB_RENAMED_METHOD_PREFIX = \"CGLIB$\";\n    private static final Map<Class<?>, Method[]> declaredMethodsCache = new ConcurrentReferenceHashMap(256);\n    public static ReflectionUtils.FieldFilter COPYABLE_FIELDS = new ReflectionUtils.FieldFilter() {\n        public boolean matches(Field field) {\n            return !Modifier.isStatic(field.getModifiers()) && !Modifier.isFinal(field.getModifiers());\n        }\n    };\n    public static ReflectionUtils.MethodFilter NON_BRIDGED_METHODS = new ReflectionUtils.MethodFilter() {\n        public boolean matches(Method method) {\n            return !method.isBridge();\n        }\n    };\n    public static ReflectionUtils.MethodFilter USER_DECLARED_METHODS = new ReflectionUtils.MethodFilter() {\n        public boolean matches(Method method) {\n            return !method.isBridge() && method.getDeclaringClass() != Object.class;\n        }\n    };\n\n    public ReflectionUtils() {\n    }\n\n    public static Field findField(Class<?> clazz, String name) {\n        return findField(clazz, name, (Class)null);\n    }\n\n    public static Field findField(Class<?> clazz, String name, Class<?> type) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        Assert.isTrue(name != null || type != null, \"Either name or type of the field must be specified\");\n\n        for(Class searchType = clazz; !Object.class.equals(searchType) && searchType != null; searchType = searchType.getSuperclass()) {\n            Field[] fields = searchType.getDeclaredFields();\n            Field[] t = fields;\n            int len = fields.length;\n\n            for(int i = 0; i < len; ++i) {\n                Field field = t[i];\n                if ((name == null || name.equals(field.getName())) && (type == null || type.equals(field.getType()))) {\n                    return field;\n                }\n            }\n        }\n\n        return null;\n    }\n\n    public static void setField(Field field, Object target, Object value) {\n        try {\n            field.set(target, value);\n        } catch (IllegalAccessException e) {\n            handleReflectionException(e);\n            throw new IllegalStateException(\"Unexpected reflection exception - \" + e.getClass().getName() + \": \" + e.getMessage());\n        }\n    }\n\n    public static Object getField(Field field, Object target) {\n        try {\n            return field.get(target);\n        } catch (IllegalAccessException e) {\n            handleReflectionException(e);\n            throw new IllegalStateException(\"Unexpected reflection exception - \" + e.getClass().getName() + \": \" + e.getMessage());\n        }\n    }\n\n    public static Method findMethod(Class<?> clazz, String name) {\n        return findMethod(clazz, name);\n    }\n\n    public static Method findMethod(Class<?> clazz, String name, Class... paramTypes) {\n        Assert.notNull(clazz, \"Class must not be null\");\n        Assert.notNull(name, \"Method name must not be null\");\n\n        for(Class searchType = clazz; searchType != null; searchType = searchType.getSuperclass()) {\n            Method[] methods = searchType.isInterface() ? searchType.getMethods() : getDeclaredMethods(searchType);\n            Method[] t = methods;\n            int len = methods.length;\n\n            for( int i = 0; i < len; ++i ) {\n                Method method = t[i];\n                if (name.equals(method.getName()) && (paramTypes == null || Arrays.equals(paramTypes, method.getParameterTypes()))) {\n                    return method;\n                }\n            }\n        }\n\n        return null;\n    }\n\n    public static Object invokeMethod(Method method, Object target) {\n        return invokeMethod(method, target);\n    }\n\n    public static Object invokeMethod(Method method, Object target, Object... args) {\n        try {\n            return method.invoke(target, args);\n        } catch (Exception e) {\n            handleReflectionException(e);\n            throw new IllegalStateException(\"Should never get here\");\n        }\n    }\n\n    public static Object invokeJdbcMethod(Method method, Object target) throws SQLException {\n        return invokeJdbcMethod(method, target);\n    }\n\n    public static Object invokeJdbcMethod(Method method, Object target, Object... args) throws SQLException {\n        try {\n            return method.invoke(target, args);\n        }\n        catch (IllegalAccessException e) {\n            handleReflectionException(e);\n        }\n        catch (InvocationTargetException e1) {\n            if (e1.getTargetException() instanceof SQLException) {\n                throw (SQLException)e1.getTargetException();\n            }\n\n            handleInvocationTargetException(e1);\n        }\n\n        throw new IllegalStateException(\"Should never get here\");\n    }\n\n    public static void handleReflectionException(Exception ex) {\n        if (ex instanceof NoSuchMethodException) {\n            throw new IllegalStateException(\"Method not found: \" + ex.getMessage());\n        } else if (ex instanceof IllegalAccessException) {\n            throw new IllegalStateException(\"Could not access method: \" + ex.getMessage());\n        } else {\n            if (ex instanceof InvocationTargetException) {\n                handleInvocationTargetException((InvocationTargetException)ex);\n            }\n\n            if (ex instanceof RuntimeException) {\n                throw (RuntimeException)ex;\n            } else {\n                throw new UndeclaredThrowableException(ex);\n            }\n        }\n    }\n\n    public static void handleInvocationTargetException(InvocationTargetException ex) {\n        rethrowRuntimeException(ex.getTargetException());\n    }\n\n    public static void rethrowRuntimeException(Throwable ex) {\n        if (ex instanceof RuntimeException) {\n            throw (RuntimeException)ex;\n        } else if (ex instanceof Error) {\n            throw (Error)ex;\n        } else {\n            throw new UndeclaredThrowableException(ex);\n        }\n    }\n\n    public static void rethrowException(Throwable ex) throws Exception {\n        if (ex instanceof Exception) {\n            throw (Exception)ex;\n        } else if (ex instanceof Error) {\n            throw (Error)ex;\n        } else {\n            throw new UndeclaredThrowableException(ex);\n        }\n    }\n\n    public static boolean declaresException(Method method, Class<?> exceptionType) {\n        Assert.notNull(method, \"Method must not be null\");\n        Class<?>[] declaredExceptions = method.getExceptionTypes();\n        Class[] t = declaredExceptions;\n        int length = declaredExceptions.length;\n\n        for( int i = 0; i < length; ++i ) {\n            Class<?> declaredException = t[i];\n            if (declaredException.isAssignableFrom(exceptionType)) {\n                return true;\n            }\n        }\n\n        return false;\n    }\n\n    public static boolean isPublicStaticFinal(Field field) {\n        int modifiers = field.getModifiers();\n        return Modifier.isPublic(modifiers) && Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers);\n    }\n\n    public static boolean isEqualsMethod(Method method) {\n        if (method != null && method.getName().equals(\"equals\")) {\n            Class<?>[] paramTypes = method.getParameterTypes();\n            return paramTypes.length == 1 && paramTypes[0] == Object.class;\n        } else {\n            return false;\n        }\n    }\n\n    public static boolean isHashCodeMethod(Method method) {\n        return method != null && method.getName().equals(\"hashCode\") && method.getParameterTypes().length == 0;\n    }\n\n    public static boolean isToStringMethod(Method method) {\n        return method != null && method.getName().equals(\"toString\") && method.getParameterTypes().length == 0;\n    }\n\n    public static boolean isObjectMethod(Method method) {\n        if (method == null) {\n            return false;\n        }\n        else {\n            try {\n                Object.class.getDeclaredMethod(method.getName(), method.getParameterTypes());\n                return true;\n            }\n            catch ( Exception e ) {\n                return false;\n            }\n        }\n    }\n\n    public static boolean isCglibRenamedMethod(Method renamedMethod) {\n        String name = renamedMethod.getName();\n        if (!name.startsWith(\"CGLIB$\")) {\n            return false;\n        } else {\n            int i;\n            for(i = name.length() - 1; i >= 0 && Character.isDigit(name.charAt(i)); --i) {\n            }\n\n            return i > \"CGLIB$\".length() && i < name.length() - 1 && name.charAt(i) == '$';\n        }\n    }\n\n    public static void makeAccessible(Field field) {\n        if ((!Modifier.isPublic(field.getModifiers()) || !Modifier.isPublic(field.getDeclaringClass().getModifiers()) || Modifier.isFinal(field.getModifiers())) && !field.isAccessible()) {\n            field.setAccessible(true);\n        }\n\n    }\n\n    public static void makeAccessible(Method method) {\n        if ((!Modifier.isPublic(method.getModifiers()) || !Modifier.isPublic(method.getDeclaringClass().getModifiers())) && !method.isAccessible()) {\n            method.setAccessible(true);\n        }\n\n    }\n\n    public static void makeAccessible(Constructor<?> ctor) {\n        if ((!Modifier.isPublic(ctor.getModifiers()) || !Modifier.isPublic(ctor.getDeclaringClass().getModifiers())) && !ctor.isAccessible()) {\n            ctor.setAccessible(true);\n        }\n\n    }\n\n    public static <T> Constructor<T> accessibleConstructor(Class<T> clazz, Class... parameterTypes) throws NoSuchMethodException {\n        Constructor<T> ctor = clazz.getDeclaredConstructor(parameterTypes);\n        ReflectionUtils.makeAccessible(ctor);\n        return ctor;\n    }\n\n    public static void doWithMethods(Class<?> clazz, ReflectionUtils.MethodCallback mc) throws IllegalArgumentException {\n        doWithMethods(clazz, mc, (ReflectionUtils.MethodFilter)null);\n    }\n\n    public static void doWithMethods(Class<?> clazz, ReflectionUtils.MethodCallback mc, ReflectionUtils.MethodFilter mf) throws IllegalArgumentException {\n        Method[] methods = getDeclaredMethods(clazz);\n        Method[] t = methods;\n        int length = methods.length;\n\n        int i;\n        for( i = 0; i < length; ++i ) {\n            Method method = t[i];\n            if (mf == null || mf.matches(method)) {\n                try {\n                    mc.doWith(method);\n                }\n                catch (IllegalAccessException e) {\n                    throw new IllegalStateException(\"Shouldn't be illegal to access method '\" + method.getName() + \"': \" + e);\n                }\n            }\n        }\n\n        if (clazz.getSuperclass() != null) {\n            doWithMethods(clazz.getSuperclass(), mc, mf);\n        } else if (clazz.isInterface()) {\n            Class[] interfaces = clazz.getInterfaces();\n            length = interfaces.length;\n\n            for( i = 0; i < length; ++i ) {\n                Class<?> superIfc = interfaces[i];\n                doWithMethods(superIfc, mc, mf);\n            }\n        }\n\n    }\n\n    public static Method[] getAllDeclaredMethods(Class<?> leafClass) throws IllegalArgumentException {\n        final List<Method> methods = new ArrayList(32);\n        doWithMethods(leafClass, new ReflectionUtils.MethodCallback() {\n            public void doWith(Method method) {\n                methods.add(method);\n            }\n        });\n        return (Method[])methods.toArray(new Method[methods.size()]);\n    }\n\n    public static Method[] getUniqueDeclaredMethods(Class<?> leafClass) throws IllegalArgumentException {\n        final List<Method> methods = new ArrayList(32);\n        doWithMethods(leafClass, new ReflectionUtils.MethodCallback() {\n            public void doWith(Method method) {\n                boolean knownSignature = false;\n                Method methodBeingOverriddenWithCovariantReturnType = null;\n                Iterator iter = methods.iterator();\n\n                while(iter.hasNext()) {\n                    Method existingMethod = (Method)iter.next();\n                    if (method.getName().equals(existingMethod.getName()) && Arrays.equals(method.getParameterTypes(), existingMethod.getParameterTypes())) {\n                        if (existingMethod.getReturnType() != method.getReturnType() && existingMethod.getReturnType().isAssignableFrom(method.getReturnType())) {\n                            methodBeingOverriddenWithCovariantReturnType = existingMethod;\n                            break;\n                        }\n\n                        knownSignature = true;\n                        break;\n                    }\n                }\n\n                if (methodBeingOverriddenWithCovariantReturnType != null) {\n                    methods.remove(methodBeingOverriddenWithCovariantReturnType);\n                }\n\n                if (!knownSignature && !ReflectionUtils.isCglibRenamedMethod(method)) {\n                    methods.add(method);\n                }\n\n            }\n        });\n        return (Method[])methods.toArray(new Method[methods.size()]);\n    }\n\n    private static Method[] getDeclaredMethods(Class<?> clazz) {\n        Method[] result = (Method[])declaredMethodsCache.get(clazz);\n        if (result == null) {\n            result = clazz.getDeclaredMethods();\n            declaredMethodsCache.put(clazz, result);\n        }\n\n        return result;\n    }\n\n    public static void doWithFields(Class<?> clazz, ReflectionUtils.FieldCallback fc) throws IllegalArgumentException {\n        doWithFields(clazz, fc, (ReflectionUtils.FieldFilter)null);\n    }\n\n    public static void doWithFields(Class<?> clazz, ReflectionUtils.FieldCallback fc, ReflectionUtils.FieldFilter ff) throws IllegalArgumentException {\n        Class targetClass = clazz;\n\n        do {\n            Field[] fields = targetClass.getDeclaredFields();\n            Field[] t = fields;\n            int len = fields.length;\n\n            for(int i = 0; i < len; ++i) {\n                Field field = t[i];\n                if (ff == null || ff.matches(field)) {\n                    try {\n                        fc.doWith(field);\n                    } catch (IllegalAccessException e) {\n                        throw new IllegalStateException(\"Shouldn't be illegal to access field '\" + field.getName() + \"': \" + e);\n                    }\n                }\n            }\n\n            targetClass = targetClass.getSuperclass();\n        } while(targetClass != null && targetClass != Object.class);\n\n    }\n\n    public static void shallowCopyFieldState(final Object src, final Object dest) throws IllegalArgumentException {\n        if (src == null) {\n            throw new IllegalArgumentException(\"Source for field copy cannot be null\");\n        } else if (dest == null) {\n            throw new IllegalArgumentException(\"Destination for field copy cannot be null\");\n        } else if (!src.getClass().isAssignableFrom(dest.getClass())) {\n            throw new IllegalArgumentException(\"Destination class [\" + dest.getClass().getName() + \"] must be same or subclass as source class [\" + src.getClass().getName() + \"]\");\n        } else {\n            doWithFields(src.getClass(), new ReflectionUtils.FieldCallback() {\n                public void doWith(Field field) throws IllegalArgumentException, IllegalAccessException {\n                    ReflectionUtils.makeAccessible(field);\n                    Object srcValue = field.get(src);\n                    field.set(dest, srcValue);\n                }\n            }, COPYABLE_FIELDS);\n        }\n    }\n\n    public interface FieldFilter {\n        boolean matches( Field field );\n    }\n\n    public interface FieldCallback {\n        void doWith( Field field ) throws IllegalArgumentException, IllegalAccessException;\n    }\n\n    public interface MethodFilter {\n        boolean matches( Method method );\n    }\n\n    public interface MethodCallback {\n        void doWith( Method method ) throws IllegalArgumentException, IllegalAccessException;\n    }\n\n\n    /**\n     *   Version: New add in Pinecone Java Ver 20240628\n     */\n    public static Object tryAccessibleInvoke( Method method, Object obj, Object... args ) throws IllegalArgumentException, InvocationTargetException {\n        try{\n            method.setAccessible( true );\n            return method.invoke( obj, args );\n        }\n        catch ( IllegalAccessException eae ) {\n            throw new ProxyProvokeHandleException( eae );\n        }\n    }\n\n\n    /**\n     *   Version: New add in Pinecone Java Ver 20241006\n     */\n    public static Object beanGet( Object bean, String propertyKey ) {\n        try{\n            java.beans.PropertyDescriptor propertyDescriptor = new java.beans.PropertyDescriptor( propertyKey, bean.getClass() );\n            Method readMethod = propertyDescriptor.getReadMethod();\n            if ( readMethod != null ) {\n                try{\n                    readMethod.setAccessible( true );\n                    return readMethod.invoke( bean );\n                }\n                catch ( InvocationTargetException | IllegalArgumentException | IllegalAccessException e ) {\n                    return null;\n                }\n            }\n        }\n        catch ( IntrospectionException e ) {\n            return null;\n        }\n        return null;\n    }\n\n    public static void beanSet( Object bean, String propertyKey, Object val ) throws IllegalArgumentException {\n        try{\n            java.beans.PropertyDescriptor propertyDescriptor = new java.beans.PropertyDescriptor( propertyKey, bean.getClass() );\n            Method writeMethod = propertyDescriptor.getWriteMethod();\n            if ( writeMethod != null ) {\n                try{\n                    writeMethod.setAccessible( true );\n                    writeMethod.invoke( bean, val );\n                }\n                catch ( InvocationTargetException | IllegalArgumentException | IllegalAccessException e ) {\n                    throw new IllegalArgumentException( e );\n                }\n            }\n        }\n        catch ( IntrospectionException e ) {\n            throw new IllegalArgumentException( e );\n        }\n    }\n\n    /**\n     * e.g. java.util.List<com.util.json.Test>\n     * e.g. java.util.Map<java.lang.String, com.util.json.Slave>\n     */\n    public static String[] extractGenericClassNames( String input ) {\n        int startIndex = input.indexOf('<');\n        int endIndex   = input.lastIndexOf('>');\n\n        if ( startIndex != -1 && endIndex != -1 && endIndex > startIndex ) {\n            String types = input.substring( startIndex + 1, endIndex ).trim();\n            if( types.indexOf( \", \" ) > 0 ) {\n                return types.split( \", \" );\n            }\n            return types.split( \",\" );\n        }\n        return null;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/StringTraits.java",
    "content": "package com.pinecone.framework.util;\n\nimport java.util.Arrays;\n\npublic class StringTraits {\n    // String like `CHO2OHC`\n    public static boolean isChiralString ( String szThat, boolean bNoCase ) {\n        int nMid   = szThat.length() / 2;\n\n        for ( int i = 0; i < nMid; i++ ) {\n            char c1 = szThat.charAt(i);\n            char c2 = szThat.charAt( szThat.length() - i - 1 );\n\n            if( bNoCase ){\n                if( !CharactersUtils.regionMatches( c1, c2 ) ){\n                    return false;\n                }\n            }\n            else {\n                if ( c1 != c2 ) {\n                    return false;\n                }\n            }\n        }\n        return true;\n    }\n\n    public static boolean isChiralString ( String szThat ) {\n        return StringTraits.isChiralString( szThat, true );\n    }\n\n    // String like `CHO2CHO`\n    public static boolean isHomoString   ( String szThat, boolean bNoCase ) {\n        int nBias = szThat.length() % 2 != 0 ? 1 : 0;\n        int nMid   = szThat.length() / 2;\n\n        for ( int i = 0; i < nMid; i++ ) {\n            char c1 = szThat.charAt(i);\n            char c2 = szThat.charAt( nMid + i + nBias );\n\n            if( bNoCase ){\n                if( !CharactersUtils.regionMatches( c1, c2 ) ){\n                    return false;\n                }\n            }\n            else {\n                if ( c1 != c2 ) {\n                    return false;\n                }\n            }\n        }\n        return true;\n    }\n\n    public static boolean isHomoString   ( String szThat ) {\n        return StringTraits.isHomoString( szThat, true );\n    }\n\n    // String like `CHO2HCO` or `CHO2COH` etc.\n    public static boolean isHeterString  ( String szThat, boolean bNoCase ) {\n        int nBias = szThat.length() % 2 != 0 ? 1 : 0;\n        int nMid  = szThat.length() / 2;\n\n        char[] chars = szThat.toCharArray();\n        Arrays.sort( chars, 0, nMid );\n        Arrays.sort( chars, nMid + nBias, szThat.length() );\n\n        return CharactersUtils.equals( chars, 0, nMid, chars, nMid + nBias, chars.length, bNoCase );\n    }\n\n    public static boolean isHeterString  ( String szThat ) {\n        return StringTraits.isHeterString( szThat, true );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/StringUtils.java",
    "content": "package com.pinecone.framework.util;\n\nimport java.io.IOException;\nimport java.io.StringWriter;\nimport java.io.Writer;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Enumeration;\nimport java.util.Iterator;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Properties;\nimport java.util.Set;\nimport java.util.StringTokenizer;\nimport java.util.TimeZone;\nimport java.util.TreeSet;\n\npublic final class StringUtils {\n    public static final String FOLDER_SEPARATOR = \"/\";\n    public static final String WINDOWS_FOLDER_SEPARATOR = \"\\\\\";\n    public static final String TOP_PATH = \"..\";\n    public static final String CURRENT_PATH = \".\";\n    public static final char EXTENSION_SEPARATOR = '.';\n\n    public StringUtils() {\n    }\n\n    public static boolean isEmpty( Object str ) {\n        return str == null || \"\".equals(str);\n    }\n\n    public static boolean isEmpty( String str ) {\n        return str == null || str.isEmpty();\n    }\n\n    public static boolean isNoneEmpty( String str ) {\n        return !StringUtils.isEmpty( str );\n    }\n\n    public static boolean hasLength(CharSequence str) {\n        return str != null && str.length() > 0;\n    }\n\n    public static boolean hasLength(String str) {\n        return hasLength((CharSequence)str);\n    }\n\n    public static boolean hasText(CharSequence str) {\n        if (!hasLength(str)) {\n            return false;\n        } else {\n            int strLen = str.length();\n\n            for(int i = 0; i < strLen; ++i) {\n                if (!Character.isWhitespace(str.charAt(i))) {\n                    return true;\n                }\n            }\n\n            return false;\n        }\n    }\n\n    public static boolean hasText(String str) {\n        return hasText((CharSequence)str);\n    }\n\n    public static boolean containsWhitespace(CharSequence str) {\n        if (!hasLength(str)) {\n            return false;\n        } else {\n            int strLen = str.length();\n\n            for(int i = 0; i < strLen; ++i) {\n                if (Character.isWhitespace(str.charAt(i))) {\n                    return true;\n                }\n            }\n\n            return false;\n        }\n    }\n\n    public static boolean containsWhitespace(String str) {\n        return containsWhitespace((CharSequence)str);\n    }\n\n    public static String trimWhitespace(String str) {\n        if (!hasLength(str)) {\n            return str;\n        } else {\n            StringBuilder sb = new StringBuilder(str);\n\n            while(sb.length() > 0 && Character.isWhitespace(sb.charAt(0))) {\n                sb.deleteCharAt(0);\n            }\n\n            while(sb.length() > 0 && Character.isWhitespace(sb.charAt(sb.length() - 1))) {\n                sb.deleteCharAt(sb.length() - 1);\n            }\n\n            return sb.toString();\n        }\n    }\n\n    public static String trimAllWhitespace(String str) {\n        if (!hasLength(str)) {\n            return str;\n        } else {\n            int len = str.length();\n            StringBuilder sb = new StringBuilder(str.length());\n\n            for(int i = 0; i < len; ++i) {\n                char c = str.charAt(i);\n                if (!Character.isWhitespace(c)) {\n                    sb.append(c);\n                }\n            }\n\n            return sb.toString();\n        }\n    }\n\n    public static String trimLeadingWhitespace(String str) {\n        if (!hasLength(str)) {\n            return str;\n        } else {\n            StringBuilder sb = new StringBuilder(str);\n\n            while(sb.length() > 0 && Character.isWhitespace(sb.charAt(0))) {\n                sb.deleteCharAt(0);\n            }\n\n            return sb.toString();\n        }\n    }\n\n    public static String trimTrailingWhitespace(String str) {\n        if (!hasLength(str)) {\n            return str;\n        } else {\n            StringBuilder sb = new StringBuilder(str);\n\n            while(sb.length() > 0 && Character.isWhitespace(sb.charAt(sb.length() - 1))) {\n                sb.deleteCharAt(sb.length() - 1);\n            }\n\n            return sb.toString();\n        }\n    }\n\n    public static String trimLeadingCharacter(String str, char leadingCharacter) {\n        if (!hasLength(str)) {\n            return str;\n        } else {\n            StringBuilder sb = new StringBuilder(str);\n\n            while(sb.length() > 0 && sb.charAt(0) == leadingCharacter) {\n                sb.deleteCharAt(0);\n            }\n\n            return sb.toString();\n        }\n    }\n\n    public static String trimTrailingCharacter(String str, char trailingCharacter) {\n        if (!hasLength(str)) {\n            return str;\n        } else {\n            StringBuilder sb = new StringBuilder(str);\n\n            while(sb.length() > 0 && sb.charAt(sb.length() - 1) == trailingCharacter) {\n                sb.deleteCharAt(sb.length() - 1);\n            }\n\n            return sb.toString();\n        }\n    }\n\n    public static boolean startsWithIgnoreCase(String str, String prefix) {\n        if (str != null && prefix != null) {\n            if (str.startsWith(prefix)) {\n                return true;\n            } else if (str.length() < prefix.length()) {\n                return false;\n            } else {\n                String lcStr = str.substring(0, prefix.length()).toLowerCase();\n                String lcPrefix = prefix.toLowerCase();\n                return lcStr.equals(lcPrefix);\n            }\n        } else {\n            return false;\n        }\n    }\n\n    public static boolean endsWithIgnoreCase(String str, String suffix) {\n        if (str != null && suffix != null) {\n            if (str.endsWith(suffix)) {\n                return true;\n            } else if (str.length() < suffix.length()) {\n                return false;\n            } else {\n                String lcStr = str.substring(str.length() - suffix.length()).toLowerCase();\n                String lcSuffix = suffix.toLowerCase();\n                return lcStr.equals(lcSuffix);\n            }\n        } else {\n            return false;\n        }\n    }\n\n    public static boolean substringMatch(CharSequence str, int index, CharSequence substring) {\n        for(int j = 0; j < substring.length(); ++j) {\n            int i = index + j;\n            if (i >= str.length() || str.charAt(i) != substring.charAt(j)) {\n                return false;\n            }\n        }\n\n        return true;\n    }\n\n    public static int countOccurrencesOf( String str, String sub ) {\n        if ( str != null && sub != null && str.length() != 0 && sub.length() != 0 ) {\n            int count = 0;\n\n            int idx;\n            for( int pos = 0; (idx = str.indexOf(sub, pos)) != -1; pos = idx + sub.length() ) {\n                ++count;\n            }\n\n            return count;\n        }\n        else {\n            return 0;\n        }\n    }\n\n    public static int countOccurrencesOf( String str, String sub, int threshold ) {\n        if ( str != null && sub != null && str.length() != 0 && sub.length() != 0 ) {\n            int count = 0;\n\n            int idx;\n            for( int pos = 0; (idx = str.indexOf(sub, pos)) != -1; pos = idx + sub.length() ) {\n                ++count;\n                if( count >= threshold ) {\n                    break;\n                }\n            }\n\n            return count;\n        }\n        else {\n            return 0;\n        }\n    }\n\n    public static String replace(String inString, String oldPattern, String newPattern) {\n        if (hasLength(inString) && hasLength(oldPattern) && newPattern != null) {\n            StringBuilder sb = new StringBuilder();\n            int pos = 0;\n            int index = inString.indexOf(oldPattern);\n\n            for(int patLen = oldPattern.length(); index >= 0; index = inString.indexOf(oldPattern, pos)) {\n                sb.append(inString.substring(pos, index));\n                sb.append(newPattern);\n                pos = index + patLen;\n            }\n\n            sb.append(inString.substring(pos));\n            return sb.toString();\n        } else {\n            return inString;\n        }\n    }\n\n    public static String delete(String inString, String pattern) {\n        return replace(inString, pattern, \"\");\n    }\n\n    public static String deleteAny(String inString, String charsToDelete) {\n        if (hasLength(inString) && hasLength(charsToDelete)) {\n            StringBuilder sb = new StringBuilder();\n\n            for(int i = 0; i < inString.length(); ++i) {\n                char c = inString.charAt(i);\n                if (charsToDelete.indexOf(c) == -1) {\n                    sb.append(c);\n                }\n            }\n\n            return sb.toString();\n        } else {\n            return inString;\n        }\n    }\n\n    public static String quote(String str) {\n        return str != null ? \"'\" + str + \"'\" : null;\n    }\n\n    public static Object quoteIfString(Object obj) {\n        return obj instanceof String ? quote((String)obj) : obj;\n    }\n\n    public static String unqualify(String qualifiedName) {\n        return unqualify(qualifiedName, '.');\n    }\n\n    public static String unqualify(String qualifiedName, char separator) {\n        return qualifiedName.substring(qualifiedName.lastIndexOf(separator) + 1);\n    }\n\n    public static String capitalize(String str) {\n        return StringUtils.changeFirstCharacterCase(str, true);\n    }\n\n    public static String uncapitalize(String str) {\n        return StringUtils.changeFirstCharacterCase(str, false);\n    }\n\n    private static String changeFirstCharacterCase( String str, boolean bCapitalize ) {\n        if (str != null && str.length() != 0) {\n            char[] cs = str.toCharArray();\n            if (bCapitalize) {\n                cs[0] = Character.toUpperCase( cs[0] );\n            } else {\n                cs[0] = Character.toLowerCase( cs[0] );\n            }\n            return String.valueOf( cs );\n        } else {\n            return str;\n        }\n    }\n\n    public static String getFilename(String path) {\n        if (path == null) {\n            return null;\n        } else {\n            int separatorIndex = path.lastIndexOf(\"/\");\n            return separatorIndex != -1 ? path.substring(separatorIndex + 1) : path;\n        }\n    }\n\n    public static String getFilenameExtension(String path) {\n        if (path == null) {\n            return null;\n        } else {\n            int extIndex = path.lastIndexOf(46);\n            if (extIndex == -1) {\n                return null;\n            } else {\n                int folderIndex = path.lastIndexOf(\"/\");\n                return folderIndex > extIndex ? null : path.substring(extIndex + 1);\n            }\n        }\n    }\n\n    public static String stripFilenameExtension(String path) {\n        if (path == null) {\n            return null;\n        } else {\n            int extIndex = path.lastIndexOf(46);\n            if (extIndex == -1) {\n                return path;\n            } else {\n                int folderIndex = path.lastIndexOf(\"/\");\n                return folderIndex > extIndex ? path : path.substring(0, extIndex);\n            }\n        }\n    }\n\n    public static String applyRelativePath(String path, String relativePath) {\n        int separatorIndex = path.lastIndexOf(\"/\");\n        if (separatorIndex != -1) {\n            String newPath = path.substring(0, separatorIndex);\n            if (!relativePath.startsWith(\"/\")) {\n                newPath = newPath + \"/\";\n            }\n\n            return newPath + relativePath;\n        } else {\n            return relativePath;\n        }\n    }\n\n    public static String cleanPath(String path) {\n        if (path == null) {\n            return null;\n        } else {\n            String pathToUse = replace(path, \"\\\\\", \"/\");\n            int prefixIndex = pathToUse.indexOf(\":\");\n            String prefix = \"\";\n            if (prefixIndex != -1) {\n                prefix = pathToUse.substring(0, prefixIndex + 1);\n                if (prefix.contains(\"/\")) {\n                    prefix = \"\";\n                } else {\n                    pathToUse = pathToUse.substring(prefixIndex + 1);\n                }\n            }\n\n            if (pathToUse.startsWith(\"/\")) {\n                prefix = prefix + \"/\";\n                pathToUse = pathToUse.substring(1);\n            }\n\n            String[] pathArray = delimitedListToStringArray(pathToUse, \"/\");\n            List<String> pathElements = new LinkedList();\n            int tops = 0;\n\n            int i;\n            for(i = pathArray.length - 1; i >= 0; --i) {\n                String element = pathArray[i];\n                if (!\".\".equals(element)) {\n                    if (\"..\".equals(element)) {\n                        ++tops;\n                    } else if (tops > 0) {\n                        --tops;\n                    } else {\n                        pathElements.add(0, element);\n                    }\n                }\n            }\n\n            for(i = 0; i < tops; ++i) {\n                pathElements.add(0, \"..\");\n            }\n\n            return prefix + collectionToDelimitedString(pathElements, \"/\");\n        }\n    }\n\n    public static boolean pathEquals(String path1, String path2) {\n        return cleanPath(path1).equals(cleanPath(path2));\n    }\n\n    public static Locale parseLocaleString(String localeString) {\n        String[] parts = tokenizeToStringArray(localeString, \"_ \", false, false);\n        String language = parts.length > 0 ? parts[0] : \"\";\n        String country = parts.length > 1 ? parts[1] : \"\";\n        validateLocalePart(language);\n        validateLocalePart(country);\n        String variant = \"\";\n        if (parts.length > 2) {\n            int endIndexOfCountryCode = localeString.indexOf(country, language.length()) + country.length();\n            variant = trimLeadingWhitespace(localeString.substring(endIndexOfCountryCode));\n            if (variant.startsWith(\"_\")) {\n                variant = trimLeadingCharacter(variant, '_');\n            }\n        }\n\n        return language.length() > 0 ? new Locale(language, country, variant) : null;\n    }\n\n    private static void validateLocalePart(String localePart) {\n        for(int i = 0; i < localePart.length(); ++i) {\n            char ch = localePart.charAt(i);\n            if (ch != '_' && ch != ' ' && !Character.isLetterOrDigit(ch)) {\n                throw new IllegalArgumentException(\"Locale part \\\"\" + localePart + \"\\\" contains invalid characters\");\n            }\n        }\n\n    }\n\n    public static String toLanguageTag(Locale locale) {\n        return locale.getLanguage() + (hasText(locale.getCountry()) ? \"-\" + locale.getCountry() : \"\");\n    }\n\n    public static TimeZone parseTimeZoneString(String timeZoneString) {\n        TimeZone timeZone = TimeZone.getTimeZone(timeZoneString);\n        if (\"GMT\".equals(timeZone.getID()) && !timeZoneString.startsWith(\"GMT\")) {\n            throw new IllegalArgumentException(\"Invalid time zone specification '\" + timeZoneString + \"'\");\n        } else {\n            return timeZone;\n        }\n    }\n\n    public static String[] addStringToArray(String[] array, String str) {\n        if (ObjectUtils.isEmpty(array)) {\n            return new String[]{str};\n        } else {\n            String[] newArr = new String[array.length + 1];\n            System.arraycopy(array, 0, newArr, 0, array.length);\n            newArr[array.length] = str;\n            return newArr;\n        }\n    }\n\n    public static String[] concatenateStringArrays(String[] array1, String[] array2) {\n        if (ObjectUtils.isEmpty(array1)) {\n            return array2;\n        } else if (ObjectUtils.isEmpty(array2)) {\n            return array1;\n        } else {\n            String[] newArr = new String[array1.length + array2.length];\n            System.arraycopy(array1, 0, newArr, 0, array1.length);\n            System.arraycopy(array2, 0, newArr, array1.length, array2.length);\n            return newArr;\n        }\n    }\n\n    public static String[] mergeStringArrays(String[] array1, String[] array2) {\n        if (ObjectUtils.isEmpty(array1)) {\n            return array2;\n        } else if (ObjectUtils.isEmpty(array2)) {\n            return array1;\n        } else {\n            List<String> result = new ArrayList();\n            result.addAll(Arrays.asList(array1));\n            String[] arr = array2;\n            int len = array2.length;\n\n            for( int i = 0; i < len; ++i ) {\n                String str = arr[i];\n                if ( !result.contains(str) ) {\n                    result.add(str);\n                }\n            }\n\n            return toStringArray((Collection)result);\n        }\n    }\n\n    public static String[] sortStringArray(String[] array) {\n        if (ObjectUtils.isEmpty(array)) {\n            return new String[0];\n        } else {\n            Arrays.sort(array);\n            return array;\n        }\n    }\n\n    public static String[] toStringArray(Collection<String> collection) {\n        return collection == null ? null : (String[])collection.toArray(new String[collection.size()]);\n    }\n\n    public static String[] toStringArray(Enumeration<String> enumeration) {\n        if (enumeration == null) {\n            return null;\n        } else {\n            List<String> list = Collections.list(enumeration);\n            return (String[])list.toArray(new String[list.size()]);\n        }\n    }\n\n    public static String[] trimArrayElements(String[] array) {\n        if (ObjectUtils.isEmpty(array)) {\n            return new String[0];\n        } else {\n            String[] result = new String[array.length];\n\n            for(int i = 0; i < array.length; ++i) {\n                String element = array[i];\n                result[i] = element != null ? element.trim() : null;\n            }\n\n            return result;\n        }\n    }\n\n    public static String[] removeDuplicateStrings( String[] array ) {\n        if ( ObjectUtils.isEmpty( array ) ) {\n            return array;\n        }\n        else {\n            Set<String> set = new TreeSet();\n            String[] arr = array;\n            int len = array.length;\n\n            for( int i = 0; i < len; ++i ) {\n                String element = arr[ i ];\n                set.add(element);\n            }\n\n            return toStringArray( (Collection)set );\n        }\n    }\n\n    public static String[] split(String toSplit, String delimiter) {\n        if (hasLength(toSplit) && hasLength(delimiter)) {\n            int offset = toSplit.indexOf(delimiter);\n            if (offset < 0) {\n                return null;\n            } else {\n                String beforeDelimiter = toSplit.substring(0, offset);\n                String afterDelimiter = toSplit.substring(offset + delimiter.length());\n                return new String[]{beforeDelimiter, afterDelimiter};\n            }\n        } else {\n            return null;\n        }\n    }\n\n    public static Properties splitArrayElementsIntoProperties(String[] array, String delimiter) {\n        return splitArrayElementsIntoProperties(array, delimiter, (String)null);\n    }\n\n    public static Properties splitArrayElementsIntoProperties(String[] array, String delimiter, String charsToDelete) {\n        if ( ObjectUtils.isEmpty(array) ) {\n            return null;\n        }\n        else {\n            Properties result = new Properties();\n            String[] arr = array;\n            int len = array.length;\n\n            for( int i = 0; i < len; ++i ) {\n                String element = arr[i];\n                if ( charsToDelete != null ) {\n                    element = deleteAny(element, charsToDelete);\n                }\n\n                String[] splittedElement = split(element, delimiter);\n                if ( splittedElement != null ) {\n                    result.setProperty(splittedElement[0].trim(), splittedElement[1].trim());\n                }\n            }\n\n            return result;\n        }\n    }\n\n    public static String[] tokenizeToStringArray(String str, String delimiters) {\n        return tokenizeToStringArray(str, delimiters, true, true);\n    }\n\n    public static String[] tokenizeToStringArray(String str, String delimiters, boolean trimTokens, boolean ignoreEmptyTokens) {\n        if (str == null) {\n            return null;\n        } else {\n            StringTokenizer st = new StringTokenizer(str, delimiters);\n            ArrayList tokens = new ArrayList();\n\n            while(true) {\n                String token;\n                do {\n                    if (!st.hasMoreTokens()) {\n                        return toStringArray((Collection)tokens);\n                    }\n\n                    token = st.nextToken();\n                    if (trimTokens) {\n                        token = token.trim();\n                    }\n                } while(ignoreEmptyTokens && token.length() <= 0);\n\n                tokens.add(token);\n            }\n        }\n    }\n\n    public static String[] delimitedListToStringArray(String str, String delimiter) {\n        return delimitedListToStringArray(str, delimiter, (String)null);\n    }\n\n    public static String[] delimitedListToStringArray(String str, String delimiter, String charsToDelete) {\n        if (str == null) {\n            return new String[0];\n        } else if (delimiter == null) {\n            return new String[]{str};\n        } else {\n            List<String> result = new ArrayList();\n            int pos;\n            if (\"\".equals(delimiter)) {\n                for(pos = 0; pos < str.length(); ++pos) {\n                    result.add(deleteAny(str.substring(pos, pos + 1), charsToDelete));\n                }\n            } else {\n                int delPos;\n                for(pos = 0; (delPos = str.indexOf(delimiter, pos)) != -1; pos = delPos + delimiter.length()) {\n                    result.add(deleteAny(str.substring(pos, delPos), charsToDelete));\n                }\n\n                if (str.length() > 0 && pos <= str.length()) {\n                    result.add(deleteAny(str.substring(pos), charsToDelete));\n                }\n            }\n\n            return toStringArray((Collection)result);\n        }\n    }\n\n    public static String[] commaDelimitedListToStringArray(String str) {\n        return delimitedListToStringArray(str, \",\");\n    }\n\n    public static Set<String> commaDelimitedListToSet(String str) {\n        Set<String> set = new TreeSet();\n        String[] tokens = commaDelimitedListToStringArray(str);\n        String[] ts = tokens;\n        int len = tokens.length;\n\n        for( int i = 0; i < len; ++i ) {\n            String token = ts[i];\n            set.add(token);\n        }\n\n        return set;\n    }\n\n    public static String collectionToDelimitedString(Collection<?> coll, String delim, String prefix, String suffix) {\n        if (CollectionUtils.isEmpty(coll)) {\n            return \"\";\n        } else {\n            StringBuilder sb = new StringBuilder();\n            Iterator it = coll.iterator();\n\n            while(it.hasNext()) {\n                sb.append(prefix).append(it.next()).append(suffix);\n                if (it.hasNext()) {\n                    sb.append(delim);\n                }\n            }\n\n            return sb.toString();\n        }\n    }\n\n    public static String collectionToDelimitedString(Collection<?> coll, String delim) {\n        return collectionToDelimitedString(coll, delim, \"\", \"\");\n    }\n\n    public static String collectionToCommaDelimitedString(Collection<?> coll) {\n        return collectionToDelimitedString(coll, \",\");\n    }\n\n    public static String arrayToDelimitedString(Object[] arr, String delim) {\n        if (ObjectUtils.isEmpty(arr)) {\n            return \"\";\n        } else if (arr.length == 1) {\n            return ObjectUtils.nullSafeToString(arr[0]);\n        } else {\n            StringBuilder sb = new StringBuilder();\n\n            for(int i = 0; i < arr.length; ++i) {\n                if (i > 0) {\n                    sb.append(delim);\n                }\n\n                sb.append(arr[i]);\n            }\n\n            return sb.toString();\n        }\n    }\n\n    public static String arrayToCommaDelimitedString(Object[] arr) {\n        return arrayToDelimitedString(arr, \",\");\n    }\n\n\n\n\n\n    /**\n     *   prototype Pinecone C/C++ Ver 3.1\n     *   Version: New add in Pinecone Java Ver 3.0\n     */\n    public static String hypertext2Text( String hypertext , boolean replaceBlankSpace , boolean replaceNewLine){\n        String szRegex = \"<.*?>\";\n        if (replaceBlankSpace){\n            szRegex += \"| \";\n        }\n        if (replaceNewLine){\n            szRegex += \"|\\t\" + \"|\\r\" + \"|\\n\";\n        }\n\n        hypertext = hypertext.replaceAll(szRegex,\"\");\n        //hypertext = StringEscapeUtils.unescapeHtml( hypertext );\n\n        return hypertext;\n    }\n\n    public static String nullThenEmpty ( String str ){\n        return  str == null ? \"\" : str;\n    }\n\n    public static Writer addSlashes( String szBadString, Writer writer, boolean bJsonQuoteMode ) throws IOException {\n        if ( szBadString != null && szBadString.length() != 0 ) {\n            char c = 0;\n            int len = szBadString.length();\n\n            if( bJsonQuoteMode ){\n                writer.write(34);\n            }\n            for( int i = 0; i < len; ++i ) {\n                char b = c;\n                c = szBadString.charAt(i);\n                switch(c) {\n                    case '\\b': {\n                        writer.write(\"\\\\b\");\n                        continue;\n                    }\n                    case '\\t': {\n                        writer.write(\"\\\\t\");\n                        continue;\n                    }\n                    case '\\n': {\n                        writer.write(\"\\\\n\");\n                        continue;\n                    }\n                    case '\\f': {\n                        writer.write(\"\\\\f\");\n                        continue;\n                    }\n                    case '\\r': {\n                        writer.write(\"\\\\r\");\n                        continue;\n                    }\n                    case '\\'':{\n                        if( bJsonQuoteMode ){\n                            writer.write(c);\n                            continue;\n                        }\n                    }\n                    case '\"':\n                    case '\\\\': {\n                        writer.write(92);\n                        writer.write(c);\n                        continue;\n                    }\n                    case '/': {\n                        if (b == '<') {\n                            writer.write(92);\n                        }\n\n                        writer.write(c);\n                        continue;\n                    }\n                }\n\n                if (c >= ' ' && (c < 128 || c >= 160) && (c < 8192 || c >= 8448)) {\n                    writer.write(c);\n                }\n                else {\n                    writer.write(\"\\\\u\");\n                    String szHexString = Integer.toHexString(c);\n                    writer.write(\"0000\", 0, 4 - szHexString.length() );\n                    writer.write(szHexString);\n                }\n            }\n\n            if( bJsonQuoteMode ){\n                writer.write(34);\n            }\n            return writer;\n        }\n        else {\n            if( bJsonQuoteMode ){\n                writer.write(\"\\\"\\\"\");\n            }\n            return writer;\n        }\n    }\n\n    public static String addSlashes( String szBadString, boolean bJsonQuoteMode ) {\n        StringWriter sw = new StringWriter();\n        synchronized( sw.getBuffer()) {\n            String s;\n            try {\n                s = StringUtils.addSlashes( szBadString, sw, bJsonQuoteMode ).toString();\n            }\n            catch ( IOException e ) {\n                return \"\";\n            }\n            return s;\n        }\n    }\n\n    public static String addSlashes( String szBadString ) {\n        return StringUtils.addSlashes( szBadString, false );\n    }\n\n    public static String jsonQuote( String szBadString ) {\n        return StringUtils.addSlashes( szBadString, true );\n    }\n\n    public static String sequencify ( String[] sequences, String szDelimiter, String szPrefix ){\n        StringBuilder sb = new StringBuilder();\n        int i = 0;\n        for( String sequence : sequences ){\n            sb.append( szPrefix ).append( sequence );\n            if( ++i != sequences.length ){\n                sb.append( szDelimiter );\n            }\n        }\n        return sb.toString();\n    }\n\n    public static String sequencify ( String[] sequences, String szDelimiter ){\n        return StringUtils.sequencify( sequences, szDelimiter, \"\" );\n    }\n\n    public static String[] trimEmptyElement( String[] strings ) {\n        String[] buf = new String[ strings.length ];\n        int j = 0;\n        for ( int i = 0; i < strings.length; i++ ) {\n            String each = strings[ i ];\n            if( !StringUtils.isEmpty( each ) ) {\n                buf[ j++ ] = each;\n            }\n        }\n        return Arrays.copyOf( buf, j );\n    }\n\n\n\n    /**\n     *   Version: New add in Pinecone Java Ver 20240624\n     */\n    public static boolean containsBoth( String target, String moreChars ) {\n        boolean[] found = new boolean[ moreChars.length() ];\n        Arrays.fill( found, false );\n\n        for ( int i = 0; i < target.length(); ++i ) {\n            char c = target.charAt( i );\n\n            int index = moreChars.indexOf( c );\n            if ( index != -1 ) {\n                found[ index ] = true;\n            }\n        }\n\n        for ( boolean isFound : found ) {\n            if ( !isFound ) {\n                return false;\n            }\n        }\n        return true;\n    }\n\n    public static boolean containsBoth( String target, char[] moreChars ) {\n        boolean[] found = new boolean[ moreChars.length ];\n        Arrays.fill( found, false );\n\n        for ( int i = 0; i < target.length(); i++ ) {\n            char c = target.charAt(i);\n\n            for ( int j = 0; j < moreChars.length; ++j ) {\n                if( found[j] ) {\n                    continue;\n                }\n                if ( c == moreChars[j] ) {\n                    found[j] = true;\n                    break;\n                }\n            }\n        }\n\n        for ( boolean isFound : found ) {\n            if ( !isFound ) {\n                return false;\n            }\n        }\n        return true;\n    }\n\n    public static boolean containsOnce( String target, String moreChars ) {\n        for ( int i = 0; i < moreChars.length(); ++i ) {\n            char c = moreChars.charAt( i );\n            if ( target.indexOf(c) != -1 ) {\n                return true;\n            }\n        }\n        return false;\n    }\n\n    public static boolean containsOnce( String target, char[] moreChars ) {\n        for ( int i = 0; i < target.length(); i++ ) {\n            char c = target.charAt(i);\n\n            for ( int j = 0; j < moreChars.length; ++j ) {\n                if ( c == moreChars[j] ) {\n                    return true;\n                }\n            }\n        }\n\n        return false;\n    }\n\n    public static int countOccurrencesOf( String target, char specifiedChar, int threshold ) {\n        int count = 0;\n        for ( int i = 0; i < target.length(); ++i ) {\n            if ( target.charAt(i) == specifiedChar ) {\n                count++;\n                if ( threshold > 0 && count >= threshold ) {\n                    return count;\n                }\n            }\n        }\n        return count;\n    }\n\n    public static int countOccurrencesOf( String target, char specifiedChar ) {\n        return StringUtils.countOccurrencesOf( target, specifiedChar, 0 );\n    }\n\n\n    /**\n     *   Version: New add in Pinecone Java Ver 20241003\n     */\n    public static boolean isBlank( String str ) {\n        int strLen;\n        if ( str != null && (strLen = str.length()) != 0 ) {\n            for( int i = 0; i < strLen; ++i ) {\n                if ( !Character.isWhitespace(str.charAt(i)) ) {\n                    return false;\n                }\n            }\n        }\n        return true;\n    }\n\n    public static boolean isNotBlank( String str ) {\n        return !StringUtils.isBlank( str );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/SuperConvert.java",
    "content": "package com.pinecone.framework.util;\n\npublic class SuperConvert {\n    public static double[] object2Float64Array(Object[] objects){\n        double[] arrayBuf = new double[objects.length];\n        for(int i=0;i<arrayBuf.length;i++){\n            arrayBuf[i] = (double)objects[i];\n        }\n        return arrayBuf;\n    }\n\n    public static float[] object2Float32Array(Object[] objects){\n        float[] arrayBuf = new float[objects.length];\n        for(int i=0;i<arrayBuf.length;i++){\n            arrayBuf[i] = (float)objects[i];\n        }\n        return arrayBuf;\n    }\n\n    public static int[] object2Int32Array(Object[] objects){\n        int[] arrayBuf = new int[objects.length];\n        for(int i=0;i<arrayBuf.length;i++){\n            arrayBuf[i] = (int)objects[i];\n        }\n        return arrayBuf;\n    }\n\n    public static long[] object2Int64Array(Object[] objects){\n        long[] arrayBuf = new long[objects.length];\n        for(int i=0;i<arrayBuf.length;i++){\n            arrayBuf[i] = (long)objects[i];\n        }\n        return arrayBuf;\n    }\n\n    public static String[] object2StringArray(Object[] objects){\n        String[] arrayBuf = new String[objects.length];\n        for(int i=0;i<arrayBuf.length;i++){\n            arrayBuf[i] = (String) objects[i];\n        }\n        return arrayBuf;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/SupplierUtils.java",
    "content": "package com.pinecone.framework.util;\n\nimport java.util.function.Supplier;\n\nimport com.pinecone.framework.system.Nullable;\n\npublic abstract class SupplierUtils {\n    public SupplierUtils() {\n    }\n\n    @Nullable\n    public static <T> T resolve( @Nullable Supplier<T> supplier ) {\n        return supplier != null ? supplier.get() : null;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/UnitHelper.java",
    "content": "package com.pinecone.framework.util;\n\nimport java.lang.reflect.Array;\nimport java.util.List;\n\npublic final class UnitHelper {\n    public static int accumulateInt( int from, int to, List<Integer > list ){\n        int sum = 0;\n        for( int i = from; i < to; i++ ){\n            sum += list.get( i );\n        }\n        return sum;\n    }\n\n    public static int accumulateInt( List<Integer > list ) {\n        return UnitHelper.accumulateInt( 0, list.size(), list );\n    }\n\n    public static double accumulateDouble( int from, int to, List<Double > list ){\n        double sum = 0;\n        for( int i = from; i < to; ++i ){\n            sum += list.get( i );\n        }\n        return sum;\n    }\n\n    public static double accumulateDouble( List<Double > list ) {\n        return UnitHelper.accumulateDouble( 0, list.size(), list );\n    }\n\n\n    @SuppressWarnings( \"unchecked\" )\n    public static <T> T[] append( T[] original, int currentSize, T element ) {\n        if ( currentSize >= original.length ) {\n            T[] newArray = (T[]) Array.newInstance( original.getClass().getComponentType(), original.length + 1 );\n            System.arraycopy( original, 0, newArray, 0, original.length );\n            newArray[ currentSize ] = element;\n            return newArray;\n        }\n        else {\n            original[ currentSize ] = element;\n            return original;\n        }\n    }\n\n    public static <T> T[] append( T[] original, T element ) {\n        return UnitHelper.append( original, original.length, element );\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public static <T> T[] remove( T[] original, int index ) {\n        if ( index < 0 || index >= original.length ) {\n            throw new IndexOutOfBoundsException( \"Index: \" + index + \", Size: \" + original.length );\n        }\n\n        T[] newArray = (T[]) Array.newInstance( original.getClass().getComponentType(), original.length - 1 );\n        for ( int i = 0, j = 0; i < original.length; ++i ) {\n            if ( i != index ) {\n                newArray[ j++ ] = original[ i ];\n            }\n        }\n        return newArray;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public static <T> T[] popBack( T[] original ) {\n        if ( original.length == 0 ) {\n            throw new IllegalStateException( \"Cannot pop from an empty array.\" );\n        }\n\n        T[] newArray = (T[]) Array.newInstance( original.getClass().getComponentType(), original.length - 1 );\n        System.arraycopy( original, 0, newArray, 0, original.length - 1 );\n        return newArray;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public static <T> T[] pollFirst( T[] original ) {\n        if ( original.length == 0 ) {\n            throw new IllegalStateException( \"Cannot poll from an empty array.\" );\n        }\n\n        T firstElement = original[ 0 ];\n        T[] newArray = (T[]) Array.newInstance( original.getClass().getComponentType(), original.length - 1 );\n        System.arraycopy( original, 1, newArray, 0, original.length - 1 );\n        return newArray;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public static <T> T[] insert( T[] original, int index, T element ) {\n        if ( index < 0 || index > original.length ) {\n            throw new IndexOutOfBoundsException( \"Index: \" + index + \", Size: \" + original.length );\n        }\n\n        T[] newArray = (T[]) Array.newInstance( original.getClass().getComponentType(), original.length + 1 );\n        System.arraycopy( original, 0, newArray, 0, index );\n        newArray[ index ] = element;\n        System.arraycopy( original, index, newArray, index + 1, original.length - index );\n\n        return newArray;\n    }\n\n\n\n    public static Object mergeArr( Object... arrays ) {\n        return UnitHelper.mergeArrays( arrays );\n    }\n\n    public static Object mergeArrays( Object[] arrays ) {\n        if ( arrays == null || arrays.length == 0 ) {\n            throw new IllegalArgumentException(\"Input arrays cannot be null or empty.\");\n        }\n\n        Class<?> componentType = arrays[ 0 ].getClass().getComponentType();\n        int totalLength = 0;\n\n        for ( Object array : arrays ) {\n            totalLength += Array.getLength(array);\n        }\n\n        Object result = Array.newInstance( componentType, totalLength );\n\n        int currentIndex = 0;\n        for ( Object array : arrays ) {\n            int length = Array.getLength(array);\n            System.arraycopy( array, 0, result, currentIndex, length );\n            currentIndex += length;\n        }\n\n        return result;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/comparator/CompoundComparator.java",
    "content": "package com.pinecone.framework.util.comparator;\n\nimport com.pinecone.framework.util.Assert;\n\nimport java.io.Serializable;\nimport java.util.ArrayList;\nimport java.util.Comparator;\nimport java.util.Iterator;\nimport java.util.List;\n\npublic class CompoundComparator<T> implements Comparator<T>, Serializable {\n    private final List<InvertibleComparator> comparators;\n\n    public CompoundComparator() {\n        this.comparators = new ArrayList<>();\n    }\n\n    public CompoundComparator( Comparator... comparators ) {\n        Assert.notNull(comparators, \"Comparators must not be null\");\n        this.comparators = new ArrayList<>(comparators.length);\n        Comparator[] hComparators = comparators;\n        int len = comparators.length;\n\n        for( int i = 0; i < len; ++i ) {\n            Comparator comparator = hComparators[i];\n            this.addComparator(comparator);\n        }\n\n    }\n\n    public void addComparator(Comparator<? extends T> comparator) {\n        if (comparator instanceof InvertibleComparator) {\n            this.comparators.add((InvertibleComparator)comparator);\n        } else {\n            this.comparators.add(new InvertibleComparator(comparator));\n        }\n\n    }\n\n    public void addComparator(Comparator<? extends T> comparator, boolean ascending) {\n        this.comparators.add(new InvertibleComparator(comparator, ascending));\n    }\n\n    public void setComparator(int index, Comparator<? extends T> comparator) {\n        if (comparator instanceof InvertibleComparator) {\n            this.comparators.set(index, (InvertibleComparator)comparator);\n        } else {\n            this.comparators.set(index, new InvertibleComparator(comparator));\n        }\n\n    }\n\n    public void setComparator(int index, Comparator<T> comparator, boolean ascending) {\n        this.comparators.set(index, new InvertibleComparator(comparator, ascending));\n    }\n\n    public void invertOrder() {\n        Iterator iter = this.comparators.iterator();\n\n        while( iter.hasNext() ) {\n            InvertibleComparator comparator = (InvertibleComparator)iter.next();\n            comparator.invertOrder();\n        }\n\n    }\n\n    public void invertOrder(int index) {\n        ((InvertibleComparator)this.comparators.get(index)).invertOrder();\n    }\n\n    public void setAscendingOrder(int index) {\n        ((InvertibleComparator)this.comparators.get(index)).setAscending(true);\n    }\n\n    public void setDescendingOrder(int index) {\n        ((InvertibleComparator)this.comparators.get(index)).setAscending(false);\n    }\n\n    public int getComparatorCount() {\n        return this.comparators.size();\n    }\n\n    public int compare(T o1, T o2) {\n        Assert.state(this.comparators.size() > 0, \"No sort definitions have been added to this CompoundComparator to compare\");\n        Iterator iter = this.comparators.iterator();\n\n        int result;\n        do {\n            if ( !iter.hasNext() ) {\n                return 0;\n            }\n\n            InvertibleComparator comparator = (InvertibleComparator)iter.next();\n            result = comparator.compare(o1, o2);\n        }\n        while( result == 0 );\n\n        return result;\n    }\n\n    public boolean equals(Object obj) {\n        if (this == obj) {\n            return true;\n        } else if (!(obj instanceof CompoundComparator)) {\n            return false;\n        } else {\n            CompoundComparator<T> other = (CompoundComparator)obj;\n            return this.comparators.equals(other.comparators);\n        }\n    }\n\n    public int hashCode() {\n        return this.comparators.hashCode();\n    }\n\n    public String toString() {\n        return \"CompoundComparator: \" + this.comparators;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/comparator/InvertibleComparator.java",
    "content": "package com.pinecone.framework.util.comparator;\n\nimport com.pinecone.framework.util.Assert;\n\nimport java.io.Serializable;\nimport java.util.Comparator;\n\npublic class InvertibleComparator<T> implements Comparator<T>, Serializable {\n    private final Comparator<T> comparator;\n    private boolean ascending = true;\n\n    public InvertibleComparator(Comparator<T> comparator) {\n        Assert.notNull(comparator, \"Comparator must not be null\");\n        this.comparator = comparator;\n    }\n\n    public InvertibleComparator(Comparator<T> comparator, boolean ascending) {\n        Assert.notNull(comparator, \"Comparator must not be null\");\n        this.comparator = comparator;\n        this.setAscending(ascending);\n    }\n\n    public void setAscending(boolean ascending) {\n        this.ascending = ascending;\n    }\n\n    public boolean isAscending() {\n        return this.ascending;\n    }\n\n    public void invertOrder() {\n        this.ascending = !this.ascending;\n    }\n\n    public int compare(T o1, T o2) {\n        int result = this.comparator.compare(o1, o2);\n        if (result != 0) {\n            if (!this.ascending) {\n                if (-2147483648 == result) {\n                    result = 2147483647;\n                } else {\n                    result *= -1;\n                }\n            }\n\n            return result;\n        } else {\n            return 0;\n        }\n    }\n\n    public boolean equals(Object obj) {\n        if (this == obj) {\n            return true;\n        } else if (!(obj instanceof InvertibleComparator)) {\n            return false;\n        } else {\n            InvertibleComparator<T> other = (InvertibleComparator)obj;\n            return this.comparator.equals(other.comparator) && this.ascending == other.ascending;\n        }\n    }\n\n    public int hashCode() {\n        return this.comparator.hashCode();\n    }\n\n    public String toString() {\n        return \"InvertibleComparator: [\" + this.comparator + \"]; ascending=\" + this.ascending;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/comparator/OrderComparator.java",
    "content": "package com.pinecone.framework.util.comparator;\n\nimport java.util.Arrays;\nimport java.util.Comparator;\nimport java.util.List;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.ObjectUtils;\n\npublic class OrderComparator implements Comparator<Object> {\n    public static final OrderComparator INSTANCE = new OrderComparator();\n\n    public OrderComparator() {\n    }\n\n    public Comparator<Object> withSourceProvider(OrderComparator.OrderSourceProvider sourceProvider) {\n        return (o1, o2) -> {\n            return this.doCompare(o1, o2, sourceProvider);\n        };\n    }\n\n    @Override\n    public int compare(@Nullable Object o1, @Nullable Object o2) {\n        return this.doCompare(o1, o2, (OrderComparator.OrderSourceProvider)null);\n    }\n\n    private int doCompare(@Nullable Object o1, @Nullable Object o2, @Nullable OrderComparator.OrderSourceProvider sourceProvider) {\n        boolean p1 = o1 instanceof PriorityOrdered;\n        boolean p2 = o2 instanceof PriorityOrdered;\n        if (p1 && !p2) {\n            return -1;\n        }\n        else if (p2 && !p1) {\n            return 1;\n        }\n        else {\n            int i1 = this.getOrder(o1, sourceProvider);\n            int i2 = this.getOrder(o2, sourceProvider);\n            return Integer.compare(i1, i2);\n        }\n    }\n\n    private int getOrder(@Nullable Object obj, @Nullable OrderComparator.OrderSourceProvider sourceProvider) {\n        Integer order = null;\n        if (obj != null && sourceProvider != null) {\n            Object orderSource = sourceProvider.getOrderSource(obj);\n            if (orderSource != null) {\n                if (orderSource.getClass().isArray()) {\n                    Object[] ta = ObjectUtils.toObjectArray(orderSource);\n                    int len = ta.length;\n\n                    for( int i = 0; i < len; ++i ) {\n                        Object source = ta[i];\n                        order = this.findOrder(source);\n                        if (order != null) {\n                            break;\n                        }\n                    }\n                }\n                else {\n                    order = this.findOrder(orderSource);\n                }\n            }\n        }\n\n        return order != null ? order : this.getOrder(obj);\n    }\n\n    protected int getOrder( @Nullable Object obj ) {\n        if (obj != null) {\n            Integer order = this.findOrder(obj);\n            if (order != null) {\n                return order;\n            }\n        }\n\n        return 2147483647;\n    }\n\n    @Nullable\n    protected Integer findOrder(Object obj) {\n        return obj instanceof Ordered ? ((Ordered)obj).getOrder() : null;\n    }\n\n    @Nullable\n    public Integer getPriority(Object obj) {\n        return null;\n    }\n\n    public static void sort(List<?> list) {\n        if (list.size() > 1) {\n            list.sort(INSTANCE);\n        }\n    }\n\n    public static void sort(Object[] array) {\n        if (array.length > 1) {\n            Arrays.sort(array, INSTANCE);\n        }\n\n    }\n\n    public static void sortIfNecessary(Object value) {\n        if ( value instanceof Object[] ) {\n            OrderComparator.sort((Object[])((Object[])value));\n        }\n        else if (value instanceof List) {\n            OrderComparator.sort((List)value);\n        }\n\n    }\n\n    @FunctionalInterface\n    public interface OrderSourceProvider {\n        @Nullable\n        Object getOrderSource(Object var1);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/comparator/Ordered.java",
    "content": "package com.pinecone.framework.util.comparator;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Ordered extends Pinenut {\n    int HIGHEST_PRECEDENCE = -2147483648;\n    int LOWEST_PRECEDENCE = 2147483647;\n\n    int getOrder();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/comparator/PriorityOrdered.java",
    "content": "package com.pinecone.framework.util.comparator;\n\npublic interface PriorityOrdered extends Ordered {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/Config.java",
    "content": "package com.pinecone.framework.util.config;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Config extends Pinenut {\n\n    Object getProtoConfig();\n\n    Object get( Object key );\n\n    Object getOrDefault( Object key, Object def );\n\n    default boolean containsKey( Object key ) {\n        return this.get( key ) != null;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/Configson.java",
    "content": "package com.pinecone.framework.util.config;\n\nimport java.util.Map;\n\npublic interface Configson extends MappedConfig, PatriarchalConfig {\n    Map<String, Object > getProtoConfig();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/GenericStartupCommandParser.java",
    "content": "package com.pinecone.framework.util.config;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\n\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic class GenericStartupCommandParser implements StartupCommandParser {\n    private String[] mValueStartSymbols = { \"\", \"--\", \"-\", \"/\", \"\\\\\" };\n    private String[] mAssignmentSymbols = { \"=\", \":\", \"=>\", \"->\" };\n    private String[] mValueSeparators   = { \",\", \";\", \"|\" };\n\n    public GenericStartupCommandParser() {}\n\n    public GenericStartupCommandParser( String[] valueStartSymbols, String[] assignmentSymbols, String[] valueSeparators ) {\n        this.mValueStartSymbols = valueStartSymbols;\n        this.mAssignmentSymbols = assignmentSymbols;\n        this.mValueSeparators = valueSeparators;\n    }\n\n    @Override\n    public Map<String, String[] > parse( String[] args ) {\n        Map<String, String[] > result = new LinkedTreeMap<>();\n\n        for ( String arg : args ) {\n            String key   = null;\n            String value = null;\n\n            for ( String startSymbol : this.mValueStartSymbols ) {\n                if ( arg.startsWith( startSymbol ) ) {\n                    int assignmentIndex = -1;\n                    for ( String assignmentSymbol : this.mAssignmentSymbols ) {\n                        int index = arg.indexOf(assignmentSymbol, startSymbol.length());\n                        if ( index > 0 ) {\n                            assignmentIndex = index;\n                            break;\n                        }\n                    }\n\n                    if ( assignmentIndex > 0 ) {\n                        key   = arg.substring(startSymbol.length(), assignmentIndex);\n                        value = arg.substring(assignmentIndex + 1);\n                    }\n                    else {\n                        key   = arg.substring(startSymbol.length());\n                        value = \"\";\n                    }\n                    break;\n                }\n            }\n\n            if ( key != null ) {\n                String[] values = this.splitValues( value );\n                result.put(key, values);\n            }\n        }\n\n        return result;\n    }\n\n    @Override\n    public Map<String, String[]> parse( Map<String, String> args ) {\n        Map<String, String[]> map = new HashMap<>( args.size() );\n\n        for ( String key : args.keySet() ) {\n            String value = args.get(key);\n            if ( value == null ) {\n                value = \"\";\n            }\n            String[] values = this.splitValues( value );\n            map.put( key, values );\n        }\n\n        return map;\n    }\n\n    private String[] splitValues(String value ) {\n        if ( value.isEmpty() ) {\n            return new String[0];\n        }\n\n        if ( value.charAt(0) == '\"' || value.charAt(0) == '\\'' ) {\n            return new String[] { value };\n        }\n\n        for ( String separator : this.mValueSeparators ) {\n            if ( value.contains( separator ) ) {\n                return value.split(java.util.regex.Pattern.quote(separator));\n            }\n        }\n        return new String[]{ value };\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/JSONConfig.java",
    "content": "package com.pinecone.framework.util.config;\n\nimport com.pinecone.framework.system.ErrorStrings;\nimport com.pinecone.framework.unit.MultiScopeMap;\nimport com.pinecone.framework.unit.MultiScopeMaptron;\nimport com.pinecone.framework.util.json.*;\n\nimport java.io.File;\nimport java.io.FileReader;\nimport java.io.IOException;\nimport java.nio.file.Path;\nimport java.util.Map;\n\npublic class JSONConfig extends JSONMaptron implements Configson {\n    protected MultiScopeMap<String, Object >     mScope;\n\n    protected JPlusContext                       mThisContext;\n    protected JSONConfig                         mRoot;\n    protected JSONConfig                         mParent;\n\n\n    public JSONConfig( JSONConfig parent ) {\n        this( (Map<String, Object >) null, parent );\n    }\n\n    public JSONConfig( JSONObject thisScope, JSONConfig parent ) {\n        this( thisScope.getMap(), parent );\n    }\n\n    public JSONConfig( JSONObject thisScope ) {\n        this( thisScope.getMap(), null );\n    }\n\n    public JSONConfig( Map<String, Object > thisScope, JSONConfig parent ) {\n        super();\n        this.mParent      = parent;\n        if( this.mParent != null ) {\n            this.inherit( this.mParent );\n        }\n        else {\n            this.mRoot        = this;\n            this.mScope       = new MultiScopeMaptron<>();\n            this.mThisContext = new JPlusContext();\n\n            if( thisScope == null ) {\n                thisScope = this.getMap();\n            }\n            this.mThisContext.asProgenitor( thisScope );\n        }\n\n        this.setThisScope( thisScope );\n    }\n\n    public JSONConfig() {\n        this(null );\n    }\n\n    @Override\n    public JSONConfig inherit( PatriarchalConfig parent ) {\n        JSONConfig that = (JSONConfig) parent;\n        this.mScope       = new MultiScopeMaptron<>();\n        this.mThisContext = that.mThisContext.clone();\n        this.mRoot        = that.mRoot;\n        this.mParent      = that;\n\n        this.mScope.setParents( that.mScope.getParents() );\n        this.mScope.setName   ( that.mScope.getName() );\n        this.mThisContext.setParent( that.mThisContext.thisScope() );\n        this.setThisScope( this.getMap() );\n\n        return this;\n    }\n\n\n    public JSONConfig addGlobalScope( Map<String, Object > scope ) {\n        this.getContext().addGlobalScope( scope );\n        if( scope instanceof MultiScopeMap ) {\n            this.getScope().addParent( (MultiScopeMap<String, Object >)scope );\n        }\n        else {\n            this.getScope().addParent( new MultiScopeMaptron<>( scope ) );\n        }\n        return this;\n    }\n\n    public JSONConfig addGlobalScope( Map<String, Object > scope, String name ) {\n        this.getContext().addGlobalScope( scope );\n        if( scope instanceof MultiScopeMap ) {\n            this.getScope().addParent( ( (MultiScopeMap<String, Object >)scope ).setName( name ) );\n        }\n        else {\n            this.getScope().addParent( ( new MultiScopeMaptron<>( scope ) ).setName( name ) );\n        }\n        return this;\n    }\n\n    public JSONConfig setThisScope( Map<String, Object > thisScope ) {\n        if( thisScope != null ) {\n            this.assimilate( thisScope );\n            this.getContext().setThisScope( thisScope );\n            this.getScope().setThisScope( thisScope );\n        }\n        return this;\n    }\n\n    public JSONConfig from( JSONObject prototype ) {\n        this.setThisScope( prototype.getMap() );\n        return this;\n    }\n\n    public JSONObject fromFile( File fConf ) throws IOException {\n        if( this.parent() != null ) {\n            return ( (JSONConfig) this.root() ).fromFile( fConf );\n        }\n\n        JPlusContext context = this.getContext().clone();\n        context.asProgenitor( this );\n        return new JSONMaptron( new JPlusCursorParser( new FileReader( fConf ), context ) );\n    }\n\n    public JSONObject fromFileNoException( File fConf ) {\n        try {\n            return this.fromFile( fConf );\n        }\n        catch ( IOException e ) {\n            return null;\n        }\n    }\n\n    public JSONObject fromPath( Path path ) throws IOException {\n        try{\n            return this.fromFile( path.toFile() );\n        }\n        catch ( IOException e ) {\n            IOException ie = null;\n            for( Path p : this.mThisContext.getParentPaths() ) {\n                try{\n                    return this.fromFile( path.resolve( p ).toFile() );\n                }\n                catch ( IOException e1 ) {\n                    ie = e1;\n                }\n            }\n\n            if( ie != null ) {\n                throw new IOException( ErrorStrings.E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED + \"What-> '\" + path + \"'\", ie );\n            }\n        }\n\n        throw new IOException( ErrorStrings.E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED + \"What-> '\" + path + \"'\" );\n    }\n\n    @Override\n    public JSONConfig getChildFromPath( Path path ) throws IOException {\n        JSONObject neo = this.fromPath( path );\n        return new JSONConfig( neo, this );\n    }\n\n    @Override\n    public Object get( Object key ) {\n        return this.opt( key.toString() );\n    }\n\n    @Override\n    public Object getOrDefault( Object key, Object def ) {\n        Object o = this.get( key );\n        if( o == null ) {\n            return def;\n        }\n        return o;\n    }\n\n    @Override\n    public Object opt( String key ) {\n        return this.mScope.get( key );\n    }\n\n    @Override\n    public JSONConfig getChild( Object key ) {\n        JSONObject prototype = this.optJSONObject( key.toString() );\n        if( prototype == null ) {\n            return null;\n        }\n\n        return ( new JSONConfig( prototype, this ) );\n    }\n\n    public JSONConfig apply ( File fConf ) throws IOException {\n        return this.from( this.fromFile( fConf ) );\n    }\n\n    @Override\n    public JSONObject getProtoConfig() {\n        return this.toJSONObject();\n    }\n\n    @Override\n    public JSONConfig parent() {\n        return this.mParent;\n    }\n\n    public MultiScopeMap<String, Object > getScope() {\n        return this.mScope;\n    }\n\n    @Override\n    public JSONConfig root() {\n        return this.mRoot;\n    }\n\n    @Override\n    public JSONConfig setParent ( Object parent ) {\n        this.mParent = (JSONConfig) parent;\n        return this;\n    }\n\n\n    public JSONConfig setRoot( Object root ) {\n        this.mRoot = (JSONConfig) root;\n        return this;\n    }\n\n    @Override\n    public Path[] getParentPaths() {\n        return this.getContext().getParentPaths();\n    }\n\n    @Override\n    public JSONConfig setParentPaths( Path[] paths ) {\n        this.getContext().setParentPaths( paths );\n        return this;\n    }\n\n    public JSONConfig addParentPath( Path newPath ) {\n        this.getContext().addParentPath( newPath );\n        return this;\n    }\n\n    @Override\n    public JPlusContext getContext() {\n        return this.mThisContext;\n    }\n\n}\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/JSONSystemConfig.java",
    "content": "package com.pinecone.framework.util.config;\n\nimport com.pinecone.framework.system.Pinecore;\nimport com.pinecone.framework.util.json.JSONObject;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\nimport java.util.Map;\n\npublic class JSONSystemConfig extends JSONConfig implements SysConfigson {\n    protected Pinecore mSystem;\n\n    public JSONSystemConfig ( Map<String, Object > map, JSONConfig parent, Pinecore system ) {\n        super( map, parent );\n        this.setSystem( system );\n    }\n\n    public JSONSystemConfig ( JSONConfig parent, Pinecore system ) {\n        this( null, parent, system );\n    }\n\n    public JSONSystemConfig ( Pinecore system ) {\n        this(null , system );\n    }\n\n    @Override\n    public JSONSystemConfig getChild( Object key ) {\n        JSONObject prototype = this.optJSONObject( key.toString() );\n        if( prototype == null ) {\n            return null;\n        }\n\n        return new JSONSystemConfig( prototype, this, this.getSystem() );\n    }\n\n    @Override\n    public Pinecore getSystem() {\n        return this.mSystem;\n    }\n\n    public JSONSystemConfig setSystem( Pinecore system ) {\n        this.mSystem = system;\n        if( this.parent() != null && ((JSONSystemConfig)this.parent() ).getSystem() != this.mSystem ) {\n            this.getContext().addParentPath( Path.of( this.getSystem().getRuntimePath() ) );\n        }\n        return this;\n    }\n\n    @Override\n    public JSONSystemConfig getChildFromPath( Path path ) throws IOException {\n        JSONObject neo = this.fromPath( path );\n        return new JSONSystemConfig( neo,this, this.getSystem() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/MappedConfig.java",
    "content": "package com.pinecone.framework.util.config;\n\nimport java.util.Map;\n\npublic interface MappedConfig extends Config {\n    Map getProtoConfig();\n\n    @Override\n    default Object get( Object key ) {\n        return this.getProtoConfig().get( key );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/OverridableConfig.java",
    "content": "package com.pinecone.framework.util.config;\n\npublic interface OverridableConfig {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/PatriarchalConfig.java",
    "content": "package com.pinecone.framework.util.config;\n\nimport com.pinecone.framework.system.prototype.FamilyContext;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\n\npublic interface PatriarchalConfig extends Config {\n    PatriarchalConfig parent();\n\n    default PatriarchalConfig root() {\n        PatriarchalConfig p = this.parent();\n        if( p == null ) {\n            return this;\n        }\n\n        return p.root();\n    }\n\n    PatriarchalConfig getChild  ( Object key );\n\n    PatriarchalConfig setParent ( Object parent );\n\n    Path[] getParentPaths();\n\n    PatriarchalConfig setParentPaths( Path[] path );\n\n    PatriarchalConfig inherit( PatriarchalConfig parent ) ;\n\n    FamilyContext getContext();\n\n    PatriarchalConfig getChildFromPath( Path path ) throws IOException;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/StartupCommandParser.java",
    "content": "package com.pinecone.framework.util.config;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.Map;\n\npublic interface StartupCommandParser extends Pinenut {\n    StartupCommandParser DefaultParser = new GenericStartupCommandParser();\n\n    Map<String, String[] > parse( String[] args );\n\n    Map<String, String[] > parse( Map<String, String > args );\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/SysConfigson.java",
    "content": "package com.pinecone.framework.util.config;\n\npublic interface SysConfigson extends Configson, SystemConfig {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/config/SystemConfig.java",
    "content": "package com.pinecone.framework.util.config;\n\nimport com.pinecone.framework.system.RuntimeSystem;\n\npublic interface SystemConfig extends Config {\n    RuntimeSystem getSystem();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/DatePattern.java",
    "content": "package com.pinecone.framework.util.datetime;\n\nimport java.time.ZoneId;\nimport java.time.format.DateTimeFormatter;\nimport java.util.Locale;\n\npublic final class DatePattern {\n\n    public static DateTimeFormatter createFormatter( String pattern ) {\n        return DateTimeFormatter.ofPattern(pattern, Locale.getDefault()).withZone(ZoneId.systemDefault());\n    }\n\n    public static final String NORM_DATETIME_PATTERN = \"yyyy-MM-dd HH:mm:ss\";\n    public static final DateTimeFormatter NORM_DATETIME_FORMATTER = createFormatter(\"yyyy-MM-dd HH:mm:ss\");\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/GenericMultiFormDateTimeAudit.java",
    "content": "package com.pinecone.framework.util.datetime;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.StringUtils;\n\nimport java.time.LocalDateTime;\nimport java.time.temporal.ChronoUnit;\nimport java.util.regex.Matcher;\nimport java.util.regex.Pattern;\n\npublic class GenericMultiFormDateTimeAudit implements UniformDateTimeAudit {\n    public GenericMultiFormDateTimeAudit() {\n\n    }\n\n    @Override\n    public boolean matches   ( String szDateTime, LocalDateTime targetTime ) {\n        StorageDateTime dateTime = GenericMultiFormDateTimeAudit.fromString( szDateTime );\n\n        // Extract components from LocalDateTime\n        int year        = targetTime.getYear();\n        int month       = targetTime.getMonthValue();\n        int dayOfMonth  = targetTime.getDayOfMonth();\n        int hour        = targetTime.getHour();\n        int minute      = targetTime.getMinute();\n        int second      = targetTime.getSecond();\n        int nano        = targetTime.getNano();\n\n        return this.matchesDateTime( year, month, dayOfMonth, hour, minute, second, nano, dateTime );\n    }\n\n    @Override\n    public boolean betweenSec ( String szDateTime, LocalDateTime targetTime, int nSecondAccuracy ) {\n        StorageDateTime dateTime        = GenericMultiFormDateTimeAudit.fromString( szDateTime );\n\n        LocalDateTime localizedDateTime = GenericMultiFormDateTimeAudit.toLocalDateTime( dateTime, targetTime );\n        long differenceInSeconds = ChronoUnit.SECONDS.between( localizedDateTime, targetTime );\n        return Math.abs( differenceInSeconds ) <= nSecondAccuracy;\n    }\n\n    @Override\n    public boolean betweenMin ( String szDateTime, LocalDateTime targetTime, int nMinuteAccuracy ) {\n        StorageDateTime dateTime        = GenericMultiFormDateTimeAudit.fromString( szDateTime );\n\n        LocalDateTime localizedDateTime = GenericMultiFormDateTimeAudit.toLocalDateTime( dateTime, targetTime );\n        long differenceInMinutes        = ChronoUnit.SECONDS.between( localizedDateTime, targetTime );\n        return Math.abs( differenceInMinutes ) <= nMinuteAccuracy * 60;\n    }\n\n    @Override\n    public boolean between   ( String szDateTime, LocalDateTime targetTime, int nMillisAccuracy ) {\n        StorageDateTime dateTime        = GenericMultiFormDateTimeAudit.fromString( szDateTime );\n\n        LocalDateTime localizedDateTime = GenericMultiFormDateTimeAudit.toLocalDateTime( dateTime, targetTime );\n        long differenceInMillis = ChronoUnit.MILLIS.between( localizedDateTime, targetTime );\n        return Math.abs(differenceInMillis) <= nMillisAccuracy;\n    }\n\n\n    public static StorageDateTime fromString    ( String szDateTime ) {\n        String szStandardizeDateTime = GenericMultiFormDateTimeAudit.standardize( szDateTime );\n        if( szStandardizeDateTime == null ) {\n            throw new IllegalArgumentException( \"Datetime should be fmt `????-??-?? ??:??:??.???`\" );\n        }\n\n        return GenericMultiFormDateTimeAudit.parseDateTime( szStandardizeDateTime );\n    }\n\n    // Usage methodology: Fill in all wildcard characters from `targetTime`.\n    public static LocalDateTime toLocalDateTime ( StorageDateTime storageDateTime, LocalDateTime targetTime ) {\n        StorageDate date = storageDateTime.getDate();\n        StorageTime time = storageDateTime.getTime();\n\n        int year   = date.getYear()   != -1   ? date.getYear()   : targetTime.getYear();\n        int month  = date.getMonth()  != -1   ? date.getMonth()  : targetTime.getMonthValue();\n        int day    = date.getDay()    != -1   ? date.getDay()    : targetTime.getDayOfMonth();\n\n        int hour   = time.getHour()   != -1   ? time.getHour()   : targetTime.getHour();\n        int minute = time.getMinute() != -1   ? time.getMinute() : targetTime.getMinute();\n        int second = time.getSecond() != -1   ? time.getSecond() : targetTime.getSecond();\n        int nano   = time.getNano()   != -1   ? time.getNano()   : targetTime.getNano();\n\n        return LocalDateTime.of( year, month, day, hour, minute, second, nano );\n    }\n\n    protected boolean matchesDateTime           ( int year, int month, int dayOfMonth, int hour, int minute, int second, int nano, StorageDateTime dateTime ) {\n        if ( dateTime.getYear() != -1 && dateTime.getYear() != year ) {\n            return false;\n        }\n\n        if ( dateTime.getMonthValue() != -1 && dateTime.getMonthValue() != month ) {\n            return false;\n        }\n\n        if ( dateTime.getDayOfMonth() != -1 && dateTime.getDayOfMonth() != dayOfMonth ) {\n            return false;\n        }\n\n        if ( dateTime.getHour() != -1 && dateTime.getHour() != hour ) {\n            return false;\n        }\n\n        if ( dateTime.getMinute() != -1 && dateTime.getMinute() != minute ) {\n            return false;\n        }\n\n        if ( dateTime.getSecond() != -1 && dateTime.getSecond() != second ) {\n            return false;\n        }\n\n        if ( dateTime.getNano() != -1 && dateTime.getNano() != nano ) {\n            return false;\n        }\n\n        return true;\n    }\n\n    public static StorageDateTime parseDateTime ( String input ) {\n        String[] parts = input.split( \"[\\\\.\\\\-T:\\\\s]+\" );\n\n        if ( parts.length != 7 ) {\n            throw new IllegalArgumentException(\"Invalid input format: \" + input);\n        }\n\n        int year   = GenericMultiFormDateTimeAudit.parseComponent( parts[0] );\n        int month  = GenericMultiFormDateTimeAudit.parseComponent( parts[1] );\n        int day    = GenericMultiFormDateTimeAudit.parseComponent( parts[2] );\n        int hour   = GenericMultiFormDateTimeAudit.parseComponent( parts[3] );\n        int minute = GenericMultiFormDateTimeAudit.parseComponent( parts[4] );\n        int second = GenericMultiFormDateTimeAudit.parseComponent( parts[5] );\n        int nano   = GenericMultiFormDateTimeAudit.parseComponent( parts[6] );\n\n        return StorageDateTime.of( year, month, day, hour, minute, second, nano );\n    }\n\n    private static int parseComponent           ( String component ) {\n        if ( component.matches( \"\\\\?{1,13}\" ) ) {\n            return -1;\n        }\n        else {\n            return Integer.parseInt(component);\n        }\n    }\n\n    public static String standardize            ( String input ) {\n        if( input.contains(\"T\") ) { // \"????-??-??T??:??:??.???\"\n            input = input.replace( \"T\", \" \" );\n        }\n\n        boolean bMatchBase = StringUtils.containsOnce( input, \"-/\" );\n        boolean hasColon   = input.contains(\":\");\n        int dot            = 0;\n        if( !bMatchBase ) {\n            dot = StringUtils.countOccurrencesOf( input, '.', 2 );\n        }\n\n        if ( bMatchBase && hasColon || ( dot > 1/* yyyy.mm.dd */ && hasColon ) ) {\n            return GenericMultiFormDateTimeAudit.standardizeDateTime( input, false );\n        }\n        else if ( bMatchBase || ( dot > 1/* yyyy.mm.dd */ || ( dot == 1 && !hasColon /* yyyy.mm*/ ) ) ) {\n            return GenericMultiFormDateTimeAudit.standardizeDateTime( input, true ) + \" ??:??:??.???\";\n        }\n        else if ( hasColon ) {\n            return \"????-??-?? \" + GenericMultiFormDateTimeAudit.standardizeTime( input );\n        }\n        else if ( input.equals(\"?\") ) {\n            return \"????-??-?? ??:??:??.???\";\n        }\n\n        return null;\n    }\n\n    private static String standardizeDateTime   ( String input, boolean bOnlyYear ) {\n        Pattern pattern;\n\n        if( bOnlyYear ) {\n            pattern = Pattern.compile( \"(\\\\d{1,13}|\\\\?{1,13})[-/\\\\.](\\\\d{1,2}|\\\\?{1,2})(?:[-/\\\\.](\\\\d{1,2}|\\\\?{1,2}))?\" );\n\n        }\n        else {\n            pattern = Pattern.compile( \"(\\\\d{1,13}|\\\\?{1,13})[-/\\\\.](\\\\d{1,2}|\\\\?{1,2})(?:[-/\\\\.](\\\\d{1,2}|\\\\?{1,2}))? (\\\\d{1,2}|\\\\?{1,2}):(\\\\d{1,2}|\\\\?{1,2})(?:\\\\:(\\\\d{1,2}|\\\\?{1,2}))?(?:\\\\.(\\\\d{1,10}|\\\\?{1,10}))?\" );\n        }\n\n        Matcher matcher = pattern.matcher(input);\n\n        if ( !matcher.matches() ) {\n            throw new IllegalArgumentException( \"Invalid date-time format: \" + input );\n        }\n\n\n        String year    = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(1), 13 );\n        String month   = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(2), 2 );\n        String day     = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(3), 2 );\n\n        if( bOnlyYear ) {\n            return String.format( \"%s-%s-%s\", year, month, day );\n        }\n        else {\n            String hour    = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(4), 2 );\n            String minute  = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(5), 2 );\n            String second  = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(6), 2 );\n            String nano    = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(7), 10, true );\n            return String.format( \"%s-%s-%s %s:%s:%s.%s\", year, month, day, hour, minute, second, nano );\n        }\n    }\n\n    private static String standardizeTime       ( String input ) {\n        Pattern pattern = Pattern.compile( \"(\\\\d{1,2}|\\\\?{1,2}):(\\\\d{1,2}|\\\\?{1,2})(?:\\\\:(\\\\d{1,2}|\\\\?{1,2}))?(?:\\\\.(\\\\d{1,10}|\\\\?{1,10}))?\" );\n        Matcher matcher = pattern.matcher(input);\n\n        if ( !matcher.matches() ) {\n            throw new IllegalArgumentException( \"Invalid time format: \" + input );\n        }\n\n        String hour   = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(1), 2 );\n        String minute = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(2), 2 );\n        String second = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(3), 2 );\n        String nano   = GenericMultiFormDateTimeAudit.formatComponent( matcher.group(4), 10, true );\n\n        return String.format( \"%s:%s:%s.%s\", hour, minute, second, nano );\n    }\n\n    private static String formatComponent       ( String component, int lengtn ) {\n        return GenericMultiFormDateTimeAudit.formatComponent( component, lengtn, false );\n    }\n\n    private static String formatComponent       ( String component, int length, boolean bNano ) {\n        if( component == null ) {\n            component = \"?\";\n        }\n\n        if ( component.contains(\"?\") ) {\n            return component;\n        }\n\n        int n = Integer.parseInt( component );\n        if( bNano ) {\n            if( component.length() < 4 ) {\n                n = n * 1000000;\n            }\n        }\n\n        return String.format( \"%0\" + length + \"d\", n );\n    }\n\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/StorageDate.java",
    "content": "package com.pinecone.framework.util.datetime;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.JSON;\n\npublic class StorageDate implements Pinenut {\n    private int   mnYear;\n    private short mnMonth;\n    private short mnDay;\n\n    public StorageDate( int year, short month, short day ) {\n        this.mnYear  = year;\n        this.mnMonth = month;\n        this.mnDay   = day;\n    }\n\n    public StorageDate( int year, int month, int day ) {\n        this( year, (short) month, (short)day );\n    }\n\n    public int getYear() {\n        return this.mnYear;\n    }\n\n    public void setYear( int year ) {\n        this.mnYear = year;\n    }\n\n    public short getMonth() {\n        return this.mnMonth;\n    }\n\n    public void setMonth( short month ) {\n        this.mnMonth = month;\n    }\n\n    public short getDay() {\n        return this.mnDay;\n    }\n\n    public void setDay( short day ) {\n        this.mnDay = day;\n    }\n\n    @Override\n    public String toString() {\n        return String.format(\"%d-%02d-%02d\", this.mnYear, this.mnMonth, this.mnDay);\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this.toString() );\n    }\n\n    public static StorageDate of( int year, int month, int day ) {\n        return new StorageDate(year, month, day);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/StorageDateTime.java",
    "content": "package com.pinecone.framework.util.datetime;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.time.Month;\n\npublic class StorageDateTime implements Pinenut {\n    private StorageDate mDate;\n    private StorageTime mTime;\n\n    public StorageDateTime( StorageDate date, StorageTime time ) {\n        this.mDate = date;\n        this.mTime = time;\n    }\n\n    public StorageDate getDate() {\n        return this.mDate;\n    }\n\n    public void setDate(StorageDate date) {\n        this.mDate = date;\n    }\n\n    public StorageTime getTime() {\n        return this.mTime;\n    }\n\n    public void setTime(StorageTime time) {\n        this.mTime = time;\n    }\n\n    public int getYear() {\n        return this.mDate.getYear();\n    }\n\n    public void setYear( int year ) {\n        this.mDate.setYear(year);\n    }\n\n    public int getMonthValue() {\n        return this.mDate.getMonth();\n    }\n\n    public void setMonth( int month ) {\n        this.mDate.setMonth((short) month);\n    }\n\n    public int getDayOfMonth() {\n        return this.mDate.getDay();\n    }\n\n    public void setDay( int day ) {\n        this.mDate.setDay((short) day);\n    }\n\n    public int getHour() {\n        return this.mTime.getHour();\n    }\n\n    public void setHour( int hour ) {\n        this.mTime.setHour(hour);\n    }\n\n    public int getMinute() {\n        return this.mTime.getMinute();\n    }\n\n    public void setMinute( int minute ) {\n        this.mTime.setMinute(minute);\n    }\n\n    public int getSecond() {\n        return this.mTime.getSecond();\n    }\n\n    public void setSecond( int second ) {\n        this.mTime.setSecond(second);\n    }\n\n    public int getNano() {\n        return this.mTime.getNano();\n    }\n\n    public void setNano( int nano ) {\n        this.mTime.setNano(nano);\n    }\n\n\n    @Override\n    public String toString() {\n        return this.mDate.toString() + \" \" + this.mTime.toString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this.toString() );\n    }\n\n    public static StorageDateTime of( int year, Month month, int dayOfMonth, int hour, int minute ) {\n        return new StorageDateTime( StorageDate.of(year, month.getValue(), dayOfMonth ), StorageTime.of(hour, minute, 0, 0) );\n    }\n\n    public static StorageDateTime of( int year, Month month, int dayOfMonth, int hour, int minute, int second ) {\n        return new StorageDateTime( StorageDate.of(year, month.getValue(), dayOfMonth), StorageTime.of(hour, minute, second, 0) );\n    }\n\n    public static StorageDateTime of( int year, Month month, int dayOfMonth, int hour, int minute, int second, int nanoOfSecond ) {\n        return new StorageDateTime( StorageDate.of(year, month.getValue(), dayOfMonth), StorageTime.of(hour, minute, second, nanoOfSecond) );\n    }\n\n    public static StorageDateTime of( int year, int month, int dayOfMonth, int hour, int minute ) {\n        return new StorageDateTime( StorageDate.of(year, month, dayOfMonth), StorageTime.of(hour, minute, 0, 0) );\n    }\n\n    public static StorageDateTime of( int year, int month, int dayOfMonth, int hour, int minute, int second ) {\n        return new StorageDateTime( StorageDate.of(year, month, dayOfMonth), StorageTime.of( hour, minute, second, 0 ) );\n    }\n\n    public static StorageDateTime of( int year, int month, int dayOfMonth, int hour, int minute, int second, int nanoOfSecond ) {\n        return new StorageDateTime(StorageDate.of(year, month, dayOfMonth),\n                StorageTime.of(hour, minute, second, nanoOfSecond));\n    }\n\n    public static StorageDateTime of( StorageDate date, StorageTime time ) {\n        return new StorageDateTime( date, time );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/StorageTime.java",
    "content": "package com.pinecone.framework.util.datetime;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.JSON;\n\npublic class StorageTime implements Pinenut {\n    private byte mnHour;\n    private byte mnMinute;\n    private byte mnSecond;\n    private int  mnNano;\n\n    public StorageTime( int hour, int minute, int second, int nano ) {\n        this.mnHour   = (byte) hour;\n        this.mnMinute = (byte) minute;\n        this.mnSecond = (byte) second;\n        this.mnNano   = nano;\n    }\n\n\n    public int getHour() {\n        return this.mnHour;\n    }\n\n    public void setHour( int hour ) {\n        this.mnHour = (byte) hour;\n    }\n\n    public int getMinute() {\n        return this.mnMinute;\n    }\n\n    public void setMinute( int minute ) {\n        this.mnMinute = (byte) minute;\n    }\n\n    public int getSecond() {\n        return this.mnSecond;\n    }\n\n    public void setSecond( int second ) {\n        this.mnSecond = (byte) second;\n    }\n\n    public int getNano() {\n        return this.mnNano;\n    }\n\n    public void setNano( int nano ) {\n        this.mnNano = nano;\n    }\n\n    @Override\n    public String toString() {\n        return String.format(\"%02d:%02d:%02d.%09d\", this.mnHour, this.mnMinute, this.mnSecond, this.mnNano);\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this.toString() );\n    }\n\n    public static StorageTime of( int hour, int minute, int second, int nano ) {\n        return new StorageTime( hour, minute, second, nano );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/UniformDateTimeAudit.java",
    "content": "package com.pinecone.framework.util.datetime;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.time.LocalDateTime;\n\npublic interface UniformDateTimeAudit extends Pinenut {\n    UniformDateTimeAudit DefaultAudit = new GenericMultiFormDateTimeAudit();\n\n    boolean matches     ( String szDateTime, LocalDateTime targetTime ) ;\n\n    boolean betweenSec  ( String szDateTime, LocalDateTime targetTime, int nSecondAccuracy ) ;\n\n    boolean betweenMin  ( String szDateTime, LocalDateTime targetTime, int nMinuteAccuracy ) ;\n\n    boolean between     ( String szDateTime, LocalDateTime targetTime, int nMillisAccuracy ) ;\n\n    default boolean matches    ( String szDateTime ) {\n        return this.matches( szDateTime, LocalDateTime.now() );\n    }\n\n    default boolean betweenSec ( String szDateTime, int nSecondAccuracy ) {\n        return this.betweenSec( szDateTime, LocalDateTime.now(), nSecondAccuracy );\n    }\n\n    default boolean betweenMin ( String szDateTime, int nMinuteAccuracy ) {\n        return this.betweenMin( szDateTime, LocalDateTime.now(), nMinuteAccuracy );\n    }\n\n    default boolean between    ( String szDateTime, int nMillisAccuracy ) {\n        return this.between( szDateTime, LocalDateTime.now(), nMillisAccuracy );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/compact/CompactTimeUnit.java",
    "content": "package com.pinecone.framework.util.datetime.compact;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface CompactTimeUnit extends Pinenut {\n    boolean isInfinite();\n\n    boolean isMilliseconds() ;\n\n    boolean isSeconds() ;\n\n    boolean isMinutes() ;\n\n    boolean isHours() ;\n\n    boolean isDays() ;\n\n    long toMask64() ;\n\n    String getSymbol();\n\n    short bits();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/compact/CompactTimeUnit32.java",
    "content": "package com.pinecone.framework.util.datetime.compact;\n\npublic enum CompactTimeUnit32 implements CompactTimeUnit {\n    INFINITE     ( 0xFFFFFFFF, \"INF\"  ),\n    MILLISECONDS ( 0x00000000, \"ms\" ),\n    SECONDS      ( 0x20000000, \"s\"  ),\n    MINUTES      ( 0x40000000, \"m\"  ),\n    HOURS        ( 0x60000000, \"h\"  ),\n    DAYS         ( 0x80000000, \"d\"  );\n\n    private final int    mask;\n    private final String symbol;\n\n    CompactTimeUnit32( int mask, String symbol ) {\n        this.mask = mask;\n        this.symbol = symbol;\n    }\n\n    public int getMask() {\n        return this.mask;\n    }\n\n    @Override\n    public String getSymbol() {\n        return this.symbol;\n    }\n\n    @Override\n    public boolean isInfinite() {\n        return this.getMask() == CompactTimeUnit32.INFINITE.getMask();\n    }\n\n    @Override\n    public boolean isMilliseconds() {\n        return this.getMask() == CompactTimeUnit32.MILLISECONDS.getMask();\n    }\n\n    @Override\n    public boolean isSeconds() {\n        return this.getMask() == CompactTimeUnit32.SECONDS.getMask();\n    }\n\n    @Override\n    public boolean isMinutes() {\n        return this.getMask() == CompactTimeUnit32.MINUTES.getMask();\n    }\n\n    @Override\n    public boolean isHours() {\n        return this.getMask() == CompactTimeUnit32.HOURS.getMask();\n    }\n\n    @Override\n    public boolean isDays() {\n        return this.getMask() == CompactTimeUnit32.DAYS.getMask();\n    }\n\n    @Override\n    public long toMask64() {\n        return this.getMask();\n    }\n\n    @Override\n    public short bits() {\n        return CompactTimestamp32.BITS;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/compact/CompactTimestamp.java",
    "content": "package com.pinecone.framework.util.datetime.compact;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface CompactTimestamp extends Pinenut {\n    long toMilliseconds();\n\n    long toSeconds();\n\n    long toMinutes();\n\n    long toHours();\n\n    long toDays();\n\n    int toInt32();\n\n    CompactTimeUnit getUnit();\n\n    boolean isInfinite();\n\n    short bits();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/datetime/compact/CompactTimestamp32.java",
    "content": "package com.pinecone.framework.util.datetime.compact;\n\npublic class CompactTimestamp32 implements CompactTimestamp {\n    public static final int MASK_TYPE            = 0xE0000000; // Hi   3 => Type\n    public static final int MASK_VALUE           = 0x1FFFFFFF; // Low 31 => Time\n    public static final int INFINITE             = 0xFFFFFFFF;\n\n    public static final long MILLIS_PER_SECOND   = 1_000L;\n    public static final long MILLIS_PER_MINUTE   = 60_000L;\n    public static final long MILLIS_PER_HOUR     = 3_600_000L;\n    public static final long MILLIS_PER_DAY      = 86_400_000L;\n\n    public static final int BITS                 = Integer.SIZE;\n\n\n    protected int mnUint32Timestamp;\n\n    public CompactTimestamp32 ( int nUint32Timestamp, boolean raw ) {\n        this.mnUint32Timestamp = nUint32Timestamp;\n    }\n\n    public CompactTimestamp32 ( int nMillis ) {\n        this( nMillis, CompactTimeUnit32.MILLISECONDS );\n    }\n\n    public CompactTimestamp32 ( int val, CompactTimeUnit timeUnit ) {\n        this( CompactTimestamp32.encode( val, (CompactTimeUnit32) timeUnit ), true );\n    }\n\n\n    @Override\n    public long toMilliseconds() {\n        return CompactTimestamp32.toMilliseconds( this.mnUint32Timestamp );\n    }\n\n    @Override\n    public long toSeconds() {\n        return CompactTimestamp32.toSeconds( this.mnUint32Timestamp );\n    }\n\n    @Override\n    public long toMinutes() {\n        return CompactTimestamp32.toMinutes( this.mnUint32Timestamp );\n    }\n\n    @Override\n    public long toHours() {\n        return CompactTimestamp32.toHours( this.mnUint32Timestamp );\n    }\n\n    @Override\n    public long toDays() {\n        return CompactTimestamp32.toDays( this.mnUint32Timestamp );\n    }\n\n    @Override\n    public int toInt32() {\n        return CompactTimestamp32.decodeValue( this.mnUint32Timestamp );\n    }\n\n    @Override\n    public CompactTimeUnit32 getUnit() {\n        return CompactTimestamp32.decodeType( this.mnUint32Timestamp );\n    }\n\n    @Override\n    public boolean isInfinite() {\n        return CompactTimestamp32.isInfinite( this.mnUint32Timestamp );\n    }\n\n    @Override\n    public short bits() {\n        return BITS;\n    }\n\n    @Override\n    public boolean equals( Object obj ) {\n        if ( this == obj ) {\n            return true;\n        }\n        if ( obj == null || getClass() != obj.getClass() ) {\n            return false;\n        }\n        CompactTimestamp32 that = ( CompactTimestamp32 ) obj;\n        return this.mnUint32Timestamp == that.mnUint32Timestamp;\n    }\n\n    @Override\n    public int hashCode() {\n        return Integer.hashCode( this.mnUint32Timestamp );\n    }\n\n    @Override\n    public String toString() {\n        return CompactTimestamp32.format( this.mnUint32Timestamp );\n    }\n\n\n\n    public static CompactTimestamp32 from ( int val, CompactTimeUnit timeUnit ) {\n        return new CompactTimestamp32( val, timeUnit );\n    }\n\n    public static CompactTimestamp32 from ( long millis ) {\n        return new CompactTimestamp32( CompactTimestamp32.fromMilliseconds( millis ), true );\n    }\n\n\n\n\n    public static int encode( int value, CompactTimeUnit32 unit ) {\n        if ( value < 0 || value > MASK_VALUE ) {\n            throw new IllegalArgumentException( \"Out of rang: \" + value );\n        }\n        return value | unit.getMask();\n    }\n\n    public static int decodeValue( int encoded ) {\n        if ( encoded == INFINITE ) {\n            return INFINITE;\n        }\n        return encoded & MASK_VALUE;\n    }\n\n    public static CompactTimeUnit32 decodeType( int encoded ) {\n        if ( encoded == INFINITE ) {\n            return CompactTimeUnit32.INFINITE;\n        }\n\n        int type = encoded & MASK_TYPE;\n        for ( CompactTimeUnit32 unit : CompactTimeUnit32.values() ) {\n            if ( unit.getMask() == type ) {\n                return unit;\n            }\n        }\n\n        return null;\n    }\n\n    public static long toMilliseconds( int encoded ) {\n        if ( CompactTimestamp32.isInfinite( encoded ) ) {\n            return -1L;\n        }\n\n        int value = CompactTimestamp32.decodeValue( encoded );\n        CompactTimeUnit32 unit = CompactTimestamp32.decodeType( encoded );\n\n        if ( unit == null ) {\n            throw new IllegalArgumentException( \"Unknown `TimeUnit`.\" );\n        }\n\n        switch ( unit ) {\n            case MILLISECONDS: {\n                return value;\n            }\n            case SECONDS: {\n                return value * MILLIS_PER_SECOND;\n            }\n            case MINUTES: {\n                return value * MILLIS_PER_MINUTE;\n            }\n            case HOURS: {\n                return value * MILLIS_PER_HOUR;\n            }\n            case DAYS: {\n                return value * MILLIS_PER_DAY;\n            }\n            default: {\n                return -1L;\n            }\n        }\n    }\n\n    public static long toSeconds( int val ) {\n        long millis = CompactTimestamp32.toMilliseconds( val );\n        return millis == -1L ? -1L : millis / MILLIS_PER_SECOND;\n    }\n\n    public static long toMinutes( int val ) {\n        long millis = CompactTimestamp32.toMilliseconds( val );\n        return millis == -1L ? -1L : millis / MILLIS_PER_MINUTE;\n    }\n\n    public static long toHours( int val ) {\n        long millis = CompactTimestamp32.toMilliseconds( val );\n        return millis == -1L ? -1L : millis / MILLIS_PER_HOUR;\n    }\n\n    public static long toDays( int val ) {\n        long millis = CompactTimestamp32.toMilliseconds( val );\n        return millis == -1L ? -1L : millis / MILLIS_PER_DAY;\n    }\n\n    public static int fromMilliseconds( long millis ) {\n        if ( millis == -1L ) {\n            return INFINITE;\n        }\n\n        if ( millis < 0 ) {\n            throw new IllegalArgumentException( \"Negative milliseconds unacceptable.\" );\n        }\n\n        if ( millis % MILLIS_PER_DAY == 0 ) {\n            long days = millis / MILLIS_PER_DAY;\n            if ( days <= MASK_VALUE ) {\n                return CompactTimestamp32.encode( (int) days, CompactTimeUnit32.DAYS );\n            }\n        }\n        if ( millis % MILLIS_PER_HOUR == 0 ) {\n            long hours = millis / MILLIS_PER_HOUR;\n            if ( hours <= MASK_VALUE ) {\n                return CompactTimestamp32.encode( (int) hours, CompactTimeUnit32.HOURS );\n            }\n        }\n        if ( millis % MILLIS_PER_MINUTE == 0 ) {\n            long minutes = millis / MILLIS_PER_MINUTE;\n            if ( minutes <= MASK_VALUE ) {\n                return CompactTimestamp32.encode( (int) minutes, CompactTimeUnit32.MINUTES );\n            }\n        }\n        if ( millis % MILLIS_PER_SECOND == 0 ) {\n            long seconds = millis / MILLIS_PER_SECOND;\n            if ( seconds <= MASK_VALUE ) {\n                return CompactTimestamp32.encode( (int) seconds, CompactTimeUnit32.SECONDS );\n            }\n        }\n\n        if ( millis <= MASK_VALUE ) {\n            return CompactTimestamp32.encode( (int) millis, CompactTimeUnit32.MILLISECONDS );\n        }\n\n        return INFINITE;\n    }\n\n    public static String format( int encoded ) {\n        if ( CompactTimestamp32.isInfinite( encoded ) ) {\n            return CompactTimeUnit32.INFINITE.getSymbol();\n        }\n\n        CompactTimeUnit32 unit = CompactTimestamp32.decodeType( encoded );\n        if ( unit == null ) {\n            return CompactTimeUnit32.INFINITE.getSymbol();\n        }\n\n        return CompactTimestamp32.decodeValue( encoded ) + \" \" + unit.getSymbol();\n    }\n\n    public static boolean isInfinite( int encoded ) {\n        return encoded == INFINITE;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/BytesID.java",
    "content": "package com.pinecone.framework.util.id;\n\npublic interface BytesID extends Identification {\n\n    int length();\n\n    String toHexString();\n\n    String toBase64String();\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/GUID.java",
    "content": "package com.pinecone.framework.util.id;\n\npublic interface GUID extends NumericID {\n\n    long hashCode64();\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/GuidAllocator.java",
    "content": "package com.pinecone.framework.util.id;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface GuidAllocator extends Pinenut {\n\n    GUID nextGUID();\n\n    GUID parse( final String hexId );\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/GuidGenerateException.java",
    "content": "package com.pinecone.framework.util.id;\n\npublic class GuidGenerateException extends RuntimeException {\n    private static final long serialVersionUID = -27048199131316992L;\n\n    public GuidGenerateException() {\n        super();\n    }\n\n    public GuidGenerateException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public GuidGenerateException( String message ) {\n        super(message);\n    }\n\n    public GuidGenerateException( String msgFormat, Object... args ) {\n        super(String.format(msgFormat, args));\n    }\n\n    public GuidGenerateException( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/Identification.java",
    "content": "package com.pinecone.framework.util.id;\n\nimport java.io.Serializable;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Identification extends Pinenut, Serializable, Comparable<Identification> {\n\n    Identification parse( String code );\n\n    byte[] toBytes();\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/IllegalIdentificationException.java",
    "content": "package com.pinecone.framework.util.id;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class IllegalIdentificationException extends PineRuntimeException {\n    public IllegalIdentificationException    () {\n        super();\n    }\n\n    public IllegalIdentificationException    ( String message ) {\n        super(message);\n    }\n\n    public IllegalIdentificationException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public IllegalIdentificationException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected IllegalIdentificationException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/Int32ID.java",
    "content": "package com.pinecone.framework.util.id;\n\nimport com.pinecone.framework.util.Bytes;\n\npublic class Int32ID implements NumericID {\n    protected int mId;\n\n    public Int32ID( int id ) {\n        this.mId = id;\n    }\n\n    @Override\n    public Identification parse( String hexID ) {\n        this.mId = Integer.parseInt( hexID, 16 );\n        return this;\n    }\n\n    @Override\n    public long longVal() {\n        return this.mId;\n    }\n\n    @Override\n    public int intVal() {\n        return this.mId;\n    }\n\n    @Override\n    public String toString() {\n        return Integer.toUnsignedString( this.mId );\n    }\n\n    @Override\n    public byte[] toBytesLE() {\n        return Bytes.int32ToBytesLE( this.mId );\n    }\n\n    @Override\n    public byte[] toBytesBE() {\n        return Bytes.int32ToBytesBE( this.mId );\n    }\n\n    @Override\n    public int sizeof() {\n        return Integer.BYTES;\n    }\n\n    @Override\n    public int compareTo( Identification that ) {\n        Int32ID val;\n        if ( that instanceof Int32ID ) {\n            val = (Int32ID) that;\n        }\n        else {\n            throw new IllegalArgumentException( \"Not Int32ID\" );\n        }\n\n        return Integer.compare( this.mId, val.mId );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/Int64ID.java",
    "content": "package com.pinecone.framework.util.id;\n\nimport com.pinecone.framework.util.Bytes;\n\npublic class Int64ID implements NumericID {\n    protected long mId;\n\n    public Int64ID( long id ) {\n        this.mId = id;\n    }\n\n    @Override\n    public Identification parse( String hexID ) {\n        this.mId = Long.parseLong( hexID, 16 );\n        return this;\n    }\n\n    @Override\n    public long longVal() {\n        return this.mId;\n    }\n\n    @Override\n    public int intVal() {\n        return (int) this.mId;\n    }\n\n    @Override\n    public String toString() {\n        return Long.toUnsignedString( this.mId );\n    }\n\n    @Override\n    public byte[] toBytesLE() {\n        return Bytes.int64ToBytesLE( this.mId );\n    }\n\n    @Override\n    public byte[] toBytesBE() {\n        return Bytes.int64ToBytesBE( this.mId );\n    }\n\n    @Override\n    public int sizeof() {\n        return Long.BYTES;\n    }\n\n    @Override\n    public int compareTo( Identification that ) {\n        Int64ID val;\n        if ( that instanceof Int64ID ) {\n            val = (Int64ID) that;\n        }\n        else {\n            throw new IllegalArgumentException( \"Not Int64ID\" );\n        }\n\n        return Long.compare( this.mId, val.mId );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/NameStringID.java",
    "content": "package com.pinecone.framework.util.id;\n\nimport com.pinecone.framework.util.json.JSONString;\n\npublic class NameStringID implements StringID, JSONString {\n\n    private String name;\n\n    public NameStringID( String name ) {\n        this.name = name;\n    }\n\n    @Override\n    public Identification parse( String code ) {\n        this.name = code;\n        return this;\n    }\n\n    @Override\n    public String toString() {\n        return this.name;\n    }\n\n    @Override\n    public String toJSONString() {\n        return \"\\\"\" + this.name + \"\\\"\";\n    }\n\n    @Override\n    public boolean equals( Object obj ) {\n        if ( this == obj ) {\n            return true;\n        }\n        if ( !(obj instanceof NameStringID) ) {\n            return false;\n        }\n        NameStringID that = (NameStringID) obj;\n        return this.name.equals(that.name);\n    }\n\n    @Override\n    public int hashCode() {\n        return this.name.hashCode();\n    }\n\n    @Override\n    public byte[] toBytes() {\n        return this.name.getBytes();\n    }\n\n    @Override\n    public int compareTo( Identification that ) {\n        StringID val;\n        if ( that instanceof StringID ) {\n            val = (StringID) that;\n        }\n        else {\n            throw new IllegalArgumentException( \"Not StringID\" );\n        }\n\n        return this.name.compareTo( val.toString() );\n    }\n\n    @Override\n    public int length() {\n        return this.name.length();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/NumericID.java",
    "content": "package com.pinecone.framework.util.id;\n\npublic interface NumericID extends Identification {\n\n    long longVal();\n\n    int intVal();\n\n    int sizeof();\n\n    default int bitsof() {\n        return this.sizeof() * 8;\n    }\n\n    @Override\n    default String toJSONString() {\n        return this.toString();\n    }\n\n    byte[] toBytesLE();\n\n    byte[] toBytesBE();\n\n    // Pinecone is using uniformed Little-Endian by default.\n    @Override\n    default byte[] toBytes() {\n        return this.toBytesLE();\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/id/StringID.java",
    "content": "package com.pinecone.framework.util.id;\n\npublic interface StringID extends Identification {\n\n    int length();\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/FileIterator.java",
    "content": "package com.pinecone.framework.util.io;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.nio.file.Path;\nimport java.util.Iterator;\nimport java.util.NoSuchElementException;\n\npublic class FileIterator implements Iterator<File > {\n    private PathIterator mPathIterator;\n\n    public FileIterator( PathIterator iterator ) {\n        this.mPathIterator = iterator;\n    }\n\n    public FileIterator( File root, boolean recursive, boolean ignoreException ) throws IOException {\n        this.mPathIterator = new PathItemIterator( root.toPath(), recursive, ignoreException );\n    }\n\n    public FileIterator( File root, boolean recursive ) throws IOException {\n        this( root, recursive, false );\n    }\n\n    public FileIterator( File root ) throws IOException {\n        this( root, true );\n    }\n\n    @Override\n    public boolean hasNext() {\n        return this.mPathIterator.hasNext();\n    }\n\n    @Override\n    public File next() {\n        Path path = this.mPathIterator.next();\n        if ( path == null ) {\n            throw new NoSuchElementException( \"No more files\" );\n        }\n        return path.toFile();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/FileNamePathIterator.java",
    "content": "package com.pinecone.framework.util.io;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\nimport java.util.Iterator;\n\npublic class FileNamePathIterator extends PathItemIterator implements Iterator<Path > {\n    public FileNamePathIterator( Path root, boolean recursive, boolean ignoreException ) throws IOException {\n        super( root, recursive, ignoreException );\n    }\n\n    public FileNamePathIterator( Path root, boolean recursive ) throws IOException {\n        this( root, recursive, false );\n    }\n\n    public FileNamePathIterator( Path root ) throws IOException {\n        this( root, true );\n    }\n\n    @Override\n    public Path next() {\n        return super.next().getFileName();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/FileUtils.java",
    "content": "package com.pinecone.framework.util.io;\n\nimport com.pinecone.framework.util.OSIdentifier;\n\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.math.BigInteger;\nimport java.nio.charset.Charset;\nimport java.nio.charset.StandardCharsets;\n\npublic final class FileUtils {\n    public static byte[] readByteAll( File pFile ) throws IOException {\n        Long fileLength = pFile.length();\n        byte[] fileContent = new byte[ fileLength.intValue() ];\n\n        try ( FileInputStream fileInputStream = new FileInputStream(pFile) ){\n            int nRealReadied = fileInputStream.read(fileContent);\n\n            if ( nRealReadied != fileLength.intValue() ) {\n                throw new IOException( \"Read all content failed !\" );\n            }\n        }\n\n        return fileContent;\n    }\n\n    public static String readAll ( String szFileDir ) throws IOException {\n        File file = new File(szFileDir);\n        return readAll(file);\n    }\n\n    public static String readAll ( String szFileDir, Charset charset ) throws IOException {\n        File file = new File(szFileDir);\n        return readAll( file, charset );\n    }\n\n    public static String readAll ( File pFile, Charset charset ) throws IOException {\n        return new String( readByteAll( pFile ), charset );\n    }\n\n    public static String readAll ( File pFile ) throws IOException {\n        return new String( readByteAll( pFile ), StandardCharsets.UTF_8 );\n    }\n\n\n\n    private static void checkDirectory( File directory ) {\n        if (!directory.exists()) {\n            throw new IllegalArgumentException(directory + \" does not exist\");\n        }\n        else if (!directory.isDirectory()) {\n            throw new IllegalArgumentException(directory + \" is not a directory\");\n        }\n    }\n\n    public static boolean isSymlink( File file ) throws IOException {\n        if ( file == null ) {\n            throw new NullPointerException(\"file must not be null\");\n        }\n        else if ( OSIdentifier.isWindows() ) {\n            return false;\n        }\n        else {\n            File fileInCanonicalDir = null;\n            if ( file.getParent() == null ) {\n                fileInCanonicalDir = file;\n            }\n            else {\n                File canonicalDir = file.getParentFile().getCanonicalFile();\n                fileInCanonicalDir = new File(canonicalDir, file.getName());\n            }\n\n            return !fileInCanonicalDir.getCanonicalFile().equals( fileInCanonicalDir.getAbsoluteFile() );\n        }\n    }\n\n    public static long sizeOf( File file ) {\n        if ( !file.exists() ) {\n            String message = file + \" does not exist\";\n            throw new IllegalArgumentException(message);\n        }\n        else {\n            return file.isDirectory() ? sizeOfDirectory(file) : file.length();\n        }\n    }\n\n    public static BigInteger sizeOfAsBigInteger( File file ) {\n        if ( !file.exists() ) {\n            String message = file + \" does not exist\";\n            throw new IllegalArgumentException(message);\n        }\n        else {\n            return file.isDirectory() ? sizeOfDirectoryAsBigInteger(file) : BigInteger.valueOf(file.length());\n        }\n    }\n\n    public static long sizeOfDirectory( File directory ) {\n        FileUtils.checkDirectory(directory);\n        File[] files = directory.listFiles();\n        if ( files == null ) {\n            return 0L;\n        }\n        else {\n            long size = 0L;\n            File[] arr = files;\n            int len = files.length;\n\n            for( int i = 0; i < len; ++i ) {\n                File file = arr[i];\n\n                try {\n                    if ( !FileUtils.isSymlink(file) ) {\n                        size += FileUtils.sizeOf(file);\n                        if ( size < 0L ) {\n                            break;\n                        }\n                    }\n                }\n                catch ( IOException e ) {\n                    // Do nothing\n                }\n            }\n\n            return size;\n        }\n    }\n\n    public static BigInteger sizeOfDirectoryAsBigInteger( File directory ) {\n        FileUtils.checkDirectory(directory);\n        File[] files = directory.listFiles();\n        if (files == null) {\n            return BigInteger.ZERO;\n        } else {\n            BigInteger size = BigInteger.ZERO;\n            File[] arr = files;\n            int len = files.length;\n\n            for( int i = 0; i < len; ++i ) {\n                File file = arr[i];\n\n                try {\n                    if ( !FileUtils.isSymlink(file) ) {\n                        size = size.add(BigInteger.valueOf(sizeOf(file)));\n                    }\n                }\n                catch ( IOException e ) {\n                    // Do nothing\n                }\n            }\n\n            return size;\n        }\n    }\n\n\n    public static void forceDelete( File file ) throws IOException {\n        if ( file.isDirectory() ) {\n            FileUtils.deleteDirectory(file);\n        }\n        else {\n            boolean filePresent = file.exists();\n            if ( !file.delete() ) {\n                if ( !filePresent ) {\n                    throw new FileNotFoundException(\"file does not exist: \" + file);\n                }\n\n                String message = \"Unable to delete file: \" + file;\n                throw new IOException( message );\n            }\n        }\n\n    }\n\n    public static void purgeDirectory( File directory ) throws IOException {\n        FileUtils.deleteDirectory( directory );\n    }\n\n    public static void deleteDirectory( File directory ) throws IOException {\n        if ( directory.exists() ) {\n            if ( !isSymlink(directory) ) {\n                FileUtils.cleanDirectory(directory);\n            }\n\n            if ( !directory.delete() ) {\n                String message = \"Unable to delete directory \" + directory + \".\";\n                throw new IOException(message);\n            }\n        }\n    }\n\n    public static boolean deleteQuietly( File file ) {\n        if ( file == null ) {\n            return false;\n        }\n        else {\n            try {\n                if (file.isDirectory()) {\n                    FileUtils.cleanDirectory(file);\n                }\n            }\n            catch ( Exception e ) {\n                // Do nothing\n            }\n\n            try {\n                return file.delete();\n            }\n            catch ( Exception e ) {\n                return false;\n            }\n        }\n    }\n\n    public static void cleanDirectory( File directory ) throws IOException {\n        String message;\n        if ( !directory.exists() ) {\n            message = directory + \" does not exist\";\n            throw new IllegalArgumentException(message);\n        }\n        else if ( !directory.isDirectory() ) {\n            message = directory + \" is not a directory\";\n            throw new IllegalArgumentException(message);\n        }\n        else {\n            File[] files = directory.listFiles();\n            if ( files == null ) {\n                throw new IOException(\"Failed to list contents of \" + directory);\n            }\n            else {\n                IOException exception = null;\n                File[] arr = files;\n                int len = files.length;\n\n                for( int i = 0; i < len; ++i ) {\n                    File file = arr[i];\n\n                    try {\n                        FileUtils.forceDelete(file);\n                    }\n                    catch ( IOException e ) {\n                        exception = e;\n                    }\n                }\n\n                if ( null != exception ) {\n                    throw exception;\n                }\n            }\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/PathItemIterator.java",
    "content": "package com.pinecone.framework.util.io;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\n\nimport java.io.IOException;\nimport java.nio.file.DirectoryStream;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.util.Iterator;\nimport java.util.NoSuchElementException;\nimport java.util.Stack;\n\npublic class PathItemIterator implements PathIterator {\n    private Stack<DirectoryStream<Path > > mDirStack  = new Stack<>() ;\n    private Stack<Iterator<Path > >        mIterStack = new Stack<>() ;\n    private Path                           mNextPath  = null          ;\n    private boolean                        mbRecursive                ;\n    private boolean                        mbIgnoreException          ;\n\n    public PathItemIterator(Path root, boolean recursive, boolean ignoreException ) throws IOException {\n        this.mbRecursive       = recursive;\n        this.mbIgnoreException = ignoreException;\n\n        if ( root != null && Files.exists(root) ) {\n            if ( Files.isDirectory( root ) ) {\n                DirectoryStream<Path> stream = Files.newDirectoryStream(root);\n                this.mDirStack.push(stream);\n                this.mIterStack.push(stream.iterator());\n            }\n            else {\n                this.mNextPath = root;\n            }\n        }\n    }\n\n    public PathItemIterator(Path root, boolean recursive ) throws IOException {\n        this( root, recursive, false );\n    }\n\n    public PathItemIterator(Path root ) throws IOException {\n        this( root, true );\n    }\n\n\n\n    @Override\n    public boolean hasNext() {\n        if ( this.mNextPath != null ) {\n            return true;\n        }\n\n        while ( !this.mIterStack.isEmpty() ) {\n            Iterator<Path > iter = this.mIterStack.peek();\n\n            if ( iter.hasNext() ) {\n                Path file = iter.next();\n                if ( Files.isDirectory(file) ) {\n                    this.mNextPath = file;\n                    if ( this.mbRecursive ) {\n                        try {\n                            DirectoryStream<Path > stream = Files.newDirectoryStream(file);\n                            this.mDirStack.push( stream );\n                            this.mIterStack.push( stream.iterator() );\n                        }\n                        catch ( IOException e ) {\n                            if( !this.mbIgnoreException ) {\n                                throw new ProxyProvokeHandleException( e );\n                            }\n                        }\n                    }\n                    return true;\n                }\n                else {\n                    this.mNextPath = file;\n                    return true;\n                }\n            }\n            else {\n                this.mIterStack.pop();\n                try {\n                    this.mDirStack.pop().close();\n                }\n                catch ( IOException e ) {\n                    if( !this.mbIgnoreException ) {\n                        throw new ProxyProvokeHandleException( e );\n                    }\n                }\n            }\n        }\n        return false;\n    }\n\n    @Override\n    public Path next() {\n        if ( !this.hasNext() ) {\n            throw new NoSuchElementException( \"No more files\" );\n        }\n\n        Path result = this.mNextPath;\n        this.mNextPath = null;\n        return result;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/PathIterator.java",
    "content": "package com.pinecone.framework.util.io;\n\nimport java.nio.file.Path;\nimport java.util.Iterator;\n\npublic interface PathIterator extends Iterator<Path > {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/Tracer.java",
    "content": "package com.pinecone.framework.util.io;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.io.PrintStream;\n\npublic interface Tracer extends Pinenut {\n    PrintStream getOut();\n\n    PrintStream getErr();\n\n\n\n    Tracer echo( Object data, Object...objects ) ;\n\n    Tracer cerr( Object data, Object...objects ) ;\n\n\n    Tracer log( Object that );\n\n    Tracer log( Object Anything, Object...objects );\n\n\n    Tracer info( Object that );\n\n    Tracer info( Object Anything, Object...objects );\n\n\n    Tracer warn ( Object Anything, Object...objects );\n\n    Tracer warn ( Object that );\n\n\n    Tracer error ( Object Anything, Object...objects );\n\n    Tracer error ( Object that );\n\n\n    Tracer trace() ;\n\n    Tracer trace( Object Anything, Object...objects ) ;\n\n\n    Tracer colorf( int colorCode, Object that );\n\n    Tracer colorf( int colorCode, Object Anything, Object...objects );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/io/Tracerson.java",
    "content": "package com.pinecone.framework.util.io;\n\nimport com.pinecone.framework.system.prototype.PinenutTraits;\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.io.PrintStream;\n\npublic class Tracerson implements Tracer {\n    public static final String COLOR_STRING_UTF_END = \"\\u001B[0m\";\n\n    protected String mszInfoColor    = \"\\u001B[34m\";\n\n    protected String mszWarnColor    = \"\\u001B[33m\";\n\n    protected String mszTraceColor   = \"\\u001B[36m\";\n\n    protected String mszElementSpilt = \" \";\n\n    protected PrintStream out = System.out;\n\n    protected PrintStream err = System.err;\n\n\n    public Tracerson() {\n\n    }\n\n    public Tracerson( PrintStream out, PrintStream err ) {\n        this.out = out;\n        this.err = err;\n    }\n\n    protected String stringify( Object data ){\n        try {\n            return PinenutTraits.invokeToJSONString( data );\n        }\n        catch ( Exception e1 ){\n            return JSON.stringify( data );\n        }\n    }\n\n    @Override\n    public PrintStream getOut() {\n        return this.out;\n    }\n\n    @Override\n    public PrintStream getErr() {\n        return this.err;\n    }\n\n\n    @Override\n    public Tracerson echo( Object data, Object...objects ) {\n        this.out.print( data );\n        for ( Object row : objects ) {\n            this.out.print( row );\n        }\n        return this;\n    }\n\n    @Override\n    public Tracerson cerr( Object data, Object...objects ) {\n        this.err.print( data );\n        for ( Object row : objects ) {\n            this.err.print( row );\n        }\n        return this;\n    }\n\n\n\n    protected void printlnColorfulEnd() {\n        if( this.out.equals( System.out ) ) {\n            this.out.println( Tracerson.COLOR_STRING_UTF_END );\n        }\n        else {\n            this.out.println();\n        }\n    }\n\n    protected void printlnStringify( Object Anything, Object...objects ) {\n        this.out.print( this.stringify( Anything ) );\n        for ( Object row : objects ) {\n            this.out.print( this.mszElementSpilt );\n            this.out.print( this.stringify( row ) );\n        }\n    }\n\n    protected void printlnColorful( String szColor, Object Anything, Object...objects ) {\n        this.out.print( szColor );\n        this.printlnStringify( Anything, objects );\n        this.printlnColorfulEnd();\n    }\n\n\n    @Override\n    public Tracerson log( Object that ){\n        this.out.println( this.stringify( that ) );\n        return this;\n    }\n\n    @Override\n    public Tracerson log( Object Anything, Object...objects ){\n        this.printlnStringify( Anything, objects );\n        this.out.println();\n        return this;\n    }\n\n\n    protected String queryInfoColor(){\n        if( this.out.equals( System.out ) ) {\n            return this.mszInfoColor;\n        }\n        else {\n            return \"[INFO] \";\n        }\n    }\n\n    @Override\n    public Tracerson info( Object that ){\n        this.out.print( this.queryInfoColor() );\n        this.out.print( this.stringify( that ) );\n        this.printlnColorfulEnd();\n        return this;\n    }\n\n    @Override\n    public Tracerson info( Object Anything, Object...objects ){\n        this.printlnColorful( this.queryInfoColor(), Anything, objects );\n        return this;\n    }\n\n\n    protected String queryWarnColor(){\n        if( this.out.equals( System.out ) ) {\n            return this.mszWarnColor;\n        }\n        else {\n            return \"[WARN] \";\n        }\n    }\n\n    @Override\n    public Tracerson warn ( Object that ){\n        this.out.print( this.queryWarnColor() );\n        this.out.print( this.stringify( that ) );\n        this.printlnColorfulEnd();\n        return this;\n    }\n\n    @Override\n    public Tracerson warn ( Object Anything, Object...objects ){\n        this.printlnColorful( this.queryWarnColor(), Anything, objects );\n        return this;\n    }\n\n\n    @Override\n    public Tracerson error ( Object that ){\n        this.err.println( this.stringify( that ) );\n        return this;\n    }\n\n    @Override\n    public Tracerson error ( Object Anything, Object...objects ){\n        this.err.print( this.stringify( Anything ) );\n        for ( Object row : objects ) {\n            this.err.print( this.mszElementSpilt );\n            this.err.print( this.stringify( row ) );\n        }\n        return this;\n    }\n\n\n    protected String queryTraceColor(){\n        if( this.out.equals( System.out ) ) {\n            return this.mszTraceColor;\n        }\n        else {\n            return \"[TRACE] \";\n        }\n    }\n\n    protected void printTraceInfo( StackTraceElement[] elements ){\n        this.out.println( this.getClass().getName() + \": Call Trace Info:\");\n        if( elements != null ){\n            for( int i = 0; i < elements.length; i++ ){\n                if( i == 0 && elements[0].getClassName().equals( \"java.lang.Thread\" ) ){\n                    continue;\n                }\n                this.out.println( \"\\tat \" + elements[i] );\n            }\n        }\n    }\n\n    @Override\n    public Tracerson trace() {\n        this.out.print( this.queryTraceColor() );\n        this.printTraceInfo( Thread.currentThread().getStackTrace() );\n        this.printlnColorfulEnd();\n        return this;\n    }\n\n    @Override\n    public Tracerson trace( Object Anything, Object...objects ) {\n        this.out.print( this.queryTraceColor() );\n        this.log( Anything, objects );\n        this.printTraceInfo( Thread.currentThread().getStackTrace() );\n        this.printlnColorfulEnd();\n        return this;\n    }\n\n    @Override\n    public Tracer colorf( int colorCode, Object that ){\n        this.out.print( \"\\u001B[\" + colorCode + \"m\" );\n        this.out.print( this.stringify( that ) );\n        this.printlnColorfulEnd();\n        return this;\n    }\n\n    @Override\n    public Tracer colorf( int colorCode, Object Anything, Object...objects ){\n        this.printlnColorful( \"\\u001B[\" + colorCode + \"m\", Anything, objects );\n        return this;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/ArchCursorParser.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.util.CursorParser;\nimport com.pinecone.framework.util.GeneralStrings;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.io.Reader;\nimport java.io.StringReader;\n\n/**\n *  Pinecone For Java JSONCursorParser [ Bean Nuts Almond JSON For Pinecone Java ]\n *  Copyright © 2008 - 2028 Bean Nuts Foundation ( DR.Undefined ) All rights reserved. [Harald.E / WJH]\n *  Tip:\n *  *****************************************************************************************\n *  JSON util Version Signature: Ver. 3.4 [Build 20240531] [Pinecone Ver.3.4]\n *  Author: undefined\n *  Last Modified Date: 2024-05-31\n *  *****************************************************************************************\n *  Principle : Bottom-up parsing implementation with recursive descendent method.\n *  [ Due to Java doesn't had fucking pointer, using cursor method can be better ]\n *  Reference[1]: https://www.json.org/json-en.html\n *  Reference[2]: https://spec.json5.org/\n *  Syntax: T-> \"\", T->[0-9] T-> '', T->[a-zA-Z]..., etc\n *          T-> { T : T }, T-> [ T ], etc\n *  Support: JSON, JSON5\n *  *****************************************************************************************\n *  Notice: This is high tolerance JSON parser, It does not fully comply with the JSON standard for error situations\n *  *****************************************************************************************\n */\npublic abstract class ArchCursorParser implements CursorParser {\n    protected long     mnCharacter;\n    protected boolean  mbIsEOF;\n    protected int      mnParseAt ;\n    protected int      mnLineAt;\n    protected char     mcPrevious;\n    protected Reader   mReader;\n    protected boolean  mbUsePrevious;\n\n    public ArchCursorParser( Reader reader ) {\n        this.mReader = (Reader)(reader.markSupported() ? reader : new BufferedReader(reader));\n        this.mbIsEOF = false;\n        this.mbUsePrevious = false;\n        this.mcPrevious = 0;\n        this.mnParseAt = 0;\n        this.mnCharacter = 1L;\n        this.mnLineAt = 1;\n    }\n\n    public ArchCursorParser( InputStream inputStream ) throws JSONParseException {\n        this((Reader)(new InputStreamReader(inputStream)));\n    }\n\n    public ArchCursorParser( String s ) {\n        this((Reader)(new StringReader(s)));\n    }\n\n    public void lineBack() {\n        if ( !this.mbUsePrevious && this.mnParseAt > 0L ) {\n            --this.mnParseAt;\n            --this.mnLineAt;\n            if( this.mnCharacter != 0 ) {\n                --this.mnCharacter;\n            }\n            this.mbUsePrevious = true;\n            this.mbIsEOF = false;\n        }\n    }\n\n    @Override\n    public void back() throws JSONParseException {\n        if (!this.mbUsePrevious && this.mnParseAt > 0L) {\n            --this.mnParseAt;\n            --this.mnCharacter;\n            this.mbUsePrevious = true;\n            this.mbIsEOF = false;\n        }\n        else {\n            throw new JSONParseException(\"Stepping back two steps is not supported\");\n        }\n    }\n\n    public static int dehexchar(char c) {\n        if (c >= '0' && c <= '9') {\n            return c - 48;\n        } else if (c >= 'A' && c <= 'F') {\n            return c - 55;\n        } else {\n            return c >= 'a' && c <= 'f' ? c - 87 : -1;\n        }\n    }\n\n    public boolean isEndLine() {\n        return this.mbIsEOF && !this.mbUsePrevious;\n    }\n\n    public boolean more() throws JSONParseException {\n        this.next();\n        if ( this.isEndLine() ) {\n            return false;\n        }\n        else {\n            this.back();\n            return true;\n        }\n    }\n\n    @Override\n    public char next() throws JSONParseException {\n        int c;\n        if (this.mbUsePrevious) {\n            this.mbUsePrevious = false;\n            c = this.mcPrevious;\n        }\n        else {\n            try {\n                c = this.mReader.read();\n            }\n            catch ( IOException e ) {\n                throw new JSONParseException(e);\n            }\n\n            if (c <= 0) {\n                this.mbIsEOF = true;\n                c = 0;\n            }\n        }\n\n        ++this.mnParseAt;\n        if ( this.mcPrevious == '\\r' ) {\n            ++this.mnLineAt;\n            this.mnCharacter = (long)(c == 10 ? 0 : 1);\n        }\n        else if ( c == '\\n' ) {\n            ++this.mnLineAt;\n            this.mnCharacter = 0L;\n        }\n        else {\n            ++this.mnCharacter;\n        }\n\n        this.mcPrevious = (char)c;\n        return this.mcPrevious;\n    }\n\n    public char next( char c ) throws JSONParseException {\n        char n = this.next();\n        if (n != c) {\n            throw this.syntaxError(\"Error parser json string with expected '\" + c + \"' and instead saw '\" + n + \"'\");\n        } else {\n            return n;\n        }\n    }\n\n    @Override\n    public String next( int n ) throws JSONParseException {\n        if (n == 0) {\n            return \"\";\n        }\n        else {\n            char[] chars = new char[n];\n\n            for( int pos = 0; pos < n; ++pos ) {\n                chars[pos] = this.next();\n                if ( this.isEndLine() ) {\n                    throw this.syntaxError(\"Error parser json string with substring bounds error.\");\n                }\n            }\n\n            return new String(chars);\n        }\n    }\n\n\n\n    public boolean skipComment( char cCurrentChar ){\n        if( cCurrentChar == '/' ){\n            char nextC = this.next();\n            if( nextC == '*' ){\n                while( true ) {\n                    char c = this.next();\n                    if( c == '*' ){\n                        c = this.next();\n\n                        while ( c == '*' ) {\n                            c = this.next();\n                        }\n\n                        if( c == '/' ){\n                            return true;\n                        }\n                    }\n                }\n            }\n            else if( nextC == '/' ){\n                while( true ) {\n                    char c = this.next();\n                    if( c == '\\n' ){\n                        this.lineBack();\n                        return true;\n                    }\n                    else if( c == '\\r' ){\n                        c = this.next();\n                        if( c == '\\n' ){\n                            this.lineBack();\n                            return true;\n                        }\n                        this.back();\n                        return true;\n                    }\n                }\n            }\n        }\n        return false;\n    }\n\n    public char nextClean() throws JSONParseException {\n        char c;\n        do {\n            c = this.next();\n            if ( this.skipComment( c ) ){\n                c = this.next();\n            }\n        }\n        while( c != 0 && c <= ' ' );\n\n        return c;\n    }\n\n    public StringBuilder nextString( char quote ) throws JSONParseException {\n        StringBuilder sb = new StringBuilder();\n\n        while( true ) {\n            char c = this.next();\n            if( this.isEndLine() ) {\n                return sb;\n            }\n\n            switch(c) {\n                case '\\u0000': {\n                    sb.append( '\\0' );\n                    continue;\n                }\n                case '\\n': {\n                    sb.append( '\\n' );\n                    continue;\n                }\n                case '\\r': {\n                    sb.append( '\\r' );\n                    continue;\n                    //throw this.syntaxError(\"Error parser json string with unterminated string.\"); //What fucking ever, who care.\n                }\n                case '\\\\': {\n                    c = this.next();\n                    if( GeneralStrings.transferCharParse( c, this, sb ) ){\n                        continue;\n                    }\n                }\n                default: {\n                    if ( c == quote ) {\n                        return sb;\n                    }\n\n                    sb.append(c);\n                }\n            }\n        }\n    }\n\n    public String nextTo( char delimiter ) throws JSONParseException {\n        StringBuffer sb = new StringBuffer();\n\n        while(true) {\n            char c = this.next();\n            if (c == delimiter || c == 0 || c == '\\n' || c == '\\r') {\n                if (c != 0) {\n                    this.back();\n                }\n\n                return sb.toString().trim();\n            }\n\n            sb.append(c);\n        }\n    }\n\n    public String nextTo( String delimiters ) throws JSONParseException {\n        StringBuffer sb = new StringBuffer();\n\n        while(true) {\n            char c = this.next();\n            if (delimiters.indexOf(c) >= 0 || c == 0 || c == '\\n' || c == '\\r') {\n                if (c != 0) {\n                    this.back();\n                }\n\n                return sb.toString().trim();\n            }\n\n            sb.append(c);\n        }\n    }\n\n    protected StringBuilder eval_next_string( char currentChat ) {\n        StringBuilder sb;\n        for ( sb = new StringBuilder(); currentChat >= ' ' && \",:]}/\\\\\\\"[{;=#&\".indexOf(currentChat) < 0; currentChat = this.next() ) {\n            sb.append(currentChat);\n        }\n        return sb;\n    }\n\n    protected Object eval_next_string_token( StringBuilder sb, char currentChat ) {\n        this.back();\n        String string = sb.toString().trim();\n        if ( string.isEmpty() ) {\n            throw this.syntaxError(\"Error parser json string missing value.\");\n        }\n        else {\n            return JSONUtils.stringToValue( string );\n        }\n    }\n\n    @Override\n    public Object nextValue( Object indexKey, Object parent, Object[] args ) throws JSONParseException {\n        char c = this.nextClean();\n        switch(c) {\n            case '\"':\n            case '\\'': {\n                return this.nextString(c).toString();\n            }\n            case '[': {\n                this.back();\n                return this.newJSONArray( indexKey, this, parent, args );\n            }\n            case '{': {\n                this.back();\n                return this.newJSONObject( indexKey, this, parent, args );\n            }\n            default: {\n                StringBuilder sb = this.eval_next_string( c );\n                return this.eval_next_string_token(sb, c);\n            }\n        }\n    }\n\n    @Override\n    public Object nextValue() throws JSONParseException {\n        return this.nextValue( null, null, null );\n    }\n\n    public char skipTo( char to ) throws JSONParseException {\n        char c;\n        try {\n            int startIndex      = this.mnParseAt;\n            int startLine       = this.mnLineAt;\n            long startCharacter = this.mnCharacter;\n            this.mReader.mark(1000000);\n\n            do {\n                c = this.next();\n                if (c == 0) {\n                    this.mReader.reset();\n                    this.mnParseAt = startIndex;\n                    this.mnCharacter = startCharacter;\n                    this.mnLineAt = startLine;\n                    return c;\n                }\n            } while(c != to);\n        }\n        catch ( IOException e ) {\n            throw new JSONParseException( e );\n        }\n\n        this.back();\n        return c;\n    }\n\n    public JSONParseException syntaxError( String message ) {\n        return new JSONParseException( message + this.toString(), (int)this.mnParseAt );\n    }\n\n    @Override\n    public String toString() {\n        return \" at \" + this.mnParseAt + \" [character \" + this.mnCharacter + \" line \" + this.mnLineAt + \"]\";\n    }\n\n    public void handleRedirectException( JSONParserRedirectException e ) {\n\n    }\n\n\n\n    protected abstract Object newJSONArray( Object indexKey, ArchCursorParser parser, Object parent, Object[] args );\n\n    protected abstract Object newJSONObject( Object indexKey, ArchCursorParser parser, Object parent, Object[] args );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/ArchJSONArray.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.prototype.Prototype;\nimport com.pinecone.framework.system.prototype.TypeIndex;\nimport com.pinecone.framework.util.Debug;\n\nimport java.io.IOException;\nimport java.io.StringWriter;\nimport java.io.Writer;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.Iterator;\nimport java.util.ListIterator;\n\n\npublic abstract class ArchJSONArray implements JSONArray {\n    protected abstract void jsonDecode0( ArchCursorParser x ) throws JSONException ;\n\n    @Override\n    public abstract JSONArray jsonDecode( ArchCursorParser x ) throws JSONException ;\n\n    @Override\n    public abstract JSONArray jsonDecode( String source ) throws JSONException ;\n\n    @Override\n    public abstract void assimilate( List<Object > that );\n\n    @Override\n    public abstract List<Object > getArray();\n\n    @Override\n    public Object front() {\n        return this.opt( 0 );\n    }\n\n    @Override\n    public Object back() {\n        return this.opt( this.length() - 1 );\n    }\n\n    @Override\n    public int length() {\n        return this.size();\n    }\n\n    /** Basic List<Object> **/\n    @Override\n    public abstract int size();\n\n    @Override\n    public abstract boolean isEmpty();\n\n    @Override\n    public abstract boolean contains( Object o );\n\n    @Override\n    public abstract Iterator<Object> iterator();\n\n    @Override\n    public abstract Object[] toArray();\n\n    @Override\n    public abstract <T> T[] toArray( T[] a ) ;\n\n\n\n\n\n    protected abstract boolean innerListAdd( Object e );\n\n    @Override\n    public boolean add( Object e ) {\n        return this.innerListAdd(e);\n    }\n\n    @Override\n    public abstract void clear();\n\n\n\n    protected abstract boolean innerListRemove( Object index );\n\n    @Override\n    public Object remove( int index ) {\n        Object o = this.opt(index);\n        if ( index >= 0 && index < this.length() ) {\n            this.innerListRemove( index );\n        }\n\n        return o;\n    }\n\n    @Override\n    public JSONArray xRemove( int index ) {\n        this.remove(index);\n        return this;\n    }\n\n    @Override\n    public boolean remove( Object o ) {\n        return this.innerListRemove( o );\n    }\n\n    @Override\n    public JSONArray xRemove( Object o ) {\n        this.remove(o);\n        return this;\n    }\n\n    @Override\n    public Object erase( Object key ) {\n        return this.remove( JSONUtils.asInt32Key( key ) );\n    }\n\n    @Override\n    public abstract boolean containsAll( Collection<?> c );\n\n    @Override\n    public abstract boolean addAll( Collection<?> c );\n\n    @Override\n    public JSONArray xAddAll( Collection<?> c ) {\n        this.addAll(c);\n        return this;\n    }\n\n    @Override\n    public abstract boolean addAll( int index, Collection<?> c );\n\n    @Override\n    public JSONArray xAddAll( int index, Collection<?> c ) {\n        this.addAll( index, c );\n        return this;\n    }\n\n    @Override\n    public abstract boolean removeAll( Collection<?> c );\n\n    @Override\n    public JSONArray xRemoveAll( Collection<?> c ) {\n        this.removeAll(c);\n        return this;\n    }\n\n    @Override\n    public abstract boolean retainAll( Collection<?> c );\n\n    @Override\n    public JSONArray xRetainAll(Collection<?> c) {\n        this.retainAll(c);\n        return this;\n    }\n\n    protected void affirmCapacity( int cap ) {\n        for( int i = this.size(); i < cap; ++i ) {\n            this.innerListAdd( JSON.NULL );\n        }\n    }\n\n    @Override\n    public Object set( int index, Object element ) {\n        if ( index == -1 ) {\n            this.innerListAdd( element );\n            return null;\n        }\n        else if ( this.size() > index ) {\n            return this.innerListSet( index, element );\n        }\n        else {\n            this.affirmCapacity( index );\n            this.innerListAdd( element );\n            return null;\n        }\n    }\n\n    @Override\n    public JSONArray xSet( int index, Object element ) {\n        this.set( index, element );\n        return this;\n    }\n\n    @Override\n    public Object affirm( int index ) {\n        if ( index == -1 ) {\n            this.innerListAdd( JSON.NULL );\n            return JSON.NULL;\n        }\n        else if ( this.size() > index ) {\n            return this.innerListGet( index );\n        }\n        else {\n            this.affirmCapacity( index + 1 );\n            return this.innerListGet( index );\n        }\n    }\n\n    @Override\n    public JSONObject affirmObject( int index ) {\n        if ( index == -1 ) {\n            JSONObject obj = new JSONMaptron();\n            this.innerListAdd( obj );\n            return obj;\n        }\n        else if ( this.size() > index ) {\n            Object obj = this.innerListGet( index );\n            if( obj instanceof JSONObject ) {\n                return (JSONObject) obj;\n            }\n            obj = new JSONMaptron();\n            this.innerListSet( index, obj );\n            return (JSONObject)obj;\n        }\n        else {\n            this.affirmCapacity( index );\n            JSONObject obj = new JSONMaptron();\n            this.innerListAdd( obj );\n            return obj;\n        }\n    }\n\n    @Override\n    public JSONArray affirmArray(int index ) {\n        if ( index == -1 ) {\n            JSONArray obj = new JSONArraytron();\n            this.innerListAdd( obj );\n            return obj;\n        }\n        else if ( this.size() > index ) {\n            Object obj = this.innerListGet( index );\n            if( obj instanceof JSONArray ) {\n                return (JSONArray) obj;\n            }\n            obj = new JSONArraytron();\n            this.innerListSet( index, obj );\n            return (JSONArray) obj;\n        }\n        else {\n            this.affirmCapacity( index );\n            JSONArray obj = new JSONArraytron();\n            this.innerListAdd( obj );\n            return obj;\n        }\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        return this.contains( value );\n    }\n\n\n\n    @Override\n    public abstract void add( int index, Object element ) ;\n\n    @Override\n    public JSONArray xAdd( int index, Object element ) {\n        this.add(index, element);\n        return this;\n    }\n\n    @Override\n    public abstract int indexOf( Object o );\n\n    @Override\n    public abstract int lastIndexOf( Object o );\n\n    @Override\n    public abstract ListIterator<Object > listIterator();\n\n    @Override\n    public abstract ListIterator<Object > listIterator( int index );\n\n    @Override\n    public abstract List<Object > subList( int fromIndex, int toIndex ) ;\n\n\n\n\n\n    protected abstract Object innerListGet( int key );\n\n    @Override\n    public Object get( int index ) throws JSONException {\n        Object object = this.opt( index );\n        if ( object == null ) {\n            throw new JSONException(\"JSONArray[\" + index + \"] not found.\");\n        }\n        else {\n            return object;\n        }\n    }\n\n    @Override\n    public Object get( Object key ) {\n        return this.get( JSONUtils.asInt32Key( key ) );\n    }\n\n    @Override\n    public boolean getBoolean( int index ) throws JSONException {\n        Object object = this.get(index);\n        if ( !object.equals(Boolean.FALSE) && (!(object instanceof String) || !((String)object).equalsIgnoreCase(\"false\")) ) {\n            if ( !object.equals(Boolean.TRUE) && (!(object instanceof String) || !((String)object).equalsIgnoreCase(\"true\")) ) {\n                throw new JSONException(\"JSONArray[\" + index + \"] is not a boolean.\");\n            }\n            else {\n                return true;\n            }\n        }\n        else {\n            return false;\n        }\n    }\n\n    @Override\n    public double getDouble( int index ) throws JSONException {\n        Object object = this.get(index);\n\n        try {\n            return object instanceof Number ? ( (Number)object ).doubleValue() : Double.parseDouble( (String)object );\n        }\n        catch ( Exception e ) {\n            throw new JSONException(\"JSONArray[\" + index + \"] is not a number.\");\n        }\n    }\n\n    @Override\n    public int getInt( int index ) throws JSONException {\n        Object object = this.get(index);\n\n        try {\n            return object instanceof Number ? ( (Number)object ).intValue() : Integer.parseInt( (String)object );\n        }\n        catch ( Exception e ) {\n            throw new JSONException(\"JSONArray[\" + index + \"] is not a number.\");\n        }\n    }\n\n    @Override\n    public JSONArray getJSONArray(int index ) throws JSONException {\n        Object object = this.get(index);\n        if ( object instanceof JSONArray ) {\n            return (JSONArray)object;\n        }\n        else {\n            throw new JSONException(\"JSONArray[\" + index + \"] is not a JSONArray.\");\n        }\n    }\n\n    @Override\n    public JSONObject getJSONObject( int index ) throws JSONException {\n        Object object = this.get(index);\n        if ( object instanceof JSONObject ) {\n            return (JSONObject)object;\n        }\n        else {\n            throw new JSONException(\"JSONArray[\" + index + \"] is not a JSONObject.\");\n        }\n    }\n\n    @Override\n    public long getLong( int index ) throws JSONException {\n        Object object = this.get(index);\n\n        try {\n            return object instanceof Number ? ( (Number)object ).longValue() : Long.parseLong( (String)object );\n        }\n        catch ( Exception e ) {\n            throw new JSONException(\"JSONArray[\" + index + \"] is not a number.\");\n        }\n    }\n\n    @Override\n    public String getString( int index ) throws JSONException {\n        Object object = this.get(index);\n        if ( object instanceof String ) {\n            return (String)object;\n        }\n        else {\n            throw new JSONException(\"JSONArray[\" + index + \"] not a string.\");\n        }\n    }\n\n    @Override\n    public byte[] getBytes( int index ) throws JSONException {\n        Object object = this.get(index);\n        if ( object instanceof String ) {\n            return ((String) object).getBytes();\n        }\n        else if ( object instanceof byte[] ) {\n            return (byte[])( (byte[])object );\n        }\n        else {\n            throw new JSONException(\"JSONObject[\" + index + \"] not a string nor bytes.\");\n        }\n    }\n\n    @Override\n    public boolean isNull( int index ) {\n        return JSON.NULL.equals(this.opt(index));\n    }\n\n    @Override\n    public String join( String separator ) throws JSONException {\n        int len = this.length();\n        StringBuffer sb = new StringBuffer();\n\n        for( int i = 0; i < len; ++i ) {\n            if (i > 0) {\n                sb.append(separator);\n            }\n\n            sb.append( JSONUtils.valueToString( this.innerListGet(i)) );\n        }\n\n        return sb.toString();\n    }\n\n    @Override\n    public Object opt( int index ) {\n        return index >= 0 && index < this.length() ? this.innerListGet( index ) : null;\n    }\n\n    @Override\n    public boolean optBoolean( int index ) {\n        return this.optBoolean(index, false);\n    }\n\n    @Override\n    public boolean optBoolean( int index, boolean defaultValue ) {\n        try {\n            return this.getBoolean(index);\n        }\n        catch (Exception e) {\n            return defaultValue;\n        }\n    }\n\n    @Override\n    public double optDouble( int index ) {\n        return this.optDouble( index, Double.NaN );\n    }\n\n    @Override\n    public double optDouble( int index, double defaultValue ) {\n        try {\n            return this.getDouble(index);\n        }\n        catch (Exception e) {\n            return defaultValue;\n        }\n    }\n\n    @Override\n    public int optInt( int index ) {\n        return this.optInt(index, 0);\n    }\n\n    @Override\n    public int optInt( int index, int defaultValue ) {\n        try {\n            return this.getInt(index);\n        }\n        catch (Exception e) {\n            return defaultValue;\n        }\n    }\n\n    @Override\n    public JSONArray optJSONArray( int index ) {\n        Object o = this.opt(index);\n        return o instanceof JSONArray ? (JSONArray)o : null;\n    }\n\n    @Override\n    public JSONObject optJSONObject( int index ) {\n        Object o = this.opt(index);\n        return o instanceof JSONObject ? (JSONObject)o : null;\n    }\n\n    @Override\n    public long optLong( int index ) {\n        return this.optLong(index, 0L);\n    }\n\n    @Override\n    public long optLong( int index, long defaultValue ) {\n        try {\n            return this.getLong(index);\n        }\n        catch (Exception e) {\n            return defaultValue;\n        }\n    }\n\n    @Override\n    public String optString( int index ) {\n        return this.optString(index, \"\");\n    }\n\n    @Override\n    public String optString( int index, String defaultValue ) {\n        Object object = this.opt(index);\n        return JSON.NULL.equals(object) ? defaultValue : object.toString();\n    }\n\n    @Override\n    public byte[] optBytes( int index ) {\n        return this.optBytes( index, \"\".getBytes() );\n    }\n\n    @Override\n    public byte[] optBytes( int index, byte[] defaultValue ) {\n        try {\n            return this.getBytes( index );\n        }\n        catch ( Exception e ) {\n            return defaultValue;\n        }\n    }\n\n\n    @Override\n    public Object opt( Object key ) {\n        try {\n            return this.opt(JSONUtils.asInt32Key(key));\n        }\n        catch ( Exception e ) {\n            return null;\n        }\n    }\n\n    @Override\n    public boolean optBoolean( Object key ) {\n        try {\n            return this.optBoolean(JSONUtils.asInt32Key(key));\n        }\n        catch ( Exception e ) {\n            return false;\n        }\n    }\n\n    @Override\n    public double optDouble( Object key ) {\n        try {\n            return this.optDouble(JSONUtils.asInt32Key(key));\n        }\n        catch ( Exception e ) {\n            return Double.NaN;\n        }\n    }\n\n    @Override\n    public int optInt( Object key ) {\n        try {\n            return this.optInt(JSONUtils.asInt32Key(key));\n        }\n        catch ( Exception e ) {\n            return Integer.MAX_VALUE;\n        }\n    }\n\n    @Override\n    public JSONArray optJSONArray( Object key ) {\n        try {\n            return this.optJSONArray(JSONUtils.asInt32Key(key));\n        }\n        catch ( Exception e ) {\n            return null;\n        }\n    }\n\n    @Override\n    public JSONObject optJSONObject( Object key ) {\n        try {\n            return this.optJSONObject(JSONUtils.asInt32Key(key));\n        }\n        catch ( Exception e ) {\n            return null;\n        }\n    }\n\n    @Override\n    public long optLong( Object key ) {\n        try {\n            return this.optLong(JSONUtils.asInt32Key(key));\n        }\n        catch ( Exception e ) {\n            return Long.MAX_VALUE;\n        }\n    }\n\n    @Override\n    public String optString( Object key ) {\n        try {\n            return this.optString(JSONUtils.asInt32Key(key));\n        }\n        catch ( Exception e ) {\n            return null;\n        }\n    }\n\n    @Override\n    public byte[] optBytes( Object key ) {\n        try {\n            return this.optBytes(JSONUtils.asInt32Key(key));\n        }\n        catch ( Exception e ) {\n            return null;\n        }\n    }\n\n\n\n\n\n    protected abstract Object innerListSet( int index, Object element );\n\n    @Override\n    public JSONArray insert( Object key, Object val ) {\n        return this.put( JSONUtils.asInt32Key( key ), val );\n    }\n\n    @Override\n    public Object insertIfAbsent( Object key, Object value ) {\n        if( !this.containsKey( JSONUtils.asInt32Key( key ) ) ){\n            return this.insert( key, value );\n        }\n        return null;\n    }\n\n    @Override\n    public JSONArray put( boolean value ) {\n        this.put((Object)(value ? Boolean.TRUE : Boolean.FALSE));\n        return this;\n    }\n\n    @Override\n    public abstract JSONArray put( Collection value );\n\n    @Override\n    public JSONArray put( double value ) throws JSONException {\n        Double d = value;\n        JSONUtils.prospectNumberQualify(d);\n        this.put( (Object)d );\n        return this;\n    }\n\n    @Override\n    public JSONArray put( int value ) {\n        this.put( (Integer)value );\n        return this;\n    }\n\n    @Override\n    public JSONArray put( long value ) {\n        this.put( (Long)value );\n        return this;\n    }\n\n    @Override\n    public JSONArray put( Map value ) {\n        this.put((Object)(new JSONMaptron(value)));\n        return this;\n    }\n\n    @Override\n    public JSONArray put( Object value ) {\n        this.innerListAdd( value );\n        return this;\n    }\n\n    @Override\n    public JSONArray put( JSONObject value ) {\n        this.innerListAdd( value );\n        return this;\n    }\n\n    @Override\n    public JSONArray put( JSONArray value ) {\n        this.innerListAdd( value );\n        return this;\n    }\n\n    @Override\n    public JSONArray put( int index, boolean value ) throws JSONException {\n        this.put( index, (Object)(value ? Boolean.TRUE : Boolean.FALSE) );\n        return this;\n    }\n\n    @Override\n    public abstract JSONArray put( int index, Collection value ) throws JSONException ;\n\n    @Override\n    public JSONArray put( int index, double value ) throws JSONException {\n        this.put( index, (Double)value );\n        return this;\n    }\n\n    @Override\n    public JSONArray put( int index, int value ) throws JSONException {\n        this.put( index, (Integer)value );\n        return this;\n    }\n\n    @Override\n    public JSONArray put( int index, long value ) throws JSONException {\n        this.put( index, (Long)value );\n        return this;\n    }\n\n    @Override\n    public JSONArray put( int index, JSONArray value ) throws JSONException {\n        this.innerListSet( index, value );\n        return this;\n    }\n\n    @Override\n    public JSONArray put( int index, JSONObject value ) throws JSONException {\n        this.innerListSet( index, value );\n        return this;\n    }\n\n    @Override\n    public abstract JSONArray put( int index, Map value ) throws JSONException ;\n\n    @Override\n    public JSONArray put( int index, Object value ) throws JSONException {\n        JSONUtils.prospectNumberQualify(value);\n        if ( index < 0 ) {\n            throw new JSONException(\"JSONArray[\" + index + \"] not found.\");\n        }\n        else {\n            if ( index < this.length() ) {\n                this.innerListSet( index, value );\n            }\n            else {\n                while( index != this.length() ) {\n                    this.put( JSON.NULL );\n                }\n\n                this.put( value );\n            }\n\n            return this;\n        }\n    }\n\n    @Override\n    public JSONObject toJSONObject( JSONArray names ) throws JSONException {\n        if ( names != null && names.length() != 0 && this.length() != 0 ) {\n            JSONObject jo = new JSONMaptron();\n\n            for( int i = 0; i < names.length(); ++i ) {\n                jo.put(names.getString(i), this.opt(i));\n            }\n\n            return jo;\n        }\n\n        return null;\n    }\n\n    @Override\n    public JSONObject toJSONObject()  {\n        JSONObject jo = new JSONMaptron();\n\n        for( int i = 0; i < this.size(); ++i ) {\n            jo.put( String.valueOf(i), this.opt( i ) );\n        }\n\n        return jo;\n    }\n\n    @Override\n    public JSONArray toJSONArray()  {\n        return this;\n    }\n\n    @Override\n    public abstract Set<? > entrySet() ;\n\n    @Override\n    public Collection<Object > values() {\n        return this;\n    }\n\n    @Override\n    public Map<?, Object > toMap() {\n        return this.toJSONObject();\n    }\n\n    @Override\n    public List<Object > toList() {\n        return this;\n    }\n\n\n\n\n    @Override\n    public boolean hasOwnProperty( Object elm ) {\n        return this.containsKey( elm );\n    }\n\n    @Override\n    public boolean hasOwnProperty( int elm ) {\n        return this.containsKey( elm );\n    }\n\n    @Override\n    public boolean containsKey( Object elm ) {\n        try {\n            if( elm instanceof Number ) {\n                return this.hasOwnProperty( ( (Number)elm ).intValue() );\n            }\n            return this.hasOwnProperty( (int)Integer.valueOf(elm.toString()) );\n        }\n        catch ( NumberFormatException e ){\n            return false;\n        }\n    }\n\n    @Override\n    public boolean containsKey( int elm ) {\n        int nLength = this.length();\n        if( elm < 0 || nLength == 0 ){\n            return false;\n        }\n        return nLength > elm;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        try {\n            return this.toJSONString(0);\n        }\n        catch (Exception e) {\n            return null;\n        }\n    }\n\n    @Override\n    public String toJSONStringI( int nIndentFactor ) {\n        try {\n            return this.toJSONString(nIndentFactor);\n        }\n        catch (Exception e) {\n            return null;\n        }\n    }\n\n    @Override\n    public String toJSONString( int nIndentFactor ) throws IOException {\n        StringWriter sw = new StringWriter();\n        synchronized(sw.getBuffer()) {\n            return this.write( sw, nIndentFactor, 0 ).toString();\n        }\n    }\n\n    @Override\n    public TypeIndex prototype() {\n        return Prototype.typeid( this );\n    }\n\n    @Override\n    public String prototypeName() {\n        return Prototype.prototypeName( this );\n    }\n\n    @Override\n    public boolean isPrototypeOf( TypeIndex that ) {\n        return that.equals( this.prototype() );\n    }\n\n\n\n    @Override\n    public JSONArray clone() {\n        try {\n            return  (JSONArray) super.clone();\n        }\n        catch ( CloneNotSupportedException e ) {\n            // this shouldn't happen, since we are Cloneable\n            throw new InternalError(e);\n        }\n    }\n\n    @Override\n    public Writer write( Writer writer ) throws IOException {\n        return this.write( writer, 0, 0 );\n    }\n\n    @Override\n    public Writer write( Writer writer, int nIndentFactor ) throws IOException {\n        return this.write( writer, nIndentFactor, 0 );\n    }\n\n    @Override\n    public abstract Writer write( Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ;\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/ArchJSONObject.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.prototype.Prototype;\nimport com.pinecone.framework.system.prototype.TypeIndex;\nimport com.pinecone.framework.util.StringUtils;\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.io.StringWriter;\nimport java.io.Writer;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.Collection;\nimport java.util.UUID;\nimport java.util.Iterator;\nimport java.util.List;\n\npublic abstract class ArchJSONObject implements JSONObject, Serializable {\n    protected abstract void jsonDecode0( ArchCursorParser x ) throws JSONException ;\n\n    @Override\n    public abstract JSONObject jsonDecode( ArchCursorParser x ) throws JSONException ;\n\n    @Override\n    public abstract JSONObject jsonDecode( String source ) throws JSONException;\n\n\n    @Override\n    public abstract JSONObject assimilate( Map<String, Object> that );\n\n\n\n    @Override\n    public JSONObject shareFrom( JSONObject that, String szKey ) {\n        this.put( szKey, that.get( szKey ) );\n        return this;\n    }\n\n    @Override\n    public JSONObject shareFrom( JSONObject that, String[] szKeys ) {\n        for ( String szKey : szKeys ) {\n            this.putOnce( szKey, that.get( szKey ) );\n        }\n        return this;\n    }\n\n    @Override\n    public JSONObject subJson ( String szKey ) {\n        JSONObject that = new JSONMaptron();\n        that.shareFrom( this, szKey );\n        return that;\n    }\n\n    @Override\n    public JSONObject subJson ( String[] szKeys ) {\n        JSONObject that = new JSONMaptron();\n        that.shareFrom( this, szKeys );\n        return that;\n    }\n\n    @Override\n    public JSONObject detachSub ( String szKey ) {\n        JSONObject that = new JSONMaptron();\n        that.put( szKey, this.get( szKey ) );\n        this.remove( szKey );\n        return that;\n    }\n\n    @Override\n    public JSONObject detachSub ( String[] szKeys ) {\n        JSONObject that = new JSONMaptron();\n        for ( String szKey : szKeys ) {\n            that.putOnce( szKey, this.get( szKey ) );\n            this.remove( szKey );\n        }\n        return that;\n    }\n\n    @Override\n    public JSONObject moveSubFrom ( JSONObject that, String szKey ) {\n        this.put( szKey, that.get( szKey ) );\n        that.remove( szKey );\n        return this;\n    }\n\n    @Override\n    public JSONObject moveSubFrom ( JSONObject that, String[] szKeys ) {\n        for ( String szKey : szKeys ) {\n            this.putOnce( szKey, that.get( szKey ) );\n            that.remove( szKey );\n        }\n        return this;\n    }\n\n\n\n    @Override\n    public abstract Map<String, Object > getMap();\n\n    /** Basic Map **/\n    @Override\n    public abstract int size();\n\n    @Override\n    public abstract boolean isEmpty();\n\n    protected abstract boolean innerMapContainsKey( Object key );\n\n    @Override\n    public boolean containsKey( Object key ) {\n        boolean result = this.innerMapContainsKey( key );\n        if ( !result && ( key instanceof Number || key instanceof Character || key instanceof Boolean || key instanceof UUID ) ) {\n            result = this.innerMapContainsKey( key.toString() );\n        }\n\n        return result;\n    }\n\n    @Override\n    public abstract boolean containsValue( Object value );\n\n    @Override\n    public abstract void putAll( Map<? extends String, ?> m );\n\n    public JSONObject xPutAll(Map<? extends String, ?> m ) {\n        this.putAll(m);\n        return this;\n    }\n\n    @Override\n    public abstract void clear();\n\n    @Override\n    public JSONObject xClear() {\n        this.clear();\n        return this;\n    }\n\n    @Override\n    public abstract Object remove( Object key );\n\n    @Override\n    public Object erase( Object key ) {\n        return this.remove( key );\n    }\n\n    @Override\n    public JSONObject xRemove(Object key) {\n        this.remove(key);\n        return this;\n    }\n\n    @Override\n    public abstract Set<String > keySet();\n\n    @Override\n    public abstract Collection<Object > values();\n\n    @Override\n    public abstract Set<Map.Entry<String, Object > > entrySet();\n\n\n    @Override\n    public JSONObject accumulate( String key, Object value ) throws JSONException {\n        JSONUtils.prospectNumberQualify(value);\n        Object object = this.opt(key);\n        if ( object == null ) {\n            this.put( key, value instanceof JSONArray ? (new JSONArraytron()).put(value) : value );\n        }\n        else if ( object instanceof JSONArray ) {\n            ((JSONArray)object).put(value);\n        }\n        else {\n            this.put(key, (Object)( new JSONArraytron()).put(object).put(value) );\n        }\n\n        return this;\n    }\n\n    @Override\n    public JSONObject append( String key, Object value ) throws JSONException {\n        JSONUtils.prospectNumberQualify(value);\n        Object object = this.opt(key);\n        if ( object == null ) {\n            this.put(key, (Object)( new JSONArraytron() ).put(value));\n        }\n        else {\n            if ( !(object instanceof JSONArray) ) {\n                throw new JSONException(\"JSONObject[\" + key + \"] is not a JSONArray.\");\n            }\n\n            this.put(key, (Object)((JSONArray)object).put(value));\n        }\n\n        return this;\n    }\n\n\n    protected abstract Object innerMapGet( Object key ) ;\n\n    @Override\n    public Object get( Object key ) {\n        Object val = this.innerMapGet(key);\n        if ( val == null && ( key instanceof Number || key instanceof Character || key instanceof Boolean || key instanceof UUID ) ) {\n            val = this.innerMapGet( key.toString() );\n        }\n\n        return val;\n    }\n\n    @Override\n    public Object get( String key ) throws JSONException {\n        if ( key == null ) {\n            throw new JSONException(\"Null key.\");\n        }\n        else {\n            Object object = this.opt(key);\n            if (object == null) {\n                throw new JSONException(\"JSONObject[\" + StringUtils.jsonQuote(key) + \"] not found.\");\n            }\n            else {\n                return object;\n            }\n        }\n    }\n\n    @Override\n    public boolean getBoolean( String key ) throws JSONException {\n        Object object = this.get(key);\n        if (!object.equals(Boolean.FALSE) && (!(object instanceof String) || !((String)object).equalsIgnoreCase(\"false\"))) {\n            if (!object.equals(Boolean.TRUE) && (!(object instanceof String) || !((String)object).equalsIgnoreCase(\"true\"))) {\n                throw new JSONException(\"JSONObject[\" + StringUtils.jsonQuote(key) + \"] is not a Boolean.\");\n            } else {\n                return true;\n            }\n        } else {\n            return false;\n        }\n    }\n\n    @Override\n    public double getDouble( String key ) throws JSONException {\n        Object object = this.get(key);\n\n        try {\n            return object instanceof Number ? ((Number)object).doubleValue() : Double.parseDouble((String)object);\n        }\n        catch (Exception e) {\n            throw new JSONException(\"JSONObject[\" + StringUtils.jsonQuote(key) + \"] is not a number.\");\n        }\n    }\n\n    @Override\n    public int getInt( String key ) throws JSONException {\n        Object object = this.get(key);\n\n        try {\n            return object instanceof Number ? ((Number)object).intValue() : Integer.parseInt((String)object);\n        }\n        catch (Exception e) {\n            throw new JSONException(\"JSONObject[\" + StringUtils.jsonQuote(key) + \"] is not an int.\");\n        }\n    }\n\n    @Override\n    public JSONArray getJSONArray  ( String key ) throws JSONException {\n        Object object = this.get(key);\n        if ( object instanceof JSONArray ) {\n            return (JSONArray)object;\n        }\n        else {\n            throw new JSONException(\"JSONObject[\" + StringUtils.jsonQuote(key) + \"] is not a JSONArray.\");\n        }\n    }\n\n    @Override\n    public JSONObject getJSONObject( String key ) throws JSONException {\n        Object object = this.get(key);\n        if ( object instanceof JSONObject ) {\n            return (JSONObject)object;\n        }\n        else {\n            throw new JSONException(\"JSONObject[\" + StringUtils.jsonQuote(key) + \"] is not a JSONObject.\");\n        }\n    }\n\n    @Override\n    public long getLong( String key ) throws JSONException {\n        Object object = this.get(key);\n\n        try {\n            return object instanceof Number ? ((Number)object).longValue() : Long.parseLong((String)object);\n        }\n        catch (Exception e) {\n            throw new JSONException(\"JSONObject[\" + StringUtils.jsonQuote(key) + \"] is not a long.\");\n        }\n    }\n\n    @Override\n    public String getString( String key ) throws JSONException {\n        Object object = this.get(key);\n        if ( object instanceof String ) {\n            return (String)object;\n        }\n        else {\n            throw new JSONException(\"JSONObject[\" + StringUtils.jsonQuote(key) + \"] not a string.\");\n        }\n    }\n\n    @Override\n    public byte[] getBytes( String key ) throws JSONException {\n        Object object = this.get(key);\n        if ( object instanceof String ) {\n            return ( (String)object ).getBytes();\n        }\n        else if ( object instanceof byte[] ) {\n            return (byte[])( (byte[])object );\n        }\n        else {\n            throw new JSONException(\"JSONObject[\" + StringUtils.jsonQuote(key) + \"] not a string nor bytes.\");\n        }\n    }\n\n    @Override\n    public JSONArray affirmArray( String key ) {\n        Object o = this.opt(key);\n        if( o instanceof JSONArray ){\n            return (JSONArray)o;\n        }\n        JSONArray jNew = new JSONArraytron();\n        this.put( key, jNew );\n        return jNew;\n    }\n\n    @Override\n    public JSONObject affirmObject(String key ) {\n        Object o = this.opt( key );\n        if( o instanceof JSONObject ){\n            return (JSONObject) o;\n        }\n        JSONObject jNew = new JSONMaptron();\n        this.put( key, jNew );\n        return jNew;\n    }\n\n    @Override\n    public Object affirm( String key ) {\n        if( this.containsKey( key ) ){\n            return this.opt(key);\n        }\n\n        Object o = JSON.NULL;\n        this.put( key, o );\n        return o;\n    }\n\n    @Override\n    public Object opt( String key ) {\n        return key == null ? null : this.innerMapGet( key );\n    }\n\n    @Override\n    public boolean optBoolean( String key ) {\n        return this.optBoolean(key, false);\n    }\n\n    @Override\n    public boolean optBoolean( String key, boolean defaultValue ) {\n        try {\n            return this.getBoolean(key);\n        }\n        catch (Exception e) {\n            return defaultValue;\n        }\n    }\n\n    @Override\n    public double optDouble( String key ) {\n        return this.optDouble( key, Double.NaN );\n    }\n\n    @Override\n    public double optDouble( String key, double defaultValue ) {\n        try {\n            return this.getDouble(key);\n        }\n        catch ( Exception e ) {\n            return defaultValue;\n        }\n    }\n\n    @Override\n    public int optInt( String key ) {\n        return this.optInt(key, 0);\n    }\n\n    @Override\n    public int optInt( String key, int defaultValue ) {\n        try {\n            return this.getInt(key);\n        }\n        catch (Exception e) {\n            return defaultValue;\n        }\n    }\n\n    @Override\n    public JSONArray optJSONArray( String key) {\n        Object o = this.opt(key);\n        return o instanceof JSONArray ? (JSONArray)o : null;\n    }\n\n    @Override\n    public JSONObject optJSONObject( String key) {\n        Object object = this.opt(key);\n        return object instanceof JSONObject ? (JSONObject)object : null;\n    }\n\n    @Override\n    public long optLong( String key ) {\n        return this.optLong(key, 0L);\n    }\n\n    @Override\n    public long optLong( String key, long defaultValue ) {\n        try {\n            return this.getLong(key);\n        }\n        catch ( Exception e ) {\n            return defaultValue;\n        }\n    }\n\n    @Override\n    public String optString( String key ) {\n        return this.optString(key, \"\");\n    }\n\n    @Override\n    public String optString( String key, String defaultValue ) {\n        Object object = this.opt(key);\n        return JSON.NULL.equals(object) ? defaultValue : object.toString();\n    }\n\n    @Override\n    public byte[] optBytes( String key ) {\n        return this.optBytes( key, \"\".getBytes() );\n    }\n\n    @Override\n    public byte[] optBytes( String key, byte[] defaultValue ) {\n        try {\n            return this.getBytes( key );\n        }\n        catch ( Exception e ) {\n            return defaultValue;\n        }\n    }\n\n    @Override\n    public Object opt( Object key ) {\n        try{\n            return this.opt( JSONUtils.asStringKey( key ) );\n        }\n        catch ( Exception e ) {\n            return null;\n        }\n    }\n\n    @Override\n    public boolean optBoolean( Object key ) {\n        try {\n            return this.optBoolean(JSONUtils.asStringKey(key));\n        }\n        catch (Exception e) {\n            return false;\n        }\n    }\n\n    @Override\n    public double optDouble( Object key ) {\n        try {\n            return this.optDouble(JSONUtils.asStringKey(key));\n        }\n        catch ( Exception e ) {\n            return Double.NaN;\n        }\n    }\n\n    @Override\n    public int optInt( Object key ) {\n        try {\n            return this.optInt(JSONUtils.asStringKey(key));\n        }\n        catch ( Exception e ) {\n            return Integer.MAX_VALUE;\n        }\n    }\n\n    @Override\n    public JSONArray optJSONArray( Object key ) {\n        try {\n            return this.optJSONArray( JSONUtils.asStringKey(key) );\n        }\n        catch ( Exception e ) {\n            return null;\n        }\n    }\n\n    @Override\n    public JSONObject optJSONObject( Object key ) {\n        try {\n            return this.optJSONObject( JSONUtils.asStringKey(key) );\n        }\n        catch ( Exception e ) {\n            return null;\n        }\n    }\n\n    @Override\n    public long optLong( Object key ) {\n        try {\n            return this.optLong(JSONUtils.asStringKey(key));\n        }\n        catch ( Exception e ) {\n            return Long.MAX_VALUE;\n        }\n    }\n\n    @Override\n    public String optString( Object key ) {\n        try {\n            return this.optString(JSONUtils.asStringKey(key));\n        }\n        catch ( Exception e ) {\n            return null;\n        }\n    }\n\n    @Override\n    public byte[] optBytes( Object key ) {\n        try {\n            return this.optBytes(JSONUtils.asStringKey(key));\n        }\n        catch (Exception e) {\n            return null;\n        }\n    }\n\n\n\n\n    @Override\n    public JSONObject increment( String key ) throws JSONException {\n        Object value = this.opt(key);\n        if (value == null) {\n            this.put(key, 1);\n        }\n        else if (value instanceof Integer) {\n            this.put(key, (Integer)value + 1);\n        }\n        else if (value instanceof Long) {\n            this.put(key, (Long)value + 1L);\n        }\n        else if (value instanceof Double) {\n            this.put(key, (Double)value + 1.0D);\n        }\n        else {\n            if (!(value instanceof Float)) {\n                throw new JSONException(\"Unable to increment [\" + StringUtils.jsonQuote(key) + \"].\");\n            }\n\n            this.put(key, (double)((Float)value + 1.0F));\n        }\n\n        return this;\n    }\n\n    @Override\n    public boolean isNull( String key ) {\n        return JSON.NULL.equals(this.opt(key));\n    }\n\n    @Override\n    public Iterator keys() {\n        return this.keySet().iterator();\n    }\n\n    @Override\n    public JSONArray names() {\n        JSONArray ja = new JSONArraytron();\n        Iterator keys = this.keys();\n\n        while( keys.hasNext() ) {\n            ja.put( keys.next() );\n        }\n\n        return ja.length() == 0 ? null : ja;\n    }\n\n    @Override\n    public String[] getOwnPropertyNames () {\n        return JSONUtils.getOwnPropertyNames( this );\n    }\n\n\n    protected abstract Object innerMapPut( String key, Object value );\n\n    @Override\n    public JSONObject insert( Object key, Object value ) {\n        return this.put( key.toString(), value );\n    }\n\n    @Override\n    public Object putIfAbsent( String key, Object value ) {\n        return this.getMap().putIfAbsent( key, value );\n    }\n\n    @Override\n    public Object insertIfAbsent( Object key, Object value ) {\n        return this.putIfAbsent( key.toString(), value );\n    }\n\n\n\n    @Override\n    public JSONObject put( String key, boolean value ) throws JSONException {\n        this.put(key, (Object)(value ? Boolean.TRUE : Boolean.FALSE));\n        return this;\n    }\n\n    @Override\n    public JSONObject put( String key, Collection value ) throws JSONException {\n        this.put(key, (Object)(new JSONArraytron(value)));\n        return this;\n    }\n\n    @Override\n    public JSONObject put( String key, double value ) throws JSONException {\n        this.put( key, (Double) value );\n        return this;\n    }\n\n    @Override\n    public JSONObject put( String key, int value ) throws JSONException {\n        this.put( key, (Integer)value );\n        return this;\n    }\n\n    @Override\n    public JSONObject put( String key, long value ) throws JSONException {\n        this.put(key, (Object) (Long) value );\n        return this;\n    }\n\n    @Override\n    public abstract JSONObject put( String key, Map value ) throws JSONException ;\n\n    @Override\n    public JSONObject put( String key, JSONArray value ) throws JSONException {\n        this.innerMapPut( key, value );\n        return this;\n    }\n\n    @Override\n    public JSONObject put( String key, JSONObject value ) throws JSONException {\n        this.innerMapPut( key, value );\n        return this;\n    }\n\n    @Override\n    public JSONObject put( String key, Object value ) throws JSONException {\n        if ( key == null ) {\n            throw new NullPointerException( \"Null key.\" );\n        }\n        else {\n            if ( value != null ) {\n                JSONUtils.prospectNumberQualify( value );\n                this.innerMapPut( key, value );\n            }\n            else {\n                this.remove( key );\n            }\n            return this;\n        }\n    }\n\n    @Override\n    public JSONObject embed( String key, Object value ) throws JSONException {\n        if ( key == null ) {\n            throw new NullPointerException(\"Null key.\");\n        }\n        else {\n            if ( value != null ) {\n                JSONUtils.prospectNumberQualify(value);\n                this.innerMapPut( key, value );\n            }\n            else {\n                this.innerMapPut( key, JSON.NULL );\n            }\n            return this;\n        }\n    }\n\n    @Override\n    public JSONObject putOnce( String key, Object value ) throws JSONException {\n        if ( key != null && value != null ) {\n            if ( this.opt(key) != null ) {\n                throw new JSONException(\"Duplicate key \\\"\" + key + \"\\\"\");\n            }\n\n            this.put(key, value);\n        }\n\n        return this;\n    }\n\n    @Override\n    public JSONObject putOpt( String key, Object value ) throws JSONException {\n        if (key != null && value != null) {\n            this.put(key, value);\n        }\n\n        return this;\n    }\n\n\n    protected abstract Object innerMapRemove( String key );\n\n    @Override\n    public Object remove( String key ) {\n        return this.innerMapRemove(key);\n    }\n\n    @Override\n    public JSONObject removeAll( Collection<String > keys ) {\n        for( String key : keys ) {\n            this.remove( key );\n        }\n        return this;\n    }\n\n    @Override\n    public JSONObject removeAll( String[] keys ) {\n        for( String key : keys ) {\n            this.remove( key );\n        }\n        return this;\n    }\n\n    @Override\n    public JSONArray toJSONArray( JSONArray names ) throws JSONException {\n        if (names != null && names.length() != 0) {\n            JSONArray ja = new JSONArraytron();\n\n            for( int i = 0; i < names.length(); ++i ) {\n                ja.put(this.opt(names.getString(i)));\n            }\n\n            return ja;\n        } else {\n            return null;\n        }\n    }\n\n    @Override\n    public JSONArray toJSONArray() {\n        JSONArray jRegressed = new JSONArraytron();\n\n        for ( Object obj : this.entrySet() ) {\n            Map.Entry kv = ( Map.Entry ) obj;\n            jRegressed.put( kv.getValue() );\n        }\n\n        return jRegressed;\n    }\n\n    @Override\n    public JSONObject toJSONObject() {\n        return this;\n    }\n\n    @Override\n    public abstract Map.Entry<String, Object > front() ;\n\n    @Override\n    public abstract Map.Entry<String, Object > back() ;\n\n\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        try {\n            return this.toJSONString( 0 );\n        }\n        catch ( Exception e ) {\n            return null;\n        }\n    }\n\n    @Override\n    public String toJSONStringI( int nIndentFactor ) {\n        try {\n            return this.toJSONString(nIndentFactor);\n        }\n        catch (Exception e) {\n            return null;\n        }\n    }\n\n    @Override\n    public String toJSONString( int nIndentFactor ) throws IOException {\n        StringWriter w = new StringWriter();\n        synchronized( w.getBuffer() ) {\n            return this.write( w, nIndentFactor,0 ).toString();\n        }\n    }\n\n    @Override\n    public TypeIndex prototype() {\n        return Prototype.typeid( this );\n    }\n\n    @Override\n    public String prototypeName() {\n        return Prototype.prototypeName( this );\n    }\n\n    @Override\n    public boolean isPrototypeOf  ( TypeIndex that ) {\n        return that.equals( this.prototype() );\n    }\n\n    @Override\n    public boolean hasOwnProperty ( Object key ) {\n        return this.containsKey( key );\n    }\n\n    @Override\n    public Map<?, Object > toMap(){\n        return this;\n    }\n\n    @Override\n    public List<Object > toList(){\n        return this.toJSONArray();\n    }\n\n\n    @Override\n    public JSONObject clone() {\n        try {\n            return (JSONObject) super.clone();\n        }\n        catch ( CloneNotSupportedException e ) {\n            // this shouldn't happen, since we are Cloneable\n            throw new InternalError(e);\n        }\n    }\n\n    @Override\n    public Writer write( Writer writer ) throws IOException {\n        return this.write( writer, 0, 0 );\n    }\n\n    @Override\n    public Writer write( Writer writer, int nIndentFactor ) throws IOException {\n        return this.write( writer, nIndentFactor, 0 );\n    }\n\n    @Override\n    public abstract Writer write( Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ;\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/CustomizableJSONCursorParser.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport java.io.InputStream;\nimport java.io.Reader;\nimport java.lang.reflect.InvocationTargetException;\n\npublic class CustomizableJSONCursorParser extends ArchCursorParser {\n    protected Class<? extends JSONObject > mJSONObjectClass;\n    protected Class<? extends JSONArray >  mJSONArrayClass;\n\n    public CustomizableJSONCursorParser( Reader reader, Class<? extends JSONObject > jObjectClass, Class<? extends JSONArray > jArrayClass ) {\n        super( reader );\n\n        this.mJSONObjectClass = jObjectClass;\n        this.mJSONArrayClass  = jArrayClass;\n    }\n\n    public CustomizableJSONCursorParser( InputStream inputStream, Class<? extends JSONObject> jObjectClass, Class<? extends JSONArray > jArrayClass ) throws JSONParseException {\n        super( inputStream );\n\n        this.mJSONObjectClass = jObjectClass;\n        this.mJSONArrayClass  = jArrayClass;\n    }\n\n    public CustomizableJSONCursorParser( String s, Class<? extends JSONObject > jObjectClass, Class<? extends JSONArray > jArrayClass ) {\n        super( s );\n\n        this.mJSONObjectClass = jObjectClass;\n        this.mJSONArrayClass  = jArrayClass;\n    }\n\n    @Override\n    protected JSONArray newJSONArray( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ) {\n        try {\n            return this.mJSONArrayClass.getDeclaredConstructor().newInstance();\n        }\n        catch ( InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e ) {\n            return null;\n        }\n    }\n\n    @Override\n    protected JSONObject newJSONObject( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ) {\n        try {\n            return this.mJSONObjectClass.getDeclaredConstructor().newInstance();\n        }\n        catch ( InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e ) {\n            return null;\n        }\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/Dictson.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.unit.Dictionary;\n\npublic interface Dictson extends Dictionary<Object >, JSONDictium {\n\n    default Object insert( Object key, Object value ) {\n        if( this.isList() ) {\n            int index = JSONUtils.asInt32Key( key );\n            if( index >= 0 ) {\n                if( index == this.getList().size() ){\n                    return this.getList().put( value );\n                }\n            }\n\n            this.convertToMap();\n        }\n\n        return this.getMap().put( JSONUtils.asStringKey(key), value );\n    }\n\n    JSONObject  affirmMap() ;\n\n    JSONArray   affirmList() ;\n\n    JSONObject  resetAsMap() ;\n\n    JSONArray   resetAsList() ;\n\n    JSONObject  getMap()  throws ClassCastException ;\n\n    JSONArray   getList() throws ClassCastException ;\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/GenericJSONEncoder.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.system.prototype.PinenutTraits;\nimport com.pinecone.framework.util.StringUtils;\n\nimport java.io.IOException;\nimport java.io.Writer;\nimport java.lang.reflect.Array;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.Map;\n\npublic class GenericJSONEncoder implements JSONEncoder {\n    public static void beforeJsonElementWrote( Writer writer, int nIndentFactor, int nIndentBlankNum, boolean bHasNextElement ) throws IOException {\n        if ( bHasNextElement ) {\n            writer.write(',');\n        }\n\n        if ( nIndentFactor > 0 ) {\n            writer.write('\\n');\n        }\n\n        GenericJSONEncoder.indentBlank( writer, nIndentBlankNum );\n    }\n\n    public static void indentBlank( Writer writer, int nIndentBlankNum ) throws IOException {\n        for( int i = 0; i < nIndentBlankNum; ++i ) {\n            writer.write(' ' );\n        }\n    }\n\n    public GenericJSONEncoder() { }\n\n    protected Writer writeUnidentifiedObject ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        StringUtils.addSlashes( String.format(\n                PinenutTraits.OBJ_STRINGIFY_DEFAULT,\n                that.getClass().getName() + \"(0x\" + Integer.toHexString( that.hashCode() ) + \")\"\n        ), writer, true );\n\n        return writer;\n    }\n\n    protected Writer writeUnknownAnyObject   ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        if ( that != null ) {\n            String szJsonString = \"\";\n\n            try {\n                szJsonString = PinenutTraits.invokeToJSONString( that, nIndentFactor, nIndentBlankNum );\n            }\n            catch ( Exception e ){\n                try {\n                    szJsonString = PinenutTraits.invokeToJSONString( that );\n                }\n                catch ( Exception e1 ){\n                    try{\n                        szJsonString = PinenutTraits.invokeCaseToString( that, null );\n                        StringUtils.addSlashes( szJsonString, writer, true );\n                        return writer;\n                    }\n                    catch ( IllegalArgumentException ea ) {\n                        return this.writeUnidentifiedObject( that, writer, nIndentFactor, nIndentBlankNum ) ;\n                    }\n                }\n            }\n\n            writer.write( szJsonString );\n        }\n        else {\n            writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT );\n        }\n        return writer;\n    }\n\n    @Override\n    public Writer write                    ( Pinenut that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        if ( that != null ) {\n            writer.write( that.toJSONString() );\n        }\n        else {\n            writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT );\n        }\n        return writer;\n    }\n\n    @Override\n    public Writer write                    ( JSONObject that, Writer writer ) throws IOException {\n        return this.write( that, writer,0,0  );\n    }\n\n    @Override\n    public Writer write                    ( JSONObject that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        if ( that != null ) {\n            that.write( writer, nIndentFactor, nIndentBlankNum  );\n        }\n        else {\n            writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT );\n        }\n        return writer;\n    }\n\n    @Override\n    public Writer write                    ( JSONArray that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        if ( that != null ) {\n            that.write( writer, nIndentFactor, nIndentBlankNum  );\n        }\n        else {\n            writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT );\n        }\n        return writer;\n    }\n\n    @Override\n    public void writeKeyValue              ( Writer writer, Object key, Object val, int nIndentFactor, int nIndentBlankNum ) throws JSONException, IOException {\n        writer.write( StringUtils.jsonQuote( key.toString() ) );\n        writer.write(':');\n        if ( nIndentFactor > 0 ) {\n            writer.write( ' ');\n        }\n\n        this.write( val, writer, nIndentFactor, nIndentBlankNum  );\n    }\n\n\n\n    @Override\n    public Writer writeMapFmtEntries       ( Collection that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        if ( that != null ) {\n            boolean bHasNextElement = false;\n            int length = that.size();\n            Iterator iter = that.iterator();\n            writer.write('{');\n            if ( length == 1 ) {\n                Object o = iter.next();\n                Map.Entry kv = (Map.Entry) o;\n                this.writeKeyValue( writer, kv.getKey(), kv.getValue(), nIndentFactor, nIndentBlankNum  );\n            }\n            else if ( length != 0 ) {\n                for( int nNewIndent = nIndentBlankNum + nIndentFactor; iter.hasNext(); bHasNextElement = true ) {\n                    GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, bHasNextElement );\n                    Object o = iter.next();\n                    Map.Entry kv = (Map.Entry) o;\n                    this.writeKeyValue( writer, kv.getKey(), kv.getValue(), nIndentFactor, nNewIndent  );\n                }\n\n                if ( nIndentFactor > 0 ) {\n                    writer.write( '\\n' );\n                }\n\n                GenericJSONEncoder.indentBlank( writer, nIndentBlankNum );\n            }\n\n            writer.write( '}' );\n            return writer;\n        }\n        else {\n            writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT );\n        }\n        return writer;\n    }\n\n    @Override\n    public Writer writeArray               ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        if ( that != null ) {\n            boolean bHasNextElement = false;\n\n            int length = Array.getLength( that );\n            writer.write('[');\n\n            if ( length == 1 ) {\n                this.write( Array.get( that, 0 ), writer, nIndentFactor, nIndentBlankNum  );\n            }\n            else if ( length != 0 ) {\n                int nNewIndent = nIndentBlankNum + nIndentFactor;\n\n                for( int i = 0; i < length; ++i ) {\n                    GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, bHasNextElement );\n                    this.write( Array.get( that, i ),writer, nIndentFactor, nNewIndent  );\n                    bHasNextElement = true;\n                }\n\n                if ( nIndentFactor > 0 ) {\n                    writer.write( '\\n' );\n                }\n\n                GenericJSONEncoder.indentBlank( writer, nIndentBlankNum );\n            }\n\n            writer.write(']');\n        }\n        else {\n            writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT );\n        }\n        return writer;\n    }\n\n    @Override\n    public Writer write                    ( Collection that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        if ( that != null ) {\n            boolean bHasNextElement = false;\n\n            int length = that.size();\n            writer.write('[');\n            Iterator iter = that.iterator();\n\n            if ( length == 1 ) {\n                this.write( iter.next(),writer , nIndentFactor, nIndentBlankNum  );\n            }\n            else if ( length != 0 ) {\n                int nNewIndent = nIndentBlankNum + nIndentFactor;\n\n                while( iter.hasNext() ) {\n                    GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, bHasNextElement );\n                    this.write( iter.next(), writer, nIndentFactor, nNewIndent  );\n                    bHasNextElement = true;\n                }\n\n                if ( nIndentFactor > 0 ) {\n                    writer.write( '\\n' );\n                }\n                GenericJSONEncoder.indentBlank( writer, nIndentBlankNum );\n            }\n\n            writer.write(']');\n        }\n        else {\n            writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT );\n        }\n        return writer;\n    }\n\n    public Writer write                    ( Map.Entry that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        if ( that != null ) {\n            writer.write('{');\n            this.writeKeyValue( writer, that.getKey(), that.getValue(), nIndentFactor, nIndentBlankNum  );\n            writer.write( '}' );\n            return writer;\n        }\n        else {\n            writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT );\n        }\n        return writer;\n    }\n\n    @Override\n    public Writer write                    ( Object that, Writer writer ) throws IOException {\n        return this.write( that, writer, 0, 0 );\n    }\n\n    @Override\n    public Writer write                    ( Object that, Writer writer, int nIndentFactor ) throws IOException {\n        return this.write( that, writer, nIndentFactor, 0 );\n    }\n\n    @Override\n    public Writer write                    ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        if ( that != null ) {\n            if ( that instanceof JSONObject ) {\n                ((JSONObject)that).write(writer, nIndentFactor, nIndentBlankNum );\n            }\n            else if ( that instanceof JSONArray ) {\n                ((JSONArray)that).write(writer, nIndentFactor, nIndentBlankNum );\n            }\n            else if ( that == JSON.NULL ) {\n                writer.write( that.toString() );\n            }\n            else if ( that instanceof String ){\n                StringUtils.addSlashes( (String) that, writer, true );\n            }\n            else if ( that instanceof Map ) {\n                this.write( (Map) that, writer, nIndentFactor, nIndentBlankNum  );\n                //(new JSONMaptron((Map)jsonValue, true)).write(writer, nIndentFactor, nIndentBlankNum );\n            }\n            else if ( that instanceof Collection ) {\n                this.write( (Collection)that, writer, nIndentFactor, nIndentBlankNum  );\n                //(new JSONArraytron((Collection)jsonValue)).write(writer, nIndentFactor, nIndentBlankNum );\n            }\n            else if ( that.getClass().isArray() ) {\n                this.writeArray( that, writer, nIndentFactor, nIndentBlankNum  );\n                //(new JSONArraytron(jsonValue)).write(writer, nIndentFactor, nIndentBlankNum );\n            }\n            else if ( that instanceof Number ) {\n                writer.write( JSONUtils.numberToString((Number)that) );\n            }\n            else if ( that instanceof Boolean ) {\n                writer.write(that.toString());\n            }\n            else if ( that instanceof JSONString ) {\n                String o;\n                try {\n                    o = ((JSONString)that).toJSONString();\n                }\n                catch ( Exception e ) {\n                    throw new JSONException(e);\n                }\n\n                writer.write( o != null ? o.toString() : StringUtils.jsonQuote(that.toString()) );\n            }\n            else if ( that instanceof Map.Entry ) {\n                this.write( (Map.Entry) that, writer, nIndentFactor, nIndentBlankNum  );\n            }\n            else if ( that instanceof Pinenut ){\n                this.write( (Pinenut)that, writer, nIndentFactor, nIndentBlankNum  );\n            }\n            else {\n                this.writeUnknownAnyObject( that, writer, nIndentFactor, nIndentBlankNum  );\n            }\n        }\n        else {\n            writer.write( JSONEncoder.JSON_OBJ_NULL_DEFAULT );\n        }\n        return writer;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/GenericJSONMarshal.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.ReflectionUtils;\nimport com.pinecone.framework.util.json.handler.EncodeHandlerRegistry;\nimport com.pinecone.framework.util.json.handler.GenericEncodeHandlerRegistry;\nimport com.pinecone.framework.util.json.handler.JSONObjectEncodeHandler;\nimport com.pinecone.framework.util.json.homotype.AnnotatedJSONInjector;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.framework.util.json.homotype.GenericBeanJSONEncoder;\n\nimport java.io.IOException;\nimport java.io.StringWriter;\nimport java.io.Writer;\nimport java.lang.reflect.Field;\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class GenericJSONMarshal extends GenericJSONEncoder implements JSONMarshal {\n\n    protected long                   mnMode;\n    protected BeanJSONEncoder        mBeanEncoder;\n    protected EncodeHandlerRegistry  mEncodeHandlerRegistry;\n\n\n    public GenericJSONMarshal( long mode, @Nullable BeanJSONEncoder beanEncoder, @Nullable EncodeHandlerRegistry registry ) {\n        super();\n        this.mnMode = mode;\n\n        if ( beanEncoder == null ) {\n            this.mBeanEncoder = new RecursiveBeanJSONEncoder( this );\n        }\n        else {\n            this.mBeanEncoder = beanEncoder;\n        }\n\n        if ( registry == null ) {\n            this.mEncodeHandlerRegistry = new GenericEncodeHandlerRegistry();\n        }\n        else {\n            this.mEncodeHandlerRegistry = registry;\n        }\n    }\n\n    public GenericJSONMarshal( long mode ) {\n        this( mode, null, null );\n    }\n\n    public GenericJSONMarshal() {\n        this( JSONMarshalMode.MODE_DEFAULT );\n    }\n\n\n    @Override\n    public void setMode( long mode ) {\n        this.mnMode = mode;\n    }\n\n    @Override\n    public long getMode() {\n        return this.mnMode;\n    }\n\n    @Override\n    public void setBeanEncoder( BeanJSONEncoder encoder ) {\n        this.mBeanEncoder = encoder;\n    }\n\n    @Override\n    public BeanJSONEncoder getBeanEncoder() {\n        return this.mBeanEncoder;\n    }\n\n    @Override\n    public void setEncodeHandlerRegistry( EncodeHandlerRegistry registry ) {\n        this.mEncodeHandlerRegistry = registry;\n    }\n\n    @Override\n    public EncodeHandlerRegistry getEncodeHandlerRegistry() {\n        return this.mEncodeHandlerRegistry;\n    }\n\n\n    public <T> void registerEncodeHandler( Class<T> type, JSONObjectEncodeHandler<? super T> handler ) {\n        this.mEncodeHandlerRegistry.register( type, handler );\n    }\n\n    protected boolean tryCustomEncodeHandler( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        if ( that == null ) {\n            return false;\n        }\n\n        JSONObjectEncodeHandler<Object> handler = this.mEncodeHandlerRegistry.get( that.getClass() );\n        if ( handler == null ) {\n            return false;\n        }\n\n        handler.serialize( that, writer, nIndentFactor, nIndentBlankNum, this );\n\n        return true;\n    }\n\n    protected boolean tryBeanMode( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        if ( ( this.mnMode & JSONMarshalMode.MODE_BEAN_GETTER ) == 0L ) {\n            return false;\n        }\n\n        this.mBeanEncoder.encode( that, writer, nIndentFactor, nIndentBlankNum );\n\n        return true;\n    }\n\n    protected void collectAnnotatedFields( Object that, List<Object[]> list ) {\n        Field[] fields = that.getClass().getDeclaredFields();\n\n        for ( Field field : fields ) {\n            ReflectionUtils.makeAccessible( field );\n            String szKey = AnnotatedJSONInjector.getAnnotatedKey( field );\n            if ( szKey == null ) {\n                continue;\n            }\n\n            if ( szKey.isEmpty() ) {\n                szKey = field.getName();\n            }\n\n            Object value;\n            try {\n                value = field.get( that );\n            }\n            catch ( IllegalAccessException e ) {\n                value = null;\n            }\n\n            list.add( new Object[]{ szKey, field, value } );\n        }\n    }\n\n    protected void collectAnyFields( Object that, List<Object[]> list ) {\n        Field[] fields = that.getClass().getDeclaredFields();\n        for ( Field field : fields ) {\n            ReflectionUtils.makeAccessible( field );\n\n            String szKey = field.getName();\n\n            Object value;\n            try {\n                value = field.get( that );\n            }\n            catch ( IllegalAccessException e ) {\n                value = null;\n            }\n\n            list.add( new Object[]{ szKey, field, value } );\n        }\n    }\n\n    protected void collectPublicFields( Object that, List<Object[]> list ) {\n        Field[] fields = that.getClass().getFields();\n\n        for ( Field field : fields ) {\n            String szKey = field.getName();\n\n            Object value;\n            try {\n                value = field.get( that );\n            }\n            catch ( IllegalAccessException e ) {\n                value = null;\n            }\n\n            list.add( new Object[]{ szKey, field, value } );\n        }\n    }\n\n\n    @Override\n    public Writer writeUnidentifiedObject( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        if ( this.tryCustomEncodeHandler( that, writer, nIndentFactor, nIndentBlankNum ) ) {\n            return writer;\n        }\n\n        if ( this.tryBeanMode( that, writer, nIndentFactor, nIndentBlankNum ) ) {\n            return writer;\n        }\n\n        List<Object[]> list = new ArrayList<>();\n        if ( ( this.mnMode & JSONMarshalMode.MODE_ANNOTATED_FIELD ) != 0L ) {\n            this.collectAnnotatedFields( that, list );\n        }\n        if ( ( this.mnMode & JSONMarshalMode.MODE_ANY_FIELD ) != 0L ) {\n            this.collectAnyFields( that, list );\n        }\n        if ( ( this.mnMode & JSONMarshalMode.MODE_PUBLIC_FIELD ) != 0L ) {\n            this.collectPublicFields( that, list );\n        }\n\n        if ( list.isEmpty() ) {\n            return super.writeUnidentifiedObject( that, writer, nIndentFactor, nIndentBlankNum);\n        }\n\n        writer.write( '{' );\n        boolean bHasNextElement = false;\n        int nNewIndent = nIndentBlankNum + nIndentFactor;\n\n        int i = 0;\n        for ( Object[] item : list ) {\n            GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, bHasNextElement );\n            this.writeKeyValue( writer, item[0], item[2], nIndentFactor, nIndentBlankNum );\n            bHasNextElement = true;\n            ++i;\n        }\n\n        if ( nIndentFactor > 0 ) {\n            writer.write( '\\n' );\n        }\n\n        GenericJSONEncoder.indentBlank( writer, nIndentBlankNum);\n        writer.write( '}' );\n        return writer;\n    }\n\n\n    public static class RecursiveBeanJSONEncoder extends GenericBeanJSONEncoder {\n        protected GenericJSONMarshal mJSONMarshal;\n\n        public RecursiveBeanJSONEncoder( GenericJSONMarshal marshal ) {\n            this.mJSONMarshal = marshal;\n        }\n\n        @Override\n        public String valueJsonify( Object val ) {\n            StringWriter w = new StringWriter();\n            try {\n                synchronized( w.getBuffer() ) {\n                    this.valueJsonify( val, w, 0,0 );\n                    return w.toString();\n                }\n            }\n            catch ( IOException e ){\n                return null;\n            }\n        }\n\n        @Override\n        public void valueJsonify( Object val, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n            this.mJSONMarshal.write( val, writer, nIndentFactor, nIndentBlankNum );\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JPlus.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.prototype.FamilyContext;\n\npublic abstract class JPlus {\n    public static Object parse ( String szJsonString ) {\n        return ( new JPlusCursorParser( szJsonString, new JPlusContext() ) ).nextValue();\n    }\n\n    public static Object parse ( String szJsonString, FamilyContext context ) {\n        return ( new JPlusCursorParser( szJsonString, context ) ).nextValue();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JPlusContext.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.prototype.OverridableFamily;\n\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\n\npublic class JPlusContext implements OverridableFamily, Cloneable {\n    protected List<Object > mGlobalScopes;\n    protected Object        mParent;\n    protected Object        mThisScope;\n    protected Object        mRoot;\n    protected Path[]        mParentPaths;\n    protected boolean       mOverriddenAffinity;\n\n    public JPlusContext() {\n        this( null, null, null, new Path[0] );\n    }\n\n    public JPlusContext( Object parent, Object thisScope, Object root, Path[] parentPaths ) {\n        this( new ArrayList<>(), parent, thisScope, root, parentPaths );\n    }\n\n    public JPlusContext( List<Object > globalScopes, Object parent, Object thisScope, Object root, Path[] parentPaths ) {\n        this.mGlobalScopes  = globalScopes;\n        this.mParent        = parent;\n        this.mThisScope     = thisScope;\n        this.mRoot          = root;\n        this.mParentPaths   = parentPaths;\n    }\n\n    public JPlusContext( Object globalScope, Object parent, Object thisScope, Object root, Path[] parentPaths ) {\n        this( parent, thisScope, root, parentPaths );\n        this.addGlobalScope( globalScope );\n    }\n\n    @Override\n    public List<Object > getGlobalScopes() {\n        return this.mGlobalScopes;\n    }\n\n    @Override\n    public Object parent() {\n        return this.mParent;\n    }\n\n    @Override\n    public Object thisScope() {\n        return this.mThisScope;\n    }\n\n    @Override\n    public Object root() {\n        return this.mRoot;\n    }\n\n    @Override\n    public Path[] getParentPaths() {\n        return this.mParentPaths;\n    }\n\n    @Override\n    public JPlusContext setGlobalScopes(List<Object > globalScopes) {\n        this.mGlobalScopes = globalScopes;\n        return this;\n    }\n\n    @Override\n    public JPlusContext setParent(Object parent) {\n        this.mParent = parent;\n        return this;\n    }\n\n    @Override\n    public JPlusContext setThisScope(Object thisScope) {\n        this.mThisScope = thisScope;\n        return this;\n    }\n\n    @Override\n    public JPlusContext setRoot(Object root) {\n        this.mRoot = root;\n        return this;\n    }\n\n    @Override\n    public JPlusContext setParentPaths(Path[] parentPaths) {\n        this.mParentPaths = parentPaths;\n        return this;\n    }\n\n    @Override\n    public JPlusContext addParentPath( Path newPath ) {\n        int length = this.mParentPaths.length;\n        Path[] newParentPaths = Arrays.copyOf( this.mParentPaths, length + 1 );\n        newParentPaths[length] = newPath;\n        this.mParentPaths = newParentPaths;\n        return this;\n    }\n\n    @Override\n    public JPlusContext addGlobalScope( Object scope ) {\n        this.getGlobalScopes().add( scope );\n        return this;\n    }\n\n    /**\n     * isOverriddenAffinity\n     * @return if $this, $super and $root, are forced overridden by global scope.\n     */\n    @Override\n    public boolean isOverriddenAffinity() {\n        return this.mOverriddenAffinity;\n    }\n\n    @Override\n    public void setOverriddenAffinity( boolean overrideAffinity ) {\n        this.mOverriddenAffinity = overrideAffinity;\n    }\n\n    @Override\n    public JPlusContext clone() {\n        JPlusContext clone;\n        try {\n            clone = (JPlusContext) super.clone();\n        }\n        catch ( CloneNotSupportedException e ) {\n            throw new InternalError(e);\n        }\n\n        return clone;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JPlusCursorParser.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.ErrorStrings;\nimport com.pinecone.framework.system.prototype.FamilyContext;\nimport com.pinecone.framework.system.prototype.OverridableFamily;\nimport com.pinecone.framework.unit.MultiScopeMap;\nimport com.pinecone.framework.unit.MultiScopeMaptron;\nimport com.pinecone.framework.util.template.TemplateParser;\n\nimport java.io.*;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.TreeMap;\n\n/**\n *  Pinecone For Java JPlusCursorParser [ Bean Nuts Almond Dragon, JSON+ For Pinecone Java ]\n *  Copyright © 2008 - 2024 Bean Nuts Foundation ( Dragon King ) All rights reserved. [Harald.E / JH.W]\n *  *****************************************************************************************\n *  Author: undefined\n *  Last Modified Date: 2024-02-17\n *  *****************************************************************************************\n *  JSON Plus is an enhanced JSON5 Edition\n *  {\n *      \"parent\": { k1: 123 },\n *      \"next\": {\n *         key: #include \"path\",\n *        key2: #\"${this.key}\",\n *        key3: #\"${root.parent.k1}\", // key3: #\"${parent.k1}\",\n *        key4: \"normal-string\", // key4: normal-string, \"key4\": normal-string, 'key4': normal-string\n *        key5: #extends root.parent,\n *      }\n *  }\n *  Support: JSON, JSON5, JSON Plus\n *  *****************************************************************************************\n */\npublic class JPlusCursorParser extends JSONCursorParser {\n    protected FamilyContext mScopeContext;\n\n    public JPlusCursorParser( Reader reader, FamilyContext scopeContext ) {\n        super(reader);\n        this.mScopeContext      = scopeContext;\n    }\n\n    public JPlusCursorParser( InputStream inputStream, FamilyContext scopeContext ) throws JSONParseException {\n        this( (Reader)(new InputStreamReader(inputStream)), scopeContext );\n    }\n\n    public JPlusCursorParser( String s, FamilyContext scopeContext ) {\n        this( (Reader)(new StringReader(s)), scopeContext );\n    }\n\n    public FamilyContext getScopeContext() {\n        return this.mScopeContext;\n    }\n\n    public void setScopeContext( JPlusContext context ) {\n        this.mScopeContext = context;\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    protected Map<Object, Object > construct_reinterpret_scope_domain() {\n        FamilyContext context = this.getScopeContext();\n        List<MultiScopeMap<Object, Object >> dummy_root = new ArrayList<>();\n        dummy_root.add( new MultiScopeMaptron<>( (Map<Object, Object >)context.root() ) );\n        List<Object > globalScopes = context.getGlobalScopes();\n        if( globalScopes != null ) {\n            for ( Object scope : globalScopes ) {\n                dummy_root.add( new MultiScopeMaptron<>( (Map<Object, Object >)scope ) );\n            }\n        }\n\n        Object dyThisScope  = context.thisScope();\n        Object dySuperScope = context.parent();\n        Object dyRootScope  = context.root();\n\n        Map<Object, Object > thisScope;\n        if( dyThisScope instanceof Map<?, ? > ){\n            thisScope = (Map<Object, Object >) dyThisScope;\n        }\n        else if( dyThisScope instanceof JSONArray ){\n            thisScope = (Map)( (JSONArray) dyThisScope ).toJSONObject();\n        }\n        else {\n            thisScope = new TreeMap<>();\n        }\n\n        MultiScopeMaptron<Object, Object > scope = new MultiScopeMaptron<>( null, dummy_root );\n\n        if( context instanceof OverridableFamily && ( (OverridableFamily)context ).isOverriddenAffinity() ) {\n            Object $this = scope.get( \"this\" );\n            if( $this != null ) {\n                dyThisScope = $this;\n            }\n\n            Object $super = scope.get( \"super\" );\n            if( $super != null ) {\n                dySuperScope = $super;\n            }\n\n            Object $root = scope.get( \"__root__\" );\n            if( $super != null ) {\n                dyRootScope = $root;\n            }\n        }\n\n\n        scope.setThisScope( thisScope );\n        scope.elevate( new TreeMap<>() );\n        scope.put( \"this\"      , dyThisScope     );\n        scope.put( \"super\"     , dySuperScope    );\n        scope.put( \"__root__\"  , dyRootScope     );\n        scope.put( \"__scope__\" , (Object) scope  );\n\n        return scope;\n    }\n\n    protected Object reinterpret_eval_object( StringBuilder token ) {\n        Map<Object, Object > scope = this.construct_reinterpret_scope_domain();\n\n        TemplateParser tp = new TemplateParser( new StringReader(token.toString()), scope );\n        return tp.evalValue();\n    }\n\n    protected void reinterpret_eval_token( StringBuilder token ) {\n        Map<Object, Object > scope = this.construct_reinterpret_scope_domain();\n\n        TemplateParser tp = new TemplateParser( new StringReader(token.toString()), scope );\n        token.setLength(0);\n        token.append( tp.eval() );\n    }\n\n    /**\n     * override_object_with_parent\n     * @param dyThisScope\n     * @param parent\n     * @return a boolean, which indicates it is a qualified K-V-Based object.\n     */\n    @SuppressWarnings(\"unchecked\")\n    protected boolean reinterpret_override_object_with_parent( Object dyThisScope, Object parent ) {\n        if( parent instanceof Map ) {\n            if( dyThisScope instanceof Map ) {\n                ( (Map)dyThisScope ).putAll( (Map)parent );\n            }\n            else if( dyThisScope instanceof List ) {\n                ( (List)dyThisScope ).addAll( ( (Map)parent ).values() );\n            }\n        }\n        else if( parent instanceof List ) {\n            List l = (List)parent;\n            if( dyThisScope instanceof Map ) {\n                int i = 0;\n                for ( Object item : l ) {\n                    ( (Map)dyThisScope ).put( String.valueOf(i), item );\n                    ++i;\n                }\n            }\n            else if( dyThisScope instanceof List ) {\n                ( (List)dyThisScope ).addAll( l );\n            }\n        }\n        else {\n            return false;\n        }\n\n        return true;\n    }\n\n    protected Object reinterpret_include_path_from_context_paths( StringBuilder path ) {\n        try{\n            // Notice, currently context should be <b>Parent</b> NOT 'this'!\n            // Under object context, the parser is sequentially parse from sibling to sibling.\n            return ( new JPlusCursorParser( new FileReader(path.toString()), this.getScopeContext() ) ).nextValue( null, this.getScopeContext().parent(), null );\n        }\n        catch ( IOException e ){\n            Path[] parentPaths = this.getScopeContext().getParentPaths();\n            Object ret = null;\n            for ( int i = 0; i < parentPaths.length; ++i ) {\n                try{\n                    ret = ( new JPlusCursorParser( new FileReader( parentPaths[i].resolve(path.toString()).toFile() ), this.getScopeContext() ) ).nextValue( null, this.getScopeContext().parent(), null );\n                }\n                catch ( IOException e1 ) {\n                    ret = null;\n                }\n            }\n\n            if( ret == null ) {\n                throw this.syntaxError( ErrorStrings.E_IRREDEEMABLE_NO_PATH_CONTEXT_MATCHED + \"What-> '\" + path + \"'\" );\n            }\n\n            return ret;\n        }\n    }\n\n    protected boolean select_reinterpret_token( StringBuilder token, Object[] ret ) {\n        String szToken = token.toString();\n        if( szToken.isEmpty() ) {\n            return false;\n        }\n\n        char c = szToken.charAt(0);\n        switch ( c ) {\n            case 'i':{\n                if( szToken.equals( \"include\" ) ) {\n                    c = this.next();\n                    token = this.devour_follow_string( c );\n                    ret[0] = this.reinterpret_include_path_from_context_paths( token );\n                    return true;\n                }\n                else {\n                    return false;\n                }\n            }\n            case 'e':{\n                if( szToken.equals( \"extends\" ) ) {\n                    c = this.next();\n                    token = this.devour_follow_string( c );\n                    Object parent = this.reinterpret_eval_object( token );\n                    Object dyThisScope = this.getScopeContext().thisScope();\n\n                    if( !this.reinterpret_override_object_with_parent( dyThisScope, parent ) ) {\n                        throw this.syntaxError( \"Overridden object should be be K-V-Based object.\" );\n                    }\n\n                    throw new JSONParserRedirectException( 1 ); // Redirect key-parse sequence and skip.\n                }\n                else {\n                    return false;\n                }\n            }\n            case 'r':{\n                if( szToken.equals( \"ref\" ) ) {\n                    c = this.next();\n                    token = this.devour_follow_string( c );  // #ref T, T->STRING\n                    ret[0] = this.reinterpret_eval_object( token );\n                    return true;\n                }\n                else {\n                    return false;\n                }\n            }\n            default: {\n                this.reinterpret_eval_token( token );\n                ret[0] = token;\n                return false;\n            }\n        }\n    }\n\n\n    protected StringBuilder devour_follow_string( char currentChar ) {\n        switch ( currentChar ) {\n            case '\\'':\n            case '\\\"':{\n                return this.nextString( currentChar );\n            }\n        }\n\n        StringBuilder sb;\n        for ( sb = new StringBuilder(); currentChar >= ' ' && \",:]}/\\\\\\\"\\'[{;=#& \".indexOf(currentChar) < 0; currentChar = this.next() ) {\n            sb.append(currentChar);\n        }\n\n        if( currentChar != ' ' ) {\n            this.back();\n        }\n        return sb;\n    }\n\n    @Override\n    protected Object eval_next_string_token( StringBuilder sb, char currentChar ) {\n        switch ( currentChar ) {\n            case '#':{\n                currentChar = this.next();\n                sb = this.devour_follow_string( currentChar );\n\n                Object[] ret = new Object[1];\n                boolean bIsReferObject = this.select_reinterpret_token( sb, ret );\n\n                currentChar = this.next(); // [xxx\\'] <- at '\\'', to get follow, before eval_next_string_token invoked this.back();\n                if( bIsReferObject ){\n                    return ret[0];\n                }\n                break;\n            }\n            case '&':{\n                currentChar = this.next();\n                sb = this.devour_follow_string( currentChar );\n                return this.reinterpret_eval_object( sb );\n            }\n            default:{\n                break;\n            }\n        }\n\n        return super.eval_next_string_token( sb, currentChar );\n    }\n\n    protected void apply_inner_patriarch( Object parent, Object neo ) {\n        if( this.getScopeContext().parent() == null && this.getScopeContext().root() == null ) {\n            this.getScopeContext().setRoot  ( neo    );\n        }\n\n        this.getScopeContext().setThisScope ( neo    );\n        this.getScopeContext().setParent    ( parent );\n    }\n\n    @Override\n    public Object nextValue( Object indexKey, Object parent, Object[] args ) throws JSONParseException {\n        char c = this.nextClean();\n        switch(c) {\n            case '\"':\n            case '\\'': {\n                return this.nextString(c).toString();\n            }\n            case '[': {\n                this.back();\n                Object lastThis = this.getScopeContext().thisScope();\n                JSONArray p     = new JSONArraytron( );\n\n                this.apply_inner_patriarch( parent, p );\n                p.jsonDecode( this );\n                this.getScopeContext().setThisScope( lastThis );\n                return p;\n            }\n            case '{': {\n                this.back();\n                JSONObject p    = new JSONMaptron( );\n                Object lastThis = this.getScopeContext().thisScope();\n\n                this.apply_inner_patriarch( parent, p );\n                p.jsonDecode( this );\n                this.getScopeContext().setThisScope( lastThis );\n                return p;\n            }\n            default: {\n                StringBuilder sb = this.eval_next_string( c );\n                return this.eval_next_string_token(sb, c);\n            }\n        }\n    }\n\n    @Override\n    public void handleRedirectException( JSONParserRedirectException e ) {\n        if( e.getContext() != null ) {\n            Object[] context = (Object[])e.getContext();\n            String key = (String) context[0];\n            Object val = context[1];\n\n            if( key != null ){\n                throw this.syntaxError( \"Macro function '#extends' can't be value.\" );\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSON.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport java.io.IOException;\nimport java.io.Reader;\nimport java.io.StringWriter;\n\npublic final class JSON {\n    public static final Object NULL      = new JSON.Null();\n\n\n    public static Object parse     ( String szJsonString ) {\n        return ( new JSONCursorParser( szJsonString ) ).nextValue();\n    }\n\n    public static String stringify ( Object that ) {\n        return JSON.encode( that, JSONEncoder.BASIC_JSON_ENCODER );\n    }\n\n    public static String stringify ( Object that, int nIndentFactor ) {\n        return JSON.encode( that, nIndentFactor, JSONEncoder.BASIC_JSON_ENCODER );\n    }\n\n    public static String marshal   ( Object that ) {\n        return JSON.encode( that, JSONEncoder.BASIC_JSON_MARSHAL );\n    }\n\n    public static String marshal   ( Object that, long mode ) {\n        return JSON.encode( that, new GenericJSONMarshal( mode ) );\n    }\n\n    public static String render    ( Object that ) {\n        return JSON.encode( that, JSONEncoder.COMMON_JSON_MARSHAL );\n    }\n\n    public static String unbean    ( Object that ) {\n        return JSON.encode( that, JSONEncoder.BEAN_JSON_MARSHAL );\n    }\n\n    public static <T> T unmarshal ( String szJsonString, Class<T > classType ) {\n        ObjectJSONCursorUnmarshal unmarshal = new ObjectJSONCursorUnmarshal( szJsonString, classType );\n        return classType.cast( unmarshal.nextValue() ) ;\n    }\n\n    public static <T> T unmarshal ( Reader reader, Class<T > classType ) {\n        ObjectJSONCursorUnmarshal unmarshal = new ObjectJSONCursorUnmarshal( reader, classType );\n        return classType.cast( unmarshal.nextValue() ) ;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public static <T> T unmarshal( String json, TypeReference<T> typeRef ) {\n        ObjectJSONCursorUnmarshal u = new ObjectJSONCursorUnmarshal( json, typeRef.getType() );\n        return (T) u.nextValue();\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public static <T> T unmarshal ( Reader reader, TypeReference<T > typeRef ) {\n        ObjectJSONCursorUnmarshal u = new ObjectJSONCursorUnmarshal( reader, typeRef.getType() );\n        return (T) u.nextValue();\n    }\n\n\n\n    public static String encode    ( Object that, JSONEncoder encoder ) {\n        return JSON.encode( that, 0, encoder );\n    }\n\n    public static String encode    ( Object that, int nIndentFactor, JSONEncoder encoder ) {\n        StringWriter w = new StringWriter();\n        try {\n            synchronized( w.getBuffer() ) {\n                return encoder.write( that, w, nIndentFactor,0 ).toString();\n            }\n        }\n        catch ( IOException e ){\n            return null;\n        }\n    }\n\n\n    public static final class Null {\n        private Null() {\n        }\n\n        @Override\n        protected final Object clone() {\n            try{\n                super.clone();\n            }\n            catch ( CloneNotSupportedException e ) {\n                throw new InternalError(e);\n            }\n            return this;\n        }\n\n        @Override\n        public boolean equals( Object that ) {\n            if ( that == this || that instanceof Null ) {\n                return true;\n            }\n            return that == null;\n        }\n\n        @Override\n        public String toString() {\n            return this.toJSONString();\n        }\n\n        public String toJSONString() {\n            return \"null\";\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONArray.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.system.prototype.TypeIndex;\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.io.Writer;\nimport java.util.*;\n\npublic interface JSONArray extends PineUnit, List<Object >, JSONDictium, Serializable, RandomAccess, Cloneable {\n    JSONArray jsonDecode( ArchCursorParser x ) throws JSONException ;\n\n    JSONArray jsonDecode( String source ) throws JSONException ;\n\n    void assimilate( List<Object > that );\n\n    List<Object > getArray();\n\n\n\n    Object front() ;\n\n    Object back() ;\n\n    int length() ;\n\n    /** Basic List<Object> **/\n    @Override\n    int size() ;\n\n    @Override\n    boolean isEmpty() ;\n\n    @Override\n    boolean contains( Object o ) ;\n\n    @Override\n    Iterator<Object> iterator() ;\n\n    @Override\n    Object[] toArray() ;\n\n    @Override\n    <T> T[] toArray( T[] a ) ;\n\n    @Override\n    boolean add( Object e ) ;\n\n    @Override\n    void clear() ;\n\n    @Override\n    Object remove( int index ) ;\n\n    JSONArray xRemove( int index ) ;\n\n    @Override\n    boolean remove( Object o ) ;\n\n    JSONArray xRemove(Object o) ;\n\n    @Override\n    Object erase( Object key ) ;\n\n    @Override\n    boolean containsAll( Collection<?> c ) ;\n\n    @Override\n    boolean addAll( Collection<?> c ) ;\n\n    JSONArray xAddAll( Collection<?> c ) ;\n\n    @Override\n    boolean addAll( int index, Collection<?> c ) ;\n\n    JSONArray xAddAll( int index, Collection<?> c ) ;\n\n    @Override\n    boolean removeAll( Collection<?> c ) ;\n\n    JSONArray xRemoveAll( Collection<?> c ) ;\n\n    @Override\n    boolean retainAll( Collection<?> c ) ;\n\n    JSONArray xRetainAll(Collection<?> c) ;\n\n    @Override\n    Object set( int index, Object element ) ;\n\n    JSONArray xSet( int index, Object element ) ;\n\n    Object affirm( int index ) ;\n\n    JSONObject affirmObject( int index ) ;\n\n    JSONArray affirmArray( int index ) ;\n\n    @Override\n    boolean containsValue( Object value ) ;\n\n\n\n    @Override\n    void add( int index, Object element ) ;\n\n    JSONArray xAdd( int index, Object element ) ;\n\n    @Override\n    int indexOf( Object o ) ;\n\n    @Override\n    int lastIndexOf( Object o ) ;\n\n    @Override\n    ListIterator<Object> listIterator() ;\n\n    @Override\n    ListIterator<Object> listIterator(int index) ;\n\n    @Override\n    List<Object> subList( int fromIndex, int toIndex ) ;\n\n\n\n    @Override\n    Object get( int index ) throws JSONException ;\n\n    @Override\n    Object get( Object key ) ;\n\n    boolean getBoolean( int index ) throws JSONException ;\n\n    double getDouble( int index ) throws JSONException ;\n\n    int getInt( int index ) throws JSONException ;\n\n    JSONArray getJSONArray( int index ) throws JSONException ;\n\n    JSONObject getJSONObject( int index ) throws JSONException ;\n\n    long getLong( int index ) throws JSONException ;\n\n    String getString( int index ) throws JSONException ;\n\n    byte[] getBytes( int index ) throws JSONException ;\n\n    boolean isNull( int index ) ;\n\n    String join( String separator ) throws JSONException ;\n\n    Object opt( int index ) ;\n\n    boolean optBoolean( int index ) ;\n\n    boolean optBoolean(int index, boolean defaultValue) ;\n\n    double optDouble( int index ) ;\n\n    double optDouble( int index, double defaultValue ) ;\n\n    int optInt( int index ) ;\n\n    int optInt( int index, int defaultValue ) ;\n\n    JSONArray optJSONArray( int index ) ;\n\n    JSONObject optJSONObject( int index ) ;\n\n    long optLong( int index ) ;\n\n    long optLong( int index, long defaultValue ) ;\n\n    String optString( int index ) ;\n\n    String optString( int index, String defaultValue ) ;\n\n    byte[] optBytes( int index ) ;\n\n    byte[] optBytes( int index, byte[] defaultValue ) ;\n\n\n    @Override\n    Object opt( Object key ) ;\n\n    @Override\n    boolean optBoolean( Object key ) ;\n\n    @Override\n    double optDouble( Object key ) ;\n\n    @Override\n    int optInt( Object key ) ;\n\n    @Override\n    JSONArray optJSONArray( Object key ) ;\n\n    @Override\n    JSONObject optJSONObject( Object key ) ;\n\n    @Override\n    long optLong( Object key ) ;\n\n    @Override\n    String optString( Object key ) ;\n\n    @Override\n    byte[] optBytes( Object key ) ;\n\n\n\n\n\n\n\n\n    @Override\n    JSONArray insert( Object key, Object val ) ;\n\n    JSONArray put( boolean value ) ;\n\n    JSONArray put( Collection value ) ;\n\n    JSONArray put( double value ) throws JSONException ;\n\n    JSONArray put( int value ) ;\n\n    JSONArray put( long value ) ;\n\n    JSONArray put( Map value ) ;\n\n    JSONArray put( Object value ) ;\n\n    JSONArray put( JSONObject value ) ;\n\n    JSONArray put( JSONArray value ) ;\n\n    JSONArray put( int index, boolean value ) throws JSONException ;\n\n    JSONArray put( int index, Collection value ) throws JSONException ;\n\n    JSONArray put( int index, double value ) throws JSONException ;\n\n    JSONArray put( int index, int value ) throws JSONException ;\n\n    JSONArray put( int index, long value ) throws JSONException ;\n\n    JSONArray put( int index, Map value ) throws JSONException ;\n\n    JSONArray put( int index, Object value ) throws JSONException ;\n\n    JSONArray put( int index, JSONObject value ) throws JSONException ;\n\n    JSONArray put( int index, JSONArray value ) throws JSONException ;\n\n    JSONObject toJSONObject( JSONArray names ) throws JSONException ;\n\n    @Override\n    JSONObject toJSONObject()  ;\n\n    @Override\n    JSONArray toJSONArray()  ;\n\n    @Override\n    Set<?> entrySet() ;\n\n    @Override\n    Collection<Object > values() ;\n\n    @Override\n    Map<?, Object > toMap() ;\n\n    @Override\n    List<Object > toList() ;\n\n\n\n\n\n\n\n    @Override\n    boolean hasOwnProperty( Object elm ) ;\n\n    boolean hasOwnProperty( int elm ) ;\n\n    @Override\n    boolean containsKey( Object elm ) ;\n\n    boolean containsKey( int elm ) ;\n\n\n\n    @Override\n    String toJSONString() ;\n\n    String toJSONStringI( int nIndentFactor ) ;\n\n    String toJSONString( int nIndentFactor ) throws IOException ;\n\n    @Override\n    TypeIndex prototype() ;\n\n    @Override\n    String prototypeName() ;\n\n    @Override\n    boolean isPrototypeOf( TypeIndex that ) ;\n\n\n    JSONArray clone() ;\n\n    Writer write(Writer writer) throws IOException ;\n\n    Writer write( Writer writer, int nIndentFactor ) throws IOException ;\n\n    Writer write( Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ;\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONArrayDecoder.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport java.lang.reflect.Type;\n\npublic abstract class JSONArrayDecoder implements JSONDecoder {\n    protected abstract void add( Object self, Object parent, Object indexKey, Object val );\n\n    @Override\n    public void decode( Object self, Object parent, Object indexKey, ArchCursorParser x, Type genericTypes ) {\n        if ( x.nextClean() != '[' ) {\n            throw x.syntaxError(\"A JSONArray text must start with '['\");\n        }\n        else if ( x.nextClean() != ']' ) {\n            x.back();\n\n            int i = 0;\n            while( true ) {\n                if ( x.nextClean() == ',' ) {\n                    x.back();\n\n                    this.add( self, parent, indexKey, JSON.NULL );\n                }\n                else {\n                    x.back();\n                    try {\n                        Object[] args = null;\n                        if( genericTypes != null ) {\n                            args = new Object[]{ genericTypes };\n                        }\n                        this.add( self, parent, indexKey, x.nextValue( i, self, args ) );\n                        ++i;\n                    }\n                    catch ( JSONParserRedirectException e ) {\n                        x.handleRedirectException( e );\n                    }\n                }\n\n                switch( x.nextClean() ) {\n                    case ',': {\n                        if (x.nextClean() == ']') {\n                            return;\n                        }\n\n                        x.back();\n                        break;\n                    }\n                    case ']': {\n                        return;\n                    }\n                    default: {\n                        throw x.syntaxError(\"Expected a ',' or ']'\");\n                    }\n                }\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONArraytron.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.system.prototype.Prototype;\nimport com.pinecone.framework.system.prototype.TypeIndex;\nimport com.pinecone.framework.util.StringUtils;\n\nimport java.io.IOException;\nimport java.io.Writer;\nimport java.lang.reflect.Array;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.Iterator;\nimport java.util.ListIterator;\nimport java.util.AbstractSet;\nimport java.util.Spliterator;\nimport java.util.Spliterators;\nimport java.util.NoSuchElementException;\n\n\npublic class JSONArraytron extends ArchJSONArray implements JSONArray {\n    private List<Object > mList;\n    protected transient Set<Map.Entry<Integer, Object > > entrySet ;\n\n    public JSONArraytron() {\n        this.mList = new ArrayList<>();\n    }\n\n    public JSONArraytron( ArchCursorParser x ) throws JSONException {\n        this();\n        this.jsonDecode0( x );\n    }\n\n    public JSONArraytron( String source ) throws JSONException {\n        this(new JSONCursorParser(source));\n    }\n\n    public JSONArraytron( Collection collection ) {\n        this.mList = new ArrayList<>();\n        if ( collection != null ) {\n            Iterator iter = collection.iterator();\n\n            while( iter.hasNext() ) {\n                this.mList.add(JSONUtils.wrapValue( iter.next()) );\n            }\n        }\n\n    }\n\n    public JSONArraytron( Object array ) throws JSONException {\n        this();\n        if (!array.getClass().isArray()) {\n            throw new JSONException(\"JSONArray initial value should be a string or collection or array.\");\n        } else {\n            int length = Array.getLength(array);\n\n            for(int i = 0; i < length; ++i) {\n                this.put(JSONUtils.wrapValue(Array.get(array, i)));\n            }\n\n        }\n    }\n\n    public JSONArraytron( List<Object > array, boolean bAssimilateMode ) throws JSONException {\n        if( bAssimilateMode ){\n            this.mList = array;\n        }\n        else {\n            this.mList = new ArrayList<>();\n            if ( array != null ) {\n                for ( Object o : array ) {\n                    this.put( JSONUtils.wrapValue(o) );\n                }\n            }\n        }\n    }\n\n    public JSONArraytron( List<Object > array ) throws JSONException {\n        this( array, false );\n    }\n\n\n    @Override\n    protected void jsonDecode0( ArchCursorParser x ) throws JSONException {\n        JSONArrayDecoder.INNER_JSON_ARRAY_DECODER.decode( this, null, null, x, null );\n    }\n\n    @Override\n    public JSONArraytron jsonDecode( ArchCursorParser x ) throws JSONException {\n        this.clear();\n        this.jsonDecode0( x );\n        return this;\n    }\n\n    @Override\n    public JSONArraytron jsonDecode( String source ) throws JSONException {\n        return this.jsonDecode( new JSONCursorParser(source) );\n    }\n\n    @Override\n    public void assimilate( List<Object > that ){\n        this.mList = that;\n    }\n\n    @Override\n    public List<Object > getArray(){\n        return this.mList;\n    }\n\n\n\n    /** Basic List<Object> **/\n    @Override\n    public int size() {\n        return this.mList.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mList.isEmpty();\n    }\n\n    @Override\n    public boolean contains( Object o ) {\n        return this.mList.contains(o);\n    }\n\n    @Override\n    public Iterator<Object > iterator() {\n        return this.mList.iterator();\n    }\n\n    @Override\n    public Object[] toArray() {\n        return this.mList.toArray();\n    }\n\n    @Override\n    public <T> T[] toArray( T[] a ) {\n        return (T[])this.mList.toArray(a);\n    }\n\n\n    @Override\n    protected boolean innerListAdd( Object e) {\n        return this.mList.add( e );\n    }\n\n    @Override\n    public void clear() {\n        this.mList.clear();\n    }\n\n    @Override\n    protected boolean innerListRemove( Object index ) {\n        return this.mList.remove( index );\n    }\n\n    @Override\n    public boolean containsAll( Collection<?> c ) {\n        return this.mList.containsAll(c);\n    }\n\n    @Override\n    public boolean addAll( Collection<?> c ) {\n        return this.mList.addAll(c);\n    }\n\n    @Override\n    public boolean addAll( int index, Collection<?> c ) {\n        return this.mList.addAll(index, c);\n    }\n\n    @Override\n    public boolean removeAll( Collection<?> c ) {\n        return this.mList.removeAll(c);\n    }\n\n    @Override\n    public boolean retainAll( Collection<?> c ) {\n        return this.mList.retainAll(c);\n    }\n\n\n\n\n\n    @Override\n    public void add( int index, Object element ) {\n        this.mList.add( index, element );\n    }\n\n    @Override\n    public int indexOf( Object o ) {\n        return this.mList.indexOf(o);\n    }\n\n    @Override\n    public int lastIndexOf( Object o ) {\n        return this.mList.lastIndexOf(o);\n    }\n\n    @Override\n    public ListIterator<Object> listIterator() {\n        return this.mList.listIterator();\n    }\n\n    @Override\n    public ListIterator<Object> listIterator(int index) {\n        return this.mList.listIterator(index);\n    }\n\n    @Override\n    public List<Object> subList( int fromIndex, int toIndex ) {\n        return this.mList.subList(fromIndex, toIndex);\n    }\n\n\n    @Override\n    protected Object innerListGet( int key ) {\n        return this.mList.get( key );\n    }\n\n    @Override\n    protected Object innerListSet( int index, Object element ) {\n        return this.mList.set( index, element );\n    }\n\n    @Override\n    public JSONArraytron put( Collection value ) {\n        this.put((Object)(new JSONArraytron(value)));\n        return this;\n    }\n\n    @Override\n    public JSONArraytron put( int index, Collection value ) throws JSONException {\n        this.put(index, (Object)( new JSONArraytron(value)) );\n        return this;\n    }\n\n    @Override\n    public JSONArraytron put( int index, Map value ) throws JSONException {\n        this.put(index, (Object)(new JSONMaptron(value)));\n        return this;\n    }\n\n    @Override\n    public Set<?> entrySet() {\n        Set<Map.Entry<Integer,Object > > es;\n        return (es = this.entrySet) == null ? (this.entrySet = new ListEntrySet()) : es;\n    }\n\n\n\n    @Override\n    public JSONArraytron clone() {\n        JSONArraytron that = (JSONArraytron) super.clone();\n        that.mList = new ArrayList<>();\n        for ( Object row : this.mList ) {\n            that.put( JSONUtils.cloneElement( row ) );\n        }\n        return that;\n    }\n\n    @Override\n    public Writer write( Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        return JSONEncoder.BASIC_JSON_ENCODER.write( this.mList, writer, nIndentFactor, nIndentBlankNum );\n    }\n\n\n    protected static boolean valEquals( Object o1, Object o2 ) {\n        return (o1==null ? o2==null : o1.equals(o2));\n    }\n\n    protected static class JSONArrayEntry implements Map.Entry<Integer, Object >, Pinenut {\n        Integer key;\n        Object  value;\n\n        JSONArrayEntry( Integer key, Object value ) {\n            this.key   = key;\n            this.value = value;\n        }\n\n        @Override\n        public Integer getKey() {\n            return this.key;\n        }\n\n        @Override\n        public Object getValue() {\n            return this.value;\n        }\n\n        @Override\n        public Object setValue( Object value ) {\n            Object oldValue = this.value;\n            this.value = value;\n            return oldValue;\n        }\n\n        public void setKey( Integer key ) {\n            this.key = key;\n        }\n\n        @Override\n        public boolean equals(Object o) {\n            if (!(o instanceof Map.Entry))\n                return false;\n            Map.Entry<?,?> e = (Map.Entry<?,?>)o;\n\n            return valEquals(key,e.getKey()) && valEquals(value,e.getValue());\n        }\n\n        @Override\n        public int hashCode() {\n            int keyHash = (key==null ? 0 : key.hashCode());\n            int valueHash = (value==null ? 0 : value.hashCode());\n            return keyHash ^ valueHash;\n        }\n\n        @Override\n        public String toString() {\n            return this.toJSONString();\n        }\n\n        @Override\n        public String toJSONString() {\n            return \"{\" + StringUtils.jsonQuote( this.key.toString() ) + \":\" + JSON.stringify( this.value ) + \"}\";\n        }\n\n        @Override\n        public TypeIndex prototype() {\n            return Prototype.typeid( this );\n        }\n    }\n\n    protected class ListEntrySet extends AbstractSet<Map.Entry<Integer, Object > > {\n        public final int size()                 { return JSONArraytron.this.size(); }\n\n        public final void clear()               { JSONArraytron.this.clear(); }\n\n        public final Iterator<Map.Entry<Integer, Object > > iterator() {\n            return new ListEntryIterator();\n        }\n\n        public final boolean contains( Object o ) {\n            if ( !(o instanceof Map.Entry) ) {\n                return false;\n            }\n            Map.Entry<?,?> e = (Map.Entry<?,?>) o;\n            Object key = e.getKey();\n\n            Object v = JSONArraytron.this.get(key);\n            return v != null && v.equals(e.getValue());\n        }\n\n        public final boolean remove( Object o ) {\n            if ( this.contains(o) ) {\n                Map.Entry<?,?> e = (Map.Entry<?,?>) o;\n                Object key = e.getKey();\n\n                return JSONArraytron.this.remove(key) ;\n            }\n            return false;\n        }\n\n        public final Spliterator<Map.Entry<Integer, Object > > spliterator() {\n            return Spliterators.spliterator( this, Spliterator.SIZED | Spliterator.ORDERED | Spliterator.DISTINCT );\n        }\n    }\n\n\n    protected abstract class JAListEntryIterator {\n        protected Iterator<Object >        currentIterator;\n        protected int                      index;\n        protected JSONArrayEntry           dummyEntry;\n\n        public JAListEntryIterator() {\n            this.index            = 0;\n            this.currentIterator  = JSONArraytron.this.iterator();\n            this.dummyEntry       = new JSONArrayEntry( this.index, null );\n        }\n\n        public boolean hasNext() {\n            return this.currentIterator.hasNext();\n        }\n\n        protected Map.Entry<Integer, Object > nextNode() {\n            if ( !this.hasNext() ) {\n                throw new NoSuchElementException();\n            }\n\n            this.dummyEntry.setKey( this.index++ );\n            this.dummyEntry.setValue( this.currentIterator.next() );\n            return this.dummyEntry;\n        }\n\n        public void remove() {\n            this.currentIterator.remove();\n        }\n    }\n\n    protected final class ListKeyIterator extends JAListEntryIterator implements Iterator<Integer > {\n        public final Integer next() { return nextNode().getKey(); }\n    }\n\n    protected final class ListValueIterator extends JAListEntryIterator implements Iterator<Object > {\n        public final Object next() { return nextNode().getValue(); }\n    }\n\n    protected final class ListEntryIterator extends JAListEntryIterator implements Iterator<Map.Entry<Integer,Object > > {\n        public final Map.Entry<Integer, Object > next() { return nextNode(); }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONCompiler.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\nimport java.util.Collection;\nimport java.util.Map;\n\n\npublic interface JSONCompiler extends Pinenut {\n    OutputStream compile( Map that, OutputStream outputStream ) throws IOException;\n\n    OutputStream compile( Collection that, OutputStream outputStream ) throws IOException ;\n\n    OutputStream compile( Object[] those, OutputStream outputStream ) throws IOException ;\n\n    OutputStream compile( Object that, OutputStream outputStream ) throws IOException ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONCompilerException.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.ParseException;\n\npublic class JSONCompilerException extends ParseException {\n    private static final long serialVersionUID = 0L;\n    private Throwable cause;\n\n    public JSONCompilerException    ( String what ) {\n        super( what );\n    }\n\n    public JSONCompilerException    ( String what, int errorOffset ) {\n        super( what, errorOffset );\n    }\n\n    public JSONCompilerException    ( String message, int errorOffset, Throwable cause ) {\n        super( message, errorOffset );\n        this.cause = cause;\n    }\n\n    public JSONCompilerException    ( Throwable cause, int errorOffset ) {\n        super( cause.getMessage(), errorOffset );\n        this.cause = cause;\n    }\n\n    public JSONCompilerException    ( Throwable cause ) {\n        super( cause.getMessage() );\n        this.cause = cause;\n    }\n\n    @Override\n    public Throwable getCause() {\n        return this.cause;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONCursorParser.java",
    "content": "package com.pinecone.framework.util.json;\n\n\nimport java.io.InputStream;\nimport java.io.Reader;\n\npublic class JSONCursorParser extends ArchCursorParser {\n    public JSONCursorParser( Reader reader ) {\n        super( reader );\n    }\n\n    public JSONCursorParser( InputStream inputStream ) throws JSONParseException {\n        super( inputStream );\n    }\n\n    public JSONCursorParser( String s ) {\n        super( s );\n    }\n\n    @Override\n    protected JSONArray newJSONArray( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ) {\n        return new JSONArraytron( parser );\n    }\n\n    @Override\n    protected JSONObject newJSONObject( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ) {\n        return new JSONMaptron( parser );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONDecoder.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.lang.reflect.Type;\nimport java.util.List;\nimport java.util.Map;\n\npublic interface JSONDecoder extends Pinenut {\n    JSONDecoder INNER_JSON_OBJECT_DECODER = new JSONObjectDecoder() {\n        @Override\n        protected void set( Object self, String key, Object val ) {\n            ( (JSONObject) self ).put( key, val );\n        }\n    };\n\n    JSONDecoder INNER_JSON_ARRAY_DECODER = new JSONArrayDecoder() {\n        @Override\n        protected void add( Object self, Object parent, Object indexKey, Object val ) {\n            ( (JSONArray) self ).add( val );\n        }\n    };\n\n    JSONDecoder INNER_MAP_DECODER = new JSONObjectDecoder() {\n        @Override\n        @SuppressWarnings( \"unchecked\" )\n        protected void set( Object self, String key, Object val ) {\n            ( (Map<String, Object >) self ).put( key, val );\n        }\n    };\n\n    JSONDecoder INNER_LIST_DECODER = new JSONArrayDecoder() {\n        @Override\n        @SuppressWarnings( \"unchecked\" )\n        protected void add( Object self, Object parent, Object indexKey, Object val ) {\n            ( (List<Object>) self ).add( val );\n        }\n    };\n\n\n    void decode( Object self, Object parent, Object indexKey, ArchCursorParser x, Type genericTypes ) ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONDecompiler.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface JSONDecompiler extends Pinenut {\n    Object nextValue( Object parent ) throws JSONCompilerException ;\n\n    Object nextValue() throws JSONCompilerException ;\n\n    Object decompile( Object parent ) ;\n\n    Object decompile() ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONDictium.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.unit.Dictium;\nimport com.pinecone.framework.unit.Units;\n\npublic interface JSONDictium extends Dictium<Object > {\n\n    Object opt( Object key );\n\n    boolean optBoolean( Object key );\n\n    double optDouble( Object key );\n\n    int optInt( Object key );\n\n    JSONArray optJSONArray( Object key );\n\n    JSONObject optJSONObject( Object key );\n\n    long optLong( Object key );\n\n    String optString( Object key );\n\n    byte[] optBytes( Object key );\n\n\n    JSONObject toJSONObject();\n\n    JSONArray toJSONArray();\n\n    default JSONArray affirmArray   ( Object key ) {\n        Object o = this.opt(key);\n        if (o instanceof JSONArray) {\n            return (JSONArray) o;\n        }\n        JSONArray jNew = new JSONArraytron();\n        this.insert( key, jNew );\n        return jNew;\n    }\n\n    default JSONObject affirmObject ( Object key ) {\n        Object o = this.opt(key);\n        if (o instanceof JSONObject) {\n            return (JSONObject) o;\n        }\n        JSONObject jNew = new JSONMaptron();\n        this.insert( key, jNew );\n        return jNew;\n    }\n\n    default Object affirm           ( Object key ) {\n        if ( this.containsKey(key) ) {\n            return this.opt(key);\n        }\n\n        Object o = JSON.NULL;\n        this.insert( key, o );\n        return o;\n    }\n\n\n    /**\n     * query\n     * 202406029\n     * @param evalKey Object simple-eval key, fmt: key1.key2.key3...keyN (T->.T)\n     * @return null for nothing, object for the value which just be queried.\n     */\n    default Object query( String evalKey ) {\n        return Units.getValueFromMapStructureRecursively( this, evalKey );\n    }\n\n    default String queryString( String evalKey, String defaultValue ) {\n        Object object = this.query( evalKey );\n        return JSON.NULL.equals(object) ? defaultValue : object.toString();\n    }\n\n    default String queryString( String evalKey ) {\n        return this.queryString( evalKey, \"\" );\n    }\n\n    default JSONObject queryJSONObject( String evalKey ) {\n        Object o = this.query( evalKey );\n        return o instanceof JSONObject ? (JSONObject)o : null;\n    }\n\n    default JSONArray queryJSONArray( String evalKey ) {\n        Object o = this.query( evalKey );\n        return o instanceof JSONArray ? (JSONArray)o : null;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONEncoder.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.io.IOException;\nimport java.io.StringWriter;\nimport java.io.Writer;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.Map;\n\npublic interface JSONEncoder extends Pinenut {\n    String          JSON_OBJ_STRINGIFY_DEFAULT             = \"[object %s]\";\n    String          JSON_OBJ_NULL_DEFAULT                  = \"null\";\n    JSONEncoder     BASIC_JSON_ENCODER                     = new GenericJSONEncoder();\n    JSONMarshal     BASIC_JSON_MARSHAL                     = new GenericJSONMarshal();\n    JSONMarshal     STRUCT_JSON_MARSHAL                    = new GenericJSONMarshal( JSONMarshalMode.MODE_PUBLIC_FIELD );\n    JSONMarshal     COMMON_JSON_MARSHAL                    = new GenericJSONMarshal( JSONMarshalMode.MODE_COMMON );\n    JSONMarshal     BEAN_JSON_MARSHAL                      = new GenericJSONMarshal( JSONMarshalMode.MODE_BEAN_GETTER );\n\n\n\n    Writer write              ( Pinenut that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ;\n\n    Writer write              ( JSONObject that, Writer writer ) throws IOException ;\n\n    Writer write              ( JSONObject that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ;\n\n    Writer write              ( JSONArray that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ;\n\n    void writeKeyValue        ( Writer writer, Object key, Object val, int nIndentFactor, int nIndentBlankNum ) throws JSONException, IOException ;\n\n    default Writer write      ( Map that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        return this.writeMapFmtEntries( that.entrySet(), writer, nIndentFactor, nIndentBlankNum );\n    }\n\n    default <T extends Map.Entry > Writer writeMapFmtEntriesT ( Collection<T> that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException  {\n        return this.writeMapFmtEntries( that, writer, nIndentFactor, nIndentBlankNum );\n    }\n\n    Writer writeMapFmtEntries ( Collection that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ;\n\n    Writer writeArray         ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ;\n\n    Writer write              ( Collection that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ;\n\n    Writer write              ( Object that, Writer writer ) throws IOException ;\n\n    Writer write              ( Object that, Writer writer, int nIndentFactor ) throws IOException ;\n\n    Writer write              ( Object that, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ;\n\n\n\n\n    static Writer writeMapFormat     ( Writer writer, Object[] those, int nIndentFactor, int nIndentBlankNum, JSONEncoder encoder ) throws IOException {\n        int length = those.length;\n\n        writer.write('{');\n\n        if ( length == 1 ) {\n            if( !(those[0] instanceof Map.Entry ) ){\n                throw new IllegalArgumentException( \"Serialized object should be [Map.Entry].\" );\n            }\n\n            Map.Entry kv = (Map.Entry)those[0];\n            encoder.writeKeyValue( writer, kv.getKey(), kv.getValue(), nIndentFactor, nIndentBlankNum  );\n        }\n        else if ( length != 0 ) {\n            int nNewIndent = nIndentBlankNum + nIndentFactor;\n            for ( int i = 0; i < length; ++i ) {\n                Map.Entry kv = (Map.Entry)those[i];\n                GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, i !=0 );\n                encoder.writeKeyValue( writer, kv.getKey(), kv.getValue(), nIndentFactor, nNewIndent  );\n            }\n            if ( nIndentFactor > 0 ) {\n                writer.write( '\\n' );\n            }\n\n            GenericJSONEncoder.indentBlank( writer, nIndentBlankNum );\n        }\n\n\n        writer.write( '}' );\n\n        return writer;\n    }\n\n    static Writer writeMapFormat     ( Writer writer, Object[] those, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        return JSONEncoder.writeMapFormat( writer, those, nIndentFactor, nIndentBlankNum, JSONEncoder.BASIC_JSON_ENCODER );\n    }\n\n    static String writeMapFormat     ( Object[] those, int nIndentFactor ) {\n        StringWriter w = new StringWriter();\n        try {\n            synchronized( w.getBuffer() ) {\n                JSONEncoder.writeMapFormat( w, (Object[])those, nIndentFactor,0 );\n                return w.toString();\n            }\n        }\n        catch ( IOException e ){\n            return null;\n        }\n    }\n\n    static String stringifyMapFormat ( Object[] those ) {\n        return JSONEncoder.writeMapFormat( those, 0 );\n    }\n\n\n\n\n    static Writer writeMapFormat     ( Writer writer, Collection those, int nIndentFactor, int nIndentBlankNum, JSONEncoder encoder ) throws IOException {\n        boolean bHasNextElement = false;\n\n        int length = those.size();\n        writer.write('{');\n        Iterator iter = those.iterator();\n\n        if ( length == 1 ) {\n            Map.Entry kv = (Map.Entry)iter.next();\n            encoder.writeKeyValue( writer, kv.getKey(), kv.getValue(), nIndentFactor, nIndentBlankNum  );\n        }\n        else if ( length != 0 ) {\n            int nNewIndent = nIndentBlankNum + nIndentFactor;\n\n            while( iter.hasNext() ) {\n                GenericJSONEncoder.beforeJsonElementWrote( writer, nIndentFactor, nNewIndent, bHasNextElement );\n                Map.Entry kv = (Map.Entry)iter.next();\n                encoder.writeKeyValue( writer, kv.getKey(), kv.getValue(), nIndentFactor, nNewIndent  );\n                bHasNextElement = true;\n            }\n\n            if ( nIndentFactor > 0 ) {\n                writer.write( '\\n' );\n            }\n            GenericJSONEncoder.indentBlank( writer, nIndentBlankNum );\n        }\n\n        writer.write('}');\n        return writer;\n    }\n\n    static Writer writeMapFormat     ( Writer writer, Collection those, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        return JSONEncoder.writeMapFormat( writer, those, nIndentFactor, nIndentBlankNum, JSONEncoder.BASIC_JSON_ENCODER );\n    }\n\n    static String writeMapFormat     ( Collection those, int nIndentFactor ) {\n        StringWriter w = new StringWriter();\n        try {\n            synchronized( w.getBuffer() ) {\n                JSONEncoder.writeMapFormat( w, (Collection)those, nIndentFactor,0 );\n                return w.toString();\n            }\n        }\n        catch ( IOException e ){\n            return null;\n        }\n    }\n\n    static String stringifyMapFormat ( Collection those ) {\n        return JSONEncoder.writeMapFormat( those, 0 );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONException.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class JSONException extends PineRuntimeException {\n    private static final long serialVersionUID = 0L;\n    private Throwable cause;\n\n    public JSONException    ( String what ) {\n        super( what );\n    }\n\n    public JSONException( Throwable cause ) {\n        super( cause.getMessage() );\n        this.cause = cause;\n    }\n\n    @Override\n    public Throwable getCause() {\n        return this.cause;\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONMaptron.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.util.json.homotype.BeanColonist;\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.io.Writer;\nimport java.lang.reflect.Field;\nimport java.util.Map;\nimport java.util.LinkedHashMap;\nimport java.util.HashMap;\nimport java.util.TreeMap;\nimport java.util.Locale;\nimport java.util.ResourceBundle;\nimport java.util.Enumeration;\nimport java.util.Set;\nimport java.util.Collection;\nimport java.util.Iterator;\n\npublic class JSONMaptron extends ArchJSONObject implements JSONObject, Serializable {\n    private Map<String, Object > mMap;\n\n    public JSONMaptron() {\n        this( true );\n    }\n\n    public JSONMaptron( boolean bLinked ){\n        this.mMap = bLinked ? new LinkedHashMap<>() : new HashMap<>();\n    }\n\n    public JSONMaptron( int nInitialCapacity, boolean bLinked ){\n        if ( bLinked ) {\n            this.mMap = new LinkedHashMap<>( nInitialCapacity );\n        }\n        else {\n            this.mMap = new HashMap<>( nInitialCapacity );\n        }\n    }\n\n    public JSONMaptron( ArchCursorParser x ) throws JSONException {\n        this();\n        this.jsonDecode0( x );\n    }\n\n    public JSONMaptron( Map<String, Object> map, String[] names ) {\n        this();\n\n        for( int i = 0; i < names.length; ++i ) {\n            try {\n                this.putOnce( names[i], map.get( names[i] ) );\n            }\n            catch ( Exception e ) {\n                this.putOnce( names[i], JSON.NULL );\n            }\n        }\n\n    }\n\n    public JSONMaptron( Map<String, Object> map ) {\n        this( map,false );\n    }\n\n    public JSONMaptron( Map<String, Object> map, boolean bAssimilateMode ) {\n        if( bAssimilateMode ){\n            this.mMap = map;\n        }\n        else {\n            this.mMap = new LinkedHashMap<>();\n            if (map != null) {\n                for ( Object o : map.entrySet() ) {\n                    Entry e = (Entry) o;\n                    Object value = e.getValue();\n                    if (value != null) {\n                        this.mMap.put( (String) e.getKey(), JSONUtils.wrapValue(value) );\n                    }\n                }\n            }\n        }\n    }\n\n    public JSONMaptron( Object bean ) {\n        this();\n        this.populateMap( bean );\n    }\n\n    public JSONMaptron( Object object, String[] names ) {\n        this();\n        Class c = object.getClass();\n\n        for( int i = 0; i < names.length; ++i ) {\n            String name = names[i];\n\n            try {\n                this.putOpt( name, c.getField(name).get(object) );\n            }\n            catch ( Exception ignore ) {\n                //Do nothing.\n            }\n        }\n\n    }\n\n    public JSONMaptron( String source ) throws JSONException {\n        this(new JSONCursorParser(source));\n    }\n\n    public JSONMaptron( String baseName, Locale locale ) throws JSONException {\n        this();\n        ResourceBundle bundle = ResourceBundle.getBundle(baseName, locale, Thread.currentThread().getContextClassLoader());\n        Enumeration keys = bundle.getKeys();\n\n        while( true ) {\n            Object key;\n            do {\n                if ( !keys.hasMoreElements() ) {\n                    return;\n                }\n\n                key = keys.nextElement();\n            }\n            while(!(key instanceof String));\n\n            String[] path = ((String)key).split(\"\\\\.\");\n            int last = path.length - 1;\n            JSONObject target = this;\n\n            for( int i = 0; i < last; ++i ) {\n                String segment = path[i];\n                JSONObject nextTarget = target.optJSONObject(segment);\n                if ( nextTarget == null ) {\n                    nextTarget = new JSONMaptron();\n                    target.put(segment, (Object)nextTarget);\n                }\n\n                target = nextTarget;\n            }\n\n            target.put(path[last], (Object)bundle.getString((String)key));\n        }\n    }\n\n    @Override\n    protected void jsonDecode0( ArchCursorParser x ) throws JSONException {\n        JSONObjectDecoder.INNER_JSON_OBJECT_DECODER.decode( this, null, null, x, null );\n    }\n\n    @Override\n    public JSONMaptron jsonDecode( ArchCursorParser x ) throws JSONException {\n        this.clear();\n        this.jsonDecode0( x );\n        return this;\n    }\n\n    @Override\n    public JSONMaptron jsonDecode( String source ) throws JSONException {\n        return this.jsonDecode( new JSONCursorParser(source) );\n    }\n\n\n    @Override\n    public JSONMaptron assimilate( Map<String, Object> that ){\n        this.mMap = that;\n        return this;\n    }\n\n\n\n    @Override\n    public Map<String, Object > getMap(){\n        return this.mMap;\n    }\n\n    /** Basic Map **/\n    @Override\n    public int size() {\n        return this.mMap.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mMap.isEmpty();\n    }\n\n    @Override\n    protected boolean innerMapContainsKey( Object key ) {\n        return this.mMap.containsKey( key );\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        return this.mMap.containsValue(value);\n    }\n\n    @Override\n    public void putAll( Map<? extends String, ?> m ) {\n        this.mMap.putAll(m);\n    }\n\n    @Override\n    public void clear() {\n        this.mMap.clear();\n    }\n\n    @Override\n    public Object remove( Object key ) {\n        return this.mMap.remove(key);\n    }\n\n    @Override\n    public Set<String > keySet() {\n        return this.mMap.keySet();\n    }\n\n    @Override\n    public Collection<Object > values() {\n        return this.mMap.values();\n    }\n\n    @Override\n    public Set<Map.Entry<String, Object > > entrySet() {\n        return this.mMap.entrySet();\n    }\n\n\n    @Override\n    protected Object innerMapGet( Object key ) {\n        return this.mMap.get( key );\n    }\n\n\n\n\n    protected void populateMap( Object bean ) {\n        BeanColonist.WrappedColonist.populate( bean, this );\n    }\n\n    @Override\n    protected Object innerMapPut( String key, Object value ){\n        return this.mMap.put( key, value );\n    }\n\n    @Override\n    public JSONMaptron put( String key, Map value ) throws JSONException {\n        this.put( key, (Object)( new JSONMaptron(value) ) );\n        return this;\n    }\n\n    @Override\n    protected Object innerMapRemove( String key ) {\n        return this.mMap.remove( key );\n    }\n\n    @Override\n    public Map.Entry<String, Object > front() {\n        return this.mMap.entrySet().iterator().next();\n    }\n\n    @Override\n    public Map.Entry<String, Object > back() {\n        try{\n            if( this.mMap instanceof LinkedHashMap ){\n                Field tail = this.mMap.getClass().getDeclaredField(\"tail\" );\n                tail.setAccessible( true );\n                Map.Entry<?, ?> kv = (Map.Entry<?, ?> )tail.get( this.mMap );\n                return (Map.Entry<String, Object> ) kv;\n            }\n            else if( this.mMap instanceof LinkedTreeMap ){\n                return ( (LinkedTreeMap<String, Object>)this.mMap ).getLast();\n            }\n            else if( this.mMap instanceof TreeMap ){\n                return ( (TreeMap<String, Object>)this.mMap ).lastEntry();\n            }\n            else {\n                throw new IllegalStateException();\n            }\n        }\n        catch ( NoSuchFieldException | IllegalAccessException | IllegalStateException e ) {\n            // It seem there is the only way, fuck.\n            Iterator<Map.Entry<String, Object> > iterator = this.mMap.entrySet().iterator();\n            Map.Entry<String, Object> tail    = null;\n            while ( iterator.hasNext() ) {\n                tail = iterator.next();\n            }\n            return tail;\n        }\n    }\n\n\n\n\n\n    @Override\n    public JSONMaptron clone() {\n        JSONMaptron that = (JSONMaptron) super.clone();\n        that.mMap = new LinkedHashMap<>();\n        for ( Entry<String, Object> e : this.mMap.entrySet() ) {\n            Object value = e.getValue();\n            that.mMap.put( e.getKey(), JSONUtils.cloneElement( value ) );\n        }\n        return that;\n    }\n\n    @Override\n    public Writer write( Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        return JSONEncoder.BASIC_JSON_ENCODER.write( this.mMap, writer, nIndentFactor, nIndentBlankNum );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONMarshal.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.util.json.handler.EncodeHandlerRegistry;\nimport com.pinecone.framework.util.json.handler.JSONObjectEncodeHandler;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\n\npublic interface JSONMarshal extends JSONEncoder {\n\n    void setMode( long mode );\n\n    long getMode();\n\n    void setBeanEncoder( BeanJSONEncoder encoder );\n\n    BeanJSONEncoder getBeanEncoder();\n\n    void setEncodeHandlerRegistry( EncodeHandlerRegistry registry );\n\n    EncodeHandlerRegistry getEncodeHandlerRegistry();\n\n\n    <T> void registerEncodeHandler( Class<T> type, JSONObjectEncodeHandler<? super T> handler );\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONMarshalMode.java",
    "content": "package com.pinecone.framework.util.json;\n\npublic final class JSONMarshalMode {\n\n    private JSONMarshalMode() {}\n\n    public static final long MODE_ANNOTATED_FIELD = 1L;\n    public static final long MODE_ANY_FIELD       = 1L << 1;\n    public static final long MODE_PUBLIC_FIELD    = 1L << 2;\n    public static final long MODE_BEAN_GETTER     = 1L << 3;\n\n    public static final long MODE_DEFAULT = MODE_ANNOTATED_FIELD;\n    public static final long MODE_COMMON  = MODE_ANNOTATED_FIELD | MODE_PUBLIC_FIELD | MODE_BEAN_GETTER;\n    public static final long MODE_ALL     = MODE_ANNOTATED_FIELD | MODE_PUBLIC_FIELD | MODE_BEAN_GETTER | MODE_ANY_FIELD;\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONObject.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\nimport com.pinecone.framework.system.prototype.TypeIndex;\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.io.Writer;\nimport java.util.*;\n\npublic interface JSONObject extends PineUnit, Map<String, Object>, JSONDictium, Cloneable, Serializable {\n    JSONObject jsonDecode( ArchCursorParser x ) throws JSONException ;\n\n    JSONObject jsonDecode( String source ) throws JSONException ;\n\n    JSONObject assimilate( Map<String, Object> that );\n\n\n\n\n    JSONObject shareFrom( JSONObject that, String szKey ) ;\n\n    JSONObject shareFrom( JSONObject that, String[] szKeys ) ;\n\n    JSONObject subJson ( String szKey ) ;\n\n    JSONObject subJson ( String[] szKeys ) ;\n\n    JSONObject detachSub ( String szKey ) ;\n\n    JSONObject detachSub ( String[] szKeys ) ;\n\n    JSONObject moveSubFrom ( JSONObject that, String szKey ) ;\n\n    JSONObject moveSubFrom ( JSONObject that, String[] szKeys ) ;\n\n\n\n    /**\n     * 20240625\n     * Eliminates all keys excepted the survivor key.\n     * @param szSurvivorKey The `key` the can surviving.\n     * @return this\n     */\n    default JSONObject eliminateExcepts( String szSurvivorKey ) {\n        Object sub = this.opt( szSurvivorKey );\n        this.clear();\n        this.embed( szSurvivorKey, sub );\n        return this;\n    }\n\n    /**\n     * 20240625\n     * Eliminates all keys excepted survivor keys.\n     * @param szSurvivorKeys The batch of `keys` that can surviving.\n     * @return this\n     */\n    default JSONObject eliminateExcepts( String[] szSurvivorKeys ) {\n        JSONObject sub = this.subJson( szSurvivorKeys );\n        this.clear();\n        this.assimilate( sub.getMap() );\n        return this;\n    }\n\n\n\n\n    Map<String, Object > getMap();\n\n    /** Basic Map **/\n    @Override\n    int size() ;\n\n    @Override\n    boolean isEmpty() ;\n\n    @Override\n    boolean containsKey( Object key ) ;\n\n    @Override\n    boolean containsValue( Object value ) ;\n\n    @Override\n    void putAll( Map<? extends String, ?> m ) ;\n\n    JSONObject xPutAll( Map<? extends String, ?> m ) ;\n\n    @Override\n    void clear() ;\n\n    JSONObject xClear() ;\n\n    @Override\n    Object remove( Object key ) ;\n\n    @Override\n    Object erase( Object key ) ;\n\n    JSONObject xRemove(Object key) ;\n\n    @Override\n    Set<String > keySet() ;\n\n    @Override\n    Collection<Object > values() ;\n\n    @Override\n    Set<Map.Entry<String, Object > > entrySet() ;\n\n\n    JSONObject accumulate( String key, Object value ) throws JSONException ;\n\n    JSONObject append( String key, Object value ) throws JSONException ;\n\n\n\n\n\n\n\n    @Override\n    Object get( Object key ) ;\n\n    Object get( String key ) throws JSONException ;\n\n    boolean getBoolean( String key ) throws JSONException ;\n\n    double getDouble( String key ) throws JSONException ;\n\n    int getInt( String key ) throws JSONException ;\n\n    JSONArray getJSONArray  ( String key ) throws JSONException ;\n\n    JSONObject getJSONObject( String key ) throws JSONException ;\n\n    long getLong( String key ) throws JSONException ;\n\n    String getString( String key ) throws JSONException ;\n\n    byte[] getBytes( String key ) throws JSONException ;\n\n    JSONArray affirmArray( String key ) ;\n\n    JSONObject affirmObject( String key ) ;\n\n    Object affirm( String key ) ;\n\n    Object opt( String key ) ;\n\n    boolean optBoolean( String key ) ;\n\n    boolean optBoolean( String key, boolean defaultValue ) ;\n\n    double optDouble( String key ) ;\n\n    double optDouble( String key, double defaultValue ) ;\n\n    int optInt( String key ) ;\n\n    int optInt( String key, int defaultValue ) ;\n\n    JSONArray optJSONArray( String key) ;\n\n    JSONObject optJSONObject( String key) ;\n\n    long optLong( String key ) ;\n\n    long optLong( String key, long defaultValue ) ;\n\n    String optString( String key ) ;\n\n    String optString( String key, String defaultValue ) ;\n\n    byte[] optBytes( String key ) ;\n\n    byte[] optBytes( String key, byte[] defaultValue ) ;\n\n    @Override\n    Object opt( Object key ) ;\n\n    @Override\n    boolean optBoolean( Object key ) ;\n\n    @Override\n    double optDouble( Object key ) ;\n\n    @Override\n    int optInt( Object key ) ;\n\n    @Override\n    JSONArray optJSONArray( Object key ) ;\n\n    @Override\n    JSONObject optJSONObject( Object key ) ;\n\n    @Override\n    long optLong( Object key ) ;\n\n    @Override\n    String optString( Object key ) ;\n\n    @Override\n    byte[] optBytes( Object key ) ;\n\n\n\n\n    JSONObject increment( String key ) throws JSONException ;\n\n    boolean isNull( String key ) ;\n\n    Iterator keys() ;\n\n    JSONArray names() ;\n\n    String[] getOwnPropertyNames () ;\n\n\n\n\n\n\n\n    @Override\n    JSONObject insert( Object key, Object value ) ;\n\n    JSONObject put( String key, boolean value ) throws JSONException ;\n\n    JSONObject put( String key, Collection value ) throws JSONException ;\n\n    JSONObject put( String key, double value ) throws JSONException ;\n\n    JSONObject put( String key, int value ) throws JSONException ;\n\n    JSONObject put( String key, long value ) throws JSONException ;\n\n    JSONObject put( String key, Map value ) throws JSONException ;\n\n    JSONObject put( String key, JSONArray value ) throws JSONException ;\n\n    JSONObject put( String key, JSONObject value ) throws JSONException ;\n\n    @Override\n    JSONObject put( String key, Object value ) throws JSONException ;\n\n    JSONObject embed( String key, Object value ) throws JSONException ;\n\n    JSONObject putOnce( String key, Object value ) throws JSONException ;\n\n    JSONObject putOpt( String key, Object value ) throws JSONException ;\n\n\n\n\n\n\n\n    Object remove( String key ) ;\n\n    JSONObject removeAll( Collection<String > keys );\n\n    JSONObject removeAll( String[] keys );\n\n    JSONArray toJSONArray( JSONArray names ) throws JSONException ;\n\n    @Override\n    JSONArray toJSONArray() ;\n\n    @Override\n    JSONObject toJSONObject() ;\n\n    Map.Entry<String, Object > front() ;\n\n    Map.Entry<String, Object > back() ;\n\n\n\n    @Override\n    String toJSONString() ;\n\n    String toJSONStringI( int nIndentFactor ) ;\n\n    String toJSONString( int nIndentFactor ) throws IOException ;\n\n    @Override\n    TypeIndex prototype() ;\n\n    @Override\n    String prototypeName() ;\n\n    @Override\n    boolean isPrototypeOf  ( TypeIndex that ) ;\n\n    @Override\n    boolean hasOwnProperty ( Object key ) ;\n\n    @Override\n    Map<?, Object > toMap();\n\n    @Override\n    List<Object > toList();\n\n\n\n    JSONObject clone() ;\n\n    Writer write(Writer writer ) throws IOException ;\n\n    Writer write( Writer writer, int nIndentFactor ) throws IOException ;\n\n    Writer write( Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONObjectDecoder.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport java.lang.reflect.Type;\n\npublic abstract class JSONObjectDecoder implements JSONDecoder {\n    protected abstract void set( Object self, String key, Object val );\n\n    @Override\n    public void decode( Object self, Object parent, Object indexKey, ArchCursorParser x, Type genericTypes ) {\n        if ( x.nextClean() != '{' ) {\n            throw x.syntaxError(\"A JSONObject text must begin with '{'\");\n        }\n        else {\n            while( true ) {\n                char c = x.nextClean();\n                switch(c) {\n                    case '\\u0000': {\n                        throw x.syntaxError( \"A JSONObject text must end with '}'\" );\n                    }\n                    case '}': {\n                        return;\n                    }\n                    default: {\n                        x.back();\n\n                        String key = null;\n                        Object val = null;\n                        try {\n                            key = x.nextValue( null, self, null ).toString();\n                            c = x.nextClean();\n                            if ( c != ':' && c != '=' ) {\n                                throw x.syntaxError( \"Expected a ':', '=' after a key\" );\n                            }\n\n                            Object[] args = null;\n                            if( genericTypes != null ) {\n                                args = new Object[]{ genericTypes };\n                            }\n                            val = x.nextValue( key, self, args );\n                            this.set( self, key, val );\n                        }\n                        catch ( JSONParserRedirectException e ) {\n                            e.setContext( new Object[]{ key, val } );\n                            x.handleRedirectException( e );\n                        }\n\n                        switch ( x.nextClean() ) {\n                            case ',':\n                            case ';': {\n                                if ( x.nextClean() == '}' ) {\n                                    return;\n                                }\n\n                                x.back();\n                                break;\n                            }\n                            case '}':{\n                                return;\n                            }\n                            default: {\n                                throw x.syntaxError( \"Expected a ',' or '}'\" );\n                            }\n                        }\n                    }\n                }\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONParseException.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.ParseException;\n\npublic class JSONParseException extends ParseException {\n    private static final long serialVersionUID = 0L;\n    private Throwable cause;\n\n    public JSONParseException    ( String what ) {\n        super( what );\n    }\n\n    public JSONParseException    ( String what, int errorOffset ) {\n        super( what, errorOffset );\n    }\n\n    public JSONParseException    ( String message, int errorOffset, Throwable cause ) {\n        super( message, errorOffset );\n        this.cause = cause;\n    }\n\n    public JSONParseException    ( Throwable cause, int errorOffset ) {\n        super( cause.getMessage(), errorOffset );\n        this.cause = cause;\n    }\n\n    public JSONParseException    ( Throwable cause ) {\n        super( cause.getMessage() );\n        this.cause = cause;\n    }\n\n    @Override\n    public Throwable getCause() {\n        return this.cause;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONParserRedirectException.java",
    "content": "package com.pinecone.framework.util.json;\n\npublic class JSONParserRedirectException extends JSONException {\n    Object context;\n    int type ;\n\n    public JSONParserRedirectException    ( int type ) {\n        super( \"\" );\n        this.type = type;\n    }\n\n    public int getType() {\n        return this.type;\n    }\n\n    public void setContext( Object context ) {\n        this.context = context;\n    }\n\n    public Object getContext() {\n        return this.context;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONString.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface JSONString extends Pinenut {\n    String toJSONString();\n\n    static JSONString wrapRaw( String that ) {\n        return new JSONString() {\n            @Override\n            public String toJSONString() {\n                return that;\n            }\n\n            @Override\n            public String toString() {\n                return that;\n            }\n        };\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/JSONUtils.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport com.pinecone.framework.util.StringUtils;\n\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.Map;\n\npublic abstract class JSONUtils {\n    public static void prospectNumberQualify(Object o) throws JSONException {\n        if (o != null) {\n            if (o instanceof Double) {\n                if (((Double)o).isInfinite() || ((Double)o).isNaN()) {\n                    throw new JSONException(\"JSON does not allow non-finite numbers.\");\n                }\n            }\n            else if (o instanceof Float && (((Float)o).isInfinite() || ((Float)o).isNaN())) {\n                throw new JSONException(\"JSON does not allow non-finite numbers.\");\n            }\n        }\n    }\n\n    private static String numberLikeStringTransfer( String string ){\n        if ( string.indexOf(46) > 0 && string.indexOf(101) < 0 && string.indexOf(69) < 0 ) {\n            while(string.endsWith(\"0\")) {\n                string = string.substring(0, string.length() - 1);\n            }\n\n            if (string.endsWith(\".\")) {\n                string = string.substring(0, string.length() - 1);\n            }\n        }\n\n        return string;\n    }\n\n    public static String numberToString( Number number ) throws JSONException {\n        if (number == null) {\n            throw new JSONException(\"Null pointer\");\n        }\n        else {\n            JSONUtils.prospectNumberQualify(number);\n            String string = number.toString();\n            return JSONUtils.numberLikeStringTransfer( string );\n        }\n    }\n\n    public static String doubleToString( double d ) {\n        if (!Double.isInfinite(d) && !Double.isNaN(d)) {\n            String string = Double.toString(d);\n            return JSONUtils.numberLikeStringTransfer( string );\n        }\n        else {\n            return \"null\";\n        }\n    }\n\n    public static String valueToString( Object value ) throws JSONException {\n        if ( value != null && !value.equals((Object)null) ) {\n            if ( value instanceof JSONString ) {\n                String object;\n                try {\n                    object = ((JSONString)value).toJSONString();\n                }\n                catch ( Exception e ) {\n                    throw new JSONException(e);\n                }\n\n                return object;\n            }\n            else if ( value instanceof Number ) {\n                return JSONUtils.numberToString((Number)value);\n            }\n            else if (!(value instanceof Boolean) && !(value instanceof JSONObject) && !(value instanceof JSONArray)) {\n                if (value instanceof Map) {\n                    return (new JSONMaptron((Map)value)).toString();\n                }\n                else if (value instanceof Collection) {\n                    return (new JSONArraytron((Collection)value)).toString();\n                }\n                else {\n                    return value.getClass().isArray() ? (new JSONArraytron(value)).toString() : StringUtils.jsonQuote(value.toString());\n                }\n            }\n            else {\n                return value.toString();\n            }\n        }\n        else {\n            return \"null\";\n        }\n    }\n\n    public static String noneStartZeroInteger( String szNum ) {\n        if( szNum.startsWith( \"0\" ) ) {\n            int i;\n            for ( i = 0; i < szNum.length(); i++ ) {\n                if( i == szNum.length() - 1 && szNum.charAt(i) == '0' ){ // 0000001, 0nX\n                    break;\n                }\n                if( szNum.charAt(i) != '0' ) {\n                    break;\n                }\n            }\n            return szNum.substring( i );\n        }\n        return szNum;\n    }\n\n    public static Object stringToValue( String string ) {\n        if ( string.equals(\"\") ) {\n            return string;\n        }\n        else if (string.equalsIgnoreCase(\"true\")) {\n            return Boolean.TRUE;\n        }\n        else if (string.equalsIgnoreCase(\"false\")) {\n            return Boolean.FALSE;\n        }\n        else if ( string.equalsIgnoreCase(\"null\") || string.equalsIgnoreCase(\"undefined\") ) {\n            return JSON.NULL;\n        }\n        else {\n            char b = string.charAt(0);\n            if ( b >= '0' && b <= '9' || b == '-' ) {\n                try {\n                    if ( string.indexOf( '.' ) <= -1 && string.indexOf( 'e' ) <= -1 && string.indexOf( 'E' ) <= -1 ) {\n                        String szToken = JSONUtils.noneStartZeroInteger( string );\n                        if( szToken.length() < 18 ) {\n                            Long n = Long.parseLong( szToken );\n                            if ( szToken.equals( n.toString() ) ) {\n                                if ( n == (long)n.intValue() ) {\n                                    return n.intValue();\n                                }\n\n                                return n;\n                            }\n                        }\n                        else {\n                            return new BigInteger( szToken );\n                        }\n                    }\n                    else {\n                        if( string.length() < 18 ) {\n                            Double d = Double.valueOf( string );\n                            if ( !d.isInfinite() && !d.isNaN() ) {\n                                return d;\n                            }\n                        }\n                        else {\n                            return new BigDecimal( string );\n                        }\n                    }\n                }\n                catch ( Exception e ) {\n                    //e.printStackTrace();\n                }\n            }\n\n            return string;\n        }\n    }\n\n    public static Object wrapValue( Object value, boolean bWrapBean ) {\n        try {\n            if ( value == null ) {\n                return JSON.NULL;\n            }\n            else if (!(value instanceof JSONObject) && !(value instanceof JSONArray) && !JSON.NULL.equals(value) && !(value instanceof JSONString) && !(value instanceof Byte) && !(value instanceof Character) && !(value instanceof Short) && !(value instanceof Integer) && !(value instanceof Long) && !(value instanceof Boolean) && !(value instanceof Float) && !(value instanceof Double) && !(value instanceof String)) {\n                if (value instanceof Collection) {\n                    return new JSONArraytron((Collection)value);\n                }\n                else if ( value.getClass().isArray() ) {\n                    return new JSONArraytron(value);\n                }\n                else if ( value instanceof Map ) {\n                    return new JSONMaptron((Map)value);\n                }\n                else {\n                    if( bWrapBean ) {\n                        Package objectPackage = value.getClass().getPackage();\n                        String objectPackageName = objectPackage != null ? objectPackage.getName() : \"\";\n                        return !objectPackageName.startsWith(\"java.\") && !objectPackageName.startsWith(\"javax.\") && value.getClass().getClassLoader() != null ? new JSONMaptron(value) : value.toString();\n                    }\n                    else {\n                        return null;\n                    }\n                }\n            }\n            else {\n                return value;\n            }\n        }\n        catch ( Exception e ) {\n            return null;\n        }\n    }\n\n    public static Object wrapValue( Object value ) {\n        return JSONUtils.wrapValue( value, true );\n    }\n\n    public static String[] getOwnPropertyNames ( JSONObject that ) {\n        int nSize = that.size();\n        if ( nSize == 0 ) {\n            return null;\n        }\n        else {\n            Iterator iterator = that.keys();\n            String[] names = new String[nSize];\n\n            for( int i = 0; iterator.hasNext(); ++i ) {\n                names[i] = (String)iterator.next();\n            }\n\n            return names;\n        }\n    }\n\n    public static Object cloneElement ( Object that ) {\n        if( that instanceof JSONArray ) {\n            return   ( ( JSONArray ) that ).clone();\n        }\n        else if( that instanceof JSONObject ) {\n            return   ( ( JSONObject ) that ).clone();\n        }\n        return that;\n    }\n\n    public static int asInt32Key( Object key ) {\n        if( key instanceof Integer ) {\n            return (int) key;\n        }\n        else if( key instanceof Float || key instanceof Double || key instanceof BigDecimal ) {\n            throw new JSONException( \"Array does not allow float as key.\" );\n        }\n        else if( key instanceof Number ) {\n            return ((Number) key).intValue();\n        }\n        else if( key instanceof String ) {\n            return Integer.parseInt( (String) key );\n        }\n\n        throw new JSONException( \"Key of Array should be integer or integer-fmt-string.\" );\n    }\n\n    public static int toInt32Key( Object key ) {\n        if( key instanceof Integer ) {\n            return (int) key;\n        }\n        else if( key instanceof Number ) {\n            return ((Number) key).intValue();\n        }\n        else if( key instanceof String ) {\n            return Integer.parseInt( (String) key );\n        }\n\n        return Integer.parseInt( key.toString() );\n    }\n\n    public static String asStringKey( Object key ) {\n        if( key instanceof String ) {\n            return (String) key;\n        }\n\n        return key.toString();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/ObjectJSONCursorUnmarshal.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport java.io.InputStream;\nimport java.io.InputStreamReader;\nimport java.io.Reader;\nimport java.io.StringReader;\nimport java.lang.reflect.Array;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Type;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.LinkedHashMap;\nimport java.util.LinkedHashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.pinecone.framework.system.prototype.ObjectiveEvaluator;\nimport com.pinecone.framework.util.ReflectionUtils;\nimport com.pinecone.framework.util.UnitHelper;\n\npublic class ObjectJSONCursorUnmarshal extends ArchCursorParser {\n    public static JSONDecoder INNER_OBJECT_DECODER = new JSONObjectDecoder() {\n        @Override\n        protected void set( Object self, String key, Object val ) {\n            ObjectiveEvaluator.MapStructures.set( self, key, val );\n        }\n    };\n\n    public static JSONDecoder INNER_ARRAY_DECODER = new JSONArrayDecoder() {\n        @Override\n        @SuppressWarnings( \"unchecked\" )\n        protected void add( Object self, Object parent, Object indexKey, Object val ) {\n            if( self.getClass().isArray() ) {\n                Object[] ref = (Object[])self; // Fuck java, no pointer.\n                ref[ 0 ] = UnitHelper.append( (Object[])ref[ 0 ], val );\n            }\n            else if( self instanceof Collection ) {\n                ((Collection) self).add( val );\n            }\n        }\n    };\n\n\n    protected Class<? >         mClassType   ;\n    protected Type              mRootType    ;\n    protected final TypeContext mTypeContext = new TypeContext();\n\n    public ObjectJSONCursorUnmarshal( Reader reader, Class<? > classType, Type rootType ) throws JSONParseException {\n        super(reader);\n        this.mRootType   = rootType;\n        this.mClassType  = classType;\n    }\n\n    public ObjectJSONCursorUnmarshal( Reader reader, Type rootType ) throws JSONParseException {\n        this( reader, ObjectiveEvaluator.resolveRawClass( rootType ), rootType );\n    }\n\n    public ObjectJSONCursorUnmarshal( Reader reader, Class<? > classType ) throws JSONParseException {\n        this( reader, (Type) classType );\n    }\n\n    public ObjectJSONCursorUnmarshal( InputStream inputStream, Class<? > classType ) throws JSONParseException {\n        this((Reader)( new InputStreamReader(inputStream)), classType );\n    }\n\n    public ObjectJSONCursorUnmarshal( InputStream inputStream, Type rootType ) throws JSONParseException {\n        this((Reader)( new InputStreamReader(inputStream)), ObjectiveEvaluator.resolveRawClass( rootType ), rootType );\n    }\n\n    public ObjectJSONCursorUnmarshal( String s, Class<? > classType ) throws JSONParseException {\n        this((Reader)( new StringReader(s)), classType );\n    }\n\n    public ObjectJSONCursorUnmarshal( String s, Type rootType ) throws JSONParseException {\n        this((Reader)( new StringReader(s)), ObjectiveEvaluator.resolveRawClass( rootType ), rootType );\n    }\n\n\n    protected GenericTypeContext nextGenericTypeContext( Object parent, Object indexKey ) throws JSONParseException {\n        Class<? > thisType    ;\n        Type elemGenericType  ;\n\n        if ( parent == null ) {\n            if ( this.mTypeContext.isEmpty() ) {\n                this.mTypeContext.push(\n                        this.mRootType,\n                        ObjectiveEvaluator.extractGenericElementType( this.mRootType )\n                );\n            }\n\n            TypeContext.Frame frame = this.mTypeContext.peek();\n\n            thisType        = ObjectiveEvaluator.resolveRawClass( frame.mContainerType );\n            elemGenericType = frame.mElementType;\n        }\n        else {\n            TypeContext.Frame frame = this.mTypeContext.peek();\n\n            if ( frame != null && frame.mElementType != null ) {\n                elemGenericType = frame.mElementType;\n                thisType        = ObjectiveEvaluator.resolveRawClass( frame.mElementType );\n            }\n            else {\n                elemGenericType = ObjectiveEvaluator.MapStructures.getElementGenericType( parent, indexKey.toString() );\n                thisType        = ObjectiveEvaluator.MapStructures.getType( parent, indexKey );\n            }\n        }\n\n        GenericTypeContext context = new GenericTypeContext();\n        context.thisType = thisType;\n        context.elemGenericType = elemGenericType;\n\n        return context;\n    }\n\n    @Override\n    protected Object newJSONArray( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ) {\n        try {\n            GenericTypeContext context = this.nextGenericTypeContext( parent, indexKey );\n            Class<? > thisType    = context.thisType;\n            Type elemGenericType  = context.elemGenericType;\n\n//            if ( parent != null ){\n//                elemGenericType = ObjectiveEvaluator.MapStructures.getElementGenericType( parent, indexKey.toString() );\n//                thisType = ObjectiveEvaluator.MapStructures.getType( parent, indexKey );\n//            }\n//            else {\n//                thisType = ObjectiveEvaluator.resolveRawClass( this.mRootType );\n//                elemGenericType = ObjectiveEvaluator.extractGenericElementType( this.mRootType );\n//            }\n\n            Object    self;\n\n            if ( thisType == null ) {\n                self = new Object(); // Dummy\n                ObjectJSONCursorUnmarshal.INNER_ARRAY_DECODER.decode( self, parent, indexKey,this, elemGenericType );\n                return self;\n            }\n\n            if ( thisType.equals( List.class ) || thisType.equals( Void.class ) || thisType.equals( Object.class ) ) {\n                thisType = JSONArraytron.class;\n                if( elemGenericType != null ) {\n                    String genericTypeName = elemGenericType.getTypeName();\n                    if( !genericTypeName.equals( \"?\" ) && !genericTypeName.equals( Object.class.getSimpleName() ) ) {\n                        thisType = ArrayList.class;\n                    }\n                }\n            }\n            else if ( thisType.equals( Set.class ) ) {\n                thisType = LinkedHashSet.class;\n            }\n\n            if ( thisType.isArray() ) {\n                Class<?> innerType = thisType.getComponentType();\n                if ( innerType.equals( Object.class ) ) {\n                    self = new Object[]{ new Object[ 0 ] };  // Object[]*, ptr -> Object[]\n                }\n                else {\n                    elemGenericType   = innerType;\n                    self = new Object[]{ Array.newInstance( innerType, 0 ) };\n                    // Object[]*, ptr -> Object[]\n                }\n            }\n            else {\n                self = thisType.getConstructor().newInstance();\n            }\n\n            ObjectJSONCursorUnmarshal.INNER_ARRAY_DECODER.decode( self, parent, indexKey,this, elemGenericType );\n            if ( self.getClass().isArray() ) {\n                return Array.get( self, 0 );\n            }\n            return self;\n        }\n        catch ( NoSuchMethodException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) {\n            return null;\n        }\n        catch ( InstantiationException e1 ) {\n            throw new JSONParseException( e1 );\n        }\n    }\n\n    protected Class<?> findDirectJSONObjectAssignableType( Class<? > thisType ) {\n        if( thisType == null || thisType.equals( Map.class ) || thisType.equals( Void.class ) || thisType.equals( Object.class ) ) {\n            thisType = JSONMaptron.class;\n        }\n        else if( thisType.isInterface() &&  Map.class.isAssignableFrom( thisType ) ) {\n            thisType = JSONMaptron.class;\n        }\n        else if( thisType.isInterface() &&  JSONObject.class.isAssignableFrom( thisType ) ) {\n            thisType = JSONMaptron.class;\n        }\n\n        return thisType;\n    }\n\n    @Override\n    protected Object newJSONObject( Object indexKey, ArchCursorParser parser, Object parent, Object[] args ) {\n        try {\n            GenericTypeContext context = this.nextGenericTypeContext( parent, indexKey );\n            Class<? > thisType    = context.thisType;\n            Type elemGenericType  = context.elemGenericType;\n//            if( parent == null ){\n//                thisType = ObjectiveEvaluator.resolveRawClass( this.mRootType );\n//                elemGenericType = ObjectiveEvaluator.extractGenericElementType( this.mRootType );\n//            }\n//            else {\n//                thisType = ObjectiveEvaluator.MapStructures.getType( parent, indexKey );\n//                elemGenericType = ObjectiveEvaluator.MapStructures.getFieldGenericType( parent, indexKey.toString() );\n//            }\n\n            thisType = this.findDirectJSONObjectAssignableType( thisType );\n            if( elemGenericType != null ) {\n                String genericTypeName = elemGenericType.getTypeName();\n                if( genericTypeName.contains( \"<\" )  && genericTypeName.contains( \">\" ) ) {\n                    thisType = LinkedHashMap.class;\n                }\n            }\n\n            Object    self;\n\n            if( thisType == null ) {\n                self = new Object(); // Dummy\n                ObjectJSONCursorUnmarshal.INNER_OBJECT_DECODER.decode( self, parent, indexKey,this, elemGenericType );\n                return self;\n            }\n\n            if( args != null && args.length > 0 ) {\n                Object dyType = args[ 0 ];\n                Type eleType  = (Type) dyType;\n                if( eleType != null ) {\n                    if( parent != null && parent.getClass().isArray() ) {\n                        if( !dyType.equals( Object[].class ) && !dyType.equals( Object.class ) && !dyType.equals( Map.class ) ) {\n                            thisType = (Class<?>) eleType;\n                        }\n                    }\n                    else {\n                        String[] genericTypeNames = ReflectionUtils.extractGenericClassNames( eleType.getTypeName() );\n                        if( genericTypeNames != null && genericTypeNames.length > 0 ) {\n                            String genericTypeName;\n                            if( genericTypeNames.length > 1 ) {\n                                genericTypeName = genericTypeNames[ 1 ]; // Map value.\n                            }\n                            else {\n                                genericTypeName = genericTypeNames[ 0 ]; // Collection value.\n                            }\n\n                            if( !genericTypeName.equals( \"?\" ) && !genericTypeName.equals( Object.class.getSimpleName() ) ) {\n                                try{\n                                    thisType = this.getClass().getClassLoader().loadClass( genericTypeName );\n                                    thisType = this.findDirectJSONObjectAssignableType( thisType );\n                                }\n                                catch ( ClassNotFoundException e ) {\n                                    thisType = JSONMaptron.class;\n                                }\n                            }\n                        }\n                    }\n                }\n            }\n            self     = thisType.getConstructor().newInstance();\n\n            ObjectJSONCursorUnmarshal.INNER_OBJECT_DECODER.decode( self, parent, indexKey, this, elemGenericType );\n            return self;\n        }\n        catch ( NoSuchMethodException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) {\n            return null;\n        }\n        catch ( InstantiationException e1 ) {\n            throw new JSONParseException( e1 );\n        }\n    }\n\n\n    public static class GenericTypeContext {\n        protected Class<? > thisType    ;\n        protected Type elemGenericType  ;\n    }\n}\n\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/TypeContext.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport java.lang.reflect.Type;\nimport java.util.ArrayDeque;\nimport java.util.Deque;\n\npublic final class TypeContext {\n\n    static final class Frame {\n        final Type mContainerType;\n        final Type mElementType;\n\n        Frame( Type containerType, Type elementType ) {\n            this.mContainerType = containerType;\n            this.mElementType   = elementType;\n        }\n    }\n\n    private final Deque<Frame> mStack = new ArrayDeque<>();\n\n    public void push( Type containerType, Type elementType ) {\n        this.mStack.push( new Frame( containerType, elementType ) );\n    }\n\n    public void pop() {\n        if ( !this.mStack.isEmpty() ) {\n            this.mStack.pop();\n        }\n    }\n\n    public Frame peek() {\n        return this.mStack.peek();\n    }\n\n    public boolean isEmpty() {\n        return this.mStack.isEmpty();\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/TypeReference.java",
    "content": "package com.pinecone.framework.util.json;\n\nimport java.lang.reflect.ParameterizedType;\nimport java.lang.reflect.Type;\n\npublic abstract class TypeReference<T> implements Comparable<TypeReference<T>> {\n    protected final Type _type;\n\n    protected TypeReference() {\n        Type superClass = this.getClass().getGenericSuperclass();\n        if (superClass instanceof Class) {\n            throw new IllegalArgumentException(\"Internal error: TypeReference constructed without actual type information\");\n        }\n        else {\n            this._type = ((ParameterizedType)superClass).getActualTypeArguments()[0];\n        }\n    }\n\n    public Type getType() {\n        return this._type;\n    }\n\n    @Override\n    public int compareTo(TypeReference<T> o) {\n        return 0;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/binary/BsonTraits.java",
    "content": "package com.pinecone.framework.util.json.binary;\n\nimport com.pinecone.framework.util.ReflectionUtils;\n\nimport java.io.OutputStream;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\n\npublic final class BsonTraits {\n    public static String FUN_TO_BSON_BYTES_NAME          = \"toBsonBytes\";\n    public static String FUN_BSON_SERIALIZE_NAME         = \"bsonSerialize\";\n\n    public static byte[] invokeToBsonBytes    ( Object that ) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException {\n        Method fnToBsonBytes = that.getClass().getMethod( BsonTraits.FUN_TO_BSON_BYTES_NAME );\n        ReflectionUtils.makeAccessible( fnToBsonBytes );\n        return (byte[]) fnToBsonBytes.invoke( that );\n    }\n\n    public static void invokeBsonSerialize    ( Object that, OutputStream os ) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException {\n        Method fnBsonSerialize = that.getClass().getMethod( BsonTraits.FUN_BSON_SERIALIZE_NAME, OutputStream.class );\n        ReflectionUtils.makeAccessible( fnBsonSerialize );\n        fnBsonSerialize.invoke( that, os );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/binary/Bsonut.java",
    "content": "package com.pinecone.framework.util.json.binary;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\nimport java.io.OutputStream;\n\npublic interface Bsonut extends Pinenut {\n    default byte[] toBsonBytes() {\n        ByteArrayOutputStream os = new ByteArrayOutputStream();\n        try{\n            this.bsonSerialize( os );\n            os.flush();\n            return os.toByteArray();\n        }\n        catch ( IOException e ) {\n            return null;\n        }\n    }\n\n    void bsonSerialize( OutputStream os ) throws IOException;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/handler/EncodeHandlerRegistry.java",
    "content": "package com.pinecone.framework.util.json.handler;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface EncodeHandlerRegistry extends Pinenut {\n\n    <T> void register( Class<T> type, JSONObjectEncodeHandler<? super T> serializer );\n\n    <T> JSONObjectEncodeHandler<T> get( Class<?> type );\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/handler/GenericEncodeHandlerRegistry.java",
    "content": "package com.pinecone.framework.util.json.handler;\n\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\n\npublic class GenericEncodeHandlerRegistry implements EncodeHandlerRegistry {\n\n    protected final Map<Class<?>, JSONObjectEncodeHandler<?>> mSerializers;\n\n    public GenericEncodeHandlerRegistry() {\n        this.mSerializers = new ConcurrentHashMap<>();\n    }\n\n    public <T> void register( Class<T> type, JSONObjectEncodeHandler<? super T> serializer ) {\n        this.mSerializers.put( type, serializer );\n    }\n\n    @SuppressWarnings(\"unchecked\")\n    public <T> JSONObjectEncodeHandler<T> get( Class<?> type ) {\n        JSONObjectEncodeHandler<?> exact = this.mSerializers.get( type );\n        if ( exact != null ) {\n            return (JSONObjectEncodeHandler<T>) exact;\n        }\n\n        for ( Map.Entry<Class<?>, JSONObjectEncodeHandler<?>> e : this.mSerializers.entrySet() ) {\n            if ( e.getKey().isAssignableFrom( type ) ) {\n                return (JSONObjectEncodeHandler<T>) e.getValue();\n            }\n        }\n\n        return null;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/handler/JSONObjectEncodeHandler.java",
    "content": "package com.pinecone.framework.util.json.handler;\n\nimport java.io.IOException;\nimport java.io.Writer;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.GenericJSONEncoder;\n\npublic interface JSONObjectEncodeHandler<T> extends Pinenut {\n\n    void serialize( T object, Writer writer, int nIndentFactor, int nIndentBlankNum, GenericJSONEncoder encoder ) throws IOException;\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/AnnotatedJSONInjector.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.lang.annotation.Annotation;\nimport java.lang.reflect.Field;\n\npublic class AnnotatedJSONInjector extends JSONInjector {\n    public AnnotatedJSONInjector() {\n        super();\n    }\n\n    @Override\n    protected String getFieldName( Field field ){\n        String szKey = AnnotatedJSONInjector.getAnnotatedKey( field );\n        if( szKey == null ) {\n            return null;\n        }\n        else if( szKey.isEmpty() ) {\n            return field.getName();\n        }\n\n        return szKey;\n    }\n\n    public static String getAnnotatedKey( Field field ) {\n        String szKey = null;\n\n        Annotation[] annotations = field.getAnnotations();\n        for ( Annotation a : annotations ) {\n            if( a instanceof JSONGet ) {\n                szKey = ( (JSONGet) a ).value();\n                break;\n            }\n            else if( a instanceof MapStructure ) {\n                szKey = ( (MapStructure) a ).value();\n                break;\n            }\n        }\n        return szKey;\n    }\n\n    public static AnnotatedJSONInjector instance() {\n        return new AnnotatedJSONInjector();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/AnnotatedObjectInjector.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport com.pinecone.framework.util.ReflectionUtils;\n\nimport java.lang.reflect.Field;\nimport java.util.List;\nimport java.util.Map;\n\npublic class AnnotatedObjectInjector extends ObjectInjector {\n    public AnnotatedObjectInjector( Class type ) {\n        super( type );\n    }\n\n    protected String getAnnotatedKey( Field field ) {\n        return AnnotatedJSONInjector.getAnnotatedKey( field );\n    }\n\n    @Override\n    public    Object inject      ( Map that, Class<?> type, Object instance ) {\n        Field[] fields = type.getDeclaredFields();\n        for ( Field field : fields ) {\n            ReflectionUtils.makeAccessible( field );\n            try {\n                String szKey = this.getAnnotatedKey( field );\n                if( szKey == null ) {\n                    continue;\n                }\n                else if( szKey.isEmpty() ) {\n                    szKey = field.getName();\n                }\n\n                Object val = that.get( this.getFieldName( szKey ) );\n                if( val == null ){\n                    val = that.get( szKey );\n                }\n                if( val == null && szKey.contains( \".\" ) ){\n                    val = this.getValueFromMapRecursively( that, szKey );\n                }\n\n                try {\n                    Object j = this.inject( val , field.getType() );\n                    field.set( instance, j );\n                }\n                catch ( IllegalArgumentException e ){\n                    //e.printStackTrace();\n                    field = null;\n                }\n            }\n            catch ( IllegalAccessException e ){\n                throw new IllegalStateException(e); // This should never be happened.\n            }\n        }\n\n        return instance;\n    }\n\n    protected Object getValueFromMapRecursively( Map map, String key ) {\n        String[] keys = key.split(\"\\\\.|\\\\/\");\n        Object value = map;\n        for ( String k : keys ) {\n            if ( value instanceof Map ) {\n                value = ((Map) value).get(k);\n            }\n            else if ( value instanceof List ) {\n                try{\n                    value = ((List) value).get( Integer.parseInt( k ) );\n                }\n                catch ( NumberFormatException e ) {\n                    return null;\n                }\n            }\n            else {\n                return null;\n            }\n        }\n        return value;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/ArchBeanColonist.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.lang.reflect.Modifier;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.stereotype.JavaBeans;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.JSONUtils;\n\npublic abstract class ArchBeanColonist implements BeanColonist {\n    public ArchBeanColonist() {\n\n    }\n\n    @Override\n    public void populate( Object bean, JSONObject target, boolean bRecursive ) {\n        Class klass = bean.getClass();\n        boolean includeSuperClass = klass.getClassLoader() != null;\n        Method[] methods = includeSuperClass ? klass.getMethods() : klass.getDeclaredMethods();\n        Map<String, Object > targetMap = target.getMap();\n\n        for( int i = 0; i < methods.length; ++i ) {\n            try {\n                Method method = methods[i];\n                if ( Modifier.isPublic( method.getModifiers() ) ) {\n                    String key = JavaBeans.getGetterMethodKeyName( method );\n                    if( key == null ) {\n                        continue;\n                    }\n\n                    if ( key.length() > 0 && Character.isUpperCase( key.charAt(0) ) && method.getParameterTypes().length == 0 ) {\n                        key = JavaBeans.methodKeyNameLowerCaseNormalize( key );\n\n                        method.setAccessible( true );\n                        Object result = method.invoke( bean, (Object[])null );\n\n                        this.putValue( targetMap, key, result, bRecursive );\n                    }\n                }\n            }\n            catch ( InvocationTargetException | IllegalAccessException e ) {\n                e.printStackTrace();\n                // Do nothing.\n            }\n        }\n    }\n\n    protected void putValue( Map<String, Object > targetMap, String key, Object result, boolean bRecursive ) {\n        if ( result != null ) {\n            Object v = JSONUtils.wrapValue( result, bRecursive );\n            if( v == null ) {\n                v = result;\n            }\n            targetMap.put( key, v );\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/BeanColonist.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.util.Set;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\n\npublic interface BeanColonist extends Pinenut {\n    BeanColonist WrappedColonist = new WrappedBeanColonist();\n    BeanColonist DirectColonist  = new DirectBeanColonist();\n\n    void populate( Object bean, JSONObject target, boolean bRecursive ) ;\n\n    default void populate( Object bean, JSONObject target ) {\n        this.populate( bean, target, true );\n    }\n\n    default JSONObject populate( Object bean, boolean bRecursive ) {\n        JSONObject jo = new JSONMaptron();\n        this.populate( bean, jo, bRecursive );\n        return jo;\n    }\n\n    default JSONObject populate( Object bean ) {\n        return this.populate( bean, true );\n    }\n\n\n    default void populate( Object bean, JSONObject target, boolean bRecursive, Set<String > exceptedKeys ) {\n        this.populate( bean, target, bRecursive );\n\n        target.removeAll( exceptedKeys );\n    }\n\n    default void populate( Object bean, JSONObject target, Set<String > exceptedKeys ) {\n        this.populate( bean, target, true, exceptedKeys );\n    }\n\n    default JSONObject populate( Object bean, boolean bRecursive, Set<String > exceptedKeys ) {\n        JSONObject jo = new JSONMaptron();\n        this.populate( bean, jo, bRecursive, exceptedKeys );\n        return jo;\n    }\n\n    default JSONObject populate( Object bean, Set<String > exceptedKeys ) {\n        return this.populate( bean, true, exceptedKeys );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/BeanJSONEncoder.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.io.IOException;\nimport java.io.Writer;\nimport java.util.Set;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface BeanJSONEncoder extends Pinenut {\n    BeanJSONEncoder BasicEncoder = new GenericBeanJSONEncoder();\n\n    String encode( Object bean );\n\n    String encode( Object bean, Set<String > exceptedKeys );\n\n    void encode( Object bean, Writer writer, int nIndentFactor ) throws IOException;\n\n    void encode( Object bean, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException;\n\n    default void encode( Object bean, Writer writer ) throws IOException {\n        this.encode( bean, writer, 0 );\n    }\n\n    void valueJsonify( Object val, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException;\n\n    String valueJsonify( Object val );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/BeanMapDecoder.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\n\npublic interface BeanMapDecoder extends Pinenut {\n    BeanMapDecoder BasicDecoder = new GenericBeanMapDecoder();\n\n    static boolean    trialHomogeneity( Object that ) {\n        return  JSONInjector.trialHomogeneity( that ) || that instanceof Map;\n    }\n\n    Object decode( Object bean, Map<String, Object > jo, Set<String > exceptedKeys, boolean bRecursive );\n\n    Object decode( Object bean, Map<String, Object > jo, boolean bRecursive ) ;\n\n    Object decode( Object bean, Map<String, Object > jo, Set<String > exceptedKeys );\n\n    Object decode( Object bean, Map<String, Object > jo ) ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/DirectBeanColonist.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.util.Map;\n\npublic class DirectBeanColonist extends ArchBeanColonist {\n    public DirectBeanColonist() {\n        super();\n    }\n\n    @Override\n    protected void putValue( Map<String, Object> targetMap, String key, Object result, boolean bRecursive ) {\n        targetMap.put( key, result );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/DirectJSONInjector.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\npublic class DirectJSONInjector extends JSONInjector {\n    public DirectJSONInjector() {\n        super();\n    }\n\n    public static JSONInjector instance() {\n        return new DirectJSONInjector();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/DirectObjectInjector.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport com.pinecone.framework.system.functions.Function;\n\npublic class DirectObjectInjector extends ObjectInjector {\n    protected String      mszFieldNS        = \"\"    ;\n    protected boolean     mbUsingHungary    = false ;\n    protected Function    mfnGetFieldName   = null  ;\n\n\n    public DirectObjectInjector( String szFieldNS, Class stereotype ){\n        super( stereotype );\n        this.mszFieldNS  = szFieldNS;\n    }\n\n    public DirectObjectInjector( boolean bUsingHungary, Class stereotype ) {\n        super( stereotype );\n        this.mbUsingHungary = bUsingHungary;\n        if( this.mbUsingHungary ){\n            this.mszFieldNS = \"m\";\n        }\n    }\n\n    public DirectObjectInjector( Function fnGetFieldName, Class stereotype ) {\n        super( stereotype );\n        this.mfnGetFieldName = fnGetFieldName;\n    }\n\n    public DirectObjectInjector( Class stereotype ) {\n        this( \"\", stereotype );\n    }\n\n    @Override\n    protected String getFieldName( String szKey ){\n        if( this.mfnGetFieldName != null ) {\n            try {\n                szKey = (String) this.mfnGetFieldName.invoke( szKey );\n            }\n            catch ( Exception e ) {\n                return szKey;\n            }\n        }\n        else {\n            if( !this.mszFieldNS.isEmpty() ){\n                StringBuilder sb = new StringBuilder();\n                sb.append( szKey );\n\n                if( this.mbUsingHungary ){\n                    sb.setCharAt( 0, Character.toUpperCase( sb.charAt(0) ) );\n                }\n                szKey = this.mszFieldNS + sb.toString();\n            }\n        }\n\n        return szKey;\n    }\n\n    public String getFieldNamespace() {\n        return this.mszFieldNS;\n    }\n\n    public void setFieldNamespace( String ns ) {\n        this.mszFieldNS = ns;\n    }\n\n\n    public static DirectObjectInjector instance( boolean bUsingHungary, Class stereotype ) {\n        return new DirectObjectInjector( bUsingHungary, stereotype );\n    }\n\n    public static DirectObjectInjector instance( String szFieldNS, Class stereotype ) {\n        return new DirectObjectInjector( szFieldNS, stereotype );\n    }\n\n    public static DirectObjectInjector instance( Function fnGetFieldName, Class stereotype ) {\n        return new DirectObjectInjector( fnGetFieldName, stereotype );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/GenericBeanJSONEncoder.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.io.IOException;\nimport java.io.Writer;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.lang.reflect.Modifier;\nimport java.util.Set;\n\nimport com.pinecone.framework.system.stereotype.JavaBeans;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.GenericJSONEncoder;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONEncoder;\n\npublic class GenericBeanJSONEncoder implements BeanJSONEncoder {\n    public GenericBeanJSONEncoder() {\n\n    }\n\n    @Override\n    public String valueJsonify( Object val ) {\n        return JSON.stringify( val );\n    }\n\n    @Override\n    public void valueJsonify( Object val, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        JSONEncoder.BASIC_JSON_ENCODER.write( val, writer, nIndentFactor, nIndentBlankNum );\n    }\n\n    @Override\n    public String encode( Object bean, Set<String > exceptedKeys ) {\n        Class klass = bean.getClass();\n        boolean includeSuperClass = klass.getClassLoader() != null;\n        Method[] methods = includeSuperClass ? klass.getMethods() : klass.getDeclaredMethods();\n\n        StringBuilder sb = new StringBuilder( \"{\" );\n        for ( int i = 0; i < methods.length; ++i ) {\n            try {\n                Method method = methods[i];\n                if ( Modifier.isPublic( method.getModifiers() ) ) {\n                    String key = JavaBeans.getGetterMethodKeyName( method );\n                    if ( !StringUtils.isEmpty( key ) ) {\n                        if ( Character.isUpperCase( key.charAt(0) ) && method.getParameterTypes().length == 0 ) {\n                            key = JavaBeans.methodKeyNameLowerCaseNormalize( key );\n\n                            if( exceptedKeys != null && exceptedKeys.contains( key ) ) {\n                                continue;\n                            }\n\n                            Object val;\n                            try {\n                                val = method.invoke( bean );\n                                sb.append( '\\\"' ).append( key ).append( \"\\\":\" );\n                            }\n                            catch ( IllegalAccessException | InvocationTargetException e ) {\n                                continue;\n                            }\n\n                            sb.append( this.valueJsonify( val ) );\n                            sb.append( ',' );\n                        }\n                    }\n                }\n            }\n            catch ( Exception e ) {\n                e.printStackTrace();\n                // Do nothing.\n            }\n        }\n\n        if( sb.charAt( sb.length() - 1 ) == ',' ) {\n            sb.deleteCharAt( sb.length() - 1 );\n        }\n        sb.append( '}' );\n\n        return sb.toString();\n\n//        StringWriter w = new StringWriter();\n//        try {\n//            synchronized( w.getBuffer() ) {\n//                this.encode( bean, w );\n//                return w.toString();\n//            }\n//        }\n//        catch ( IOException e ){\n//            return null;\n//        }\n    }\n\n    @Override\n    public String encode( Object bean ) {\n        return this.encode( bean, (Set<String >) null );\n    }\n\n    @Override\n    public void encode( Object bean, Writer writer, int nIndentFactor ) throws IOException {\n        this.encode( bean, writer, nIndentFactor, 0 );\n    }\n\n    @Override\n    public void encode( Object bean, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        Class<?> klass = bean.getClass();\n        boolean includeSuperClass = klass.getClassLoader() != null;\n        Method[] methods = includeSuperClass ? klass.getMethods() : klass.getDeclaredMethods();\n\n        writer.write( \"{\" );\n        boolean isFirst = true;\n\n        for ( int i = 0; i < methods.length; ++i ) {\n            try {\n                Method method = methods[i];\n                if ( Modifier.isPublic( method.getModifiers() ) ) {\n                    String key = JavaBeans.getGetterMethodKeyName( method );\n                    if ( !StringUtils.isEmpty( key ) ) {\n                        if ( Character.isUpperCase( key.charAt( 0 ) ) && method.getParameterTypes().length == 0 ) {\n                            key = JavaBeans.methodKeyNameLowerCaseNormalize( key );\n\n                            if ( !isFirst ) {\n                                writer.write( \",\" );\n                            }\n\n                            int nNewIndent = nIndentBlankNum + nIndentFactor;\n                            if ( nNewIndent > 0 ) {\n                                writer.write('\\n');\n                            }\n                            GenericJSONEncoder.indentBlank( writer, nNewIndent );\n\n\n                            Object val;\n                            try {\n                                val = method.invoke( bean );\n                                writer.write( \"\\\"\" + key + \"\\\":\" );\n                            }\n                            catch ( IllegalAccessException | InvocationTargetException e ) {\n                                continue;\n                            }\n\n                            this.valueJsonify( val, writer, nIndentFactor, nNewIndent );\n                            isFirst = false;\n\n                            GenericJSONEncoder.indentBlank( writer, nIndentBlankNum );\n                        }\n                    }\n                }\n            }\n            catch ( Exception e ) {\n                e.printStackTrace();\n                // Do nothing.\n            }\n        }\n\n        if ( nIndentFactor > 0 ) {\n            writer.write( '\\n' );\n        }\n        writer.write( \"}\" );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/GenericBeanMapDecoder.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.lang.reflect.Modifier;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.pinecone.framework.system.stereotype.JavaBeans;\nimport com.pinecone.framework.util.StringUtils;\n\npublic class GenericBeanMapDecoder implements BeanMapDecoder {\n    @SuppressWarnings( \"unchecked\" )\n    protected Object decode0( Object bean, Map jo, Set<String > exceptedKeys, boolean bRecursive ) {\n        return this.decode( bean, (Map<String, Object >)jo, exceptedKeys, bRecursive );\n    }\n\n    @Override\n    public Object decode( Object bean, Map<String, Object > jo, Set<String > exceptedKeys, boolean bRecursive ) {\n        if( jo == null ) {\n            return bean;\n        }\n\n        Class klass = bean.getClass();\n        boolean includeSuperClass = klass.getClassLoader() != null;\n        Method[] methods = includeSuperClass ? klass.getMethods() : klass.getDeclaredMethods();\n\n        for( int i = 0; i < methods.length; ++i ) {\n            try {\n                Method method = methods[i];\n                if ( Modifier.isPublic( method.getModifiers() ) ) {\n                    String legKey = JavaBeans.getSetterMethodKeyName( method );\n                    String key = legKey;\n                    if( !StringUtils.isEmpty( key ) ) {\n                        if ( Character.isUpperCase( key.charAt(0) ) && method.getParameterTypes().length == 1 ) {\n                            key = JavaBeans.methodKeyNameLowerCaseNormalize( key );\n\n                            Object desiredVal = jo.get( key );\n                            if( desiredVal == null ) {\n                                continue;\n                            }\n                            else if( exceptedKeys != null && exceptedKeys.contains( key ) ) {\n                                continue;\n                            }\n\n                            try {\n                                Object recursiveBean = null;\n                                if( bRecursive ) {\n                                    String szGetterMethod = JavaBeans.MethodMajorKeyGet + legKey;\n                                    Method      curGetter = bean.getClass().getMethod( szGetterMethod );\n                                    if( curGetter != null ) {\n                                        recursiveBean = curGetter.invoke( bean );\n                                        if( !BeanMapDecoder.trialHomogeneity( recursiveBean ) ) {\n                                            recursiveBean = null; // Not a bean.\n                                        }\n                                    }\n                                }\n\n                                if( recursiveBean == null ) {\n                                    method.invoke( bean, desiredVal );\n                                }\n                                else {\n                                    if( desiredVal instanceof Map ) {\n                                        this.decode0( recursiveBean, (Map)desiredVal, exceptedKeys, bRecursive );\n                                    }\n                                }\n                            }\n                            catch ( IllegalAccessException | InvocationTargetException ignore ) {\n                                ignore.printStackTrace();\n                                // Do nothing.\n                            }\n                        }\n                    }\n                }\n            }\n            catch ( Exception ignore ) {\n                ignore.printStackTrace();\n                // Do nothing.\n            }\n        }\n\n        return bean;\n    }\n\n    @Override\n    public Object decode( Object bean, Map<String, Object > jo, boolean bRecursive ) {\n        return this.decode( bean, jo, (Set<String >) null, bRecursive );\n    }\n\n    @Override\n    public Object decode( Object bean, Map<String, Object> jo, Set<String> exceptedKeys ) {\n        return this.decode( bean, jo, exceptedKeys, false );\n    }\n\n    @Override\n    public Object decode( Object bean, Map<String, Object > jo ) {\n        return this.decode( bean, jo, false );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/GenericStructJSONDecoder.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.lang.reflect.Field;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.pinecone.framework.util.StringUtils;\n\npublic class GenericStructJSONDecoder implements StructJSONDecoder {\n    @SuppressWarnings( \"unchecked\" )\n    protected Object decode0( Object struct, Map jo, Set<String > exceptedKeys, boolean bRecursive ) {\n        return this.decode( struct, (Map<String, Object >)jo, exceptedKeys, bRecursive );\n    }\n\n    @Override\n    public Object decode( Object struct, Map<String, Object > jo, Set<String > exceptedKeys, boolean bRecursive ) {\n        if( jo == null ) {\n            return struct;\n        }\n\n        Class klass = struct.getClass();\n        boolean includeSuperClass = klass.getClassLoader() != null;\n        Field[] fields = includeSuperClass ? klass.getFields() : klass.getDeclaredFields();\n\n        for( int i = 0; i < fields.length; ++i ) {\n            try {\n                Field field = fields[i];\n                field.setAccessible( true );\n\n                String key = field.getName();\n                if( !StringUtils.isEmpty( key ) ) {\n                    Object desiredVal = jo.get( key );\n                    if( desiredVal == null ) {\n                        continue;\n                    }\n                    else if( exceptedKeys != null && exceptedKeys.contains( key ) ) {\n                        continue;\n                    }\n\n                    try {\n                        Object recursiveBean = null;\n                        if( bRecursive ) {\n                            Field      curField = struct.getClass().getField( key );\n                            if( curField != null ) {\n                                recursiveBean = curField.get( struct );\n                                if( !BeanMapDecoder.trialHomogeneity( recursiveBean ) ) {\n                                    recursiveBean = null; // Not a struct.\n                                }\n                            }\n                        }\n\n                        if( recursiveBean == null ) {\n                            field.set( struct, desiredVal );\n                        }\n                        else {\n                            if( desiredVal instanceof Map ) {\n                                this.decode0( recursiveBean, (Map)desiredVal, exceptedKeys, bRecursive );\n                            }\n                        }\n                    }\n                    catch ( IllegalAccessException | IllegalArgumentException ignore ) {\n                        ignore.printStackTrace();\n                        // Do nothing.\n                    }\n                }\n            }\n            catch ( Exception ignore ) {\n                ignore.printStackTrace();\n                // Do nothing.\n            }\n        }\n\n        return struct;\n    }\n\n    @Override\n    public Object decode( Object struct, Map<String, Object > jo, boolean bRecursive ) {\n        return this.decode( struct, jo, (Set<String >) null, bRecursive );\n    }\n\n    @Override\n    public Object decode( Object struct, Map<String, Object> jo, Set<String> exceptedKeys ) {\n        return this.decode( struct, jo, exceptedKeys, false );\n    }\n\n    @Override\n    public Object decode( Object struct, Map<String, Object > jo ) {\n        return this.decode( struct, jo, false );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/GenericStructJSONEncoder.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.io.IOException;\nimport java.io.Writer;\nimport java.lang.reflect.Field;\nimport java.util.Set;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.GenericJSONEncoder;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONEncoder;\n\npublic class GenericStructJSONEncoder implements StructJSONEncoder {\n    public GenericStructJSONEncoder() {\n\n    }\n\n    @Override\n    public String valueJsonify( Object val ) {\n        return JSON.stringify( val );\n    }\n\n    @Override\n    public void valueJsonify( Object val, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        JSONEncoder.BASIC_JSON_ENCODER.write( val, writer, nIndentFactor, nIndentBlankNum );\n    }\n\n    @Override\n    public String encode( Object struct, Set<String > exceptedKeys, boolean bAllFields ) {\n        Class klass = struct.getClass();\n        boolean includeSuperClass = klass.getClassLoader() != null;\n        Field[] fields;\n        if( bAllFields ) {\n            fields = klass.getDeclaredFields();\n        }\n        else {\n            fields = includeSuperClass ? klass.getFields() : klass.getDeclaredFields();\n        }\n\n        StringBuilder sb = new StringBuilder( \"{\" );\n        for( int i = 0; i < fields.length; ++i ) {\n            try {\n                Field field = fields[i];\n                field.setAccessible( true );\n                String key = field.getName();\n                if( !StringUtils.isEmpty( key ) ) {\n                    if( exceptedKeys != null && exceptedKeys.contains( key ) ) {\n                        continue;\n                    }\n\n                    Object val;\n                    try {\n                        val = field.get( struct );\n                        sb.append( '\\\"' ).append( key ).append( \"\\\":\" );\n                    }\n                    catch ( IllegalAccessException | IllegalArgumentException e ) {\n                        continue;\n                    }\n\n                    sb.append( this.valueJsonify( val ) );\n                    sb.append( ',' );\n                }\n            }\n            catch ( Exception e ) {\n                e.printStackTrace();\n                // Do nothing.\n            }\n        }\n\n        if( sb.charAt( sb.length() - 1 ) == ',' ) {\n            sb.deleteCharAt( sb.length() - 1 );\n        }\n\n        return sb.append( '}' ).toString();\n    }\n\n    @Override\n    public String encode( Object struct ) {\n        return this.encode( struct, (Set<String >) null );\n    }\n\n    @Override\n    public void encode( Object struct, Writer writer, int nIndentFactor ) throws IOException {\n        this.encode0( struct, writer, nIndentFactor, 0 );\n    }\n\n    protected void encode0( Object struct, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException {\n        Class klass = struct.getClass();\n        boolean includeSuperClass = klass.getClassLoader() != null;\n        Field[] fields = includeSuperClass ? klass.getFields() : klass.getDeclaredFields();\n\n        writer.write( \"{\" );\n        boolean isFirst = true;\n\n        for( int i = 0; i < fields.length; ++i ) {\n            try {\n                Field field = fields[i];\n                field.setAccessible( true );\n                String key = field.getName();\n                if( !StringUtils.isEmpty( key ) ) {\n                    int nNewIndent = nIndentBlankNum + nIndentFactor;\n                    if ( !isFirst ) {\n                        writer.write( \",\" );\n                    }\n\n                    if ( nNewIndent > 0 ) {\n                        writer.write('\\n');\n                    }\n                    GenericJSONEncoder.indentBlank( writer, nNewIndent );\n\n\n                    Object val;\n                    try {\n                        val = field.get( struct );\n                        writer.write( \"\\\"\" + key + \"\\\":\" );\n                    }\n                    catch ( IllegalAccessException | IllegalArgumentException e ) {\n                        continue;\n                    }\n\n                    this.valueJsonify( val, writer, nIndentFactor, nNewIndent );\n                    isFirst = false;\n\n                    GenericJSONEncoder.indentBlank( writer, nIndentBlankNum );\n                }\n            }\n            catch ( Exception e ) {\n                e.printStackTrace();\n                // Do nothing.\n            }\n        }\n\n        if ( nIndentFactor > 0 ) {\n            writer.write( '\\n' );\n        }\n        writer.write( \"}\" );\n    }\n\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/JSONGet.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.lang.annotation.*;\n\n@Target({ElementType.FIELD})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface JSONGet {\n    String value() default \"\";\n}\n\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/JSONInjector.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport com.pinecone.framework.system.functions.Executable;\nimport com.pinecone.framework.system.functions.Executor;\nimport com.pinecone.framework.system.functions.Function;\nimport com.pinecone.framework.system.homotype.HomoInjector;\nimport com.pinecone.framework.util.ReflectionUtils;\nimport com.pinecone.framework.util.json.*;\n\nimport java.lang.reflect.Array;\nimport java.lang.reflect.Field;\nimport java.lang.reflect.Method;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.Callable;\n\npublic abstract class JSONInjector implements HomoInjector {\n    public JSONInjector(){\n\n    }\n\n    @Override\n    public boolean isHomogeneity( Object that ) {\n        return JSONInjector.trialHomogeneity( that );\n    }\n\n    protected String getFieldName( Field field ){\n        return field.getName();\n    }\n\n    @Override\n    public Object  inject( Object data ) throws IllegalArgumentException {\n        return this.inject( data, true );\n    }\n\n    @Override\n    public Object inject              ( Object that, Object instance ) throws Exception {\n        return this.inject( that, that.getClass(), instance );\n    }\n\n    @Override\n    public Object inject              ( Object that, Class<?> stereotype, Object instance ) throws Exception {\n        return this.inject( true, true, instance );\n    }\n\n    public Object  inject             ( Object data, boolean bRecursive ) throws IllegalArgumentException  {\n        return this.inject( data, bRecursive, null );\n    }\n\n    public Object  inject             ( Object data, boolean bRecursive, Object instance ) throws IllegalArgumentException  {\n        if ( data != null ) {\n            if ( JSONInjector.trialHomogeneity( data ) ) {\n                return data;\n            }\n            else if ( data instanceof Map) {\n                return new JSONMaptron((Map)data);\n            }\n            else if ( data instanceof List) {\n                return new JSONArraytron((List) data);\n            }\n            else if ( data instanceof Collection ) {\n                return new JSONArraytron((Collection)data);\n            }\n            else if ( data instanceof JSONString ) {\n                return ( (JSONString) data ).toJSONString();\n            }\n            else {\n                return this.javaObjectInject( data, bRecursive, instance );\n            }\n        }\n\n        return JSON.NULL;\n    }\n\n    public Object  injectArray        ( Object data, boolean bRecursive, JSONArray instance ) throws IllegalArgumentException {\n        if( data.getClass().getComponentType().isPrimitive() ){\n            for ( int i = 0; i < Array.getLength(data); i++ ) {\n                instance.put( this.inject( Array.get( data, i ), bRecursive ) );\n            }\n        }\n        else {\n            for ( Object row : (Object[]) data ) {\n                instance.put( this.inject( row, bRecursive ) );\n            }\n        }\n        return instance;\n    }\n\n    public Object  injectObject       ( Object data, boolean bRecursive, JSONObject instance ) throws IllegalArgumentException {\n        Field[] fields = data.getClass().getDeclaredFields();\n        for ( Field field : fields ) {\n            ReflectionUtils.makeAccessible( field );\n            String szKey = this.getFieldName( field );\n            if( szKey == null ) {\n                continue;\n            }\n\n            Object value;\n            try{\n                value = field.get( data );\n            }\n            catch ( IllegalAccessException e ){\n                value = null;\n            }\n            instance.embed( this.getFieldName( field ), this.inject( value, bRecursive ) );\n        }\n        return instance;\n    }\n\n    public Object  javaObjectInject   ( Object data, boolean bRecursive, Object instance ) throws IllegalArgumentException {\n        if( data != null ){\n            if( data.getClass().isArray() ){\n                JSONArray array = (JSONArray) instance;\n                if( instance == null ) {\n                    array = new JSONArraytron();\n                }\n                return this.injectArray( data, bRecursive, array );\n            }\n            else if( data.getClass().isEnum() ) {\n                return data.toString();\n            }\n            else if( data instanceof Method ){\n                throw new IllegalArgumentException( \"Method cannot survive without its mother.\" );\n            }\n            else if( data instanceof Runnable ){\n                return new Executor() {\n                    Runnable proto = (Runnable) data;\n\n                    public Runnable reveal(){\n                        return this.proto;\n                    }\n\n                    @Override\n                    public void execute() throws Exception {\n                        this.proto.run();\n                    }\n                };\n            }\n            else if( data instanceof Callable ){\n                return new Function() {\n                    Callable proto = (Callable) data;\n\n                    public Callable reveal(){\n                        return this.proto;\n                    }\n\n                    @Override\n                    public Object invoke( Object... obj ) throws Exception {\n                        return this.proto.call();\n                    }\n                };\n            }\n            else {\n                JSONObject object = (JSONObject) instance;\n                if( instance == null ) {\n                    object = new JSONMaptron();\n                }\n                return this.injectObject( data, bRecursive, object );\n            }\n\n        }\n        return JSON.NULL;\n    }\n\n\n\n\n    public static Number     inject( Number data ){\n        return data;\n    }\n\n    public static Boolean    inject( Boolean data ){\n        return data;\n    }\n\n    public static String     inject( String data ){\n        return data;\n    }\n\n    public static Executable inject( Executable data ){\n        return data;\n    }\n\n    public static JSONObject inject( JSONObject data ){\n        return data;\n    }\n\n    public static JSONArray  inject( JSONArray data ){\n        return data;\n    }\n\n    public static boolean    trialHomogeneity( Object that ) {\n        return  that instanceof Number     || that instanceof Boolean   || that instanceof String || that == JSON.NULL ||\n                that instanceof JSONObject || that instanceof JSONArray ||\n                that instanceof Executable;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/MapStructure.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.lang.annotation.*;\n\n@Target({ElementType.FIELD})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface MapStructure {\n    String value() default \"\";\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/ObjectInjector.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport com.pinecone.framework.system.functions.Executable;\nimport com.pinecone.framework.system.homotype.HomoInjector;\nimport com.pinecone.framework.system.homotype.StereotypicInjector;\nimport com.pinecone.framework.system.prototype.Prototype;\nimport com.pinecone.framework.util.ReflectionUtils;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONArray;\n\n\nimport java.lang.reflect.Array;\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Field;\nimport java.lang.reflect.Method;\nimport java.util.Set;\nimport java.util.Map;\nimport java.util.List;\nimport java.util.Collection;\nimport java.util.LinkedHashMap;\nimport java.util.HashSet;\nimport java.util.Queue;\nimport java.util.LinkedList;\nimport java.util.ArrayList;\nimport java.util.concurrent.Callable;\n\npublic abstract class ObjectInjector implements HomoInjector, StereotypicInjector {\n    protected Class<?> mType;\n\n    protected String getFieldName( String szKey ){\n        return szKey;\n    }\n\n    public ObjectInjector( Class type ) {\n        this.mType = type;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    protected Object inject           ( Object that, Class<?> type ) {\n        if ( ObjectInjector.trialHomogeneity( that ) && !type.isEnum() ){\n            return that;\n        }\n        else if( type == Object.class ){\n            return that;\n        }\n        else if( that instanceof Executable ){\n            return this.inject( (Executable) that );\n        }\n        else if ( that instanceof Collection ){\n            return this.inject( (Collection) that, type );\n        }\n        else if ( that instanceof Map ){\n            return this.inject( (Map) that, type );\n        }\n        else if ( type.isEnum() ){\n            try{\n                return Enum.valueOf( (Class<Enum>) type, that.toString() );\n            }\n            catch ( RuntimeException e ) {\n                return that;\n            }\n        }\n        return that;\n    }\n\n    public static Collection newDefaultCollection( Class<?> type ) {\n        if( type == null || List.class.isAssignableFrom( type ) ){\n            return new ArrayList();\n        }\n        else if( Set.class.isAssignableFrom( type ) ){\n            return new HashSet();\n        }\n        else if( Queue.class.isAssignableFrom( type ) ){\n            return new LinkedList();\n        }\n        else {\n            return new ArrayList();\n        }\n    }\n\n\n    public    Object injectArray      ( Collection that, Class<?> type, Object instance ) {\n        Class innerType = type.getComponentType();\n        if( innerType.isPrimitive() ){\n            int i = 0;\n            for ( Object o : that ) {\n                Array.set( instance, i, this.inject( o, innerType ) );\n                ++i;\n            }\n            return instance;\n        }\n        else {\n            Object[] objects = (Object[]) instance;\n            int i = 0;\n            for ( Object o : that ) {\n                objects[ i ] = this.inject( o, innerType );\n                ++i;\n            }\n            return objects;\n        }\n    }\n\n    public    Object injectCollection ( Collection that, Class<?> type, Collection instance ) {\n        for( Object row : that ){\n            instance.add( this.inject( row, row.getClass() ) );\n        }\n\n        return instance;\n    }\n\n    protected Object inject           ( Collection that, Class<?> type ) {\n        if( type != null && type.isAssignableFrom( that.getClass() ) ){\n            return that;\n        }\n        else if( type == null || Collection.class.isAssignableFrom( type )  ){\n            Collection instance;\n            if( type == null || type.isInterface() || Prototype.isAbstract( type ) ){ // Motherfucker condition...\n                instance = ObjectInjector.newDefaultCollection( type );\n            }\n            else {\n                try{\n                    instance = (Collection)type.getDeclaredConstructor().newInstance();\n                }\n                catch ( InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e ) {\n                    instance = ObjectInjector.newDefaultCollection( type );\n                }\n            }\n\n            return this.injectCollection( that, type, instance );\n        }\n        else if( type.isArray() ){\n            Class innerType = type.getComponentType();\n            Object instance = Array.newInstance( innerType, that.size() );\n            return this.injectArray( that, type, instance );\n        }\n\n        return null; // WHat fuck could be ??? asking jesus...\n    }\n\n    public    Object inject           ( Collection that, Class<?> type, Object instance ) {\n        if( type == JSONArray.class || type == Object.class ){\n            return that;\n        }\n        else if( type == null || Collection.class.isAssignableFrom( type )  ){\n            return this.injectCollection( that, type, (Collection) instance );\n        }\n        else if( type.isArray() ){\n            return this.injectArray( that, type, instance );\n        }\n\n        return null; // WHat fuck could be ??? asking jesus...\n    }\n\n    protected Object inject           ( Map that, Class<?> type ) {\n        if( type != null && type.isAssignableFrom( that.getClass() ) ){\n            return that;\n        }\n        else if( type == null || Map.class.isAssignableFrom( type )  ){\n            Map map;\n            if( type == null || type.isInterface() || Prototype.isAbstract( type ) ){ // Motherfucker condition...\n                map = new LinkedHashMap();\n            }\n            else {\n                try{\n                    map = (Map)type.getDeclaredConstructor().newInstance();\n                }\n                catch ( InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e ) {\n                    map = new LinkedHashMap();\n                }\n            }\n\n            for( Object row : that.entrySet() ){\n                Map.Entry kv = (Map.Entry) row;\n                map.put( kv.getKey(), this.inject( kv.getValue(), type ) );\n            }\n            return map;\n        }\n        else  {\n            Object obj;\n            try{\n                Constructor constructor = type.getConstructor();\n                ReflectionUtils.makeAccessible( constructor );\n                obj = constructor.newInstance();\n            }\n            catch ( NoSuchMethodException | InvocationTargetException | InstantiationException | IllegalAccessException e ) {\n                return null;\n            }\n\n            return this.inject( that, obj );\n        }\n    }\n\n    public    Object inject           ( Map that, Class<?> type, Object instance ) {\n        for( Object row : that.entrySet() ){\n            Map.Entry kv = (Map.Entry) row;\n            Field field;\n            try{\n                field = type.getDeclaredField( this.getFieldName( kv.getKey().toString() ) );\n            }\n            catch ( NoSuchFieldException e ){\n                try {\n                    field = type.getDeclaredField( kv.getKey().toString() );\n                }\n                catch ( NoSuchFieldException e1 ){\n                    field = null;\n                }\n            }\n            if( field != null ){\n                ReflectionUtils.makeAccessible( field );\n                try {\n                    try {\n                        Object j = this.inject( kv.getValue() , field.getType() );\n                        field.set( instance, j );\n                    }\n                    catch ( IllegalArgumentException e ){\n                        field = null;\n                    }\n                }\n                catch ( IllegalAccessException e ){\n                    throw new IllegalStateException(e); // This should never be happened.\n                }\n            }\n        }\n\n        /*    Field[] fields = type.getClass().getDeclaredFields();\n            for ( Field field : fields ) {\n                ReflectionUtils.makeAccessible( field );\n                try {\n                    Object val = that.opt( this.getFieldName( field.getName() ) );\n                    if( val == null ){\n                        val = that.opt( field.getName() );\n                    }\n                    try {\n                        Object j = this.inject( val , field.getType() );\n                        field.set( type, j );\n                    }\n                    catch ( IllegalArgumentException e ){\n                        e.printStackTrace();\n                    }\n                }\n                catch ( IllegalAccessException e ){\n                    throw new IllegalStateException(e); // This should never be happened.\n                }\n            }*/\n\n        return instance;\n    }\n\n    public    Object inject           ( Map that, Object instance ) {\n        if( this.mType != null ) {\n            return this.typeInject( that, instance );\n        }\n        return this.inject( that, instance.getClass(), instance );\n    }\n\n    public    Object typeInject       ( Map that, Object instance ) {\n        return this.inject( that, this.mType, instance );\n    }\n\n\n\n    public    Object inject           ( Collection that ){\n        return this.inject( that, this.mType );\n    }\n\n    public    Object inject           ( Map that ){\n        return this.inject( that, this.mType );\n    }\n\n    public    Object inject           ( Executable data ) {\n        Method fn;\n        try {\n            fn = data.getClass().getMethod( \"reveal\" );\n            try {\n                return fn.invoke( data );\n            }\n            catch ( Exception e ){\n                throw new IllegalArgumentException( \"Executable `reveal` function should never be modified.\", e ); // What fuck was that, did you modified it ?\n            }\n        }\n        catch ( NoSuchMethodException e ){\n            return data;\n        }\n    }\n\n    public    Number inject           ( Number data ){\n        return data;\n    }\n\n    public    Boolean inject          ( Boolean data ){\n        return data;\n    }\n\n    public    String inject           ( String data ){\n        return data;\n    }\n\n    public    Runnable inject         ( Runnable data ){\n        return data;\n    }\n\n    public    Callable inject         ( Callable data ){\n        return data;\n    }\n\n    public    Method inject           ( Method data ){\n        return data;\n    }\n\n    @Override\n    public Object inject              ( Object that ){\n        return this.inject( that, this.mType );\n    }\n\n    @Override\n    public Object inject              ( Object that, Object instance ) throws Exception {\n        if( this.mType != null ) {\n            return this.inject( that, this.mType, instance );\n        }\n        return this.inject( that, instance.getClass(), instance );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Object inject              ( Object that, Class<?> type, Object instance ) throws Exception {\n        if ( ObjectInjector.trialHomogeneity( that ) && !type.isEnum() ){\n            return that;\n        }\n        else if( type == Object.class ){\n            return that;\n        }\n        else if( that instanceof Executable){\n            return this.inject( (Executable) that );\n        }\n        else if ( that instanceof Collection ){\n            return this.inject( (Collection) that, type, instance );\n        }\n        else if ( that instanceof Map ){\n            return this.inject( (Map) that, type, instance );\n        }\n        else if ( type.isEnum() ){\n            try{\n                return Enum.valueOf( (Class<Enum>) type, that.toString() );\n            }\n            catch ( RuntimeException e ) {\n                return that;\n            }\n        }\n        return that;\n    }\n\n\n    @Override\n    public boolean isHomogeneity     ( Object that ){\n        return ObjectInjector.trialHomogeneity( that );\n    }\n\n    @Override\n    public Class<?> getStereotype() {\n        return this.mType;\n    }\n\n    @Override\n    public void setStereotype( Class<?> type ) {\n        this.mType = type;\n    }\n\n    public static boolean trialHomogeneity( Object that ) {\n        return  that instanceof Number     || that instanceof Boolean   || that instanceof String || that == JSON.NULL ||\n                that instanceof Callable   || that instanceof Runnable  || that instanceof Method;\n    }\n\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/StructJSONDecoder.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface StructJSONDecoder extends Pinenut {\n    StructJSONDecoder BasicDecoder = new GenericStructJSONDecoder();\n\n    static boolean    trialHomogeneity( Object that ) {\n        return  JSONInjector.trialHomogeneity( that ) || that instanceof Map;\n    }\n\n    Object decode( Object struct, Map<String, Object > jo, Set<String > exceptedKeys, boolean bRecursive );\n\n    Object decode( Object struct, Map<String, Object > jo, boolean bRecursive ) ;\n\n    Object decode( Object struct, Map<String, Object > jo, Set<String > exceptedKeys );\n\n    Object decode( Object struct, Map<String, Object > jo ) ;\n}\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/StructJSONEncoder.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\nimport java.io.IOException;\nimport java.io.Writer;\nimport java.util.Set;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface StructJSONEncoder extends Pinenut {\n    StructJSONEncoder BasicEncoder = new GenericStructJSONEncoder();\n\n    String encode( Object struct );\n\n    String encode( Object struct, Set<String > exceptedKeys, boolean bAllFields );\n\n    default String encode( Object struct, Set<String > exceptedKeys ) {\n        return this.encode( struct, exceptedKeys, false );\n    }\n\n    default String encode( Object struct, boolean bAllFields ) {\n        return this.encode( struct, null, bAllFields );\n    }\n\n    void encode( Object struct, Writer writer, int nIndentFactor ) throws IOException;\n\n    default void encode( Object struct, Writer writer ) throws IOException {\n        this.encode( struct, writer, 0 );\n    }\n\n    void valueJsonify( Object val, Writer writer, int nIndentFactor, int nIndentBlankNum ) throws IOException;\n\n    String valueJsonify( Object val );\n}\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/json/homotype/WrappedBeanColonist.java",
    "content": "package com.pinecone.framework.util.json.homotype;\n\npublic class WrappedBeanColonist extends ArchBeanColonist {\n    public WrappedBeanColonist() {\n        super();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ArchClassScopeLoader.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.util.name.MultiNamespace;\nimport com.pinecone.framework.util.name.Name;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic abstract class ArchClassScopeLoader implements MultiClassScopeLoader {\n    protected ClassScope           mClassScope;\n    protected ClassLoader          mClassLoader;\n    protected List<ClassFilter >   mIncludeFilters;\n    protected List<ClassFilter >   mExcludeFilters;\n\n    protected ArchClassScopeLoader( ClassScope classScope, ClassLoader classLoader ) {\n        this.mClassScope      = classScope;\n        this.mClassLoader     = classLoader;\n        this.mIncludeFilters  = new ArrayList<>();\n        this.mExcludeFilters  = new ArrayList<>();\n    }\n\n    @Override\n    public void addIncludeFilter( ClassFilter includeFilter ) {\n        this.mIncludeFilters.add(includeFilter);\n    }\n\n    @Override\n    public void addExcludeFilter( ClassFilter excludeFilter ) {\n        this.mExcludeFilters.add(0, excludeFilter);\n    }\n\n    @Override\n    public void resetFilters( boolean useDefaultFilters ) {\n        this.mIncludeFilters.clear();\n        this.mExcludeFilters.clear();\n        if ( useDefaultFilters ) {\n            this.registerDefaultFilters();\n        }\n    }\n\n    protected void registerDefaultFilters() {\n\n    }\n\n    protected boolean filter( Class<?> clazz ) {\n        for( ClassFilter filter : this.mIncludeFilters ) {\n            if( !filter.match( clazz, this ) ){\n                return true;\n            }\n        }\n\n        for( ClassFilter filter : this.mExcludeFilters ) {\n            if( filter.match( clazz, this ) ){\n                return true;\n            }\n        }\n\n        return false;\n    }\n\n    @Override\n    public Class<? > load( Name name ) throws ClassNotFoundException {\n        return (Class<?>) this.loads0( name, true );\n    }\n\n    @Override\n    public List loads( Name name ) {\n        try{\n            return (List) this.loads0( name, false );\n        }\n        catch ( ClassNotFoundException e ) {\n            return null; // This should never be happened.\n        }\n    }\n\n    protected List<String > expandNamespace( Name name ) {\n        if( name instanceof MultiNamespace) {\n            return ((MultiNamespace) name).getFullNames();\n        }\n\n        return List.of( name.getFullName() ) ;\n    }\n\n    protected abstract Class<? > loadSingleByFullClassName( String szFullClassName );\n\n    protected Object loads0( Name name, boolean bOnlyFirst ) throws ClassNotFoundException {\n        List<Class<? > > batch = null;\n        if( !bOnlyFirst ) {\n            batch = new ArrayList<>();\n        }\n\n        List<String > ns = this.expandNamespace( name );\n        for ( ScopedPackage scope : this.mClassScope.getAllScopes() ) {\n            String className = scope.packageName() + NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR;\n\n            for( String szNS : ns ) {\n                String szCN = className + szNS;\n                Class<? > ste = this.loadSingleByFullClassName( szCN );\n                if( bOnlyFirst ) {\n                    if( ste == null ) {\n                        throw new ClassNotFoundException( \"Servgram class not found: \" + szCN );\n                    }\n                    return ste;\n                }\n                else {\n                    if( ste != null ) {\n                        batch.add( ste );\n                    }\n                }\n            }\n        }\n\n        return batch;\n    }\n\n    protected void handleIgnoreException( Exception e ) throws ProvokeHandleException {\n        // Just ignore them.\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ArchClassScopeSet.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Set;\n\npublic abstract class ArchClassScopeSet implements ClassScope {\n    protected Set<ScopedPackage > mScopes;\n    protected ClassLoader         mClassLoader;\n\n    protected ArchClassScopeSet( Set<ScopedPackage > scope, ClassLoader classLoader ) {\n        this.mScopes        = scope;\n        this.mClassLoader   = classLoader;\n    }\n\n    @Override\n    public void addScope( String szPackageName ) {\n        ScopedPackage pkg = ScopedPackage.defaultInstance( szPackageName, this.mClassLoader );\n        this.mScopes.add( pkg );\n    }\n\n    @Override\n    public void addScope( ScopedPackage scope ) {\n        this.mScopes.add( scope );\n    }\n\n    @Override\n    public void removeScope( String szPackageName ) {\n        ScopedPackage that = this.getPackageByName( szPackageName );\n\n        if( that != null ) {\n            this.mScopes.remove( that );\n        }\n    }\n\n    @Override\n    public void removeScope( ScopedPackage scope ) {\n        this.mScopes.remove( scope );\n    }\n\n    @Override\n    public boolean containsScope( String szPackageName ) {\n        return this.getPackageByName( szPackageName ) != null;\n    }\n\n    @Override\n    public boolean containsScope( ScopedPackage scope ) {\n        return this.mScopes.contains( scope );\n    }\n\n    @Override\n    public ScopedPackage getPackageByName( String szPackageName ) {\n        for( ScopedPackage pkg : this.mScopes ) {\n            if( pkg.packageName().equals( szPackageName ) ){\n                return pkg;\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public List<ScopedPackage > getAllScopes() {\n        return new ArrayList<>( this.mScopes );\n    }\n\n    @Override\n    public List<String > getAllNameScopes() {\n        List<String> list = new ArrayList<>();\n\n        for( ScopedPackage pkg : this.mScopes ) {\n            list.add( pkg.packageName() );\n        }\n        return list;\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ArchDynamicFactory.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic abstract class ArchDynamicFactory implements DynamicFactory {\n    protected static final Map<Class<? >, Class<? > > PrimitiveToWrapper = new HashMap<>();\n\n    static {\n        ArchDynamicFactory.PrimitiveToWrapper.put( boolean.class,  Boolean.class   );\n        ArchDynamicFactory.PrimitiveToWrapper.put( byte.class,     Byte.class      );\n        ArchDynamicFactory.PrimitiveToWrapper.put( char.class,     Character.class );\n        ArchDynamicFactory.PrimitiveToWrapper.put( double.class,   Double.class    );\n        ArchDynamicFactory.PrimitiveToWrapper.put( float.class,    Float.class     );\n        ArchDynamicFactory.PrimitiveToWrapper.put( int.class,      Integer.class   );\n        ArchDynamicFactory.PrimitiveToWrapper.put( long.class,     Long.class      );\n        ArchDynamicFactory.PrimitiveToWrapper.put( short.class,    Short.class     );\n        ArchDynamicFactory.PrimitiveToWrapper.put( void.class,     Void.class      );\n    }\n\n    protected ClassLoader             mClassLoader      ;\n    protected ClassScope              mClassScope       ;\n\n    protected ArchDynamicFactory( ClassLoader classLoader, ClassScope classScope ) {\n        this.mClassLoader       = classLoader       ;\n        this.mClassScope        = classScope        ;\n    }\n\n    @Override\n    public ClassLoader getClassLoader() {\n        return this.mClassLoader;\n    }\n\n    @Override\n    public ClassScope getClassScope() {\n        return this.mClassScope;\n    }\n\n\n    protected Object beforeInstantiate( Class<? > that, Class<?>[] stereotypes, Object[] args ) {\n        return null;\n    }\n\n    @Override\n    public Object newInstance ( Class<? > that, Class<?>[] stereotypes, Object[] args ) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {\n        Object s = this.beforeInstantiate( that, stereotypes, args );\n        if ( s != null ) {\n            return s;\n        }\n\n        Constructor<?>[] constructors = that.getConstructors();\n        boolean bUsingSetAccess = false;\n        if( constructors.length == 0 ) {\n            constructors = that.getDeclaredConstructors();\n            bUsingSetAccess = true;\n        }\n        for ( Constructor<?> constructor : constructors ) {\n            Class<?>[] paramTypes = constructor.getParameterTypes();\n            int nArgsLength = 0;\n            if( args != null ) {\n                nArgsLength = args.length;\n            }\n\n            if ( paramTypes.length == nArgsLength ) {\n                boolean matches = true;\n                for ( int i = 0; i < paramTypes.length; ++i ) {\n                    if( stereotypes != null ) {\n                        if ( !paramTypes[i].isAssignableFrom( stereotypes[i] ) ) {\n                            matches = false;\n                            break;\n                        }\n                    }\n                    else {\n                        Class<?> paramType = paramTypes[i];\n                        if ( !paramType.isInstance( args[i] ) ) {\n                            if( paramType.isPrimitive() ) {\n                                Class<?> wrapperType = ArchDynamicFactory.PrimitiveToWrapper.get( paramType );\n                                if ( wrapperType != null && wrapperType.isInstance( args[i] ) ) {\n                                    continue;\n                                }\n                            }\n                            matches = false;\n                            break;\n                        }\n                    }\n                }\n\n                if ( matches ) {\n                    if( bUsingSetAccess ) {\n                        try{\n                            return constructor.newInstance( args );\n                        }\n                        catch ( IllegalAccessException e ) {\n                            constructor.setAccessible( true );\n                            Object ins = constructor.newInstance( args );\n                            constructor.setAccessible( false );\n                            return ins;\n                        }\n                    }\n                    else {\n                        return constructor.newInstance( args );\n                    }\n                }\n            }\n        }\n\n        return null;\n    }\n\n    @Override\n    public Object loadInstance( String szClassFullName, Class<?>[] stereotypes, Object[] args ) throws ClassNotFoundException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {\n        Class<?> clazz = this.mClassLoader.loadClass( szClassFullName );\n        return this.newInstance( clazz, stereotypes, args );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ArchMultiProtocolNamespaceFetcher.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport java.net.URL;\nimport java.net.URLClassLoader;\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic abstract class ArchMultiProtocolNamespaceFetcher implements NamespaceCollector {\n    protected List<PathNamespaceCollectum > mCollectors;\n    protected ClassLoader                   mClassLoader;\n    protected PathNamespaceCollectum        mFileAdapter;\n    protected PathNamespaceCollectum        mJarAdapter;\n\n\n    public ArchMultiProtocolNamespaceFetcher ( List<PathNamespaceCollectum > collectors, ClassLoader classLoader ) {\n        this.mCollectors  = collectors;\n        this.mClassLoader = classLoader;\n\n        for( PathNamespaceCollectum collectum : collectors ) {\n            if( collectum.matched( NamespaceCollector.KEY_FILE_PROTOCOL ) ) {\n                this.mFileAdapter = collectum;\n            }\n            else if( collectum.matched( NamespaceCollector.KEY_JAR_PROTOCOL ) ) {\n                this.mJarAdapter = collectum;\n            }\n        }\n    }\n\n    public ArchMultiProtocolNamespaceFetcher ( List<PathNamespaceCollectum > collectors ) {\n        this( collectors, Thread.currentThread().getContextClassLoader() );\n    }\n\n    public ArchMultiProtocolNamespaceFetcher ( PathNamespaceCollectum fileAdapter, PathNamespaceCollectum jarAdapter, ClassLoader classLoader ) {\n        this( new ArrayList<>(), classLoader );\n\n        this.mFileAdapter = fileAdapter;\n        this.mJarAdapter  = jarAdapter;\n    }\n\n    @Override\n    public ClassLoader getClassLoader() {\n        return this.mClassLoader;\n    }\n\n    @Override\n    public void fetch ( String szNSName, List<String > collections, boolean bCollectChildPackage ) {\n        this.fetch0( szNSName, collections, bCollectChildPackage );\n    }\n\n    @Override\n    public String fetchFirst( String szNSName ) {\n        return this.fetch0( szNSName, null, false );\n    }\n\n    public String fetch0 ( String szNSName, List<String > collections, boolean bCollectChildPackage ) {\n        String packagePath = szNSName.replace ( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR, NamespaceCollector.RESOURCE_NAME_SEPARATOR );\n        URL url = this.mClassLoader.getResource ( packagePath );\n        if ( url == null ) {\n            if( this.mClassLoader instanceof URLClassLoader ) {\n                String first = this.fetchByURLs( ((URLClassLoader) this.mClassLoader).getURLs(), szNSName, packagePath, collections, bCollectChildPackage );\n                if( collections == null ) {\n                    return first;\n                }\n            }\n            else {\n                return null;\n            }\n        }\n\n        if( collections != null ) {\n            this.fetch( url, szNSName, collections, bCollectChildPackage );\n        }\n        else {\n            return this.fetchFirst( url, szNSName );\n        }\n\n        return null;\n    }\n\n    public String fetchByURLs( URL[] urls, String szNSName, String szPackagePath, List<String> collections, boolean bCollectChildPackage ) {\n        if ( urls != null ) {\n            for ( int i = 0; i < urls.length; i++ ) {\n                URL url = urls[i];\n                String urlPath = url.getPath();\n                if ( urlPath.endsWith( \"classes/\" ) ) {\n                    continue;\n                }\n\n                String jarPath = urlPath + \"!/\" + szPackagePath;\n                //List<String > subList = UnitUtils.spawnExtendParent( collections );\n                if( collections != null ) {\n                    this.mJarAdapter.collect( jarPath, szNSName, collections, bCollectChildPackage );\n                }\n                else {\n                    return this.mJarAdapter.collectFirst( jarPath, szNSName );\n                }\n                //classNames.addAll( subList );\n            }\n        }\n\n        return null;\n    }\n\n    @Override\n    public void fetch( URL url, String szNSName, List<String> collections, boolean bCollectChildPackage ) {\n        String protocol = url.getProtocol ();\n        if ( protocol.equals ( NamespaceCollector.KEY_FILE_PROTOCOL ) ) {\n            this.mFileAdapter.collect ( url.getPath (), szNSName, collections, bCollectChildPackage );\n        }\n        else if ( protocol.equals ( NamespaceCollector.KEY_JAR_PROTOCOL ) ) {\n            this.mJarAdapter.collect ( url.getPath (), szNSName, collections, bCollectChildPackage );\n        }\n        else {\n            for( PathNamespaceCollectum collectum : this.mCollectors ) {\n                if( collectum.matched( protocol ) ) {\n                    collectum.collect( url.getPath (), szNSName, collections, bCollectChildPackage );\n                }\n            }\n        }\n    }\n\n    @Override\n    public String fetchFirst( URL url, String szNSName ) {\n        String protocol = url.getProtocol ();\n        if ( protocol.equals ( NamespaceCollector.KEY_FILE_PROTOCOL ) ) {\n            return this.mFileAdapter.collectFirst ( url.getPath (), szNSName );\n        }\n        else if ( protocol.equals ( NamespaceCollector.KEY_JAR_PROTOCOL ) ) {\n            return this.mJarAdapter.collectFirst ( url.getPath (), szNSName );\n        }\n        else {\n            for( PathNamespaceCollectum collectum : this.mCollectors ) {\n                if( collectum.matched( protocol ) ) {\n                    return collectum.collectFirst( url.getPath (), szNSName );\n                }\n            }\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassCandidateScanner.java",
    "content": "package com.pinecone.framework.util.lang;\n\npublic class ClassCandidateScanner extends ObjectCandidateScanner {\n\n    public ClassCandidateScanner     ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory ) {\n        super( searchScope, classLoader, iteratorsFactory );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassFilter.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ClassFilter extends Pinenut {\n    boolean match( Class<? > clazz, ClassScopeLoader loader ) ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassNameFetcher.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport java.util.List;\n\npublic class ClassNameFetcher extends ArchMultiProtocolNamespaceFetcher implements NamespaceCollector {\n    protected PathNamespaceCollectum   mFileAdapter;\n    protected PathNamespaceCollectum   mJarAdapter;\n\n    public ClassNameFetcher ( List<PathNamespaceCollectum > collectors, ClassLoader classLoader ) {\n        super( collectors, classLoader );\n    }\n\n    public ClassNameFetcher ( List<PathNamespaceCollectum > collectors ) {\n        super( collectors );\n    }\n\n    public ClassNameFetcher ( PathNamespaceCollectum fileAdapter, PathNamespaceCollectum jarAdapter, ClassLoader classLoader ) {\n        super( fileAdapter, jarAdapter, classLoader );\n    }\n\n    public ClassNameFetcher ( ClassLoader classLoader ) {\n        this( new FileClassCollectorAdapter(), new JarClassCollectorAdapter(), classLoader );\n    }\n\n    public ClassNameFetcher () {\n        this( Thread.currentThread().getContextClassLoader() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassScanner.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.util.lang.iterator.NamespaceIterator;\n\nimport java.io.IOException;\nimport java.util.List;\n\npublic interface ClassScanner extends ObjectScanner {\n    void addIncludeFilter     ( TypeFilter filter          );\n\n    void addExcludeFilter     ( TypeFilter filter          );\n\n    void addIterator          ( NamespaceIterator classIter, NamespaceIterator packageIter ) ;\n\n    void scan( String szNSName, boolean bCollectChildPackage, List<String > candidates ) throws IOException ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassScope.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.List;\n\npublic interface ClassScope extends Pinenut {\n    void           addScope           ( String szPackageName );\n\n    void           addScope           ( ScopedPackage scope );\n\n    void           removeScope        ( String szPackageName );\n\n    void           removeScope        ( ScopedPackage scope );\n\n    boolean        containsScope      ( String szPackageName );\n\n    boolean        containsScope      ( ScopedPackage scope );\n\n    ScopedPackage  getPackageByName   ( String szPackageName );\n\n    List<ScopedPackage > getAllScopes ();\n\n    List<String >  getAllNameScopes   ();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassScopeLoader.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.name.Name;\n\npublic interface ClassScopeLoader extends Pinenut {\n    Class<? > load ( Name simpleName ) throws ClassNotFoundException ;\n\n    void clearCache();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ClassScopeNSProtocolIteratorsFactory.java",
    "content": "package com.pinecone.framework.util.lang;\n\npublic class ClassScopeNSProtocolIteratorsFactory extends GenericScopeNSProtocolIteratorsFactory {\n\n    public ClassScopeNSProtocolIteratorsFactory( ClassLoader classLoader, ClassScope searchScope ) {\n        super( classLoader, searchScope, \".class\" );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/DynamicFactory.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.prototype.Factory;\nimport com.pinecone.framework.util.name.Name;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.List;\n\npublic interface DynamicFactory extends Factory {\n    DynamicFactory DefaultFactory = new GenericDynamicFactory();\n\n    @Override\n    ClassLoader           getClassLoader();\n\n    ClassScope            getClassScope();\n\n    Object loadInstance ( String szClassFullName, Class<?>[] stereotypes, Object[] args ) throws ClassNotFoundException, InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException;\n\n    Object newInstance  ( Class<? > that, Class<?>[] stereotypes, Object[] args ) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException;\n\n    // No exception, but with null.\n    default Object optNewInstance  ( Class<? > that, Class<?>[] stereotypes, Object[] args ) {\n        try{\n            return this.newInstance( that, stereotypes, args );\n        }\n        catch ( InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) {\n            return null;\n        }\n    }\n\n    // No exception, but with null.\n    default Object optLoadInstance ( String szClassFullName, Class<?>[] stereotypes, Object[] args ) {\n        try{\n            return this.loadInstance( szClassFullName, stereotypes, args );\n        }\n        catch ( ClassNotFoundException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) {\n            return null;\n        }\n    }\n\n    // No exception, but with null.\n    default Object optLoadInstance ( Name classFullName, Class<?>[] stereotypes, Object[] args ) {\n        return this.optLoadInstance( classFullName.getFullName(), stereotypes, args );\n    }\n\n\n    // No exception, but with null.\n    default Object optNewInstance  ( Class<? > that, Object[] args ) {\n        return this.optNewInstance( that, null, args );\n    }\n\n    // No exception, but with null.\n    default Object optLoadInstance ( String szClassFullName, Object[] args ) {\n        return this.optLoadInstance( szClassFullName, null, args );\n    }\n\n    // No exception, but with null.\n    default Object optLoadInstance ( Name classFullName, Object[] args ) {\n        return this.optLoadInstance( classFullName.getFullName(), args );\n    }\n\n    // No exception, but with null.\n    default Object optLoadInstanceFromScope ( String szClassSimpleName, Class<?>[] stereotypes, Object[] args ) {\n        ClassScope scope  = this.getClassScope();\n        List<String > nss = scope.getAllNameScopes();\n\n        for( String ns : nss ) {\n            if( !ns.endsWith( \".\" ) ) {\n                ns = ns + \".\";\n            }\n\n            Object neo = this.optLoadInstance( ns + szClassSimpleName, stereotypes, args );\n            if( neo != null ){\n                return neo;\n            }\n        }\n\n        return null;\n    }\n\n    default Object optLoadInstanceFromScope ( String szClassSimpleName, Object[] args ) {\n        return this.optLoadInstance( szClassSimpleName, null, args );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/FileClassCollectorAdapter.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport java.io.File;\nimport java.util.List;\n\npublic class FileClassCollectorAdapter implements PathNamespaceCollectum {\n    @Override\n    public boolean matched      ( String szProtocol ) {\n        return szProtocol.toLowerCase().equals( NamespaceCollector.KEY_FILE_PROTOCOL );\n    }\n\n    @Override\n    public void collect         ( String szResourcePath, String szNSName, List<String > classNames, boolean bCollectChildren ) {\n        this.collect0( szResourcePath, szNSName, classNames, bCollectChildren );\n    }\n\n    @Override\n    public String collectFirst  ( String szResourcePath, String szNSName ) {\n        return this.collect0( szResourcePath, szNSName, null, false );\n    }\n\n\n    protected String collect0        ( String szResourcePath, String szNSName, List<String > classNames, boolean bCollectChildren ) {\n        File file         = new File( szResourcePath );\n        File[] childFiles = file.listFiles ();\n        if( childFiles != null ) {\n            String szPackageNamePathFmt = szNSName.replace( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR, File.separator );\n            for ( File childFile : childFiles ) {\n                if ( childFile.isDirectory () ) {\n                    if ( bCollectChildren && classNames != null ) {\n                        this.collect ( childFile.getPath (), szNSName, classNames, bCollectChildren );\n                    }\n                }\n                else {\n                    String childFilePath = childFile.getPath ();\n                    if ( childFilePath.endsWith ( \".class\" ) ) {\n                        String szPackageSegment = childFilePath.substring( childFilePath.indexOf( szPackageNamePathFmt ) );\n                        String szChildPackage   = szPackageSegment.replace( File.separator, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ).replace( \".class\", \"\" );\n\n                        if( classNames == null ) {\n                            return szChildPackage;\n                        }\n                        else {\n                            classNames.add ( szChildPackage );\n                        }\n                    }\n                }\n            }\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/FilePackageCollectorAdapter.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport java.io.File;\nimport java.util.List;\n\npublic class FilePackageCollectorAdapter implements PathNamespaceCollectum {\n    @Override\n    public boolean matched( String szProtocol ) {\n        return szProtocol.toLowerCase().equals( NamespaceCollector.KEY_FILE_PROTOCOL );\n    }\n\n    @Override\n    public void collect         ( String szResourcePath, String szNSName, List<String > packageNames, boolean bCollectChildren ) {\n        this.collect0( szResourcePath, szNSName, packageNames, bCollectChildren );\n    }\n\n    @Override\n    public String collectFirst  ( String szResourcePath, String szNSName ) {\n        return this.collect0( szResourcePath, szNSName, null, false );\n    }\n\n    protected String collect0   ( String szResourcePath, String szNSName, List<String > packageNames, boolean bCollectChildren ) {\n        File file         = new File ( szResourcePath );\n        File[] childFiles = file.listFiles ();\n        if( childFiles != null ) {\n            String szPackageNamePathFmt = szNSName.replace( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR, File.separator );\n            for ( File childFile : childFiles ) {\n                if ( childFile.isDirectory () ) {\n                    String szPackageSegment = childFile.getPath ().substring( childFile.getPath ().indexOf( szPackageNamePathFmt ) );\n                    String szChildPackage   = szPackageSegment.replace( File.separator, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR );\n                    if( packageNames == null ) {\n                        return szChildPackage;\n                    }\n                    else {\n                        packageNames.add ( szChildPackage );\n                    }\n\n\n                    if ( bCollectChildren ) {\n                        this.collect ( childFile.getPath (), szPackageSegment, packageNames, bCollectChildren );\n                    }\n                }\n            }\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/GenericClassScopeSet.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport java.util.LinkedHashSet;\nimport java.util.Set;\n\npublic class GenericClassScopeSet extends ArchClassScopeSet {\n    public GenericClassScopeSet( Set<ScopedPackage > scope, ClassLoader classLoader ) {\n        super( scope, classLoader );\n    }\n\n    public GenericClassScopeSet( ClassLoader classLoader ) {\n        this( new LinkedHashSet<>(), classLoader );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/GenericDynamicFactory.java",
    "content": "package com.pinecone.framework.util.lang;\n\n\npublic class GenericDynamicFactory extends ArchDynamicFactory {\n    public GenericDynamicFactory( ClassLoader classLoader, ClassScope classScope ) {\n        super( classLoader, classScope );\n    }\n\n    public GenericDynamicFactory( ClassLoader classLoader ) {\n        this( classLoader, new GenericClassScopeSet( classLoader ) );\n    }\n\n    public GenericDynamicFactory() {\n        this( Thread.currentThread().getContextClassLoader() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/GenericScopeNSProtocolIteratorsFactory.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport java.io.IOException;\nimport java.net.URL;\nimport java.util.Enumeration;\nimport java.util.List;\n\nimport com.pinecone.framework.util.lang.iterator.DirectoryFileIterator;\nimport com.pinecone.framework.util.lang.iterator.DirectoryPackageIterator;\nimport com.pinecone.framework.util.lang.iterator.JarFileIterator;\nimport com.pinecone.framework.util.lang.iterator.JarPackageIterator;\n\npublic class GenericScopeNSProtocolIteratorsFactory implements NSProtocolIteratorsFactoryAdapter {\n    protected ClassLoader                 mClassLoader        ;\n    protected ClassScope                  mSearchScope        ;\n    protected String                      mszSuffix           ;\n\n    public GenericScopeNSProtocolIteratorsFactory( ClassLoader classLoader, ClassScope searchScope, String szSuffix ) {\n        this.mClassLoader = classLoader;\n        this.mSearchScope = searchScope;\n        this.mszSuffix    = szSuffix;\n    }\n\n    protected NamespaceIteratorPair newIteratorPair  ( URL url, String szNSName ) throws IOException {\n        String protocol = url.getProtocol ();\n\n        if ( protocol.equals ( NamespaceCollector.KEY_FILE_PROTOCOL ) ) {\n            return new NamespaceIteratorPair(\n                    new DirectoryFileIterator( url.getPath (), szNSName, this.mszSuffix ), new DirectoryPackageIterator( url.getPath (), szNSName, this.mszSuffix )\n            );\n        }\n        else if ( protocol.equals ( NamespaceCollector.KEY_JAR_PROTOCOL ) ) {\n            return new NamespaceIteratorPair(\n                    new JarFileIterator( url.getPath (), this.mszSuffix ), new JarPackageIterator( url.getPath (), this.mszSuffix )\n            );\n        }\n\n        return null;\n    }\n\n    @Override\n    public void prepareScopeIterators ( String szNSName, List<NamespaceIteratorPair> pairs ) throws IOException {\n        List<ScopedPackage > scope = null;\n        if( this.mSearchScope != null ) {\n            scope = this.mSearchScope.getAllScopes();\n\n            for( ScopedPackage pkg : scope ) {\n                this.prepareIterators( pkg.packageName() + NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR + szNSName, pairs );\n            }\n        }\n\n        if( this.mSearchScope == null || scope.isEmpty() ) {\n            this.prepareIterators( szNSName, pairs );\n        }\n    }\n\n    @Override\n    public void prepareIterators ( String szNSName, List<NamespaceIteratorPair> pairs ) throws IOException {\n        String packagePath          = szNSName.replace ( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR, NamespaceCollector.RESOURCE_NAME_SEPARATOR );\n        Enumeration<URL > resources = this.mClassLoader.getResources( packagePath );\n        if ( !resources.hasMoreElements() ) {\n            return;\n        }\n\n        while ( resources.hasMoreElements() ) {\n            URL url = resources.nextElement();\n            NamespaceIteratorPair pair = this.newIteratorPair( url, szNSName );\n\n            if( pair != null ) {\n                pairs.add( pair );\n            }\n        }\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/InnerMetadataReader.java",
    "content": "package com.pinecone.framework.util.lang;\n\npublic class InnerMetadataReader implements MetadataReader {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/JarClassCollectorAdapter.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.lang.iterator.JarEntryIterator;\n\nimport java.io.IOException;\nimport java.util.Enumeration;\nimport java.util.List;\nimport java.util.jar.JarEntry;\n\npublic class JarClassCollectorAdapter implements PathNamespaceCollectum {\n    @Override\n    public boolean matched( String szProtocol ) {\n        return szProtocol.toLowerCase().equals( NamespaceCollector.KEY_JAR_PROTOCOL );\n    }\n\n    @Override\n    public void collect         ( String szResourcePath, String szPackageName, List<String > classNames, boolean bCollectChildren ) {\n        this.collect0( szResourcePath, szPackageName, classNames, bCollectChildren );\n    }\n\n    @Override\n    public String collectFirst  ( String szResourcePath, String szPackageName ) {\n        return this.collect0( szResourcePath, szPackageName, null, false );\n    }\n\n    public String collect0 ( String szResourcePath, String szPackageName, List<String > classNames, boolean bCollectChildren ) {\n        try {\n            JarEntryIterator iterator        = new JarEntryIterator( szResourcePath, \".class\" );\n            Enumeration<JarEntry> entries    = iterator.entries ();\n            String packagePath               = iterator.getPackagePath();\n            String classesScopePath          = iterator.getClassesScopePath();\n\n            while ( entries.hasMoreElements () ) {\n                JarEntry jarEntry = entries.nextElement ();\n                String entryName = jarEntry.getName ();\n                if ( entryName.endsWith ( \".class\" ) ) {\n                    if ( bCollectChildren && classNames != null ) { // [@Harald Notice] No need for recursion, for JAR files, this flag is usually processed in a tiled manner\n                        if ( entryName.startsWith ( packagePath ) ) {\n                            entryName = entryName.replace ( NamespaceCollector.RESOURCE_NAME_SEPARATOR, \".\" ).substring ( 0, entryName.lastIndexOf ( \".\" ) );\n                            classNames.add ( entryName );\n                        }\n                    }\n                    else {\n                        int index = entryName.lastIndexOf ( NamespaceCollector.RESOURCE_NAME_SEPARATOR );\n                        String myPackagePath;\n                        if ( index != -1 ) {\n                            myPackagePath = entryName.substring ( 0, index );\n                        }\n                        else {\n                            myPackagePath = entryName;\n                        }\n\n                        boolean bQualified = false;\n                        if( classesScopePath == null ) {\n                            if( myPackagePath.equals( packagePath ) ) {\n                                bQualified = true;\n                            }\n                        }\n                        else {\n                            if ( myPackagePath.startsWith( classesScopePath ) && myPackagePath.endsWith( packagePath ) ) {\n                                bQualified = true;\n                            }\n                        }\n\n                        if ( bQualified ) {\n                            entryName = JarUtils.normalizeJarClassName( entryName, classesScopePath );\n\n                            if( classNames == null ) {\n                                return entryName;\n                            }\n                            else {\n                                classNames.add ( entryName );\n                            }\n                        }\n                    }\n                }\n            }\n        }\n        catch ( IOException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n\n        return null;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/JarPackageCollectorAdapter.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.lang.iterator.JarEntryIterator;\n\nimport java.io.IOException;\nimport java.util.Enumeration;\nimport java.util.List;\nimport java.util.jar.JarEntry;\n\npublic class JarPackageCollectorAdapter implements PathNamespaceCollectum {\n    @Override\n    public boolean matched( String szProtocol ) {\n        return szProtocol.toLowerCase().equals( NamespaceCollector.KEY_JAR_PROTOCOL );\n    }\n\n    @Override\n    public void collect         ( String szResourcePath, String szPackageName, List<String > packageNames, boolean bCollectChildren ) {\n        this.collect0( szResourcePath, szPackageName, packageNames, bCollectChildren );\n    }\n\n    @Override\n    public String collectFirst  ( String szResourcePath, String szNSName ) {\n        return this.collect0( szResourcePath, szNSName, null, false );\n    }\n\n    public String collect0 ( String szResourcePath, String szNSName, List<String > packageNames, boolean bCollectChildren ) {\n        try {\n            JarEntryIterator iterator        = new JarEntryIterator( szResourcePath, \".class\" );\n            Enumeration<JarEntry> entries    = iterator.entries ();\n            String packagePath               = iterator.getPackagePath();\n            String classesScopePath          = iterator.getClassesScopePath();\n\n            while ( entries.hasMoreElements () ) {\n                JarEntry jarEntry = entries.nextElement ();\n                String entryName  = jarEntry.getName ();\n                if( jarEntry.isDirectory() ) {\n                    if( classesScopePath != null && entryName.startsWith( classesScopePath ) ) {\n                        entryName = entryName.replace( classesScopePath, \"\" );\n                    }\n\n                    if ( bCollectChildren && packageNames != null ) { // [@Harald Notice] No need for recursion, for JAR files, this flag is usually processed in a tiled manner\n                        if ( entryName.startsWith ( packagePath ) && !entryName.equals( packagePath + NamespaceCollector.RESOURCE_NAME_SEPARATOR ) ) {\n                            entryName = entryName.replace ( NamespaceCollector.RESOURCE_NAME_SEPARATOR, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR );\n                            entryName = entryName.substring ( 0, entryName.lastIndexOf ( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ) );\n                            packageNames.add ( entryName );\n                        }\n                    }\n                    else {\n                        //Debug.trace( jarEntry.getName(),packagePath  );\n\n                        if ( entryName.startsWith ( packagePath ) ) {\n                            String childSegment = entryName.substring ( packagePath.length() );\n                            if( StringUtils.countOccurrencesOf( childSegment, NamespaceCollector.RESOURCE_NAME_SEPARATOR, 3 ) > 2 ) {\n                                continue;\n                            }\n                            if( entryName.equals( packagePath + NamespaceCollector.RESOURCE_NAME_SEPARATOR ) ) { // Self path\n                                continue;\n                            }\n\n                            entryName = entryName.replace ( NamespaceCollector.RESOURCE_NAME_SEPARATOR, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR );\n                            entryName = entryName.substring ( 0, entryName.lastIndexOf ( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ) );\n\n                            if( packageNames == null ) {\n                                return entryName;\n                            }\n                            else {\n                                packageNames.add ( entryName );\n                            }\n                        }\n                    }\n                }\n            }\n        }\n        catch ( IOException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n\n        return null;\n    }\n\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/JarUtils.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport java.io.IOException;\nimport java.util.Enumeration;\nimport java.util.Iterator;\nimport java.util.LinkedList;\nimport java.util.jar.JarEntry;\nimport java.util.jar.JarInputStream;\n\npublic final class JarUtils {\n    public static Enumeration<JarEntry > fetchEnumeration( JarInputStream jarInputStream ) throws IOException {\n        LinkedList<JarEntry > buf = new LinkedList<>();\n\n        JarEntry jarEntry;\n        while ( ( jarEntry = jarInputStream.getNextJarEntry() ) != null ) {\n            buf.add( jarEntry );\n            jarInputStream.closeEntry();\n        }\n\n        return new Enumeration<JarEntry>() {\n            private Iterator<JarEntry > iterator = buf.iterator();\n            @Override\n            public boolean hasMoreElements() {\n                return this.iterator.hasNext();\n            }\n\n            @Override\n            public JarEntry nextElement() {\n                return this.iterator.next();\n            }\n        };\n    }\n\n    public static String normalizeJarClassName( String entryName, String classesScopePath ) {\n        if( classesScopePath != null && entryName.startsWith( classesScopePath ) ) {\n            entryName = entryName.replace( classesScopePath, \"\" );\n        }\n        return entryName.replace( NamespaceCollector.RESOURCE_NAME_SEPARATOR, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ).substring(\n                0, entryName.lastIndexOf( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR_C )\n        );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/LazyScopedPackage.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class LazyScopedPackage implements ScopedPackage {\n    protected String                      mszPackageName;\n    protected ClassLoader                 mClassLoader;\n    protected NamespaceCollector          mPackageCollector;\n    protected NamespaceCollector          mClassCollector;\n    protected ScopedPackage               mParent;\n    protected List<ScopedPackage >        mChildren;\n    protected Package                     mPackage;\n\n    public LazyScopedPackage( String packageName, ScopedPackage parent, ClassLoader classLoader, NamespaceCollector packageCollector, NamespaceCollector classCollector ) {\n        this.mParent           = parent;\n        this.mszPackageName    = packageName;\n        this.mClassLoader      = classLoader;\n        this.mPackageCollector = packageCollector;\n        this.mClassCollector   = classCollector;\n    }\n\n    public LazyScopedPackage( String packageName, ScopedPackage parent, ClassLoader classLoader ) {\n        this( packageName, parent, classLoader, null, null );\n\n        this.mPackageCollector = new PackageNameFetcher( this.mClassLoader );\n    }\n\n    public LazyScopedPackage( String packageName, ScopedPackage parent ) {\n        this( packageName, parent, Thread.currentThread().getContextClassLoader() );\n    }\n\n    public LazyScopedPackage( String packageName, ClassLoader classLoader ) {\n        this( packageName, null, classLoader );\n    }\n\n    public LazyScopedPackage( String packageName ) {\n        this( packageName, (ScopedPackage) null );\n    }\n\n    @Override\n    public String parentName() {\n        int lastDotIndex = mszPackageName.lastIndexOf('.');\n        if ( lastDotIndex == -1 ) {\n            return null;\n        }\n        return mszPackageName.substring( 0, lastDotIndex );\n    }\n\n    @Override\n    public ScopedPackage parent() {\n        if( this.mParent == null ) {\n            String parentName = this.parentName();\n            if ( parentName == null ) {\n                return null;\n            }\n\n            this.mParent = new LazyScopedPackage( parentName, null, this.mClassLoader, this.mPackageCollector, this.mClassCollector );\n        }\n\n        return this.mParent;\n    }\n\n    @Override\n    public List<ScopedPackage > children() {\n        if( this.mChildren == null ) {\n            this.mChildren = new ArrayList<>();\n\n            List<String > namesList = this.getPackageCollector().fetch( this.packageName(), false );\n            for( String name : namesList ) {\n                this.mChildren.add( new LazyScopedPackage( name, this, this.mClassLoader, this.mPackageCollector, this.mClassCollector ) );\n            }\n        }\n\n        return this.mChildren;\n    }\n\n    @Override\n    public List<String > fetchChildrenNames() {\n        List<ScopedPackage > children = this.mChildren;\n        if( children == null ) {\n            children = this.children();\n        }\n\n        List<String > namesList = new ArrayList<>();\n        for( ScopedPackage scopedPackage : children ) {\n            namesList.add( scopedPackage.packageName() );\n        }\n        return namesList;\n    }\n\n    @Override\n    public List<String > fetchChildrenClassNames() {\n        if( this.mClassCollector == null ) {\n            this.mClassCollector = new ClassNameFetcher( this.getClassLoader() );\n        }\n\n        return this.mClassCollector.fetch( this.packageName(), false );\n    }\n\n    @Override\n    public String fetchFirstClassName() {\n        if( this.mClassCollector == null ) {\n            this.mClassCollector = new ClassNameFetcher( this.getClassLoader() );\n        }\n\n        return this.mClassCollector.fetchFirst( this.packageName() );\n    }\n\n    @Override\n    public String packageName() {\n        return mszPackageName;\n    }\n\n    @Override\n    public ClassLoader getClassLoader() {\n        return this.mClassLoader;\n    }\n\n    @Override\n    public NamespaceCollector getPackageCollector() {\n        return this.mPackageCollector;\n    }\n\n    @Override\n    public boolean hasLoaded() {\n        if( this.mPackage != null ) {\n            return true;\n        }\n\n        Package pkg = this.getClassLoader().getDefinedPackage( this.packageName() );\n        if ( pkg != null ) {\n            this.mPackage = pkg;\n            return true;\n        }\n\n        return false;\n    }\n\n    @Override\n    public Package tryLoad() {\n        if( this.mPackage != null ) {\n            return this.mPackage;\n        }\n\n        String szFirstClass = this.fetchFirstClassName();\n        if( szFirstClass != null ) {\n            try{\n                Class<?> cls = this.mClassLoader.loadClass( szFirstClass );\n                if( cls == null ) {\n                    return null;\n                }\n                this.mPackage = this.getPackage();\n            }\n            catch ( Exception e ) {\n                this.mPackage = null;\n            }\n        }\n\n        return this.mPackage;\n    }\n\n    @Override\n    public Package getPackage() {\n        if( this.mPackage == null ) {\n            this.mPackage = this.getClassLoader().getDefinedPackage( this.packageName() );\n        }\n        return this.mPackage;\n    }\n\n    @Override\n    public String toString() {\n        return this.mszPackageName;\n    }\n\n    @Override\n    public String toJSONString() {\n        return \"\\\"\" + this.toString() + \"\\\"\";\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/MetadataReader.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface MetadataReader extends Pinenut {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/MultiClassScopeLoader.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.util.name.Name;\n\nimport java.util.List;\n\npublic interface MultiClassScopeLoader extends ClassScopeLoader {\n    List loads( Name name ) ;\n\n    void addIncludeFilter( ClassFilter includeFilter ) ;\n\n    void addExcludeFilter( ClassFilter excludeFilter ) ;\n\n    void resetFilters    ( boolean useDefaultFilters );\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/NSProtocolIteratorsFactoryAdapter.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.io.IOException;\nimport java.util.List;\n\npublic interface NSProtocolIteratorsFactoryAdapter extends Pinenut {\n    void prepareScopeIterators ( String szNSName, List<NamespaceIteratorPair> pairs ) throws IOException;\n\n    void prepareIterators      ( String szNSName, List<NamespaceIteratorPair> pairs ) throws IOException;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/NamespaceCollector.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.net.URL;\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic interface NamespaceCollector extends Pinenut {\n    String KEY_FILE_PROTOCOL             = \"file\";\n    String KEY_JAR_PROTOCOL              = \"jar\";\n    String RESOURCE_NAME_SEPARATOR       = \"/\";\n    String JAVA_PKG_CLASS_SEPARATOR      = \".\";\n    char   JAVA_PKG_CLASS_SEPARATOR_C    = '.';\n\n    default List<String> fetch ( String szNSName ) {\n        return this.fetch( szNSName, true );\n    }\n\n    default List<String> fetch ( String szNSName, boolean bCollectChildPackage ) {\n        List<String > list = new ArrayList<>();\n        this.fetch( szNSName, list,bCollectChildPackage );\n        return list;\n    }\n\n    default void fetch ( String szNSName, List<String > collections ) {\n        this.fetch( szNSName, collections, true );\n    }\n\n    void fetch ( String szNSName, List<String > collections, boolean bCollectChildPackage ) ;\n\n    String fetchFirst ( String szNSName ) ;\n\n\n    default List<String> fetch ( URL url, String szNSName ) {\n        return this.fetch( url, szNSName, true );\n    }\n\n    default List<String> fetch ( URL url, String szNSName, boolean bCollectChildPackage ) {\n        List<String > list = new ArrayList<>();\n        this.fetch( url, szNSName, list,bCollectChildPackage );\n        return list;\n    }\n\n    default void fetch ( URL url, String szNSName, List<String > collections ) {\n        this.fetch( url, szNSName, collections, true );\n    }\n\n    void fetch ( URL url, String szNSName, List<String > collections, boolean bCollectChildPackage ) ;\n\n    String fetchFirst ( URL url, String szNSName ) ;\n\n    ClassLoader getClassLoader();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/NamespaceIteratorPair.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.lang.iterator.NamespaceIterator;\n\npublic class NamespaceIteratorPair implements Pinenut {\n    public NamespaceIterator classIter;\n    public NamespaceIterator packageIter;\n\n    NamespaceIteratorPair( NamespaceIterator classIter, NamespaceIterator packageIter ) {\n        this.classIter    = classIter;\n        this.packageIter  = packageIter;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ObjectCandidateScanner.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport com.pinecone.framework.util.lang.iterator.NamespaceIterator;\n\npublic class ObjectCandidateScanner implements ClassScanner {\n    protected ClassLoader                          mClassLoader        ;\n    protected ClassScope                           mSearchScope        ;\n    protected List<TypeFilter >                    mIncludeFilters     ;\n    protected List<TypeFilter >                    mExcludeFilters     ;\n    protected List<NamespaceIteratorPair>          mIterators          ;\n    protected NSProtocolIteratorsFactoryAdapter    mIteratorsFactory   ;\n\n    public ObjectCandidateScanner     ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory ) {\n        this.mSearchScope        = searchScope       ;\n        this.mClassLoader        = classLoader       ;\n        this.mIncludeFilters     = new ArrayList<>() ;\n        this.mExcludeFilters     = new ArrayList<>() ;\n        this.mIterators          = new ArrayList<>() ;\n        this.mIteratorsFactory   = iteratorsFactory  ;\n    }\n\n    public ObjectCandidateScanner     ( ClassScope searchScope, ClassLoader classLoader ) {\n        this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ) );\n    }\n\n    @Override\n    public void addIncludeFilter     ( TypeFilter filter          ) {\n        this.mIncludeFilters.add( filter );\n    }\n\n    @Override\n    public void addExcludeFilter     ( TypeFilter filter          ) {\n        this.mExcludeFilters.add( filter );\n    }\n\n    @Override\n    public void addIterator          ( NamespaceIterator classIter, NamespaceIterator packageIter ) {\n        this.addIterator( new NamespaceIteratorPair( classIter, packageIter ) );\n    }\n\n    protected void addIterator       ( NamespaceIteratorPair iteratorPair ) {\n        this.mIterators.add( iteratorPair );\n    }\n\n    @Override\n    public void scan( String szNSName, boolean bCollectChildPackage, List<String > candidates ) throws IOException {\n        if ( this.mIterators.isEmpty() ) {\n            this.mIteratorsFactory.prepareScopeIterators( szNSName, this.mIterators );\n        }\n\n        this.scan0( this.mIterators, bCollectChildPackage, candidates );\n    }\n\n\n    protected void scan0( List<NamespaceIteratorPair> pairs, boolean bCollectChildPackage, List<String > candidates ) throws IOException {\n        for ( NamespaceIteratorPair pair : pairs ) {\n            NamespaceIterator classIter = pair.classIter;\n            NamespaceIterator pkgIter   = pair.packageIter;\n\n            while ( classIter.hasNext() ) {\n                String szClassName = classIter.next();\n                if ( !this.filter( szClassName ) ) {\n                    candidates.add( szClassName );\n                }\n            }\n\n            if ( bCollectChildPackage ) {\n                while ( pkgIter.hasNext() ) {\n                    String szPackageName = pkgIter.next();\n                    List<NamespaceIteratorPair> chridren = new ArrayList<>();\n                    this.mIteratorsFactory.prepareIterators( szPackageName, chridren );\n                    if ( !chridren.isEmpty() ) {\n                        this.scan0( chridren, bCollectChildPackage, candidates );\n                    }\n                }\n            }\n        }\n\n        // Clear cache, to prevent next unexpected iteration.\n        pairs.clear();\n    }\n\n    protected boolean filter( String szClassName ) {\n        try {\n            for ( TypeFilter filter : this.mIncludeFilters ) {\n                if ( filter.match( szClassName, null ) ) {\n                    return false;\n                }\n            }\n\n            for ( TypeFilter filter : this.mExcludeFilters ) {\n                if ( filter.match( szClassName, null ) ) {\n                    return true;\n                }\n            }\n        }\n        catch ( IOException e ) {\n            return true;\n        }\n\n        return false;\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ObjectScanner.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport java.io.IOException;\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.lang.iterator.NamespaceIterator;\n\npublic interface ObjectScanner extends Pinenut {\n\n    void addIncludeFilter     ( TypeFilter filter          );\n\n    void addExcludeFilter     ( TypeFilter filter          );\n\n    void addIterator          ( NamespaceIterator classIter, NamespaceIterator packageIter ) ;\n\n    void scan( String szNSName, boolean bCollectChildPackage, List<String > candidates ) throws IOException;\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/PackageNameFetcher.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport java.util.List;\n\npublic class PackageNameFetcher extends ArchMultiProtocolNamespaceFetcher implements NamespaceCollector {\n    protected PathNamespaceCollectum   mFileAdapter;\n    protected PathNamespaceCollectum   mJarAdapter;\n\n    public PackageNameFetcher ( List<PathNamespaceCollectum > collectors, ClassLoader classLoader ) {\n        super( collectors, classLoader );\n    }\n\n    public PackageNameFetcher ( List<PathNamespaceCollectum > collectors ) {\n        super( collectors );\n    }\n\n    public PackageNameFetcher ( PathNamespaceCollectum fileAdapter, PathNamespaceCollectum jarAdapter, ClassLoader classLoader ) {\n        super( fileAdapter, jarAdapter, classLoader );\n    }\n\n    public PackageNameFetcher ( ClassLoader classLoader ) {\n        this( new FilePackageCollectorAdapter(), new JarPackageCollectorAdapter(), classLoader );\n    }\n\n    public PackageNameFetcher () {\n        this( Thread.currentThread().getContextClassLoader() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/PathNamespaceCollectum.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.List;\n\npublic interface PathNamespaceCollectum extends Pinenut {\n    boolean matched      ( String szProtocol );\n\n    void    collect      ( String szResourcePath, String szPackageName, List<String > classNames, boolean bCollectChildren );\n\n    String  collectFirst ( String szResourcePath, String szPackageName );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/ScopedPackage.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.List;\n\npublic interface ScopedPackage extends Pinenut, Comparable<ScopedPackage > {\n    String parentName() ;\n\n    ScopedPackage parent() ;\n\n    List<ScopedPackage > children() ;\n\n    List<String > fetchChildrenNames() ;\n\n    String packageName() ;\n\n    ClassLoader getClassLoader() ;\n\n    NamespaceCollector getPackageCollector() ;\n\n    List<String > fetchChildrenClassNames();\n\n    String fetchFirstClassName();\n\n    Package getPackage();\n\n    boolean hasLoaded();\n\n    Package tryLoad();\n\n    static ScopedPackage defaultInstance( String packageName, ClassLoader classLoader ) {\n        return new LazyScopedPackage( packageName, classLoader );\n    }\n\n    static ScopedPackage defaultInstance( String packageName ) {\n        return new LazyScopedPackage( packageName );\n    }\n\n    @Override\n    default int compareTo( ScopedPackage o ){\n        return this.packageName().compareTo( o.packageName() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/TypeFilter.java",
    "content": "package com.pinecone.framework.util.lang;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.io.IOException;\n\npublic interface TypeFilter extends Pinenut {\n    boolean match( String szClassName, Object pool ) throws IOException;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/ArchJarEntryIterator.java",
    "content": "package com.pinecone.framework.util.lang.iterator;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.lang.JarUtils;\nimport com.pinecone.framework.util.lang.NamespaceCollector;\n\nimport java.io.IOException;\nimport java.util.LinkedList;\nimport java.util.Deque;\nimport java.util.Enumeration;\nimport java.util.NoSuchElementException;\nimport java.util.jar.JarEntry;\nimport java.util.jar.JarFile;\nimport java.util.jar.JarInputStream;\nimport java.util.zip.ZipEntry;\n\npublic abstract class ArchJarEntryIterator implements Pinenut {\n    protected JarFile                  mJarFile;\n    protected Enumeration<JarEntry >   mEntries;\n    protected String                   mPackagePath;\n    protected String                   mClassesScopePath = null;  // e.g. BOOT-INF/lib/\n    protected JarEntry                 mCurrentEntry;\n    protected String                   mszSuffix      ;\n\n    public ArchJarEntryIterator( String szResourcePath, String szSuffix ) throws IOException {\n        this.mszSuffix     = szSuffix;\n        String[] jarInfo   = szResourcePath.split ( \"!\" );\n        String jarFilePath = jarInfo[0].substring ( jarInfo[0].indexOf ( NamespaceCollector.RESOURCE_NAME_SEPARATOR ) );\n\n        this.mJarFile      = new JarFile( jarFilePath );\n\n        boolean bUsingFile = true;\n        String packagePath = szResourcePath;\n        if( jarInfo.length > 1 ) {\n            String szCurrentFragName = jarInfo[1].substring(1);\n            if( jarInfo.length > 2 && szCurrentFragName.endsWith(\".jar\") ) {\n                ZipEntry zipEntry = this.mJarFile.getEntry( szCurrentFragName );\n                if( zipEntry == null ) {\n                    throw new IOException( \"Illegal resource path: \" + szResourcePath );\n                }\n\n                Deque<JarInputStream> streamStack = new LinkedList<>();\n                JarInputStream jarInputStream = new JarInputStream(\n                        this.mJarFile.getInputStream( zipEntry )\n                );\n                streamStack.addFirst( jarInputStream );\n\n                try{\n                    if( jarInfo.length > 3 ) {\n                        for ( int i = 2; i < jarInfo.length - 1; ++i ) {\n                            if ( !jarInfo[i].toLowerCase().endsWith(\".jar\") ) {\n                                break;\n                            }\n\n                            szCurrentFragName = jarInfo[i].substring(1);\n                            JarEntry jarEntry;\n                            while ( ( jarEntry = jarInputStream.getNextJarEntry() ) != null ) {\n                                String szJarEntryName = jarEntry.getName();\n                                if ( !jarEntry.isDirectory() && szJarEntryName.equals( szCurrentFragName ) ) {\n                                    jarInputStream = new JarInputStream( this.mJarFile.getInputStream( jarEntry ) );\n                                    streamStack.addFirst( jarInputStream );\n                                    break;\n                                }\n                                jarInputStream.closeEntry();\n                            }\n                        }\n                    }\n\n                    bUsingFile = false;\n                    // [@Harald Notice] Using a temporary enumeration to prevent unexpected resource leaks.\n                    this.mEntries = JarUtils.fetchEnumeration( jarInputStream );\n                }\n                finally {\n                    JarInputStream t;\n                    // [@Harald Notice] All `JarInputStream` should be close and release in the nested scenario.\n                    while ( ( t = streamStack.peek() ) != null ) {\n                        t.close();\n                        streamStack.pop();\n                    }\n                }\n\n            }\n            else{\n                if( jarInfo.length != 2 ) {\n                    this.mClassesScopePath = szCurrentFragName + \"/\";\n                }\n            }\n            packagePath = jarInfo[ jarInfo.length - 1 ].substring ( 1 );\n        }\n        this.mPackagePath  = packagePath;\n\n        if( bUsingFile ) {\n            this.mEntries      = this.mJarFile.entries();\n        }\n\n        this.skipEntries();\n    }\n\n    public boolean hasNext() {\n        return this.mCurrentEntry != null;\n    }\n\n    public Object next() {\n        if ( !this.hasNext() ) {\n            throw new NoSuchElementException();\n        }\n\n        return this.mCurrentEntry;\n    }\n\n    public String getClassesScopePath() {\n        return this.mClassesScopePath;\n    }\n\n    public String getPackagePath() {\n        return this.mPackagePath;\n    }\n\n    public Enumeration<JarEntry > entries() {\n        return this.mEntries;\n    }\n\n    protected abstract void skipEntries() ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/DirectoryFileIterator.java",
    "content": "package com.pinecone.framework.util.lang.iterator;\n\nimport com.pinecone.framework.util.lang.NamespaceCollector;\n\nimport java.io.File;\nimport java.util.NoSuchElementException;\nimport java.util.Objects;\nimport java.util.function.Consumer;\n\npublic class DirectoryFileIterator implements NamespaceIterator {\n    protected File   mFile          ;\n    protected File[] mChildFiles    ;\n    protected String mNSNamePathFmt ;\n    protected String mszSuffix      ;\n\n    protected int    mCursor           = 0;\n    protected int    mLastRet          = -1;\n\n    public DirectoryFileIterator( String szResourcePath, String szNSName, String szSuffix ) {\n        this.mszSuffix      = szSuffix;\n        this.mFile          = new File( szResourcePath );\n        this.mChildFiles    = this.mFile.listFiles();\n        this.mNSNamePathFmt = szNSName.replace( NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR, File.separator );\n\n        if( this.mChildFiles == null ) {\n            this.mChildFiles = new File[0];\n        }\n\n        this.skipEntities();\n    }\n\n    @Override\n    public boolean hasNext() {\n        return this.mCursor < this.mChildFiles.length;\n    }\n\n    protected String replacePathName( String sz ) {\n        String szPackageSegment = sz.substring( sz.indexOf( this.mNSNamePathFmt ) );\n        return szPackageSegment.replace( File.separator, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ).replace( this.mszSuffix, \"\" );\n    }\n\n\n    public String next() {\n        if ( !this.hasNext() ) {\n            throw new NoSuchElementException();\n        }\n\n        this.mLastRet = this.mCursor;\n        String childFilePath = this.mChildFiles[ this.mCursor++ ].getPath();\n\n        this.skipEntities();\n\n        return this.replacePathName( childFilePath );\n    }\n\n    @Override\n    public void forEachRemaining( Consumer<? super String > action ) {\n        Objects.requireNonNull(action);\n        final int size = this.mChildFiles.length;\n        int i = this.mCursor;\n        if ( i < size ) {\n            for ( ; i < size ; i++ )  {\n                if( this.sift( this.mChildFiles[i] ) ) {\n                    continue;\n                }\n                action.accept( this.mChildFiles[i].getPath() );\n            }\n\n            this.mCursor  = i;\n            this.mLastRet = i - 1;\n        }\n    }\n\n    protected boolean sift( File file ) {\n        return file.isDirectory() && ! file.getPath().endsWith ( this.mszSuffix );\n    }\n\n    protected void skipEntities() {\n        while ( this.mCursor < this.mChildFiles.length && this.sift( this.mChildFiles[ this.mCursor ] ) ) {\n            ++this.mCursor;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/DirectoryPackageIterator.java",
    "content": "package com.pinecone.framework.util.lang.iterator;\n\nimport java.io.File;\n\nimport com.pinecone.framework.util.lang.NamespaceCollector;\n\npublic class DirectoryPackageIterator extends DirectoryFileIterator {\n    public DirectoryPackageIterator( String szResourcePath, String szNSName, String szSuffix ) {\n        super( szResourcePath, szNSName, szSuffix );\n    }\n\n    @Override\n    protected String replacePathName( String sz ) {\n        String szPackageSegment = sz.substring( sz.indexOf( this.mNSNamePathFmt ) );\n        return szPackageSegment.replace( File.separator, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR );\n    }\n\n    @Override\n    protected boolean sift( File file ) {\n        return !file.isDirectory() ;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/JarEntryIterator.java",
    "content": "package com.pinecone.framework.util.lang.iterator;\n\nimport java.io.IOException;\nimport java.util.Iterator;\nimport java.util.NoSuchElementException;\nimport java.util.Objects;\nimport java.util.function.Consumer;\nimport java.util.jar.JarEntry;\n\npublic class JarEntryIterator extends ArchJarEntryIterator implements Iterator<JarEntry > {\n    public JarEntryIterator( String szResourcePath, String szSuffix ) throws IOException {\n        super( szResourcePath, szSuffix );\n    }\n\n    @Override\n    public boolean hasNext() {\n        return this.mEntries.hasMoreElements();\n    }\n\n    @Override\n    public JarEntry next() {\n        if ( !this.hasNext() ) {\n            throw new NoSuchElementException();\n        }\n\n        this.mCurrentEntry = this.mEntries.nextElement();\n\n        return this.mCurrentEntry;\n    }\n\n    @Override\n    public void forEachRemaining( Consumer<? super JarEntry> action ) {\n        Objects.requireNonNull( action );\n        while ( this.hasNext() ) {\n            action.accept( this.next() );\n        }\n    }\n\n    @Override\n    protected void skipEntries() {\n\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/JarFileIterator.java",
    "content": "package com.pinecone.framework.util.lang.iterator;\nimport com.pinecone.framework.util.lang.JarUtils;\nimport com.pinecone.framework.util.lang.NamespaceCollector;\n\nimport java.io.IOException;\nimport java.util.NoSuchElementException;\nimport java.util.Objects;\nimport java.util.function.Consumer;\nimport java.util.jar.JarEntry;\n\n\npublic class JarFileIterator extends ArchJarEntryIterator implements NamespaceIterator {\n    public JarFileIterator( String szResourcePath, String szSuffix ) throws IOException {\n        super( szResourcePath, szSuffix );\n    }\n\n    @Override\n    public boolean hasNext() {\n        return this.mCurrentEntry != null;\n    }\n\n    @Override\n    public String next() {\n        if ( !this.hasNext() ) {\n            throw new NoSuchElementException();\n        }\n\n        String entryName = this.mCurrentEntry.getName();\n        String className = JarUtils.normalizeJarClassName( entryName, this.mClassesScopePath );\n\n        this.skipEntries();\n\n        return className;\n    }\n\n    @Override\n    public void forEachRemaining( Consumer<? super String> action ) {\n        Objects.requireNonNull( action );\n        while ( this.hasNext() ) {\n            action.accept( this.next() );\n        }\n    }\n\n    @Override\n    protected void skipEntries() {\n        while ( this.mEntries.hasMoreElements() ) {\n            JarEntry entry   = this.mEntries.nextElement();\n            String entryName = entry.getName();\n            //Debug.trace( entryName );\n            if ( entryName.endsWith( this.mszSuffix ) ) {\n                int index = entryName.lastIndexOf( NamespaceCollector.RESOURCE_NAME_SEPARATOR );\n                String myPackagePath;\n                if ( index == -1 ) {\n                    myPackagePath = entryName;\n                }\n                else {\n                    myPackagePath = entryName.substring( 0, index );\n                }\n\n                if( this.mClassesScopePath == null ) {\n                    if ( myPackagePath.equals( this.mPackagePath ) ) {\n                        this.mCurrentEntry = entry;\n                        return;\n                    }\n                }\n                else {\n                    if ( myPackagePath.startsWith( this.mClassesScopePath ) && myPackagePath.endsWith( this.mPackagePath ) ) {\n                        this.mCurrentEntry = entry;\n                        return;\n                    }\n                }\n            }\n        }\n        this.mCurrentEntry = null; // No more valid entries\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/JarPackageIterator.java",
    "content": "package com.pinecone.framework.util.lang.iterator;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.lang.NamespaceCollector;\n\nimport java.io.IOException;\nimport java.util.NoSuchElementException;\nimport java.util.jar.JarEntry;\n\npublic class JarPackageIterator extends JarFileIterator {\n    public JarPackageIterator( String szResourcePath, String szSuffix ) throws IOException {\n        super( szResourcePath, szSuffix );\n    }\n\n    @Override\n    protected void skipEntries() {\n        while ( this.mEntries.hasMoreElements() ) {\n            JarEntry entry = this.mEntries.nextElement();\n            String entryName = entry.getName();\n            if ( entry.isDirectory() ) {\n                if( this.mClassesScopePath != null && entryName.startsWith( this.mClassesScopePath ) ) {\n                    entryName = entryName.replace( this.mClassesScopePath, \"\" );\n                }\n\n                if ( entryName.startsWith( this.mPackagePath ) ) {\n                    String childSegment = entryName.substring( this.mPackagePath.length() );\n                    if( StringUtils.countOccurrencesOf( childSegment, NamespaceCollector.RESOURCE_NAME_SEPARATOR, 3 ) > 2 ) {\n                        continue;\n                    }\n                    if( entryName.equals( this.mPackagePath + NamespaceCollector.RESOURCE_NAME_SEPARATOR ) ) { // Self path\n                        continue;\n                    }\n\n                    this.mCurrentEntry = entry;\n                    return;\n                }\n            }\n        }\n        this.mCurrentEntry = null; // No more valid entries\n    }\n\n    @Override\n    public String next() {\n        if ( !this.hasNext() ) {\n            throw new NoSuchElementException();\n        }\n\n        String entryName   = this.mCurrentEntry.getName();\n        if( this.mClassesScopePath != null ) {\n            if( entryName.startsWith( this.mClassesScopePath ) ) {\n                entryName = entryName.replace( this.mClassesScopePath, \"\" );\n            }\n        }\n        String packageName = entryName.replace( NamespaceCollector.RESOURCE_NAME_SEPARATOR, NamespaceCollector.JAVA_PKG_CLASS_SEPARATOR ).substring( 0, entryName.length() - 1 );\n\n        this.skipEntries();\n\n        return packageName;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lang/iterator/NamespaceIterator.java",
    "content": "package com.pinecone.framework.util.lang.iterator;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.Iterator;\n\npublic interface NamespaceIterator extends Iterator<String >, Pinenut {\n    boolean hasNext();\n\n    String next();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lock/ReentrantReadWriteSpinLock.java",
    "content": "package com.pinecone.framework.util.lock;\n\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.locks.Condition;\nimport java.util.concurrent.locks.ReadWriteLock;\nimport java.util.concurrent.atomic.AtomicInteger;\nimport java.util.concurrent.locks.Lock;\n\npublic class ReentrantReadWriteSpinLock implements ReadWriteLock {\n    private final AtomicInteger               mutexSignal = new AtomicInteger( 0 );\n    private static final int              WRITE_LOCK_MASK = 1 << 16;\n\n    private Thread                          writingThread = null;\n    private int                       writeReentrantCount = 0;\n\n    private final ThreadLocal<Integer> readReentrantCount = ThreadLocal.withInitial(() -> 0);\n\n    private final Lock                           readLock = new ReadLock();\n    private final Lock                          writeLock = new WriteLock();\n\n    @Override\n    public Lock readLock() {\n        return this.readLock;\n    }\n\n    @Override\n    public Lock writeLock() {\n        return this.writeLock;\n    }\n\n    private class ReadLock implements Lock {\n        @Override\n        public void lock() {\n            while ( !this.tryLock() ) {\n\n            }\n        }\n\n        @Override\n        public boolean tryLock() {\n            int currentState = mutexSignal.get();\n\n            if ( ( currentState & WRITE_LOCK_MASK ) != 0 && writingThread != Thread.currentThread() ) {\n                return false;\n            }\n\n            if ( mutexSignal.compareAndSet( currentState, currentState + 1 ) ) {\n                readReentrantCount.set( readReentrantCount.get() + 1 );\n                return true;\n            }\n            return false;\n        }\n\n        @Override\n        public void lockInterruptibly() throws InterruptedException {\n            while ( !this.tryLock() ) {\n                if ( Thread.interrupted() ) {\n                    throw new InterruptedException( \"Thread was interrupted while attempting to acquire read lock.\" );\n                }\n            }\n        }\n\n        @Override\n        public boolean tryLock(long time, TimeUnit unit) throws InterruptedException {\n            long deadline = System.nanoTime() + unit.toNanos(time);\n            while ( !this.tryLock() ) {\n                if ( Thread.interrupted() ) {\n                    throw new InterruptedException( \"Thread was interrupted while attempting to acquire read lock.\" );\n                }\n                if ( System.nanoTime() > deadline ) {\n                    return false;\n                }\n            }\n            return true;\n        }\n\n        @Override\n        public void unlock() {\n            if (readReentrantCount.get() <= 0) {\n                throw new IllegalMonitorStateException( \"Read lock not held by current thread.\" );\n            }\n\n            readReentrantCount.set( readReentrantCount.get() - 1 );\n            mutexSignal.decrementAndGet();\n        }\n\n        @Override\n        public Condition newCondition() {\n            throw new UnsupportedOperationException();\n        }\n    }\n\n    private class WriteLock implements Lock {\n        @Override\n        public void lock() {\n            Thread currentThread = Thread.currentThread();\n\n            if ( writingThread == currentThread ) {\n                ++writeReentrantCount;\n                return;\n            }\n\n            while ( true ) {\n                int currentState = mutexSignal.get();\n\n                if ( currentState == 0 ) {\n                    if ( mutexSignal.compareAndSet( 0, WRITE_LOCK_MASK ) ) {\n                        writingThread = currentThread;\n                        writeReentrantCount = 1;\n                        break;\n                    }\n                }\n            }\n        }\n\n        @Override\n        public boolean tryLock() {\n            Thread currentThread = Thread.currentThread();\n\n            if ( writingThread == currentThread ) {\n                writeReentrantCount++;\n                return true;\n            }\n\n            if ( mutexSignal.compareAndSet(0, WRITE_LOCK_MASK) ) {\n                writingThread = currentThread;\n                writeReentrantCount = 1;\n                return true;\n            }\n            return false;\n        }\n\n        @Override\n        public void lockInterruptibly() throws InterruptedException {\n            while ( !this.tryLock() ) {\n                if ( Thread.interrupted() ) {\n                    throw new InterruptedException(\"Thread was interrupted while attempting to acquire write lock\");\n                }\n            }\n        }\n\n        @Override\n        public boolean tryLock(long time, TimeUnit unit) throws InterruptedException {\n            long deadline = System.nanoTime() + unit.toNanos(time);\n            while ( !this.tryLock() ) {\n                if ( Thread.interrupted() ) {\n                    throw new InterruptedException(\"Thread was interrupted while attempting to acquire write lock\");\n                }\n                if ( System.nanoTime() > deadline ) {\n                    return false;\n                }\n            }\n            return true;\n        }\n\n        @Override\n        public void unlock() {\n            if ( writingThread != Thread.currentThread() ) {\n                throw new IllegalMonitorStateException(\"Write lock not held by current thread\");\n            }\n\n            if ( --writeReentrantCount == 0 ) {\n                writingThread = null;\n                mutexSignal.set(0);\n            }\n        }\n\n        @Override\n        public Condition newCondition() {\n            throw new UnsupportedOperationException();\n        }\n    }\n\n}\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lock/ReentrantSpinLock.java",
    "content": "package com.pinecone.framework.util.lock;\n\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.concurrent.locks.Condition;\nimport java.util.concurrent.locks.Lock;\n\npublic class ReentrantSpinLock implements Lock {\n    private final AtomicBoolean mMutexSignal     = new AtomicBoolean( false );\n    private Thread              mOwningThread    = null;\n    private int                 mnReentrantCount = 0;\n\n    @Override\n    public void lock() {\n        Thread currentThread = Thread.currentThread();\n\n        if ( currentThread == this.mOwningThread ) {\n            ++this.mnReentrantCount;\n            return;\n        }\n\n        while ( !this.mMutexSignal.compareAndSet( false, true ) ) {\n\n        }\n\n        this.mOwningThread = currentThread;\n        this.mnReentrantCount = 1;\n    }\n\n    @Override\n    public void unlock() {\n        Thread currentThread = Thread.currentThread();\n\n        if ( currentThread != this.mOwningThread ) {\n            return;\n            //throw new IllegalMonitorStateException( \"Calling thread has not locked this lock\" );\n        }\n\n        --this.mnReentrantCount;\n\n        if ( this.mnReentrantCount == 0 ) {\n            this.mOwningThread = null;\n            this.mMutexSignal.set( false );\n        }\n    }\n\n    @Override\n    public boolean tryLock() {\n        Thread currentThread = Thread.currentThread();\n\n        if ( currentThread == this.mOwningThread ) {\n            ++this.mnReentrantCount;\n            return true;\n        }\n\n        if ( this.mMutexSignal.compareAndSet( false, true ) ) {\n            this.mOwningThread = currentThread;\n            this.mnReentrantCount = 1;\n            return true;\n        }\n\n        return false;\n    }\n\n    @Override\n    public void lockInterruptibly() throws InterruptedException {\n        Thread currentThread = Thread.currentThread();\n\n        if ( currentThread == this.mOwningThread ) {\n            ++this.mnReentrantCount;\n            return;\n        }\n\n        while ( !this.mMutexSignal.compareAndSet( false, true ) ) {\n            if ( Thread.interrupted() ) {\n                throw new InterruptedException();\n            }\n        }\n\n        this.mOwningThread = currentThread;\n        this.mnReentrantCount = 1;\n    }\n\n    @Override\n    public boolean tryLock(long time, java.util.concurrent.TimeUnit unit) throws InterruptedException {\n        long endTime = System.nanoTime() + unit.toNanos(time);\n        Thread currentThread = Thread.currentThread();\n\n        if ( currentThread == this.mOwningThread ) {\n            this.mnReentrantCount++;\n            return true;\n        }\n\n        while ( !this.mMutexSignal.compareAndSet( false, true ) ) {\n            if ( System.nanoTime() > endTime ) {\n                return false;\n            }\n            if ( Thread.interrupted() ) {\n                throw new InterruptedException();\n            }\n        }\n\n        this.mOwningThread = currentThread;\n        this.mnReentrantCount = 1;\n        return true;\n    }\n\n    @Override\n    public Condition newCondition() {\n        throw new UnsupportedOperationException( \"ReentrantSpinLock does not support conditions.\" );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/lock/SpinLock.java",
    "content": "package com.pinecone.framework.util.lock;\n\nimport java.util.concurrent.atomic.AtomicBoolean;\nimport java.util.concurrent.locks.Condition;\nimport java.util.concurrent.locks.Lock;\n\npublic class SpinLock implements Lock {\n    private final AtomicBoolean mMutexSignal = new AtomicBoolean( false );\n\n    @Override\n    public void lock() {\n        while ( !this.mMutexSignal.compareAndSet(false, true) ) {\n\n        }\n    }\n\n    @Override\n    public void unlock() {\n        this.mMutexSignal.set(false);\n    }\n\n    @Override\n    public boolean tryLock() {\n        return this.mMutexSignal.compareAndSet( false, true );\n    }\n\n    @Override\n    public Condition newCondition() {\n        throw new UnsupportedOperationException( \"SpinLock does not support conditions.\" );\n    }\n\n    @Override\n    public void lockInterruptibly() throws InterruptedException {\n        while ( !this.mMutexSignal.compareAndSet( false, true ) ) {\n            if (Thread.currentThread().isInterrupted()) {\n                throw new InterruptedException();\n            }\n        }\n    }\n\n    @Override\n    public boolean tryLock( long time, java.util.concurrent.TimeUnit unit ) throws InterruptedException {\n        long endTime = System.nanoTime() + unit.toNanos( time );\n        while ( !this.mMutexSignal.compareAndSet(false, true) ) {\n            if ( System.nanoTime() > endTime ) {\n                return false;\n            }\n            if ( Thread.currentThread().isInterrupted() ) {\n                throw new InterruptedException();\n            }\n        }\n        return true;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/math/BigNumberMath.java",
    "content": "package com.pinecone.framework.util.math;\nimport com.pinecone.Pinecone;\n\nimport java.math.BigDecimal;\n\npublic abstract class BigNumberMath {\n    private PrecisionHolder precision;\n    protected int mp = 0;\n    private int lastPrecision = 0;\n    private int stp = 0;\n\n    protected BigNumberMath(PrecisionHolder precision) {\n        this.precision = precision;\n    }\n\n    protected int precision() {\n        if (this.mp != 0) {\n            return this.lastPrecision + this.mp;\n        } else {\n            this.lastPrecision = this.precision.getPrecision();\n            this.stp = this.lastPrecision / 5;\n            return this.lastPrecision;\n        }\n    }\n\n    protected void applyForCachePrecision() {\n        this.mp += this.stp;\n    }\n\n    protected void clearCachePrecision() {\n        this.mp -= this.stp;\n        if (this.mp < 0) {\n            this.mp = 0;\n        }\n\n    }\n\n    public static BigNumberMath getDefaultBigNumberMath(PrecisionHolder precision) {\n        return new BigNumberMathAchieve(precision);\n    }\n\n    public static BigNumberMath getDefaultBigNumberMath(int precision) {\n        PrecisionHolder precisionHolder = new PrecisionHolder(){\n            @Override\n            public int getPrecision() {\n                return precision;\n            }\n        };\n        return new BigNumberMathAchieve(precisionHolder);\n    }\n\n    public static BigNumberMath getDefaultBigNumberMath() {\n        PrecisionHolder precisionHolder = new PrecisionHolder(){\n            @Override\n            public int getPrecision() {\n                return Pinecone.FLOAT_ACCURACY;\n            }\n        };\n        return new BigNumberMathAchieve(precisionHolder);\n    }\n\n    public abstract BigDecimal sin(BigDecimal decimal);\n\n    public abstract BigDecimal cos(BigDecimal decimal);\n\n    public abstract BigDecimal tan(BigDecimal decimal);\n\n    public abstract BigDecimal asin(BigDecimal decimal);\n\n    public abstract BigDecimal acos(BigDecimal decimal);\n\n    public abstract BigDecimal atan(BigDecimal decimal);\n\n    public abstract BigDecimal pow(BigDecimal decimal, BigDecimal decimal2);\n\n    public abstract BigDecimal pow(double var1, double var2);\n\n    public abstract BigDecimal sqrt(BigDecimal decimal);\n\n    public abstract BigDecimal cbrt(BigDecimal decimal);\n\n    public abstract BigDecimal root(BigDecimal decimal, BigDecimal decimal2);\n\n    public abstract BigDecimal log10(BigDecimal decimal);\n\n    public abstract BigDecimal log(BigDecimal decimal, BigDecimal decimal2);\n\n    public abstract BigDecimal ln(BigDecimal decimal);\n\n    public abstract BigDecimal exp(BigDecimal decimal);\n\n    public abstract BigDecimal sinh(BigDecimal decimal);\n\n    public abstract BigDecimal cosh(BigDecimal decimal);\n\n    public abstract BigDecimal tanh(BigDecimal decimal);\n\n    public abstract BigDecimal asinh(BigDecimal decimal);\n\n    public abstract BigDecimal acosh(BigDecimal decimal);\n\n    public abstract BigDecimal atanh(BigDecimal decimal);\n\n    public abstract BigDecimal deg(BigDecimal decimal);\n\n    public abstract BigDecimal rad(BigDecimal decimal);\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/math/BigNumberMathAchieve.java",
    "content": "package com.pinecone.framework.util.math;\n\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\n\nfinal class BigNumberMathAchieve extends BigNumberMath {\n    private static BigDecimal E = new BigDecimal(\"2.7182818284590452353602874713526624977572470936999595749669676277240766\");\n    private static BigDecimal PI = new BigDecimal(\"3.1415926535897932384626433832795028841971693993751058209749445923078164\");\n    private static BigDecimal PI2;\n    private static BigDecimal bim;\n    private static BigDecimal B180;\n    private static BigDecimal N1;\n    private static BigDecimal B2;\n    static BigDecimal[] PREC_CACHE;\n\n    static {\n        PI2 = PI.multiply(BigDecimal.valueOf(2L));\n        bim = BigDecimal.valueOf(2147483647L);\n        B180 = BigDecimal.valueOf(180L);\n        N1 = BigDecimal.valueOf(-1L);\n        B2 = BigDecimal.valueOf(2L);\n        PREC_CACHE = new BigDecimal[101];\n        BigDecimal current = BigDecimal.ONE;\n\n        for(int i = 0; i < PREC_CACHE.length; ++i) {\n            PREC_CACHE[i] = current;\n            current = current.divide(BigDecimal.TEN);\n        }\n\n    }\n\n    BigNumberMathAchieve(PrecisionHolder precision) {\n        super(precision);\n    }\n\n    public BigDecimal sin(BigDecimal x) {\n        int precision = super.precision();\n        x = x.remainder(PI2);\n        BigDecimal P = this.ofPrecision(precision);\n        boolean neg = x.compareTo(BigDecimal.ZERO) == -1;\n        if (neg) {\n            x = x.abs();\n        }\n\n        BigDecimal result = BigDecimal.ONE;\n        BigDecimal cons = x.subtract(PI.divide(BigDecimal.valueOf(2L), precision + 2, 6)).pow(2);\n        BigDecimal curf = BigDecimal.valueOf(2L);\n        BigDecimal curr = cons;\n        int i = 3;\n        boolean n = true;\n\n        while(true) {\n            BigDecimal temp;\n            if (n) {\n                temp = result.subtract(curr.divide(curf, precision + 2, 6));\n            } else {\n                temp = result.add(curr.divide(curf, precision + 2, 6));\n            }\n\n            if (result.subtract(temp).abs().compareTo(P) != 1) {\n                result = temp;\n                if (neg) {\n                    result = temp.negate();\n                }\n\n                return result.setScale(precision, 6);\n            }\n\n            result = temp;\n            curr = curr.multiply(cons).setScale(precision + 2, 6);\n            n = !n;\n            curf = curf.multiply(BigDecimal.valueOf((long)(i++ * i++)));\n        }\n    }\n\n    public BigDecimal cos(BigDecimal x) {\n        int precision = super.precision();\n        x = x.remainder(PI2);\n        BigDecimal P = this.ofPrecision(precision);\n        x = x.abs();\n        BigDecimal result = BigDecimal.ONE;\n        BigDecimal cons = x.pow(2);\n        BigDecimal curf = BigDecimal.valueOf(2L);\n        BigDecimal curr = cons;\n        int i = 3;\n        boolean n = true;\n\n        while(true) {\n            BigDecimal temp;\n            if (n) {\n                temp = result.subtract(curr.divide(curf, precision + 2, 6));\n            } else {\n                temp = result.add(curr.divide(curf, precision + 2, 6));\n            }\n\n            if (result.subtract(temp).abs().compareTo(P) != 1) {\n                return temp.setScale(precision, 6);\n            }\n\n            result = temp;\n            curr = curr.multiply(cons).setScale(precision + 2, 6);\n            n = !n;\n            curf = curf.multiply(BigDecimal.valueOf((long)(i++ * i++)));\n        }\n    }\n\n    public BigDecimal tan(BigDecimal x) {\n        int precision = super.precision();\n        super.applyForCachePrecision();\n        BigDecimal result = this.sin(x).divide(this.cos(x), precision, 6);\n        super.clearCachePrecision();\n        return result.setScale(precision);\n    }\n\n    public BigDecimal asin(BigDecimal x) {\n        if (x.abs().compareTo(BigDecimal.ONE) == 1) {\n            try {\n                throw new Exception(\"Illegal input of asin(x)\");\n            }\n            catch ( Exception e ) {\n                e.printStackTrace();\n            }\n        }\n\n        int precision = super.precision();\n        super.applyForCachePrecision();\n        BigDecimal P = this.ofPrecision(precision);\n        BigDecimal result = BigDecimal.valueOf(Math.asin(x.doubleValue()));\n\n        while(true) {\n            BigDecimal temp = result.subtract(this.sin(result).subtract(x).divide(this.cos(result), precision + 3, 6));\n            if (result.subtract(temp).abs().compareTo(P) != 1) {\n                super.clearCachePrecision();\n                return temp.setScale(precision, 6);\n            }\n\n            result = temp;\n        }\n    }\n\n    public BigDecimal acos(BigDecimal x) {\n        if (x.abs().compareTo(BigDecimal.ONE) == 1) {\n            try {\n                throw new Exception(\"Illegal input of acos(x)\");\n            } catch (Exception var6) {\n                var6.printStackTrace();\n            }\n        }\n\n        int precision = super.precision();\n        super.applyForCachePrecision();\n        BigDecimal P = this.ofPrecision(precision);\n        BigDecimal result = BigDecimal.valueOf(Math.acos(x.doubleValue()));\n\n        while(true) {\n            BigDecimal temp = result.subtract(this.cos(result).subtract(x).divide(this.sin(result).negate(), precision + 3, 6));\n            if (result.subtract(temp).abs().compareTo(P) != 1) {\n                super.clearCachePrecision();\n                return temp.setScale(precision, 6);\n            }\n\n            result = temp;\n        }\n    }\n\n    public BigDecimal atan(BigDecimal x) {\n        int precision = super.precision();\n        super.applyForCachePrecision();\n        BigDecimal P = this.ofPrecision(precision);\n        BigDecimal result = BigDecimal.valueOf(Math.atan(x.doubleValue()));\n\n        while(true) {\n            BigDecimal temp = result.subtract(this.tan(result).subtract(x).multiply(this.cos(result).pow(2)));\n            if (result.subtract(temp).abs().compareTo(P) != 1) {\n                super.clearCachePrecision();\n                return temp.setScale(precision, 6);\n            }\n\n            result = temp.setScale(precision + 3, 6);\n        }\n    }\n\n    public BigDecimal pow(BigDecimal a, BigDecimal b) {\n        int precision = super.precision();\n        super.applyForCachePrecision();\n        BigDecimal P = this.ofPrecision(precision);\n        if (b.abs().compareTo(bim) == 1) {\n            throw new IllegalArgumentException(\"计算幂的过程中指数的绝对值太大！\");\n        } else if (b.compareTo(BigDecimal.ZERO) == 0) {\n            if (a.compareTo(BigDecimal.ZERO) == 0) {\n                throw new IllegalArgumentException(\"计算幂的过程中遇到0的0次方\");\n            } else {\n                return BigDecimal.ONE;\n            }\n        } else if (b.compareTo(BigDecimal.ONE) == 0) {\n            return a;\n        } else if (b.compareTo(N1) == 0) {\n            return BigDecimal.ONE.divide(a, precision, 6);\n        } else {\n            boolean below = b.signum() == -1;\n            b = b.abs();\n            if (b.stripTrailingZeros().precision() <= 0) {\n                return a.pow(b.intValue());\n            } else {\n                BigDecimal result = a.pow(b.intValue());\n                BigDecimal constant = BigDecimal.ONE.add(b.multiply(this.ln(a)));\n                if (result.equals(BigDecimal.ZERO)) {\n                    result = BigDecimal.ONE;\n                }\n\n                while(true) {\n                    BigDecimal temp = result.multiply(constant.subtract(this.ln(result)));\n                    if (temp.subtract(result).compareTo(P) != 1) {\n                        if (below) {\n                            return BigDecimal.ONE.divide(temp, precision, 6);\n                        } else {\n                            super.clearCachePrecision();\n                            return temp.setScale(precision, 6);\n                        }\n                    }\n\n                    result = temp;\n                }\n            }\n        }\n    }\n\n    public BigDecimal pow(double a, double b) {\n        return this.pow(new BigDecimal(a),new BigDecimal(b));\n    }\n\n    public BigDecimal sqrt(BigDecimal x) {\n        int precision = super.precision();\n        BigDecimal n = BigDecimal.ONE;\n        BigDecimal l = BigDecimal.ZERO;\n        BigDecimal P = this.ofPrecision(precision);\n        BigDecimal B = new BigDecimal(2);\n\n        while(true) {\n            n = n.subtract(n.pow(2).subtract(x).divide(B.multiply(n), precision + 4, 6));\n            if (n.subtract(l).abs().compareTo(P) != 1) {\n                return n.setScale(precision, 6);\n            }\n\n            l = n;\n        }\n    }\n\n    public BigDecimal cbrt(BigDecimal x) {\n        int precision = super.precision();\n        BigDecimal n = BigDecimal.ONE;\n        BigDecimal l = BigDecimal.ZERO;\n        BigDecimal P = this.ofPrecision(precision);\n        BigDecimal B = new BigDecimal(3);\n\n        while(true) {\n            n = n.subtract(n.pow(3).subtract(x).divide(B.multiply(n.pow(2)), precision + 4, 6));\n            if (n.subtract(l).abs().compareTo(P) != 1) {\n                return n.setScale(precision, 6);\n            }\n\n            l = n;\n        }\n    }\n\n    public BigDecimal root(BigDecimal a, BigDecimal b) {\n        int precision = super.precision();\n        super.applyForCachePrecision();\n        BigDecimal result = this.pow(a, BigDecimal.ONE.divide(b, precision, 6));\n        super.clearCachePrecision();\n        return result.setScale(precision, 6);\n    }\n\n    public BigDecimal log10(BigDecimal x) {\n        return this.log(x, BigDecimal.TEN);\n    }\n\n    public BigDecimal log(BigDecimal a, BigDecimal b) {\n        int precision = super.precision();\n        super.applyForCachePrecision();\n        BigDecimal result = this.ln(a).divide(this.ln(b), precision, 6);\n        super.clearCachePrecision();\n        return result.setScale(precision, 6);\n    }\n\n    public BigDecimal ln(BigDecimal x) {\n        if (x.signum() != 1) {\n            throw new IllegalArgumentException(\"Invalid input of ln(x)\");\n        } else {\n            int precision = super.precision();\n            super.applyForCachePrecision();\n            BigDecimal sc = this.ofPrecision(precision);\n            int btl = x.toBigInteger().bitLength();\n            BigDecimal result = BigDecimal.valueOf((double)btl - Math.ceil((double)(3 * (btl - 3) / 10 + 1)));\n\n            while(true) {\n                BigDecimal bpk = this.exp(result);\n                BigDecimal tmp = result.subtract(bpk.subtract(x).divide(bpk, precision + 4, 6));\n                if (tmp.subtract(result).abs().compareTo(sc) != 1) {\n                    super.clearCachePrecision();\n                    return tmp.setScale(precision, 6);\n                }\n\n                result = tmp;\n            }\n        }\n    }\n\n    public BigDecimal exp(BigDecimal x) {\n        int precision = super.precision();\n        super.applyForCachePrecision();\n        BigDecimal P = this.ofPrecision(precision);\n        if (x.abs().compareTo(bim) == 1) {\n            throw new IllegalArgumentException(\"计算幂的过程中指数的绝对值太大！\");\n        } else {\n            boolean ng = x.signum() == -1;\n            x = x.abs();\n            BigDecimal eix = E.setScale(precision + 3, 4).pow(x.setScale(0, 0).intValue()).setScale(precision + 3, 4);\n            BigDecimal p0 = x.setScale(0, 0);\n            BigDecimal cons = x.subtract(p0);\n            BigDecimal curr = BigDecimal.ONE;\n            BigDecimal curr2 = cons;\n            BigDecimal result = eix;\n            int var11 = 2;\n\n            while(true) {\n                BigDecimal temp = result.add(eix.multiply(curr2).divide(curr, precision + 5, 6));\n                if (temp.subtract(result).abs().compareTo(P) != 1) {\n                    super.clearCachePrecision();\n                    return ng ? BigDecimal.ONE.divide(temp, precision, 6) : temp.setScale(precision, 6);\n                }\n\n                curr2 = curr2.multiply(cons);\n                curr = curr.multiply(BigDecimal.valueOf((long)(var11++)));\n                result = temp;\n            }\n        }\n    }\n\n    public BigDecimal sinh(BigDecimal x) {\n        int precision = super.precision();\n        super.applyForCachePrecision();\n        BigDecimal result = this.exp(x).subtract(this.exp(x.negate())).divide(B2, precision, 6);\n        super.clearCachePrecision();\n        return result.setScale(precision, 6);\n    }\n\n    public BigDecimal cosh(BigDecimal x) {\n        int precision = super.precision();\n        super.applyForCachePrecision();\n        BigDecimal result = this.exp(x).add(this.exp(x.negate())).divide(B2, precision, 6);\n        super.clearCachePrecision();\n        return result.setScale(precision, 6);\n    }\n\n    public BigDecimal tanh(BigDecimal x) {\n        int precision = super.precision();\n        super.applyForCachePrecision();\n        BigDecimal e2 = this.exp(x);\n        BigDecimal eN2 = BigDecimal.ONE.divide(e2, precision + 3, 6);\n        BigDecimal result = e2.subtract(eN2).divide(e2.add(eN2), precision + 3, 6);\n        super.clearCachePrecision();\n        return result.setScale(precision, 6);\n    }\n\n    public BigDecimal asinh(BigDecimal x) {\n        int precision = super.precision();\n        super.applyForCachePrecision();\n        BigDecimal P = this.ofPrecision(precision);\n        boolean ng = x.signum() == -1;\n        if (x.signum() == 0) {\n            return BigDecimal.ZERO;\n        } else {\n            x = x.abs();\n            BigDecimal result = this.ln(x);\n\n            while(true) {\n                BigDecimal temp = result.subtract(this.sinh(result).subtract(x).divide(this.cosh(result), precision + 3, 6));\n                if (result.subtract(temp).abs().compareTo(P) != 1) {\n                    result = temp;\n                    if (ng) {\n                        result = temp.negate();\n                    }\n\n                    super.clearCachePrecision();\n                    return result.setScale(precision, 6);\n                }\n\n                result = temp;\n            }\n        }\n    }\n\n    public BigDecimal acosh(BigDecimal x) {\n        if (x.compareTo(BigDecimal.ONE) == -1) {\n            try {\n                throw new Exception(\"Illegal input of acosh(x)\");\n            } catch (Exception var6) {\n                var6.printStackTrace();\n            }\n        }\n\n        if (x.compareTo(BigDecimal.ONE) == 0) {\n            return BigDecimal.ZERO;\n        } else {\n            int precision = super.precision();\n            super.applyForCachePrecision();\n            BigDecimal P = this.ofPrecision(precision);\n            BigDecimal result = this.ln(x);\n\n            while(true) {\n                BigDecimal temp = result.subtract(this.cosh(result).subtract(x).divide(this.sinh(result), precision + 3, 6));\n                if (result.subtract(temp).abs().compareTo(P) != 1) {\n                    super.clearCachePrecision();\n                    return temp.setScale(precision, 6);\n                }\n\n                result = temp;\n            }\n        }\n    }\n\n    public BigDecimal atanh(BigDecimal x) {\n        if (x.abs().compareTo(BigDecimal.ONE) != -1) {\n            try {\n                throw new Exception(\"Illegal input of atanh(x)\");\n            } catch (Exception var6) {\n                var6.printStackTrace();\n            }\n        }\n\n        int precision = super.precision();\n        super.applyForCachePrecision();\n        BigDecimal P = this.ofPrecision(precision);\n        BigDecimal result = x.setScale(precision, 6);\n\n        while(true) {\n            BigDecimal temp = result.subtract(this.tanh(result).subtract(x).multiply(this.cosh(result).pow(2)));\n            if (result.subtract(temp).abs().compareTo(P) != 1) {\n                super.clearCachePrecision();\n                return temp.setScale(precision, 6);\n            }\n\n            result = temp.setScale(precision + 5, 6);\n        }\n    }\n\n    public BigDecimal deg(BigDecimal x) {\n        return x.multiply(B180).divide(PI, super.precision(), 6);\n    }\n\n    public BigDecimal rad(BigDecimal x) {\n        return x.multiply(PI).divide(B180, super.precision(), 6);\n    }\n\n    private BigDecimal ofPrecision(int precision) {\n        if (precision <= 0) {\n            return PREC_CACHE[0];\n        } else {\n            return precision < PREC_CACHE.length ? PREC_CACHE[precision] : BigDecimal.ONE.divide(new BigDecimal(BigInteger.TEN.pow(precision)));\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/math/PrecisionHolder.java",
    "content": "package com.pinecone.framework.util.math;\n\npublic interface PrecisionHolder {\n    int getPrecision();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/math/Vectorizer.java",
    "content": "package com.pinecone.framework.util.math;\n\nimport java.util.*;\n\npublic class Vectorizer<T> {\n    private Vector<Vector<T > >      tokenArrays;\n\n    private HashSet<T >              unionTokenSet;\n\n    private Vector<Vector<Double > > tokenVectors;\n\n    private Vectorizer() {\n        this.unionTokenSet = new HashSet<>();\n        this.tokenArrays   = new Vector<>();\n        this.tokenVectors  = new Vector<>();\n    }\n\n    public Vectorizer( Vector<Vector<T> >tokenArrays ){\n        this();\n        this.apply( tokenArrays );\n    }\n\n    private void apply( Vector<Vector<T> >tokenArrays ) {\n        this.tokenArrays = tokenArrays;\n\n        for ( int i = 0; i < this.tokenArrays.size(); i++ ) {\n            this.unionTokenSet.addAll( tokenArrays.get(i) );\n\n            this.tokenVectors.add( new Vector<>() );\n        }\n\n        this.analysis();\n    }\n\n    private Vector<Vector<T> > singlify( Vector<T> tokenArrayA, Vector<T> tokenArrayB ) {\n        Vector<Vector<T> > single = new Vector<>();\n        single.add( tokenArrayA );\n        single.add( tokenArrayB );\n        return single;\n    }\n\n    public Vectorizer( Vector<T> tokenArrayA, Vector<T> tokenArrayB ){\n        this();\n        this.apply(  this.singlify( tokenArrayA, tokenArrayB ) );\n    }\n\n    public Vector<Vector<Double > > getResult(){\n        return this.tokenVectors;\n    }\n\n\n    private void tokenMapify( Vector<T > proto, Map<T, Double > map ) {\n        for ( T item : proto ){\n            double tempInt = 0.0;\n            if( map.containsKey(item) ){\n                tempInt = map.get(item);\n            }else {\n                map.put(item,0.0);\n            }\n            map.replace(item,++tempInt);\n        }\n    }\n\n    private void analysis(){\n        ArrayList<HashMap<T,Double > > tokenMaps = new ArrayList<>();\n\n        for ( int i = 0; i < this.tokenArrays.size(); i++ ) {\n            tokenMaps.add( new HashMap<>() );\n            this.tokenMapify( this.tokenArrays.get(i), tokenMaps.get(i) );\n        }\n\n        for( T item : this.unionTokenSet ){\n            for ( int i = 0; i < tokenMaps.size(); i++ ) {\n                if( !tokenMaps.get(i).containsKey(item) ){\n                    tokenMaps.get(i).put( item,0.0 );\n                }\n            }\n\n            for ( int j = 0; j < this.tokenVectors.size(); j++ ) {\n                this.tokenVectors.get(j).add( tokenMaps.get(j).get(item) );\n            }\n        }\n\n\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/ArchName.java",
    "content": "package com.pinecone.framework.util.name;\n\npublic abstract class ArchName implements Name {\n    protected String    mszName;\n\n    protected ArchName( String szName ) {\n        this.mszName = szName;\n    }\n\n    @Override\n    public void  setName( String szName ){\n        this.mszName = szName;\n    }\n\n    @Override\n    public void  asStandardizedName( String szStandardizedName ) {\n        this.mszName = szStandardizedName;\n    }\n\n    @Override\n    public String toJSONString() {\n        return \"\\\"\" + this.toString() + \"\\\"\";\n    }\n\n    @Override\n    public boolean equals( Object obj ) {\n        if( obj instanceof Name ) {\n            if( obj instanceof Namespace ) {\n                return this.getFullName().equals( ( (Namespace)obj ).getFullName() );\n            }\n            return this.getName().equals( ( (Name)obj ).getName() );\n        }\n        return false;\n    }\n\n    @Override\n    public int hashCode() {\n        return this.getFullName().hashCode();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/ArchNamespaceNode.java",
    "content": "package com.pinecone.framework.util.name;\n\npublic abstract class ArchNamespaceNode extends ArchName implements Namespace {\n    protected String           mszSeparator;\n\n    protected ArchNamespaceNode( String szName, String separator ) {\n        super( szName );\n        this.mszSeparator = separator;\n    }\n\n    @Override\n    public String getNodeName() {\n        return this.mszName;\n    }\n\n    @Override\n    public String getSeparator() {\n        return this.mszSeparator;\n    }\n\n    @Override\n    public void setSeparator( String separator ) {\n        this.mszSeparator = separator;\n    }\n\n    @Override\n    public String toString() {\n        return this.getSimpleName();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/FixScopeName.java",
    "content": "package com.pinecone.framework.util.name;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.name.ArchName;\nimport com.pinecone.framework.util.name.MultiScopeName;\n\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.List;\n\npublic class FixScopeName extends ArchName implements MultiScopeName {\n    protected List<String >   mDomains;\n    protected List<String >   mSuffixes;\n\n    public FixScopeName( String szName, String szDomain ) {\n        this( szName );\n\n        this.mDomains.add  ( szDomain );\n    }\n\n    public FixScopeName( String szName, String szDomain, String szSuffix ) {\n        this( szName );\n\n        this.mDomains.add  ( szDomain );\n        this.mSuffixes.add ( szSuffix );\n    }\n\n    public FixScopeName( String szName, @Nullable String[] domains, @Nullable String[] suffixes ) {\n        this( szName );\n        if ( domains != null ) {\n            this.mDomains.addAll( Arrays.asList( domains ) );\n        }\n        if ( suffixes != null ) {\n            this.mSuffixes.addAll( Arrays.asList( suffixes ) );\n        }\n    }\n\n    public FixScopeName( String szName, List<String > domains, List<String > suffixes ) {\n        super( szName );\n\n        this.mDomains  = domains;\n        this.mSuffixes = suffixes;\n    }\n\n    public FixScopeName( String szName ) {\n        super( szName );\n\n        this.mDomains   = new ArrayList<>();\n        this.mSuffixes  = new ArrayList<>();\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public String getFullName() {\n        if ( !this.mDomains.isEmpty() && !this.mSuffixes.isEmpty() ) {\n            return this.mDomains.get( 0 ) + this.mszName + this.mSuffixes.get( 0 );\n        }\n        else if ( !this.mDomains.isEmpty() ) {\n            return this.mDomains.get( 0 ) + this.mszName;\n        }\n        else if ( !this.mSuffixes.isEmpty() ) {\n            return this.mszName + this.mSuffixes.get( 0 );\n        }\n        else {\n            return this.mszName;\n        }\n    }\n\n    @Override\n    public String getDomain(){\n        if ( !this.mDomains.isEmpty() ) {\n            return this.mDomains.get( 0 );\n        }\n        else {\n            return \"\";\n        }\n    }\n\n    @Override\n    public String toString() {\n        return this.getFullName();\n    }\n\n    @Override\n    public List<String> getFullNames() {\n        List<String> fullNames = new ArrayList<>();\n        if ( this.mDomains.isEmpty() && this.mSuffixes.isEmpty() ) {\n            fullNames.add( this.mszName );\n        }\n        else if ( this.mDomains.isEmpty() ) {\n            for ( String suffix : this.mSuffixes ) {\n                fullNames.add( this.mszName + suffix );\n            }\n        }\n        else if ( this.mSuffixes.isEmpty() ) {\n            for ( String domain : this.mDomains ) {\n                fullNames.add( domain + this.mszName );\n            }\n        }\n        else {\n            for ( String domain : this.mDomains ) {\n                for ( String suffix : this.mSuffixes ) {\n                    fullNames.add( domain + this.mszName + suffix );\n                }\n            }\n        }\n        return fullNames;\n    }\n\n    public void addDomain( String domain ) {\n        this.mDomains.add( domain );\n    }\n\n    public void addSuffix( String suffix ) {\n        this.mSuffixes.add( suffix );\n    }\n\n    public List<String> getDomains() {\n        return this.mDomains;\n    }\n\n    public List<String> getSuffixes() {\n        return this.mSuffixes;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/GenericMultiNamespace.java",
    "content": "package com.pinecone.framework.util.name;\n\nimport com.pinecone.framework.unit.LinkedTreeSet;\n\nimport java.util.*;\n\npublic class GenericMultiNamespace extends ArchNamespaceNode implements MultiNamespace {\n    protected Set<MultiNamespace > mParents;\n\n    public GenericMultiNamespace( String name ) {\n        this( name, new LinkedTreeSet<>( MultiNamespace.DefaultSetNameComparator ), Namespace.DEFAULT_SEPARATOR );\n    }\n\n    public GenericMultiNamespace( String name, Set<MultiNamespace > parents ) {\n        this( name, parents, Namespace.DEFAULT_SEPARATOR );\n    }\n\n    public GenericMultiNamespace( String name, Set<MultiNamespace > parents, String separator ) {\n        super( name, separator );\n        this.mParents = parents != null ? parents : new LinkedTreeSet<>( MultiNamespace.DefaultSetNameComparator );\n    }\n\n    public GenericMultiNamespace( String name, MultiNamespace parent ) {\n        this( name, parent, Namespace.DEFAULT_SEPARATOR );\n    }\n\n    public GenericMultiNamespace( String name, MultiNamespace parent, String separator ) {\n        super( name, separator );\n\n        LinkedTreeSet<MultiNamespace > set = new LinkedTreeSet<>( MultiNamespace.DefaultSetNameComparator );\n        if( parent != null ) {\n            set.add( parent );\n        }\n\n        this.mParents = set;\n    }\n\n    public GenericMultiNamespace( String name, String separator ) {\n        this( name, new LinkedTreeSet<>( MultiNamespace.DefaultSetNameComparator ), separator );\n    }\n\n\n    @Override\n    public Collection<MultiNamespace > getParents() {\n        return this.mParents;\n    }\n\n    @Override\n    public Namespace parent() {\n        return this.getFirstParent();\n    }\n\n    @Override\n    public void setParent( Namespace parent ) {\n        this.mParents.clear();\n        this.addParent( (MultiNamespace)parent );\n    }\n\n    @Override\n    public MultiNamespace getFirstParent() {\n        return this.mParents.isEmpty() ? null : this.mParents.iterator().next();\n    }\n\n    @Override\n    public String getFullName() {\n        Namespace firstParent = this.getFirstParent();\n        if ( firstParent == null ) {\n            return this.mszName;\n        }\n        return firstParent.getFullName() + this.getSeparator() + this.mszName;\n    }\n\n    @Override\n    public List<String > getFullNames() {\n        List<String > fullNames = new ArrayList<>();\n        if( this.mParents.isEmpty() ) {\n            fullNames.add( this.getNodeName() );\n        }\n        else {\n            for ( MultiNamespace parent : this.mParents ) {\n                this.addFullNames( parent, fullNames );\n            }\n        }\n        return fullNames;\n    }\n\n    protected void addFullNames( MultiNamespace namespace, List<String > fullNames ) {\n        List<String > parentFullNames = namespace.getFullNames();\n        for ( String parentFullName : parentFullNames ) {\n            String fullName = parentFullName + this.getSeparator() + this.getNodeName();\n            fullNames.add( fullName );\n        }\n    }\n\n    @Override\n    public String getFullNameByNS( String szNS ) {\n        for ( Namespace parent : this.mParents ) {\n            if ( parent.getNodeName().equals( szNS ) ) {\n                return parent.getFullName() + this.getSeparator() + this.mszName;\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public void addParent( MultiNamespace parent ) {\n        this.mParents.add( parent );\n    }\n\n    @Override\n    public boolean hasOwnParent( MultiNamespace parent ) {\n        return this.mParents.contains( parent );\n    }\n\n    @Override\n    public boolean hasOwnParentNS( String szNS ) {\n        return this.mParents.contains( szNS );\n    }\n\n    @Override\n    public MultiNamespace getParentByNS( String szNS ) {\n        for ( MultiNamespace parent : this.mParents ) {\n            if ( parent.getNodeName().equals(szNS) ) {\n                return parent;\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public void removeParent( MultiNamespace parent ) {\n        this.mParents.remove( parent );\n    }\n\n    @Override\n    public int parentsSize() {\n        return this.mParents.size();\n    }\n\n    @Override\n    public String getSimpleName() {\n        return getNodeName();\n    }\n\n\n    @Override\n    public boolean equals( Object obj ) {\n        if( obj instanceof MultiNamespace ) {\n            return this.getFullNames().equals( ((MultiNamespace) obj).getFullNames() );\n        }\n        return false;\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/GenericNamespaceParser.java",
    "content": "package com.pinecone.framework.util.name;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.regex.Matcher;\nimport java.util.regex.Pattern;\n\npublic class GenericNamespaceParser implements NamespaceParser {\n    protected Class<? extends Namespace> namespaceClass;\n\n    public GenericNamespaceParser() {\n        this( UniNamespace.class );\n    }\n\n    public GenericNamespaceParser( Class<? extends Namespace> namespaceClass ) {\n        this.namespaceClass = namespaceClass;\n    }\n\n    public void setNamespaceClass( Class<? extends Namespace> namespaceClass ) {\n        this.namespaceClass = namespaceClass;\n    }\n\n    @Override\n    public Namespace parse( String szNamespaceStr, Pattern pattern ) {\n        if ( pattern == null ) {\n            throw new IllegalArgumentException( \"Pattern cannot be null\" );\n        }\n\n        List<String > parts = new ArrayList<>();\n        List<String > usedSeparators = new ArrayList<>();\n\n        Matcher matcher = pattern.matcher( szNamespaceStr );\n        int lastIndex = 0;\n        while ( matcher.find() ) {\n            String part = szNamespaceStr.substring( lastIndex, matcher.start() );\n            parts.add(part);\n            usedSeparators.add(matcher.group());\n            lastIndex = matcher.end();\n        }\n        // Add the last one.\n        parts.add( szNamespaceStr.substring( lastIndex ) );\n\n        // Create the Namespace tree.\n        Namespace current = null;\n        String currentSeparator = null;\n        for ( int i = 0; i < parts.size(); ++i ) {\n            String part = parts.get(i);\n            String nextSeparator = i < usedSeparators.size() ? usedSeparators.get(i) : Namespace.DEFAULT_SEPARATOR;\n            current = this.newNamespaceInstance( part, current, currentSeparator );\n            currentSeparator = nextSeparator;\n        }\n\n        return current;\n    }\n\n    protected Namespace newNamespaceInstance( String name, Namespace parent, String separator ) {\n        try {\n            return this.namespaceClass.getConstructor( String.class, Namespace.class, String.class ).newInstance( name, parent, separator );\n        }\n        catch ( Exception e ) {\n            try {\n                return this.namespaceClass.getConstructor( String.class, MultiNamespace.class, String.class ).newInstance( name, (MultiNamespace)parent, separator );\n            }\n            catch ( Exception e2 ) {\n                throw new PineRuntimeException( \"Failed to instantiate namespace class\", e2 );\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/MultiNamespace.java",
    "content": "package com.pinecone.framework.util.name;\n\nimport java.util.Collection;\nimport java.util.Comparator;\nimport java.util.List;\n\npublic interface MultiNamespace extends Namespace, MultiScopeName {\n    Collection<MultiNamespace > getParents();\n\n    MultiNamespace              getFirstParent();\n\n    String                      getFullName();\n\n    String                      getFullNameByNS ( String szNS );\n\n    void                        addParent       ( MultiNamespace parent );\n\n    boolean                     hasOwnParent    ( MultiNamespace parent );\n\n    boolean                     hasOwnParentNS  ( String szNS );\n\n    MultiNamespace              getParentByNS   ( String szNS );\n\n    void                        removeParent    ( MultiNamespace parent );\n\n    int                         parentsSize();\n\n\n    Comparator<Object > DefaultSetNameComparator = new Comparator<>() {\n        @Override\n        public int compare( Object o1, Object o2 ) {\n            if ( o1 instanceof Namespace && o2 instanceof Namespace ) {\n                return ( (Namespace) o1 ).getFullName().compareTo( ( (Namespace) o2 ).getFullName() );\n            }\n            else if ( o1 instanceof String && o2 instanceof String ) {\n                return ((String) o1).compareTo( (String) o2 );\n            }\n            else if ( o1 instanceof Namespace && o2 instanceof String ) {\n                return ( (Namespace) o1 ).getNodeName().compareTo( (String) o2 );\n            }\n            else if ( o1 instanceof String && o2 instanceof Namespace ) {\n                return ( (String) o1 ).compareTo( ( (Namespace) o2 ).getNodeName() );\n            }\n            else {\n                throw new IllegalArgumentException( \"Objects are not of type Namespace or String\" );\n            }\n        }\n    };\n\n    @Override\n    default int compareTo( Namespace o ) {\n        return this.getNodeName().compareTo( o.getNodeName() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/MultiScopeName.java",
    "content": "package com.pinecone.framework.util.name;\n\nimport java.util.List;\n\npublic interface MultiScopeName extends Name {\n    List<String > getFullNames();\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/Name.java",
    "content": "package com.pinecone.framework.util.name;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Name extends Pinenut {\n    String      getName();\n\n    String      getFullName();\n\n    String      getDomain();\n\n    void        setName( String szName );\n\n    void        asStandardizedName( String szStandardizedName );\n\n    default boolean isStandardizedName() {\n        return this.getName().equals( this.getFullName() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/Namespace.java",
    "content": "package com.pinecone.framework.util.name;\n\npublic interface Namespace extends Name, Comparable<Namespace > {\n    String      DEFAULT_SEPARATOR = \".\";\n\n    Namespace   parent();\n\n    void setParent( Namespace parent );\n\n    default Namespace   root(){\n        Namespace p = this;\n        Namespace c = p;\n        while ( p != null ) {\n            c = p;\n            p = p.parent();\n        }\n\n        return c;\n    }\n\n    default String      rootName() {\n        return this.root().getNodeName();\n    }\n\n    String      getSeparator();\n\n    String      getNodeName();\n\n    String      getSimpleName();\n\n    String      getFullName();\n\n    void        setSeparator  ( String separator );\n\n    @Override\n    default String      getName() {\n        return this.getNodeName();\n    }\n\n    @Override\n    default String      getDomain() {\n        Namespace p = this.parent();\n        if( p != null ) {\n            return p.getFullName();\n        }\n        return \"\";\n    }\n\n    @Override\n    default int compareTo( Namespace o ) {\n        return this.getFullName().compareTo( o.getFullName() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/NamespaceParser.java",
    "content": "package com.pinecone.framework.util.name;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.List;\nimport java.util.regex.Pattern;\n\npublic interface NamespaceParser extends Pinenut {\n    default Namespace parse( String szNamespaceStr, List<String> separators ) {\n        if ( separators == null || separators.isEmpty() ) {\n            throw new IllegalArgumentException( \"Separators list cannot be null or empty\" );\n        }\n\n        StringBuilder regexBuilder = new StringBuilder();\n        for ( String sep : separators ) {\n            regexBuilder.append( Pattern.quote(sep) ).append( \"|\" );\n        }\n        String regex = regexBuilder.substring( 0, regexBuilder.length() - 1 );\n        return this.parse( szNamespaceStr, Pattern.compile( regex ) );\n    }\n\n    default Namespace parse( String szNamespaceStr, String szSeparatorsRegex ) {\n        if ( szSeparatorsRegex == null || szSeparatorsRegex.isEmpty() ) {\n            throw new IllegalArgumentException( \"Regex string cannot be null or empty\" );\n        }\n        return this.parse( szNamespaceStr, Pattern.compile( szSeparatorsRegex ) );\n    }\n\n    Namespace parse( String namespaceStr, Pattern separatorsPattern );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/ScopeName.java",
    "content": "package com.pinecone.framework.util.name;\n\npublic class ScopeName extends ArchName implements Name {\n    protected String    mszDomain;\n\n    public ScopeName( String szName, String szDomain ) {\n        super( szName );\n        this.mszDomain = szDomain;\n    }\n\n    public ScopeName( String szName ) {\n        this( szName, \"\" );\n    }\n\n    @Override\n    public String getName() {\n        return this.mszName;\n    }\n\n    @Override\n    public String getFullName(){\n        return this.getDomain() + this.getName();\n    }\n\n    @Override\n    public String getDomain(){\n        return this.mszDomain;\n    }\n\n    @Override\n    public String toString() {\n        return this.getFullName();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/UniNamespace.java",
    "content": "package com.pinecone.framework.util.name;\n\npublic class UniNamespace extends ArchNamespaceNode implements Namespace {\n    protected Namespace        mParent;\n\n    public UniNamespace( String name ) {\n        this( name, null, Namespace.DEFAULT_SEPARATOR );\n    }\n\n    public UniNamespace( String name, Namespace parent ) {\n        this( name, parent, Namespace.DEFAULT_SEPARATOR );\n    }\n\n    public UniNamespace( String name, Namespace parent, String separator ) {\n        super( name, separator );\n        this.mParent        = parent;\n    }\n\n    public UniNamespace( String name, String separator ) {\n        this( name, null, separator );\n    }\n\n    @Override\n    public Namespace parent() {\n        return mParent;\n    }\n\n    @Override\n    public void setParent( Namespace parent ) {\n        this.mParent = parent;\n    }\n\n    @Override\n    public String getSimpleName() {\n        return this.getNodeName();\n    }\n\n    @Override\n    public String getFullName() {\n        if ( this.mParent == null ) {\n            return this.mszName;\n        }\n        return this.mParent.getFullName() + this.mszSeparator + this.mszName;\n    }\n\n    @Override\n    public  boolean equals( Object that ) {\n        if( that instanceof Namespace ) {\n            return this.getFullName().equals( ((Namespace) that).getFullName() );\n        }\n        return false;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/path/BasicPathResolver.java",
    "content": "package com.pinecone.framework.util.name.path;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class BasicPathResolver implements PathResolver {\n    protected String mszSepRegex;\n    protected String mszSeparator;\n\n    public BasicPathResolver( String szSeparator, String szSepRegex ) {\n        this.mszSeparator = szSeparator;\n        this.mszSepRegex  = szSepRegex;\n    }\n\n    public BasicPathResolver() {\n        this( \"/\", \"/\" );\n    }\n\n\n    @Override\n    public List<String > resolvePath( String[] parts ) {\n        ArrayList<String> resolvedParts = new ArrayList<>();\n        for (String part : parts) {\n            if ( part.equals(\".\") || part.isEmpty() ) {\n                continue;\n            }\n\n            if ( part.equals(\"..\") ) {\n                if ( !resolvedParts.isEmpty() ) {\n                    resolvedParts.remove( resolvedParts.size() - 1 );\n                }\n            }\n            else {\n                resolvedParts.add( part );\n            }\n        }\n        return resolvedParts;\n    }\n\n    @Override\n    public String resolvePath( String path ) {\n        String[] parts = this.processPath( path ).split( this.mszSepRegex );\n        return this.assemblePath( this.resolvePath( parts ) );\n    }\n\n    @Override\n    public List<String > resolvePathParts( String path ) {\n        return this.resolvePath( this.segmentPathParts( path ) );\n    }\n\n    @Override\n    public String[] segmentPathParts( String path ) {\n        return this.processPath( path ).split( this.mszSepRegex );\n    }\n\n    @Override\n    public String assemblePath( List<String > parts ) {\n        if ( parts == null || parts.size() == 0 ) {\n            return \"\";\n        }\n\n        StringBuilder path = new StringBuilder();\n\n        for ( int i = 0; i < parts.size(); ++i ) {\n            if ( i > 0 ) {\n                path.append( this.mszSeparator );\n            }\n            path.append( parts.get( i ) );\n        }\n        return path.toString();\n    }\n\n    protected String processPath( String path ) {\n        return path;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/name/path/PathResolver.java",
    "content": "package com.pinecone.framework.util.name.path;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface PathResolver extends Pinenut {\n    List<String > resolvePath      ( String[] parts ) ;\n\n    String        resolvePath      ( String path );\n\n    List<String > resolvePathParts ( String path ) ;\n\n    String[] segmentPathParts      ( String path ) ;\n\n    String assemblePath            ( List<String > parts ) ;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/ArchRDBExecutor.java",
    "content": "package com.pinecone.framework.util.rdb;\n\nimport java.sql.ResultSet;\nimport java.sql.ResultSetMetaData;\nimport java.sql.SQLException;\nimport java.sql.Statement;\nimport java.util.ArrayList;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONArraytron;\nimport com.pinecone.framework.util.json.JSONMaptron;\n\npublic abstract class ArchRDBExecutor implements MappedExecutor {\n    private RDBHost          mRDBSQLHost;\n\n    private MappedSQLSplicer mSimpleSQLSpawner = null ;\n\n    private void init() {\n        this.mSimpleSQLSpawner = new MappedSQLSplicer();\n    }\n\n\n    @Override\n    public RDBHost getRDBSQLHost() {\n        this.init();\n        return this.mRDBSQLHost;\n    }\n\n    public ArchRDBExecutor( RDBHost rdbHost ) {\n        this.init();\n        this.mRDBSQLHost = rdbHost;\n    }\n\n    protected Statement createStatement() throws SQLException {\n        return this.mRDBSQLHost.createStatement();\n    }\n\n    @Override\n    public ResultSession query( String szSQL ) throws SQLException {\n        //this.affirmCurrentStatement();\n        Statement statement = this.createStatement();\n        ResultSet resultSet = statement.executeQuery( szSQL );\n        return new DirectResultSession( this.mRDBSQLHost, statement, resultSet );\n    }\n\n    @Override\n    public long execute( String szSQL, boolean bIgnoreNoAffected ) throws SQLException {\n        //this.affirmCurrentStatement();\n        Statement statement = this.createStatement();\n        statement.execute( szSQL );\n        if( bIgnoreNoAffected ){\n            return 1;\n        }\n        long n = statement.getUpdateCount();\n        statement.close();\n        return n;\n    }\n\n    public long execute( String szSQL ) throws SQLException {\n        return this.execute( szSQL, false );\n    }\n\n    public int countFromTable( String szSQL ){\n        try{\n            ResultSession session = this.query(szSQL);\n            ResultSet resultSet   = session.getResultSet();\n\n            resultSet.next();\n            int n = resultSet.getInt(\"COUNT(*)\");\n            session.close();\n            return n;\n        }\n        catch ( Exception E ){\n            return 0;\n        }\n    }\n\n    public int getSumFromTable( String szTableName ){\n        return this.countFromTable( \"SELECT COUNT(*) FROM `\" + szTableName + \"`\" );\n    }\n\n\n\n    /** Fetch Function **/\n    public String[] fetchAllColumn ( String szTable ) throws SQLException {\n        String szSQL = \"SHOW COLUMNS FROM `\" + szTable + \"`\";\n        ResultSession session = this.query( szSQL );\n        ResultSet resultSet   = session.getResultSet();\n\n        resultSet.last();\n        int nRow = resultSet.getRow();\n        resultSet.beforeFirst();\n        String[] columns = new String[ nRow ];\n\n        int j = 0;\n        while ( resultSet.next() ){\n            columns[ j++ ] = resultSet.getString( 1 );\n        }\n        session.close();\n        return columns;\n    }\n\n    public static String[] column2Array( ResultSet resultSet )throws SQLException {\n        ResultSetMetaData metaData = resultSet.getMetaData();\n        int nColumnCount = metaData.getColumnCount();\n        String[] columns = new String[nColumnCount];\n\n        for ( int i = 1, j = 0; i <= nColumnCount; i++ ) {\n            columns[j++] = metaData.getColumnLabel(i);\n        }\n        return columns;\n    }\n\n    public List<Map<String, Object >> fetchAssoc(String szSQL ) throws SQLException {\n        ResultSession session = this.query( szSQL );\n        ResultSet resultSet   = session.getResultSet();\n\n        ResultSetMetaData metaData = resultSet.getMetaData();\n        int sizeofRowSet = metaData.getColumnCount();\n        ArrayList<Map<String,Object > > queryResult = new ArrayList<>();\n\n        int jc = 0;\n        while ( resultSet.next() ){\n            queryResult.add( new LinkedHashMap<>() );\n            for ( int i = 1; i <= sizeofRowSet; i++ ) {\n                queryResult.get(jc).put(\n                        metaData.getColumnLabel( i ), resultSet.getObject( i )\n                );\n            }\n            jc++;\n        }\n\n        session.close();\n        return queryResult;\n    }\n\n    @Override\n    public JSONArray fetch     (String szSQL ) throws SQLException {\n        ResultSession session = this.query( szSQL );\n        ResultSet resultSet   = session.getResultSet();\n\n        ResultSetMetaData metaData = resultSet.getMetaData();\n        int sizeofRowSet = metaData.getColumnCount();\n        JSONArray queryResult = new JSONArraytron();\n\n        int jc = 0;\n        while ( resultSet.next() ){\n            queryResult.put( new JSONMaptron() );\n            for ( int i = 1; i <= sizeofRowSet; i++ ) {\n                queryResult.getJSONObject( jc ).put(\n                        metaData.getColumnLabel( i ), resultSet.getObject( i )\n                );\n            }\n            jc++;\n        }\n\n        session.close();\n        return queryResult;\n    }\n\n    /**\n     *  Using java class to store query result if these data operated particular frequently (> 1e6)\n     *  According to trail result, if calculation scale beyond (1e6) there is a significant\n     *  performance gap between java native object and the HashMap based com.pinecone::JSONObject.\n     *  *****************************************************************************************\n     *  Experiment At [Intel(R) Core(TM) i7-9750H CPU @ 2.60GHz (Single Thread)]:\n     *  Trail At 1e6 : HashMap [11ms], Java Native Object [<10ms] the difference is tiny.\n     *  Trail At 1e7 : HashMap [~100ms], Java Native Object [10ms ~ 20ms] the difference is huge but still acceptable.\n     *  Trail At 1e8 : HashMap [>1000ms], Java Native Object [80ms ~ 150ms] the difference is huge but unacceptable.\n     *  *****************************************************************************************\n     *  JSONObject Mode was recommended to be used in temporary query object or normal condition.\n     *  NativeObject Mode was recommended to be used in the query result will be manipulated frequently.\n     */\n//    public Object selectJavaify ( String szSQL, Class<?> antetype ) throws SQLException {\n//\n//    }\n\n\n    /** Insert Function **/\n    public long insertWithArray ( String szSimpleTable, Map dataMap, boolean bReplace ) throws SQLException {\n        if ( dataMap != null ) {\n            return this.execute( this.mSimpleSQLSpawner.spliceInsertSQL( szSimpleTable, dataMap, bReplace ) );\n        }\n        return -1;\n    }\n\n    @Override\n    public long insertWithArray ( String szSimpleTable, Map dataMap ) throws SQLException {\n        return insertWithArray(szSimpleTable,dataMap,false);\n    }\n\n\n\n    /** Update Function **/\n    @Override\n    public long updateWithArray ( String szSimpleTable, Map dataMap, List<Map.Entry> conditionMap, String szConditionGlue ) throws SQLException {\n        if ( dataMap != null ) {\n            return this.execute(\n                    this.mSimpleSQLSpawner.spliceUpdateSQL (\n                            szSimpleTable,\n                            dataMap,\n                            conditionMap,\n                            szConditionGlue\n                    ),\n                    true\n            );\n        }\n        return -1;\n    }\n\n    public long updateWithArray ( String szSimpleTable, Map dataMap, List<Map.Entry> conditionMap ) throws SQLException {\n        return this.updateWithArray( szSimpleTable, dataMap, conditionMap, \"AND\" );\n    }\n\n    @Override\n    public long updateWithArray ( String szSimpleTable, Map dataMap, Map conditionMap, String szConditionGlue ) throws SQLException {\n        if ( dataMap != null ) {\n            return this.execute(\n                    this.mSimpleSQLSpawner.spliceUpdateSQL (\n                            szSimpleTable,\n                            dataMap,\n                            conditionMap,\n                            szConditionGlue\n                    ),\n                    true\n            );\n        }\n        return -1;\n    }\n\n    public long updateWithArray ( String szSimpleTable, Map dataMap, Map conditionMap ) throws SQLException {\n        return this.updateWithArray( szSimpleTable, dataMap, conditionMap, \"AND\" );\n    }\n\n    public long updateWithArray ( String szSimpleTable, Map dataMap, String szConditionSQL ) throws SQLException {\n        if ( dataMap != null ) {\n            StringBuilder sqlStream = new StringBuilder();\n            sqlStream.append( this.mSimpleSQLSpawner.spliceNoConditionUpdateSQL( szSimpleTable,dataMap ) );\n\n            if ( szConditionSQL!= null ) {\n                if( !szConditionSQL.toLowerCase().contains(\"where\")){\n                    sqlStream.append(\" WHERE \");\n                }\n                sqlStream.append( szConditionSQL );\n            }\n            return this.execute( sqlStream.toString(), true );\n        }\n        return -1;\n    }\n\n    public long updateWithArray ( String szSimpleTable, Map dataMap ) throws SQLException {\n        return updateWithArray( szSimpleTable, dataMap, (Map) null, \"AND\" );\n    }\n\n\n\n    /** Delete Function **/\n    @Override\n    public long deleteWithArray ( String szSimpleTable, List<Map.Entry> conditionMap,  String szConditionGlue ) throws SQLException {\n        if ( conditionMap != null ) {\n            return this.execute( this.mSimpleSQLSpawner.spliceDeleteSQL( szSimpleTable, conditionMap, szConditionGlue ) );\n        }\n        return this.execute(\"TRUNCATE `\" + szSimpleTable + '`');\n    }\n\n    @Override\n    public long deleteWithArray ( String szSimpleTable, Map conditionMap,  String szConditionGlue ) throws SQLException {\n        if ( conditionMap != null ) {\n            return this.execute( this.mSimpleSQLSpawner.spliceDeleteSQL( szSimpleTable, conditionMap, szConditionGlue ) );\n        }\n        return this.execute(\"TRUNCATE  `\" + szSimpleTable + '`');\n    }\n\n    public long deleteWithArray ( String szSimpleTable, List<Map.Entry> conditionMap ) throws SQLException {\n        return this.deleteWithArray( szSimpleTable,conditionMap,\"AND\" );\n    }\n\n    public long deleteWithArray ( String szSimpleTable, Map conditionMap ) throws SQLException {\n        return this.deleteWithArray( szSimpleTable,conditionMap,\"AND\" );\n    }\n\n    public long deleteWithSQL   ( String szSimpleTable, String szConditionSQL ) throws SQLException {\n        StringBuilder sqlStream = new StringBuilder();\n        sqlStream .append( \"DELETE FROM `\" ).append( szSimpleTable ).append( \"`\" );\n        if ( szConditionSQL!= null ) {\n            if( !szConditionSQL.toLowerCase().contains(\"where\")){\n                sqlStream.append(\" WHERE \");\n            }\n            sqlStream.append( szConditionSQL );\n        }\n        return this.execute( sqlStream.toString() );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/DirectResultSession.java",
    "content": "package com.pinecone.framework.util.rdb;\n\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\nimport java.sql.Statement;\n\npublic class DirectResultSession implements ResultSession {\n    protected RDBHost mHost;\n\n    protected Statement mStatement;\n\n    protected ResultSet mResultSet;\n\n    public DirectResultSession( RDBHost host, Statement statement, ResultSet resultSet ) {\n        this.mHost      = host;\n        this.mStatement = statement;\n        this.mResultSet = resultSet;\n    }\n\n    @Override\n    public RDBHost getHost() {\n        return this.mHost;\n    }\n\n    @Override\n    public Statement getStatement() {\n        return this.mStatement;\n    }\n\n    @Override\n    public ResultSet getResultSet() {\n        return this.mResultSet;\n    }\n\n    @Override\n    public void close() throws SQLException {\n        this.mStatement.close();\n        this.mResultSet.close();\n        this.mHost = null;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/MappedExecutor.java",
    "content": "package com.pinecone.framework.util.rdb;\n\nimport com.pinecone.framework.util.json.JSONArray;\n\nimport java.sql.SQLException;\nimport java.util.Map;\nimport java.util.List;\n\npublic interface MappedExecutor {\n    RDBHost getRDBSQLHost();\n\n    long execute( String szSQL, boolean bIgnoreNoAffected ) throws SQLException;\n\n    ResultSession query( String szSQL ) throws SQLException;\n\n\n    JSONArray fetch     ( String szSQL ) throws SQLException ;\n\n\n    long insertWithArray ( String szSimpleTable, Map dataMap ) throws SQLException;\n\n    long updateWithArray ( String szSimpleTable, Map dataMap, List<Map.Entry> conditionMap, String szConditionGlue ) throws SQLException;\n\n    long updateWithArray ( String szSimpleTable, Map dataMap, Map conditionMap, String szConditionGlue ) throws SQLException;\n\n\n\n    long deleteWithArray (String szSimpleTable, List<Map.Entry> conditionMap, String szConditionGlue ) throws SQLException;\n\n    long deleteWithArray ( String szSimpleTable, Map conditionMap,  String szConditionGlue ) throws SQLException;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/MappedSQLSplicer.java",
    "content": "package com.pinecone.framework.util.rdb;\n\nimport java.util.Map;\nimport java.util.List;\n\npublic class MappedSQLSplicer implements SQLSplicer {\n    public String spliceSingleKeyValueSequence( Object szKey, Object szValue, String szGlue ){\n        return String.format(\" `%s` = '%s' %s \",szKey,szValue, szGlue );\n    }\n\n    public String spliceSimpleKeyValuesSequence( List<Map.Entry> keyValues, String szGlue ) {\n        if ( keyValues!= null ) {\n            StringBuilder sqlStream = new StringBuilder();\n            int i = 0, mapSize = keyValues.size();\n            for ( Map.Entry item : keyValues ) {\n                sqlStream.append( this.spliceSingleKeyValueSequence( item.getKey(),item.getValue(), (i++ != mapSize - 1) ? szGlue : \"\" ) );\n            }\n            return sqlStream.toString();\n        }\n        return \"\";\n    }\n\n    public String spliceSimpleKeyValuesSequence( Map keyValues, String szGlue ) {\n        if ( keyValues!= null ) {\n            StringBuilder sqlStream = new StringBuilder();\n            int i = 0, mapSize = keyValues.size();\n\n            for ( Object each : keyValues.entrySet() ) {\n                Map.Entry item = (Map.Entry) each;\n                sqlStream.append(this.spliceSingleKeyValueSequence(item.getKey(), item.getValue(), (i++ != mapSize - 1) ? szGlue : \"\"));\n            }\n            return sqlStream.toString();\n        }\n        return \"\";\n    }\n\n    public String spliceInsertSQL ( String szFullNameTable, Map dataMap, boolean bReplace ) {\n        if ( dataMap != null ) {\n            StringBuilder sqlStream = new StringBuilder();\n            sqlStream.append( String.format( bReplace ? \"REPLACE INTO `%s` \" : \"INSERT INTO `%s` \", szFullNameTable ) )  ;\n            int i = 0, mapSize = dataMap.size();\n            StringBuilder sql_key = new StringBuilder();\n            StringBuilder sql_value = new StringBuilder();\n\n            for ( Object each : dataMap.entrySet() ) {\n                Map.Entry item = (Map.Entry) each;\n                sql_key .append( \"`\" ) .append( item.getKey() ).append( \"`\" ).append((i != mapSize - 1) ? \",\" : \"\");\n                sql_value .append(\"'\" ) .append( item.getValue() ).append( \"'\" ).append ((i++ != mapSize - 1) ? \",\" : \"\");\n            }\n            sqlStream .append( \" ( \" ).append( sql_key.toString() ).append( \" ) VALUES ( \" ).append( sql_value.toString() ).append( \" )\" );\n            return sqlStream.toString();\n        }\n        return \"\";\n    }\n\n\n\n\n    public String spliceNoConditionUpdateSQL( String szFullNameTable, Map dataMap ) {\n        if ( dataMap != null ) {\n            return String.format( \"UPDATE `%s` SET %s \", szFullNameTable, this.spliceSimpleKeyValuesSequence(dataMap, \",\") );\n        }\n        return \"\";\n    }\n\n    public String spliceUpdateSQL ( String szFullNameTable, Map dataMap, List<Map.Entry> conditionKeyValues, String szConditionGlue ) {\n        if ( dataMap != null ) {\n            String szConditionSQL = this.spliceSimpleKeyValuesSequence( conditionKeyValues, szConditionGlue );\n            if( !szConditionSQL.isEmpty() ){\n                return this.spliceNoConditionUpdateSQL( szFullNameTable, dataMap ) + \" WHERE \" + szConditionSQL;\n            }\n        }\n        return \"\";\n    }\n\n    public String spliceUpdateSQL ( String szFullNameTable, Map dataMap, Map conditionKeyValues, String szConditionGlue ) {\n        if ( dataMap != null ) {\n            String szConditionSQL = this.spliceSimpleKeyValuesSequence( conditionKeyValues, szConditionGlue );\n            if( !szConditionSQL.isEmpty() ){\n                return this.spliceNoConditionUpdateSQL( szFullNameTable, dataMap ) + \" WHERE  \" + szConditionSQL;\n            }\n        }\n        return \"\";\n    }\n\n\n\n    public String spliceDeleteSQL ( String szFullNameTable, List<Map.Entry> conditionKeyValues, String szConditionGlue ) {\n        StringBuilder sqlStream = new StringBuilder();\n        sqlStream .append( String.format( \"DELETE FROM `%s` \", szFullNameTable ) );\n        if ( conditionKeyValues != null ) {\n            sqlStream.append( \" WHERE \" );\n            sqlStream.append( this.spliceSimpleKeyValuesSequence( conditionKeyValues, szConditionGlue ) );\n        }\n        return sqlStream.toString();\n    }\n\n    public String spliceDeleteSQL ( String szFullNameTable, Map conditionKeyValues, String szConditionGlue ) {\n        StringBuilder sqlStream = new StringBuilder();\n        sqlStream .append( String.format( \"DELETE FROM `%s` \", szFullNameTable ) );\n        if ( conditionKeyValues != null ) {\n            sqlStream.append( \" WHERE  \" );\n            sqlStream.append( this.spliceSimpleKeyValuesSequence( conditionKeyValues, szConditionGlue ) );\n        }\n        return sqlStream.toString();\n    }\n\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/RDBHost.java",
    "content": "package com.pinecone.framework.util.rdb;\n\nimport java.sql.*;\n\npublic interface RDBHost {\n    Connection getConnection();\n\n    boolean isClosed() ;\n\n    void connect() throws SQLException;\n\n    void close() throws SQLException;\n\n    Statement  createStatement() throws SQLException;\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/ResultSession.java",
    "content": "package com.pinecone.framework.util.rdb;\n\nimport java.sql.Connection;\nimport java.sql.ResultSet;\nimport java.sql.SQLException;\nimport java.sql.Statement;\n\npublic interface ResultSession {\n    RDBHost getHost();\n\n    default Connection getConnection()  {\n        return this.getHost().getConnection();\n    }\n\n    Statement getStatement();\n\n    ResultSet getResultSet();\n\n    void close() throws SQLException;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/SQLSplicer.java",
    "content": "package com.pinecone.framework.util.rdb;\n\npublic interface SQLSplicer {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/rdb/SQLStrings.java",
    "content": "package com.pinecone.framework.util.rdb;\n\nimport com.pinecone.framework.util.StringUtils;\n\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\n\npublic final class SQLStrings {\n    public static String format( Object val, boolean bIncludeBool ) {\n        String sz;\n        if ( val == null ) {\n            return \"null\";\n        }\n        else if ( val instanceof String ) {\n            return \"'\" + StringUtils.addSlashes( (String) val ) + \"'\";\n        }\n        else if ( val instanceof Number ) {\n            if( val instanceof BigDecimal || val instanceof BigInteger ){\n                sz = val.toString();\n            }\n            else {\n                return val.toString();\n            }\n        }\n        else if ( val instanceof Boolean ) {\n            if( bIncludeBool ){\n                return val.toString();\n            }\n            else {\n                return  ( (boolean) val ) ? \"1\" : \"0\";\n            }\n        }\n        else {\n            sz = val.toString();\n        }\n\n        return \"'\" + StringUtils.addSlashes( sz ) + \"'\";\n    }\n\n    public static String format( Object val ) {\n        return SQLStrings.format( val, false );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/template/TemplateCursorParser.java",
    "content": "package com.pinecone.framework.util.template;\n\nimport com.pinecone.framework.system.ParseException;\nimport com.pinecone.framework.util.CursorParser;\n\npublic class TemplateCursorParser implements CursorParser {\n    protected TemplateParser mParser;\n\n    protected TemplateCursorParser( TemplateParser parser ) {\n        this.mParser = parser;\n    }\n\n    @Override\n    public void back() throws ParseException {\n        this.mParser.back();\n    }\n\n    @Override\n    public char next() throws ParseException {\n        return this.mParser.next();\n    }\n\n    @Override\n    public String next( int n ) throws ParseException {\n        return this.mParser.next(n);\n    }\n\n    @Override\n    public Object nextValue() throws ParseException {\n        return this.mParser.eval();\n    }\n\n    @Override\n    public Object nextValue( Object indexKey, Object parent, Object[] args ) throws ParseException {\n        return this.nextValue();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/template/TemplateParser.java",
    "content": "package com.pinecone.framework.util.template;\n\nimport com.pinecone.framework.system.ParseException;\nimport com.pinecone.framework.system.prototype.Objectom;\nimport com.pinecone.framework.system.prototype.PinenutTraits;\nimport com.pinecone.framework.util.CursorParser;\nimport com.pinecone.framework.util.GeneralStrings;\n\nimport java.io.BufferedReader;\nimport java.io.IOException;\nimport java.io.Reader;\nimport java.io.StringReader;\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.util.List;\nimport java.util.Map;\n\n/**\n *  Pinecone For Java TemplateParser [ Bean Nuts(R) Almond Dragon, Unify Template Language ]\n *  Copyright © Bean Nuts Foundation ( Dragon King ) All rights reserved. [Harald.E / JH.W]\n *  *****************************************************************************************\n *  ${xxx}, ${xxx.xxx}, ${xxx[\"xxx\"]}, ${xxx[xxx]}\n *  *****************************************************************************************\n */\npublic class TemplateParser {\n    protected static final String SYNTAX_ERROR  = \"Syntax error !\";\n\n    //private   String            mszNowAt;  // For debug only, which is to indict the current parse-at point.\n    //private   String            mszRaw;    // For debug only\n\n    protected Reader            mReader;\n    protected char              mcPrevious;\n    protected long              mnCharacter;\n    protected boolean           mbUsePrevious;\n    protected int               mnParseAt ;\n    protected int               mnLineAt;\n\n    protected TokenType         mTokenType;\n    protected StringBuilder     mCurrentToken;\n    protected StringBuilder     mRendered;\n    protected boolean           mbEvalMode;\n    protected Objectom          mVariableMap;\n\n    protected CursorParser      mThisCursor;\n\n    enum TokenType {\n        T_UNDEFINED, T_DELIMITER, T_IDENTIFIER, T_INTEGER, T_FLOAT, T_KEYWORD, T_TEMP, T_STRING, T_BLOCK, T_ENDLINE, T_UTL_TAG, T_PASS\n    }\n\n    public TemplateParser( Reader reader, Object valMap ) {\n        this.mReader        = (Reader)(reader.markSupported() ? reader : new BufferedReader(reader));\n        this.mVariableMap   = Objectom.wrap( valMap );\n        this.mCurrentToken  = new StringBuilder();\n        this.mRendered      = new StringBuilder();\n\n        this.mThisCursor    = new TemplateCursorParser(this);\n    }\n\n    public TemplateParser( String raw, Object valMap ) {\n        this( new StringReader(raw), valMap );\n        //this.mszRaw = raw;\n    }\n\n    protected ParseException parseException( String message ) {\n        return new ParseException( new ParseException( message + \" at \" + this.mnParseAt + \" [character \" + this.mnCharacter + \" line \" + this.mnLineAt + \"]\", (int)this.mnParseAt ) );\n    }\n\n    public void back() throws ParseException {\n        if (!this.mbUsePrevious && this.mnParseAt > 0L) {\n            --this.mnParseAt;\n            --this.mnCharacter;\n            this.mbUsePrevious = true;\n            this.mTokenType = TokenType.T_UNDEFINED;\n        }\n        else {\n            throw new ParseException( \"Stepping back two steps is not supported\" );\n        }\n    }\n\n\n    public boolean end() {\n        return this.mTokenType == TokenType.T_ENDLINE && !this.mbUsePrevious;\n    }\n\n    public char next() throws ParseException {\n        int c;\n        if ( this.mbUsePrevious ) {\n            this.mbUsePrevious = false;\n            c = this.mcPrevious;\n        }\n        else {\n            try {\n                c = this.mReader.read();\n            }\n            catch ( IOException e ) {\n                throw new ParseException( e, this.mnParseAt );\n            }\n\n            if ( c <= 0 ) {\n                this.mTokenType = TokenType.T_ENDLINE;\n                c = 0;\n            }\n        }\n\n        ++this.mnParseAt;\n        if ( this.mcPrevious == '\\r' ) {\n            ++this.mnLineAt;\n            this.mnCharacter = (long)(c == 10 ? 0 : 1);\n        }\n        else if ( c == '\\n' ) {\n            this.mnCharacter = 0L;\n            ++this.mnLineAt;\n        }\n        else {\n            ++this.mnCharacter;\n        }\n\n//        if ( c != 0 ) {\n//            this.mszNowAt = this.mszRaw.substring(this.mnParseAt);\n//        }\n        this.mcPrevious = (char)c;\n        return this.mcPrevious;\n    }\n\n    public String next(int n) throws ParseException {\n        if ( n == 0 ) {\n            return \"\";\n        }\n        else {\n            char[] chars = new char[n];\n\n            for( int pos = 0; pos < n; ++pos ) {\n                chars[pos] = this.next();\n                if ( this.end() ) {\n                    throw this.parseException( \"Error parser template string with substring bounds error.\" );\n                }\n            }\n\n            return new String(chars);\n        }\n    }\n\n    protected void back_if_parenthesized(){\n        if(  \"]}\".indexOf( this.mcPrevious ) >= 0 ) {\n            this.back();\n        }\n    }\n\n    protected void devourUntilEL( char nextChar ) throws ParseException {\n        while ( nextChar != 0 ) {\n            if ( nextChar == '$' ) {\n                int nextNextChar = this.next();\n                if ( nextNextChar == '{' ) {\n                    this.mCurrentToken.append((char) nextChar);\n                    this.mCurrentToken.append((char) nextNextChar);\n                    this.mTokenType = TokenType.T_UTL_TAG;\n                    this.mbEvalMode = true;\n                    return;\n                }\n                else {\n                    this.mRendered.append((char) nextChar);\n                }\n            }\n            else {\n                this.mRendered.append((char) nextChar);\n            }\n            nextChar = this.next();\n        }\n    }\n\n    public void getNextToken() throws ParseException {\n        this.mTokenType = TokenType.T_UNDEFINED;\n\n        StringBuilder temp = this.mCurrentToken;\n        temp.setLength(0);\n\n        char nextChar = this.next();\n        if ( this.end() ) {\n            return;\n        }\n\n        if ( !this.mbEvalMode ) {\n            this.devourUntilEL(nextChar);\n            return;\n        }\n\n        while ( nextChar != 0 && Character.isWhitespace(nextChar) ) {\n            nextChar = this.next();\n        }\n\n        while ( nextChar == '\\r' ) {\n            nextChar = this.next();\n            if (nextChar == '\\n') {\n                nextChar = this.next();\n            }\n            while ( nextChar != 0 && Character.isWhitespace(nextChar) ) {\n                nextChar = this.next();\n            }\n        }\n\n        if ( this.mbEvalMode && nextChar == '}' ) {\n            this.mCurrentToken.append((char) nextChar);\n            this.mTokenType = TokenType.T_DELIMITER;\n            this.mbEvalMode = false;\n            return;\n        }\n\n        if ( nextChar == 0 ) {\n            this.mTokenType = TokenType.T_ENDLINE;\n            return;\n        }\n\n\n        boolean isDoubleQuote = true;\n        if ( nextChar == '\"' || nextChar == '\\'' ) {\n            if ( nextChar == '\\'' ) {\n                isDoubleQuote = false;\n            }\n\n            nextChar = this.next();\n            while ( (isDoubleQuote && nextChar != '\"') || (!isDoubleQuote && nextChar != '\\'') && nextChar != '\\r' && nextChar != 0 ) {\n                if ( nextChar == '\\\\' ) {\n                    nextChar = this.next();\n                    GeneralStrings.transferCharParse( nextChar, this.mThisCursor, temp );\n                }\n                else {\n                    this.mCurrentToken.append( nextChar );\n                }\n\n                nextChar = this.next();\n            }\n            if ( nextChar == '\\r' || nextChar == 0 ) {\n                throw this.parseException( \"Unexpected End-line\" );\n            }\n\n            this.mTokenType = TokenType.T_STRING;\n            return;\n        }\n\n        if ( \" .[]{}=$\".indexOf(nextChar) >= 0 ) {\n            temp.append((char) nextChar);\n            if (nextChar == '{') {\n                temp.append((char) nextChar);\n                this.mTokenType = TokenType.T_UTL_TAG;\n            }\n            else {\n                this.mTokenType = TokenType.T_DELIMITER;\n            }\n            return;\n        }\n\n        if ( Character.isLetter( nextChar ) || nextChar == '_' ) {\n            while (!(\" .[]{}=$\".indexOf(nextChar) >= 0 || nextChar == '\\r' || nextChar == '\\t' || nextChar == '\\n' || nextChar == 0)) {\n                temp.append( nextChar );\n                nextChar = this.next();\n            }\n\n            if( \".[]{}=$\".indexOf(nextChar) >= 0 ){\n                this.back();\n            }\n\n            this.mTokenType = TokenType.T_TEMP;\n        }\n\n\n        if ( Character.isDigit(nextChar) || nextChar == '-' || nextChar == '+' ) {\n            int dotTimes = 0;\n            int nScientificFlag = 0;\n            while ( Character.isDigit(nextChar) || \".+-eE\".indexOf(nextChar) >= 0 ) {\n                if( nextChar == '.' ) {\n                    ++dotTimes;\n                }\n                else if( nextChar == 'E' || nextChar == 'e' ) {\n                    ++nScientificFlag;\n                }\n                temp.append(nextChar);\n                nextChar = this.next();\n            }\n\n            if ( dotTimes > 0 || nScientificFlag > 0 ) {\n                this.mTokenType = TokenType.T_FLOAT;\n            }\n            else {\n                this.mTokenType = TokenType.T_INTEGER;\n            }\n            return;\n        }\n\n        String szCurrentToken = this.mCurrentToken.toString();\n        if ( this.mTokenType == TokenType.T_TEMP ) {\n            if (\n                    szCurrentToken.equals(\"undefined\") || szCurrentToken.equals(\"null\") ||\n                    szCurrentToken.equals(\"false\")     || szCurrentToken.equals(\"true\") ||\n                    szCurrentToken.equals(\"this\")\n            ) {\n                this.mTokenType = TokenType.T_KEYWORD;\n            }\n            else {\n                this.mTokenType = TokenType.T_IDENTIFIER;\n            }\n        }\n\n        if ( this.mTokenType == TokenType.T_UNDEFINED ) {\n            throw this.parseException( \"\\nIllegal token found ! What-> \\\"\" + this.mCurrentToken.toString() + \"\\\"\" );\n        }\n    }\n\n    protected void setPassToken() {\n        this.mTokenType = TokenType.T_PASS;\n    }\n\n    public String eval() {\n        do {\n            this.getNextToken();\n\n            if ( this.mTokenType == TokenType.T_UTL_TAG ) {\n                Object[] refDummy = new Object[1];\n                this.eval_anonymous_val(refDummy);\n\n                Object dummy = refDummy[0];\n                if( dummy == null ) {\n                    this.mRendered.append( \"null\" );\n                }\n                else {\n                    //this.mRendered.append( PinenutTraits.invokeToJSONString( dummy, \"[object Unknown]\") ); // Debug test\n                    this.mRendered.append( PinenutTraits.invokeToString(dummy, \"[object Unknown]\") );\n                }\n            }\n\n        }\n        while ( this.mTokenType != TokenType.T_ENDLINE );\n\n        return mRendered.toString();\n    }\n\n    public Object evalValue() {\n        this.mbEvalMode = true;\n\n        Object dummy;\n        Object[] refDummy = new Object[1];\n        do {\n            this.getNextToken();\n            if( this.mTokenType == TokenType.T_ENDLINE ) {\n                break;\n            }\n            this.eval_exp_assign(refDummy);\n        }\n        while ( this.mTokenType != TokenType.T_ENDLINE && this.mTokenType != TokenType.T_DELIMITER );\n        dummy = refDummy[0];\n\n        return dummy;\n    }\n\n    private void eval_anonymous_val( Object[] jtVar ) {\n        do {\n            this.getNextToken();\n            this.eval_exp_assign(jtVar);\n        }\n        while (\n                this.mTokenType != TokenType.T_ENDLINE && !(this.mTokenType == TokenType.T_DELIMITER &&\n                        ( this.mCurrentToken.length() > 0 && ( this.mCurrentToken.charAt(0) == '}' || this.mCurrentToken.charAt(0) == ']' ) ) )\n        );\n    }\n\n    private void eval_exp_assign( Object[] jtVar ) {\n        this.eval_exp_parenthesized(jtVar);\n    }\n\n    private void eval_exp_parenthesized( Object[] jtVar ) {\n        if( this.eval_exp_obtain(jtVar) ){\n            return;\n        }\n\n        if ( this.mCurrentToken.length() > 0 && this.mCurrentToken.charAt(0) == '(' ) {\n            this.getNextToken();\n            this.eval_exp_assign(jtVar);\n\n            if ( this.mCurrentToken.length() > 0 &&  this.mCurrentToken.charAt(0) != ')' ) {\n                throw this.parseException( \"Syntax error Missing ')'.\" );\n            }\n        }\n        else if ( this.mCurrentToken.length() > 0 && this.mCurrentToken.charAt(0) == '[' ) {\n            Object[] dummyKey = new Object[1];\n            this.eval_anonymous_val( dummyKey );\n\n            if( dummyKey[0] == null ) {\n                throw this.parseException( \"Undefined key.\" );\n            }\n            if( jtVar[0] instanceof Map ) {\n                Map m = (Map)jtVar[0];\n                jtVar[0] = m.get(dummyKey[0].toString());\n            }\n            else if( jtVar[0] instanceof List ) {\n                List m = (List)jtVar[0];\n                int id;\n                if( dummyKey[0] instanceof Number ) {\n                    id = ( (Number)dummyKey[0] ).intValue();\n                }\n                else if( dummyKey[0] instanceof String  ) {\n                    id = Integer.parseInt( (String) dummyKey[0] );\n                }\n                else {\n                    id = Integer.parseInt( dummyKey[0].toString() );\n                }\n                jtVar[0] = m.get(id);\n            }\n            else {\n                throw this.parseException( \"Error variable status, should be Map.\" );\n            }\n\n            if ( this.mCurrentToken.length() > 0 && this.mCurrentToken.charAt(0) != ']' ) {\n                throw this.parseException( \"Syntax error Missing ']'.\" );\n            }\n            this.setPassToken();\n        }\n        else {\n            this.variable_obtain(jtVar);\n        }\n    }\n\n    private boolean eval_exp_obtain( Object[] jtVar ) {\n        if ( this.mTokenType == TokenType.T_DELIMITER ) {\n            if ( this.mCurrentToken.toString().equals(\".\") ) {\n                this.getNextToken();\n\n                if ( this.mTokenType == TokenType.T_IDENTIFIER ) {\n                    if( jtVar[0] instanceof Map ) {\n                        Map m = (Map)jtVar[0];\n                        jtVar[0] = m.get( this.mCurrentToken.toString() );\n                    }\n                    else if( jtVar[0] instanceof Objectom ) {\n                        Objectom m = (Objectom)jtVar[0];\n                        jtVar[0] = m.get( this.mCurrentToken.toString() );\n                    }\n                    else {\n                        throw this.parseException( \"Error variable status, should be Map.\" );\n                    }\n                }\n                else {\n                    throw this.parseException( \"Illegal template offset\" );\n                }\n\n                return true;\n            }\n        }\n\n        return false;\n    }\n\n    private void variable_obtain( Object[] jtVar ) {\n        int i;\n        String szCurrentToken = this.mCurrentToken.toString();\n        switch ( this.mTokenType ) {\n            case T_IDENTIFIER : {\n                if ( this.mVariableMap.containsKey(szCurrentToken) ) {\n                    jtVar[0] = this.mVariableMap.get(szCurrentToken);\n                }\n\n                //this.getNextToken();\n                break;\n            }\n            case T_INTEGER: {\n                if (szCurrentToken.length() > 18) {\n                    jtVar[0] = new BigInteger(szCurrentToken);\n                }\n                else {\n                    jtVar[0] = Long.parseLong(szCurrentToken);\n                }\n                //this.getNextToken();\n                this.back_if_parenthesized();\n                break;\n            }\n            case T_FLOAT: {\n                if ( szCurrentToken.length() > 18 ) {\n                    jtVar[0] = new BigDecimal(szCurrentToken);\n                }\n                else {\n                    double n;\n                    if ( szCurrentToken.equals(\"-INF\") || szCurrentToken.equals(\"-Infinity\") ) {\n                        n = Double.NEGATIVE_INFINITY;\n                    }\n                    else if (szCurrentToken.equals(\"+INF\") || szCurrentToken.equals(\"+Infinity\")) {\n                        n = Double.POSITIVE_INFINITY;\n                    }\n                    else {\n                        n = Double.parseDouble(szCurrentToken);\n                    }\n\n                    jtVar[0] = n;\n                }\n                //this.getNextToken();\n                this.back_if_parenthesized();\n                break;\n            }\n            case T_STRING: {\n                jtVar[0] = szCurrentToken;\n                //this.getNextToken();\n                break;\n            }\n            case T_DELIMITER: {\n                if (\".)]}\".indexOf(szCurrentToken.charAt(0)) != -1) {\n                    break;\n                }\n                else {\n                    throw parseException( TemplateParser.SYNTAX_ERROR );\n                }\n            }\n            case T_KEYWORD: {\n                if ( szCurrentToken.equalsIgnoreCase(\"true\") ) {\n                    jtVar[0] = true;\n                }\n                else if ( szCurrentToken.equalsIgnoreCase(\"false\") ) {\n                    jtVar[0] = false;\n                }\n                else if ( szCurrentToken.equalsIgnoreCase(\"null\") || szCurrentToken.equalsIgnoreCase(\"undefined\") ) {\n                    jtVar[0] = null;\n                }\n                else if ( szCurrentToken.equalsIgnoreCase(\"this\") ) {\n                    if ( this.mVariableMap.containsKey(\"this\") ) {\n                        jtVar[0] = this.mVariableMap.get(\"this\");\n                    }\n                    else {\n                        jtVar[0] = this.mVariableMap;\n                    }\n                }\n                else {\n                    throw this.parseException( TemplateParser.SYNTAX_ERROR );\n                }\n                //this.getNextToken();\n                break;\n            }\n            case T_UTL_TAG: {\n                this.eval_anonymous_val(jtVar);\n                jtVar[0] = this.mVariableMap.get(jtVar);\n                this.mbEvalMode = true;\n                this.getNextToken();\n                if ( szCurrentToken.charAt(0) != '}' ) {\n                    this.back();\n                }\n                break;\n            }\n            default: {\n                throw this.parseException( TemplateParser.SYNTAX_ERROR );\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/template/UTRAlmondProvider.java",
    "content": "package com.pinecone.framework.util.template;\n\nimport java.io.IOException;\nimport java.io.Writer;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.prototype.Objectom;\n\npublic class UTRAlmondProvider implements UniformTemplateRenderer {\n    @Override\n    public String render( String tpl, Objectom context ){\n        TemplateParser parser = new TemplateParser( tpl, context );\n        return parser.eval();\n    }\n\n    @Override\n    public void render( String tpl, Objectom context, Writer writer ) {\n        // TODO\n        try{\n            writer.write( this.render( tpl, context ) );\n        }\n        catch ( IOException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/template/UniformTemplateRenderer.java",
    "content": "package com.pinecone.framework.util.template;\n\nimport java.io.IOException;\nimport java.io.Writer;\n\nimport com.pinecone.framework.system.prototype.Objectom;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface UniformTemplateRenderer extends Pinenut {\n    UniformTemplateRenderer DefaultRenderer = new UTRAlmondProvider();\n\n    String render( String tpl, Objectom context );\n\n    default String render( String tpl, Object context ) {\n        return this.render( tpl, Objectom.wrap( context ) );\n    }\n\n    void render( String tpl, Objectom context, Writer writer ) ;\n\n    default void render( String tpl, Object context, Writer writer ) {\n        this.render( tpl, Objectom.wrap( context ), writer );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/uoi/GenericUniformObjectLoaderFactory.java",
    "content": "package com.pinecone.framework.util.uoi;\n\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.HashMap;\nimport java.util.Map;\n\nimport com.pinecone.framework.system.NoSuchProviderException;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\n\npublic class GenericUniformObjectLoaderFactory implements UniformObjectLoaderFactory {\n    protected Map<String, Class<? > >   mUOLRegister = new HashMap<>();\n    protected ClassLoader               mClassLoader;\n    protected DynamicFactory            mDynamicFactory;\n\n    public GenericUniformObjectLoaderFactory( ClassLoader classLoader, DynamicFactory dynamicFactory ) {\n        this.mClassLoader    = classLoader;\n        this.mDynamicFactory = dynamicFactory;\n\n        this.mUOLRegister.put( UniformObjectLoaderFactory.DefaultJavaClassType, LocalUOIJavaClassProvider.class );\n    }\n\n    public GenericUniformObjectLoaderFactory( ClassLoader classLoader ) {\n        this( classLoader, new GenericDynamicFactory( classLoader ));\n    }\n\n    public GenericUniformObjectLoaderFactory() {\n        this( Thread.currentThread().getContextClassLoader() );\n    }\n\n\n    @Override\n    public Class<? > getUniformObjectLoader( String loaderName ) {\n        return this.mUOLRegister.get( loaderName );\n    }\n\n    @Override\n    public void register( String loaderName, Class<? > loader ) {\n        this.mUOLRegister.put( loaderName, loader );\n    }\n\n    @Override\n    public void deregister( String loaderName ) {\n        this.mUOLRegister.remove( loaderName );\n    }\n\n    @Override\n    public int size() {\n        return this.mUOLRegister.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mUOLRegister.isEmpty();\n    }\n\n    @Override\n    public UniformObjectLoader newLoader( String loaderName ) throws NoSuchProviderException {\n        Class<? > clazz = this.getUniformObjectLoader( loaderName );\n        if( clazz == null ) {\n            throw new NoSuchProviderException( loaderName );\n        }\n\n        try{\n            Constructor constructor = clazz.getConstructor( ClassLoader.class, DynamicFactory.class );\n            return (UniformObjectLoader) constructor.newInstance( this.mClassLoader, this.mDynamicFactory );\n        }\n        catch ( NoSuchMethodException | InvocationTargetException | IllegalAccessException | InstantiationException e ) {\n            return null;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/uoi/LocalUOIJavaClassProvider.java",
    "content": "package com.pinecone.framework.util.uoi;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.lang.GenericDynamicFactory;\n\npublic class LocalUOIJavaClassProvider implements UniformObjectLoader {\n    protected ClassLoader      mClassLoader;\n    protected DynamicFactory   mDynamicFactory;\n\n    public LocalUOIJavaClassProvider( ClassLoader classLoader, DynamicFactory dynamicFactory ) {\n        this.mClassLoader    = classLoader;\n        this.mDynamicFactory = dynamicFactory;\n    }\n\n    public LocalUOIJavaClassProvider( ClassLoader classLoader ) {\n        this( classLoader, new GenericDynamicFactory( classLoader ));\n    }\n\n    public LocalUOIJavaClassProvider() {\n        this( Thread.currentThread().getContextClassLoader() );\n    }\n\n\n    @Override\n    public Class<? > toClass( UOI uoi ) throws IllegalArgumentException {\n        if( !StringUtils.isEmpty( uoi.getHost() ) ) {\n            throw new IllegalArgumentException( \"Remote host [\" + uoi.getHost() + \"] is not supported.\" );\n        }\n\n        try{\n            return this.mClassLoader.loadClass( uoi.getObjectName() );\n        }\n        catch ( ClassNotFoundException e ) {\n            return null;\n        }\n    }\n\n    @Override\n    public Object newInstance( UOI uoi, Class<?>[] paramTypes, Object... args ) {\n        return this.mDynamicFactory.optNewInstance( this.toClass(uoi), paramTypes, args );\n    }\n\n    @Override\n    public Object newInstance( UOI uoi, Object... args ) {\n        return this.mDynamicFactory.optNewInstance( this.toClass(uoi), null, args );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/uoi/UOI.java",
    "content": "package com.pinecone.framework.util.uoi;\n\nimport java.net.URI;\n\nimport com.pinecone.framework.system.NoSuchProviderException;\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class UOI implements Pinenut {\n    protected URI                         mResourceIdentifier;\n    protected UniformObjectLoader         mUniformObjectLoader;\n    protected UniformObjectLoaderFactory  mUniformObjectLoaderFactory;\n\n\n    public UOI( URI uri, UniformObjectLoaderFactory factory ) {\n        this.resolve( uri, factory );\n    }\n\n    public UOI( String szURI, UniformObjectLoaderFactory factory ){\n        this.resolve( szURI, factory );\n    }\n\n    public UOI( URI uri ) {\n        this( uri, UniformObjectLoaderFactory.DefaultObjectLoaderFactory );\n    }\n\n    public UOI( String szURI ) {\n        this( szURI, UniformObjectLoaderFactory.DefaultObjectLoaderFactory );\n    }\n\n    public UOI() {\n    }\n\n\n    public void resolve( URI uri, UniformObjectLoaderFactory factory ) throws ProxyProvokeHandleException {\n        this.mResourceIdentifier          = uri;\n        this.mUniformObjectLoaderFactory  = factory;\n        try{\n            this.mUniformObjectLoader         = factory.newLoader( this.getScheme() );\n        }\n        catch ( NoSuchProviderException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n    }\n\n    public void resolve( String szURI, UniformObjectLoaderFactory factory ){\n        this.resolve( URI.create( szURI ), factory );\n    }\n\n    public void resolve( URI uri ) {\n        this.resolve( uri, UniformObjectLoaderFactory.DefaultObjectLoaderFactory );\n    }\n\n    public void resolve( String str ) {\n        this.resolve( str, UniformObjectLoaderFactory.DefaultObjectLoaderFactory );\n    }\n\n\n    public String getObjectName() {\n        String szPath = this.mResourceIdentifier.getPath();\n        if( szPath.startsWith( \"/\" ) ) {\n            return szPath.substring( 1 );\n        }\n        return szPath;\n    }\n\n    public Class<? > toClass() {\n        return this.mUniformObjectLoader.toClass( this );\n    }\n\n    public Object newInstance( Class<? >[] paramTypes, Object... args ) {\n        return this.mUniformObjectLoader.newInstance( this, paramTypes, args );\n    }\n\n    public Object newInstance( Object... args ) {\n        return this.mUniformObjectLoader.newInstance( this, args );\n    }\n\n\n\n    public String getScheme() {\n        return this.mResourceIdentifier.getScheme();\n    }\n\n    public String getSchemeSpecificPart() {\n        return this.mResourceIdentifier.getSchemeSpecificPart();\n    }\n\n    public String getRawSchemeSpecificPart() {\n        return this.mResourceIdentifier.getRawSchemeSpecificPart();\n    }\n\n    public String getUserInfo() {\n        return this.mResourceIdentifier.getUserInfo();\n    }\n\n    public String getRawUserInfo() {\n        return this.mResourceIdentifier.getRawUserInfo();\n    }\n\n    public String getHost() {\n        return this.mResourceIdentifier.getHost();\n    }\n\n    public int getPort() {\n        return this.mResourceIdentifier.getPort();\n    }\n\n    public String getPath() {\n        return this.mResourceIdentifier.getPath();\n    }\n\n    public String getRawPath() {\n        return this.mResourceIdentifier.getRawPath();\n    }\n\n    public String getQuery() {\n        return this.mResourceIdentifier.getQuery();\n    }\n\n    public String getRawQuery() {\n        return this.mResourceIdentifier.getRawQuery();\n    }\n\n    public String getFragment() {\n        return this.mResourceIdentifier.getFragment();\n    }\n\n    public String getRawFragment() {\n        return this.mResourceIdentifier.getRawFragment();\n    }\n\n    public boolean isAbsolute() {\n        return this.mResourceIdentifier.isAbsolute();\n    }\n\n    public boolean isOpaque() {\n        return this.mResourceIdentifier.isOpaque();\n    }\n\n    public URI normalize() {\n        return this.mResourceIdentifier.normalize();\n    }\n\n    public URI relativize(URI uri) {\n        return this.mResourceIdentifier.relativize(uri);\n    }\n\n    @Override\n    public boolean equals(Object obj) {\n        return this.mResourceIdentifier.equals(obj);\n    }\n\n    @Override\n    public int hashCode() {\n        return this.mResourceIdentifier.hashCode();\n    }\n\n    @Override\n    public String toString() {\n        return this.mResourceIdentifier.toString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return \"\\\"\" + this.toString() + \"\\\"\";\n    }\n\n    public String toASCIIString() {\n        return this.mResourceIdentifier.toASCIIString();\n    }\n\n\n    public static UOI create( String uri ) {\n        return new UOI( uri );\n    }\n\n    public static UOI create( String uri, UniformObjectLoaderFactory factory  ) {\n        return new UOI( uri, factory );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/uoi/UniformObjectLoader.java",
    "content": "package com.pinecone.framework.util.uoi;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface UniformObjectLoader extends Pinenut {\n    Class<? > toClass( UOI uoi ) ;\n\n    Object newInstance( UOI uoi, Class<? >[] paramTypes, Object... args );\n\n    Object newInstance( UOI uoi, Object... args );\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/main/java/com/pinecone/framework/util/uoi/UniformObjectLoaderFactory.java",
    "content": "package com.pinecone.framework.util.uoi;\n\nimport com.pinecone.framework.system.NoSuchProviderException;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface UniformObjectLoaderFactory extends Pinenut {\n    String DefaultJavaClassType = \"java-class\";\n    UniformObjectLoaderFactory DefaultObjectLoaderFactory = new GenericUniformObjectLoaderFactory();\n\n\n\n    Class<? > getUniformObjectLoader( String loaderName ) ;\n\n    void register( String loaderName, Class<? > loader ) ;\n\n    void deregister( String loaderName );\n\n    int size();\n\n    boolean isEmpty();\n\n    UniformObjectLoader newLoader( String loaderName ) throws NoSuchProviderException;\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/system/SimpleCascadeComponentManager.java",
    "content": "package com.system;\n\nimport com.pinecone.framework.system.architecture.ArchCascadeComponent;\nimport com.pinecone.framework.system.architecture.ArchCascadeComponentManager;\nimport com.pinecone.framework.system.architecture.CascadeComponent;\nimport com.pinecone.framework.system.architecture.CascadeComponentManager;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.framework.util.name.UniNamespace;\n\nclass SimpleCascadeComponent extends ArchCascadeComponent {\n    public SimpleCascadeComponent( Namespace name, CascadeComponentManager manager, CascadeComponent parent ) {\n        super( name, manager, parent );\n    }\n\n    public SimpleCascadeComponent( String name, CascadeComponentManager manager ) {\n        super( new UniNamespace( name ), manager, null );\n    }\n}\n\npublic class SimpleCascadeComponentManager extends ArchCascadeComponentManager {\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/system/TestComponent.java",
    "content": "package com.system;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\n\npublic class TestComponent {\n    public static void testAdd() throws Exception {\n        SimpleCascadeComponentManager manager = new SimpleCascadeComponentManager();\n\n        SimpleCascadeComponent A = new SimpleCascadeComponent( \"A\", manager );\n        SimpleCascadeComponent B = new SimpleCascadeComponent( \"B\", manager );\n        SimpleCascadeComponent C = new SimpleCascadeComponent( \"C\", manager );\n        manager.addComponent( A );\n        manager.addComponent( B );\n        manager.addComponent( C );\n\n\n\n        SimpleCascadeComponent a1 = new SimpleCascadeComponent( \"a1\", manager );\n        SimpleCascadeComponent a2 = new SimpleCascadeComponent( \"a2\", manager );\n        A.addChildComponent( a1 );\n        A.addChildComponent( a2 );\n\n        SimpleCascadeComponent b1 = new SimpleCascadeComponent( \"b1\", manager );\n        SimpleCascadeComponent b2 = new SimpleCascadeComponent( \"b2\", manager );\n        B.addChildComponent( b1 );\n        B.addChildComponent( b2 );\n\n        SimpleCascadeComponent c1 = new SimpleCascadeComponent( \"c1\", manager );\n        C.addChildComponent( c1 );\n\n\n        Debug.fmt( 2,manager.getComponents() );\n        Debug.fmt( 2, manager.getComponentsRegisterList() );\n    }\n\n    public static void testRefer() throws Exception {\n        SimpleCascadeComponentManager manager = new SimpleCascadeComponentManager();\n\n        SimpleCascadeComponent A = new SimpleCascadeComponent( \"A\", manager );\n        SimpleCascadeComponent B = new SimpleCascadeComponent( \"B\", manager );\n        SimpleCascadeComponent C = new SimpleCascadeComponent( \"C\", manager );\n        manager.addComponent( A );\n        manager.addComponent( B );\n        manager.addComponent( C );\n\n\n\n        SimpleCascadeComponent a1 = new SimpleCascadeComponent( \"a1\", manager );\n        SimpleCascadeComponent a2 = new SimpleCascadeComponent( \"a2\", manager );\n        A.addChildComponent( a1 );\n        A.addChildComponent( a2 );\n\n        SimpleCascadeComponent b1 = new SimpleCascadeComponent( \"b1\", manager );\n        SimpleCascadeComponent b2 = new SimpleCascadeComponent( \"b2\", manager );\n        B.addChildComponent( b1 );\n        B.addChildComponent( b2 );\n        B.referChildComponent( a2 );\n\n        SimpleCascadeComponent c1 = new SimpleCascadeComponent( \"c1\", manager );\n        C.addChildComponent( c1 );\n        C.referChildComponent( b2 );\n\n        Debug.fmt( 2,manager.getComponents() );\n        Debug.fmt( 2, manager.getComponentsRegisterList() );\n\n        Debug.fmt( 2, C.children() );\n    }\n\n    public static void testCascadeRemove() throws Exception {\n        SimpleCascadeComponentManager manager = new SimpleCascadeComponentManager();\n\n        SimpleCascadeComponent A = new SimpleCascadeComponent( \"A\", manager );\n        SimpleCascadeComponent B = new SimpleCascadeComponent( \"B\", manager );\n        SimpleCascadeComponent C = new SimpleCascadeComponent( \"C\", manager );\n        manager.addComponent( A );\n        manager.addComponent( B );\n        manager.addComponent( C );\n\n\n\n        SimpleCascadeComponent a1 = new SimpleCascadeComponent( \"a1\", manager );\n        SimpleCascadeComponent a2 = new SimpleCascadeComponent( \"a2\", manager );\n        A.addChildComponent( a1 );\n        A.addChildComponent( a2 );\n\n        SimpleCascadeComponent b1 = new SimpleCascadeComponent( \"b1\", manager );\n        SimpleCascadeComponent b2 = new SimpleCascadeComponent( \"b2\", manager );\n        SimpleCascadeComponent b3 = new SimpleCascadeComponent( \"b3\", manager );\n\n        SimpleCascadeComponent b3_1 = new SimpleCascadeComponent( \"b3_1\", manager );\n        b3.addChildComponent( b3_1 );\n        b3.referChildComponent( a1 );\n\n        B.addChildComponent( b1 );\n        B.addChildComponent( b2 );\n        B.addChildComponent( b3 );\n        B.referChildComponent( a2 );\n\n\n        SimpleCascadeComponent c1 = new SimpleCascadeComponent( \"c1\", manager );\n        C.addChildComponent( c1 );\n        C.referChildComponent( a1 );\n        C.referChildComponent( b2 );\n\n        Debug.fmt( 2,manager.getComponents() );\n        Debug.fmt( 2, manager.getComponentsRegisterList() );\n        Debug.fmt( 2, C.children() );\n\n\n\n        // Test omega remove child\n//        manager.removeComponent( a1 );\n//        Debug.fmt( 2,manager.getComponents() );\n//        Debug.fmt( 2, manager.getComponentsRegisterList() );\n//        Debug.fmt( 2, C.children() );\n\n\n        // Test omega remove parent\n        manager.removeComponent( A );  // Diane has been erased from every universe across infinity.\n        Debug.fmt( 2,manager.getComponents() );\n        Debug.fmt( 2, manager.getComponentsRegisterList() );\n        Debug.fmt( 2, C.children() );\n        Debug.fmt( 2, b3.children() );\n\n        // Test self-destruction\n//        A.purge();\n//        Debug.fmt( 2,manager.getComponents() );\n//        Debug.fmt( 2, manager.getComponentsRegisterList() );\n//        Debug.fmt( 2, C.children() );\n//        Debug.fmt( 2, b3.children() );\n\n        // Test others\n//        a1.independent( \"cyc\" );\n//        Debug.fmt( 2,manager.getComponents() );\n//        Debug.fmt( 2, manager.getComponentsRegisterList() );\n//        Debug.fmt( 2, C.children() );\n//        Debug.fmt( 2, b3.children() );\n    }\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            //TestComponent.testAdd();\n            //TestComponent.testRefer();\n            TestComponent.testCascadeRemove();\n\n\n            return 0;\n        }, (Object[]) args );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/unit/JavaGenericTests.java",
    "content": "package com.unit;\n\nimport java.lang.reflect.ParameterizedType;\nimport java.lang.reflect.Type;\nimport java.util.LinkedHashMap;\nimport java.util.Map;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\n\npublic class JavaGenericTests {\n    public static void testBasic() {\n        Map<String, Integer > instance = new LinkedHashMap<>();\n\n        Class<?> clazz = instance.getClass();\n\n        Type genericSuperclass = clazz.getGenericSuperclass();\n        if ( genericSuperclass instanceof ParameterizedType ) {\n            ParameterizedType parameterizedType = (ParameterizedType) genericSuperclass;\n            Type[] actualTypeArguments = parameterizedType.getActualTypeArguments();\n\n            for ( Type type : actualTypeArguments ) {\n                Debug.trace( type.getTypeName() );\n            }\n        }\n    }\n\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n\n            JavaGenericTests.testBasic();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/unit/TestFileIteratorAndDistinct.java",
    "content": "package com.unit;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.unit.distinct.*;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.io.FileNamePathIterator;\nimport com.pinecone.framework.util.io.FileUtils;\nimport com.pinecone.framework.util.io.PathItemIterator;\n\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.StandardCopyOption;\nimport java.util.*;\n\npublic class TestFileIteratorAndDistinct {\n    public static void testFileIterator() throws Exception {\n        PathItemIterator iterator = new PathItemIterator( Path.of( \"C:/Users/undefined/Desktop/wolfmc\" ), false );\n\n        while ( iterator.hasNext() ) {\n            Debug.trace( iterator.next().toString() );\n        }\n\n    }\n\n    public static void testDistinct_Simple() throws Exception {\n        List<String> list1 = Arrays.asList( \"t1\", \"t2\", \"t3\", \"t4\" );\n        List<String> list2 = Arrays.asList( \"t1\", \"t3\", \"t5\", \"t6\" );\n        List<String> list3 = Arrays.asList( \"t1\", \"t2\", \"t3\", \"t7\", \"t7\" );\n        List<String> list4 = Arrays.asList( \"t8\", \"t2\", \"t3\", \"t9\" );\n\n        MegaBloomDistinctAudit<String> distinctAudit = new MegaBloomDistinctAudit<>(\n                List.of( list1, list2, list3 ), DistinctType.SymmetricDistinct, new HashSet<>()\n        );\n        Collection<String> distinctElements = distinctAudit.audit();\n\n        Debug.trace( distinctElements );\n\n\n        Debug.trace( distinctAudit.audit( list4 ) );\n\n        Debug.trace( distinctAudit.hasOwnElement( \"t3\" ) );\n\n    }\n\n    public static void testDistinct_Master() throws Exception {\n        List<String> list1 = Arrays.asList( \"t1\", \"t2\", \"t3\", \"t4\" );\n        List<String> list2 = Arrays.asList( \"t1\", \"t3\", \"t5\", \"t6\" );\n        List<String> list3 = Arrays.asList( \"t1\", \"t2\", \"t3\", \"t7\", \"t7\" );\n        List<String> list4 = Arrays.asList( \"t8\", \"t2\", \"t3\", \"t9\" );\n\n        MegaPrototypeBloomDistinctAudit<String> distinctAudit = new MegaPrototypeBloomDistinctAudit<>(\n                list1.iterator(), List.of( list2, list3 ), DistinctType.SymmetricDistinct, new HashSet<>()\n        );\n        Collection<String> distinctElements = distinctAudit.audit();\n\n        Debug.trace( distinctElements );\n\n\n        Debug.trace( distinctAudit.audit( list4 ) );\n\n        Debug.trace( distinctAudit.hasOwnElement( \"t3\" ) );\n\n    }\n\n    public static void testDistinct_Tiny() throws Exception {\n        List<String> list1 = Arrays.asList( \"t1\", \"t2\", \"t3\", \"t4\", \"t5\", \"t6\", \"t7\", \"t8\", \"t9\", \"t10\", \"t11\" );\n        List<String> list2 = Arrays.asList( \"t6\", \"t2\", \"t4\", \"t8\", \"t1\", \"t3\", \"t5\" );\n        List<String> list3 = Arrays.asList( \"t1\", \"t2\", \"t3\", \"t7\", \"t7\" );\n        List<String> list4 = Arrays.asList( \"t8\", \"t2\", \"t3\", \"t9\" );\n\n        GenericDistinctAudit<String> distinctAudit = new GenericDistinctAudit<>(\n                List.of( list1, list2 ), DistinctType.SymmetricDistinct, new ArrayList<>()\n        );\n        Collection<String> distinctElements = distinctAudit.audit();\n\n        Debug.trace( distinctElements );\n\n\n//        Debug.trace( distinctAudit.audit( list4 ) );\n//\n//        Debug.trace( distinctAudit.hasOwnElement( \"t3\" ) );\n\n    }\n\n    public static void testDistinct_TinyMaster() throws Exception {\n        List<String> list1 = Arrays.asList( \"t1\", \"t2\", \"t3\", \"t4\" );\n        List<String> list2 = Arrays.asList( \"t1\", \"t3\", \"t5\", \"t6\" );\n        List<String> list3 = Arrays.asList( \"t1\", \"t2\", \"t3\", \"t7\", \"t7\" );\n        List<String> list4 = Arrays.asList( \"t8\", \"t2\", \"t3\", \"t9\" );\n\n        GenericPrototypeDistinctAudit<String> distinctAudit = new GenericPrototypeDistinctAudit<>(\n                list1.iterator(), List.of( list2, list3 ), DistinctType.SymmetricDistinct, new HashSet<>()\n        );\n        Collection<String> distinctElements = distinctAudit.audit();\n\n        Debug.trace( distinctElements );\n\n\n        Debug.trace( distinctAudit.audit( list4 ) );\n\n        Debug.trace( distinctAudit.hasOwnElement( \"t3\" ) );\n\n    }\n\n    public static void testDistinct_MegaMerge() throws Exception {\n        Collection<String> list1 = List.of( \"t0\", \"t1\", \"t2\", \"t3\", \"t4\", \"t5\", \"t6\", \"t8\", \"t9\" );\n        //List<String> list2 = Arrays.asList( \"t9\", \"t8\", \"t6\", \"t5\", \"t0\", \"t2\" );\n        Collection<String> list2 = List.of( \"t0\", \"t1\", \"t2\", \"t3\", \"t6\", \"t5\" );\n\n        List<String> list3 = Arrays.asList( \"t1\", \"t2\", \"t3\", \"t7\", \"t7\" );\n        List<String> list4 = Arrays.asList( \"t8\", \"t2\", \"t3\", \"t9\" );\n\n        MegaMergeDistinctAudit<String> distinctAudit = new MegaMergeDistinctAudit<>(\n                list1.iterator(), list2.iterator(), 2\n        );\n        Collection<String> distinctElements = distinctAudit.audit();\n\n        Debug.trace( distinctElements );\n\n\n        //Debug.trace( distinctAudit.audit( list4 ) );\n\n        //Debug.trace( distinctAudit.hasOwnElement( \"t3\" ) );\n\n    }\n\n    public static void testDistinct_dir() throws Exception {\n        Path desk = Path.of( \"C:/Users/undefined/Desktop/wolfmc\" );\n        FileNamePathIterator iterator1 = new FileNamePathIterator( desk, false );\n        FileNamePathIterator iterator2 = new FileNamePathIterator( Path.of( \"E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Pinecones/Hydra/src/main/java/com/pinecone/hydra/umc/wolfmc\" ), false );\n\n//        List<String > fn1 = new ArrayList<>();\n//        while ( iterator1.hasNext() ) {\n//            fn1.add( iterator1.next().toString() );\n//        }\n//\n//        List<String > fn2 = new ArrayList<>();\n//        while ( iterator2.hasNext() ) {\n//            fn2.add( iterator2.next().toString() );\n//        }\n//\n//        Set<String > s1 = new HashSet<>( fn2 );\n//        List<String > uni = new ArrayList<>();\n//        for( String s : fn1 ) {\n//            if( !s1.contains( s ) ) {\n//                uni.add( s );\n//            }\n//        }\n\n//        GenericDistinctAudit<String> distinctAudit = new GenericDistinctAudit<>(\n//                List.of( fn1, fn2 ), DistinctType.SymmetricDistinct, new ArrayList<>()\n//        );\n\n//        GenericPrototypeDistinctAudit<String> distinctAudit = new GenericPrototypeDistinctAudit<>(\n//                fn2.iterator(), List.of( fn1 ), DistinctType.SymmetricDistinct, new ArrayList<>()\n//        );\n//\n//        Collection<String > c = distinctAudit.audit();\n//        //c = uni;\n//        Debug.trace( c.size() );\n//        for( String p : c ) {\n//            Debug.trace( desk.resolve( p ).toString() );\n//\n//            //Files.copy( p.toAbsolutePath(), Path.of( \"C:/Users/undefined/Desktop/welsir\" ), StandardCopyOption.REPLACE_EXISTING );\n//        }\n\n\n        GenericPrototypeDistinctAudit<Path > distinctAudit2 = new GenericPrototypeDistinctAudit<>(\n                iterator2, List.of( iterator1 ), new ArrayList<>(), DistinctType.SymmetricDistinct\n        );\n\n        Collection<Path > cp = distinctAudit2.audit();\n        Debug.trace( cp.size() );\n        for( Path p : cp ) {\n            Debug.trace( desk.resolve( p ).toString() );\n            Files.copy( desk.resolve( p ), Path.of( \"C:/Users/undefined/Desktop/welsir/\" ).resolve( p ), StandardCopyOption.REPLACE_EXISTING );\n        }\n\n    }\n\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            //TestFileIteratorAndDistinct.testFileIterator();\n            //TestFileIteratorAndDistinct.testDistinct_Simple();\n            //TestFileIteratorAndDistinct.testDistinct_Master();\n            //TestFileIteratorAndDistinct.testDistinct_Tiny();\n            //TestFileIteratorAndDistinct.testDistinct_TinyMaster();\n            //TestFileIteratorAndDistinct.testDistinct_MegaMerge();\n\n            TestFileIteratorAndDistinct.testDistinct_dir();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/unit/TestMultiValueMap.java",
    "content": "package com.unit;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.unit.LinkedMultiValueMap;\nimport com.pinecone.framework.util.Debug;\n\npublic class TestMultiValueMap {\n    public static void testBasic() {\n        LinkedMultiValueMap<Integer, String > multiValueMap = new LinkedMultiValueMap<>();\n\n        multiValueMap.add( 1, \"fuck1\" );\n        multiValueMap.add( 2, \"fuck2\" );\n        multiValueMap.add( 1, \"fuck1_1\" );\n\n        Debug.trace( multiValueMap );\n    }\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            TestMultiValueMap.testBasic();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/unit/TestUnits.java",
    "content": "package com.unit;\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.unit.*;\nimport com.pinecone.framework.unit.TreeMap;\nimport com.pinecone.framework.unit.tabulate.*;\nimport com.pinecone.framework.unit.trie.TrieMap;\nimport com.pinecone.framework.unit.trie.TrieNode;\nimport com.pinecone.framework.unit.trie.UniTrieMaptron;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\n\nimport java.util.*;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\n\n@SuppressWarnings( \"unchecked\" )\npublic class TestUnits {\n    public static void testUniScopeMap() {\n        UniScopeMaptron map2 = new UniScopeMaptron(new JSONMaptron(\"{c1:'kc1', c2:'kc2', c3:'kc3'}\"));\n        UniScopeMaptron map1 = new UniScopeMaptron(new JSONMaptron(\"{c1:'kb1', b2:'kb2', b3:'kb3'}\"), map2);\n        UniScopeMaptron map  = new UniScopeMaptron(true, map1);\n        //map.put( \"ka1\", \"a1\" );\n\n\n        for ( Object o: map.scopeEntrySet() ) {\n            Map.Entry kv = (Map.Entry) o;\n            Debug.trace( kv.getKey(), kv.getValue() );\n        }\n\n\n        Debug.trace( map.get(\"c1\") );\n        Debug.trace( map.get(\"b2\") );\n        Debug.trace( map.get(\"d3\") );\n\n        LinkedHashMap ls = new LinkedHashMap();\n        ls.put( \"g1\", \"gg\" );\n\n        map.elevate( ls );\n\n        LinkedHashMap linkedHashMap = new LinkedHashMap();\n        map.overrideTo( linkedHashMap );\n\n\n\n        Debug.trace( linkedHashMap, map.isEmpty(), map.isScopeEmpty(), map.ancestors(), map.hasOwnProperty(\"g1\"), map.hasOwnProperty(\"kc2\") );\n    }\n\n    public static void testMultiScopeMap() {\n\n\n        MultiScopeMaptron map4_0 = new MultiScopeMaptron(new JSONMaptron(\"{e1:'ke1', e2:'ke2'}\"));\n\n        MultiScopeMaptron map3_0 = new MultiScopeMaptron(new JSONMaptron(\"{d1:'kd1', c1:'kd1'}\"));\n        map3_0.addParent( new MultiScopeMaptron() );\n        map3_0.addParent( ( new MultiScopeMaptron() ).addParent( map4_0 ) );\n\n        MultiScopeMaptron map2_0 = new MultiScopeMaptron(new JSONMaptron(\"{c1:'kc1', c2:'kc2', c3:'kc3'}\"), null, \"jesus\");\n        MultiScopeMaptron map2_1 = new MultiScopeMaptron(new JSONMaptron(\"{c11:'kc11'}\"));\n        map2_1.addParent(map3_0);\n        MultiScopeMaptron map1 = new MultiScopeMaptron(new JSONMaptron(\"{c1:'kb1', b2:'kb2', b3:'kb3'}\"));\n        map1.addParent( map2_0 ).addParent( map2_1 );\n        MultiScopeMaptron map = new MultiScopeMaptron(true, null);\n\n        map.addParent( map1 );\n        map.put( \"fuck\", \"me\" );\n        map.put( \"fuck2\", \"this\" );\n\n        //map.put( \"ka1\", \"a1\" );\n\n        Debug.trace( map.scopes() );\n\n\n        for ( Object o: map.scopeEntrySet() ) {\n            Map.Entry kv = (Map.Entry) o;\n            Debug.trace( kv.getKey(), kv.getValue() );\n        }\n\n\n\n        Debug.trace( map.getAll( \"c1\" ), map.query( \"c1\",\"jesus\" ) );\n\n\n        Debug.trace( map.get(\"c1\") );\n        Debug.trace( map.get(\"b2\") );\n        Debug.trace( map.get(\"d3\") );\n\n        LinkedHashMap ls = new LinkedHashMap();\n        ls.put( \"g1\", \"gg\" );\n\n        map.elevate( ls );\n\n        LinkedHashMap linkedHashMap = new LinkedHashMap();\n        map.overrideTo( linkedHashMap );\n\n\n        Debug.trace( linkedHashMap, map.isEmpty(), map.isScopeEmpty(), map.hasOwnProperty(\"g1\"), map.hasOwnProperty(\"kc2\") );\n    }\n\n    public static void testPrecedeMultiMap() {\n        MultiScopeMaptron map1_0 = new MultiScopeMaptron(new JSONMaptron(\"{p1:'kp1', p2:'kp2'}\"));\n\n        PrecedeMultiScopeMap p = new PrecedeMultiMaptron();\n        p.addParent( map1_0 );\n        p.put( \"this1\", \"this1\" );\n        p.put( \"this\" , \"this is this\" );\n\n        MultiScopeMaptron mapKeyWord = new MultiScopeMaptron(new JSONMaptron(\"{this:'this is keyword', super:'super is keyword'}\"));\n        p.setPrecedeScope( mapKeyWord );\n\n        Debug.trace( p, p.get( \"p1\" ), p.get( \"this\" ) );\n    }\n\n    public static void testRecursiveEntryIterator() {\n        Map<String, Object> map = new JSONMaptron( \"{ k1:v1, k2:v2, k3: { k3_1:v3_1, k3_2:v3_2, li:[ 0,1,2,3, { lk1: vlk1, lk2:vlk2  } ] }, k3_4: v3_4  }\" );\n        //Map<String, Object> map = new JSONMaptron( \"{ k1:v1, li:[ 0, { lk1: vlk1, lk2:vlk2  } ] }, k3_4: v3_4  }\" );\n\n        RecursiveFamilyIterator<Object> iterator = new RecursiveFamilyIterator<>( map, true );\n        //RecursiveEntryIterator iterator = new RecursiveEntryIterator( map, true );\n\n        TypedNamespaceFamilyEntryNameEncoder encoder = new TypedNamespaceFamilyEntryNameEncoder();\n        while ( iterator.hasNext() ) {\n            UnitFamilyNode node = iterator.next();\n            //Debug.trace( node, node.parent(), node.namespacify( true ) );\n\n            Debug.trace( node, node.parent(), encoder.encodeNS( node, true ).getFullName(), node.namespacify( true ) );\n\n//            if( node.parent() != null ) {\n//                Debug.trace( \"K\", node.parent().parent() );\n//            }\n        }\n//        while ( iterator.hasNext() ) {\n//            Map.Entry node = iterator.next();\n//            Debug.trace( node );\n//        }\n\n        iterator = new RecursiveFamilyIterator<>( map, true );\n        GenericCollectedEntryEncoder entryEncoder = new GenericCollectedEntryEncoder( iterator );\n        Collection collection = entryEncoder.encode();\n\n        Debug.trace( collection );\n        Debug.trace( map );\n\n\n        GenericCollectedEntryDecoder decoder = new GenericCollectedEntryDecoder<>();\n        Map<String, Object> decoded = decoder.decode(collection);\n        Debug.trace(decoded);\n\n        iterator = new RecursiveFamilyIterator<>( map, true );\n        entryEncoder = new GenericCollectedEntryEncoder( iterator );\n        Map map1 = entryEncoder.regress();\n        Debug.trace( map1 );\n\n\n        decoded = decoder.evolve( map1 );\n        Debug.trace(decoded);\n    }\n\n\n    public static void testMergeSharedList(){\n        List<String> list1 = new ArrayList<>(List.of(\"a\", \"b\", \"c\",\"d\",\"e\"));\n        List<String> list2 = new ArrayList<>(List.of(\"X\", \"D\", \"F\",\"X\",\"Y\"));\n        System.out.println(\"list1: \"+list1);\n        System.out.println(\"list2: \"+list2);\n\n        SharedList<String> mergeList = SharedList.SharedListBuilder.merge(list1, list2);\n\n        System.out.println(\"merge list1 and list2 : \"+ mergeList);\n\n        SharedList<String> slice = SharedList.SharedListBuilder.slice(2, 6, mergeList);\n        System.out.println(\"slice mergeList from 2 to 6 : \"+ slice);\n\n        SharedList<String> merge2List = SharedList.SharedListBuilder.merge(list1, list2, slice);\n        System.out.println(\"merge list1 and list2 and slice : \"+ merge2List);\n        System.out.println(\"merge2 get index 1: \"+ merge2List.get(1));\n\n        SharedList<String> subList = merge2List.subList(4, 5);\n        System.out.println(\"merge2 subList from 4 to 5 :\"+ subList);\n\n        subList.set(1, \"hello\");\n        System.out.println(\"sublist after set sublist index 1 to hello: \"+subList);\n        System.out.println(\"merge2 after set sublist index 1 to hello: \"+merge2List);\n    }\n\n    public static void testTrieMap() {\n\n        UniTrieMaptron<String, String> trieMap = new UniTrieMaptron<>();\n\n        trieMap.put(\"a1/b1/c1\", \"T1\");\n        trieMap.put(\"a2/b2/c2\", \"T2\");\n        trieMap.put(\"a3/b3/c3\", \"T3\");\n        trieMap.put(\"a3/b4/c4\", \"T4\");\n        trieMap.put(\"a4/b5/c5\", \"T5\");\n        trieMap.put(\"a1/b1/c2\", \"T6\");\n        trieMap.put(\"a1/b1/c3\", \"T7\");\n\n        trieMap.makeSymbolic( \"a1/b1/rc5\", \"a3/b3/c3\" );\n\n        TrieNode node = trieMap.queryNode(\"a1/b1\");\n        //node.put(\"c4\",\"T8\",trieMap);\n        Debug.trace(trieMap.get(\"a1/b1/rc5\"));\n\n        //trieMap.makeSymbolic( \"a1/b1/rc2\", \"a3\" );\n        trieMap.makeSymbolic( \"a1/b1/rc2\", \"a1/b1/rc5\" );\n        TrieNode<String > rc6 = trieMap.queryNode( \"a1/b1/rc2\" );\n        Debug.trace( rc6.evinceReparse().reparse() );\n        Debug.trace( trieMap.queryNode( \"a1\" ).getFullName() );\n\n        Debug.trace( trieMap, trieMap.size() );\n        trieMap.put(\"a3/b4\", \"RRR\");\n        Debug.greenf( trieMap, trieMap.size() );\n\n        trieMap.remove( \"a3\" );\n\n        Debug.trace( trieMap, trieMap.size() );\n\n\n        //trieMap.remove( \"a1/b1\" );\n\n        Debug.trace( trieMap.keySet() );\n\n        Debug.trace( trieMap.values() );\n\n\n\n\n\n        TrieMap clone = trieMap.clone();\n\n        Debug.trace(clone,clone.size());\n\n        Debug.trace(clone.keySet());\n\n        Debug.trace(clone.values());\n\n\n//        trieMap.put(\"a1/b1\", \"TCC\");\n//        Debug.trace( trieMap.get(\"a1/b1\") );\n\n\n    }\n\n    public static void testConcurrentTrie() {\n        UniTrieMaptron<String, String> trieMap = new UniTrieMaptron<>( ConcurrentHashMap::new );\n        //UniTrieMaptron<String, String> trieMap = new UniTrieMaptron<>( TreeMap::new );\n\n        trieMap.put(\"a1/b1/c1\", \"T1\");\n        trieMap.put(\"a2/b2/c2\", \"T2\");\n        trieMap.put(\"a3/b3/c3\", \"T3\");\n        trieMap.put(\"a3/b4/c4\", \"T4\");\n        trieMap.put(\"a4/b5/c5\", \"T5\");\n        trieMap.put(\"a1/b1/c2\", \"T6\");\n        trieMap.put(\"a1/b1/c3\", \"T7\");\n\n\n        var s = trieMap.root();\n\n\n\n\n\n        Debug.greenfs( trieMap );\n\n\n\n        int numberOfThreads = 10;\n        ExecutorService executorService = Executors.newFixedThreadPool(numberOfThreads);\n\n        for (int i = 0; i < numberOfThreads; i++) {\n            executorService.execute(new PathInserter(trieMap));\n        }\n\n\n        executorService.shutdown();\n        while (!executorService.isTerminated()) {\n        }\n\n        Debug.trace( trieMap.size() );\n    }\n\n\n\n\n    private static final String CHARACTERS = \"abcdefghijklmnopqrstuvwxyz\";\n    private static final Random random = new Random();\n\n    private static String generateRandomPath() {\n        int segmentCount = 3 + random.nextInt(3); // Generate between 3 to 5 segments\n        StringBuilder path = new StringBuilder();\n        for (int i = 0; i < segmentCount; i++) {\n            if (i > 0) {\n                path.append('/');\n            }\n            path.append(generateRandomSegment());\n        }\n        return path.toString();\n    }\n\n    private static String generateRandomSegment() {\n        //int length = 2 + random.nextInt(3); // Generate segment length between 2 to 4\n        int length = 1;\n        StringBuilder segment = new StringBuilder(length);\n        for (int i = 0; i < length; i++) {\n            segment.append(CHARACTERS.charAt(random.nextInt(CHARACTERS.length())));\n        }\n        return segment.toString();\n    }\n\n    private static class PathInserter implements Runnable {\n        private final UniTrieMaptron<String, String> trieMap;\n\n        public PathInserter(UniTrieMaptron<String, String> trieMap) {\n            this.trieMap = trieMap;\n        }\n\n        @Override\n        public void run() {\n            for (int i = 0; i < 10000; i++) {\n                String path = generateRandomPath();\n                String value = \"Value_\" + random.nextInt(1000);\n                System.out.printf(\"Inserting path: %s with value: %s%n\", path, value);\n                try{\n                    trieMap.put(path, value);\n                }\n                catch ( IllegalArgumentException e ) {\n                    e.printStackTrace();\n                }\n            }\n        }\n    }\n\n\n    private static void testBitSet64(){\n        long i = 0b1110101110101110101010000010001111011000001010111011101110111010L ;\n        long k = 0 ;\n\n        for ( int j = 0; j < 64; ++j ) {\n            Debug.redf(BitSet64.toBinaryStringLSB(i));\n            Debug.greenfs(BitSet64.toBinaryStringLSB(BitSet64.reverse(i,0,j)));\n            Debug.hhf();\n        }\n    }\n\n\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n\n            //TestUnits.testUniScopeMap();\n            //TestUnits.testMultiScopeMap();\n            //TestUnits.testPrecedeMultiMap();\n            //TestUnits.testRecursiveEntryIterator();\n            //TestUnits.testMergeSharedList();\n            //TestUnits.testTrieMap();\n\n            //TestUnits.testConcurrentTrie();\n            TestUnits.testBitSet64();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/util/TestCompactTimestamp.java",
    "content": "package com.util;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.datetime.compact.CompactTimeUnit32;\nimport com.pinecone.framework.util.datetime.compact.CompactTimestamp32;\n\npublic class TestCompactTimestamp {\n    public static void testTimestamp32() {\n        int encodedMs   = CompactTimestamp32.encode( 123, CompactTimeUnit32.MILLISECONDS );\n        int encodedSec  = CompactTimestamp32.encode( 60, CompactTimeUnit32.SECONDS       );\n        int encodedMin  = CompactTimestamp32.encode( 30, CompactTimeUnit32.MINUTES       );\n        int encodedHour = CompactTimestamp32.encode( 12, CompactTimeUnit32.HOURS         );\n        int encodedDay  = CompactTimestamp32.encode( 1, CompactTimeUnit32.DAYS           );\n        int encodedInf  = CompactTimestamp32.INFINITE;\n\n        Debug.trace( \"Milliseconds: \" + CompactTimestamp32.toMilliseconds(encodedMs));\n        Debug.trace( \"Seconds: \"      + CompactTimestamp32.toMilliseconds(encodedSec));\n        Debug.trace( \"Minutes: \"      + CompactTimestamp32.toMilliseconds(encodedMin));\n        Debug.trace( \"Hours: \"        + CompactTimestamp32.toMilliseconds(encodedHour));\n        Debug.trace( \"Days: \"         + CompactTimestamp32.toMilliseconds(encodedDay));\n        Debug.trace( \"Infinite: \"     + CompactTimestamp32.toMilliseconds(encodedInf));\n    }\n\n    public static void testTimestamp32_norm() {\n        long[] testValues = { 123, 60_000, 3_600_000, 86_400_000, 500_000_000_000L };\n\n        for ( long millis : testValues ) {\n            int encoded = CompactTimestamp32.fromMilliseconds( millis );\n            System.out.printf(\n                    \"Millis: %d -> Encoded: %s -> Normalized: %d ms\\n\",\n                    millis, CompactTimestamp32.format( encoded ), CompactTimestamp32.toMilliseconds( encoded )\n            );\n        }\n    }\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            TestCompactTimestamp.testTimestamp32();\n            TestCompactTimestamp.testTimestamp32_norm();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/util/TestDateTime.java",
    "content": "package com.util;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.datetime.GenericMultiFormDateTimeAudit;\n\nimport java.time.LocalDateTime;\n\npublic class TestDateTime {\n    public static void testAudit() {\n        GenericMultiFormDateTimeAudit audit = new GenericMultiFormDateTimeAudit();\n        LocalDateTime currentTime = LocalDateTime.of(2024, 6, 24, 1, 2, 3);\n\n        Debug.trace(1,audit.matches(\"2024-06-24T01:02:03\", currentTime)); // true\n\n        Debug.trace(2,audit.matches(\"2024-06-24 1:02:03\", currentTime)); // true\n\n        Debug.trace(3,audit.matches(\"2024-06-24 1:2:03\", currentTime)); // true\n\n        Debug.trace(4,audit.matches(\"2024-06-24 1:2:3\", currentTime)); // true\n\n        Debug.trace(5,audit.matches(\"2024-06-24 01:2:3\", currentTime)); // true\n\n        Debug.trace(6,audit.matches(\"2024-06-24 01:02:3\", currentTime)); // true\n\n        Debug.trace(7,audit.matches(\"2024-06-24 01:2:03\", currentTime)); // true\n        Debug.trace(-7,audit.matches(\"2024-06-24 01:2:02\", currentTime));\n\n        Debug.trace(8,audit.matches(\"2024-06-24\", currentTime)); // true\n\n        Debug.trace(9,audit.matches(\"2024-6-24\", currentTime)); // true\n\n        Debug.trace(10,audit.matches(\"1:02:03\", currentTime)); // true\n\n        Debug.trace(11,audit.matches(\"1:2:03\", currentTime)); // true\n\n        Debug.trace(12,audit.matches(\"1:2:3\", currentTime)); // true\n\n        Debug.trace(13,audit.matches(\"01:2:3\", currentTime)); // true\n\n        Debug.trace(14,audit.matches(\"01:02:3\", currentTime)); // true\n\n        Debug.trace(15,audit.matches(\"01:2:03\", currentTime)); // true\n\n        Debug.trace(16,audit.matches(\"?\", currentTime)); // true\n\n        Debug.trace(17,audit.matches(\"2024-06-24 01:02:??\", currentTime)); // true\n\n        Debug.trace(18,audit.matches(\"2024-06-24 01:??:??\", currentTime)); // true\n\n        Debug.trace(19,audit.matches(\"2024-06-24 ??:??:??\", currentTime)); // true\n\n        Debug.trace(20,audit.matches(\"2024-06-?? ??:??:??\", currentTime)); // true\n\n        Debug.trace(21,audit.matches(\"2024-??-?? ??:??:??\", currentTime)); // true\n\n        Debug.trace(22,audit.matches(\"????-??-?? ??:??:??\", currentTime)); // true\n\n        Debug.trace(23,audit.matches(\"2024-06-24 01:??:03\", currentTime)); // true\n\n        Debug.trace(24,audit.matches(\"2024-??-24 01:??:03\", currentTime)); // true\n\n        Debug.trace(25,audit.matches(\"????-??-24 01:??:03\", currentTime)); // true\n        Debug.trace(-25,audit.matches(\"????-??-24 13:??:03\", currentTime));\n\n        Debug.trace(26,audit.matches(\"????-??-24 01:??:03\", currentTime)); // true\n\n\n        Debug.trace(27,audit.matches(\"01:2\", currentTime)); // true\n        Debug.trace(-27,audit.matches(\"01:3\", currentTime));\n\n        Debug.trace(28,audit.matches(\"2024-06-24 01:2\", currentTime)); // true\n        Debug.trace(-28,audit.matches(\"2024-06-24 01:03\", currentTime));\n\n        Debug.trace(29,audit.matches(\"2024-06\", currentTime)); // true\n        Debug.trace(-29,audit.matches(\"2024-07\", currentTime));\n\n        Debug.trace(30,audit.matches(\"01:?\", currentTime)); // true\n        Debug.trace(-30,audit.matches(\"02:?\", currentTime));\n\n        Debug.trace(31,audit.matches(\"2024-06/24 01:2\", currentTime)); // true\n        Debug.trace(-31,audit.matches(\"2024/06/24 01:03\", currentTime));\n\n        Debug.trace(32,audit.matches(\"2024.06.24 01:2\", currentTime)); // true\n        Debug.trace(-32,audit.matches(\"2024.06.24 01:03\", currentTime));\n\n        Debug.trace(33,audit.matches(\"2024.06\", currentTime)); // true\n        Debug.trace(-33,audit.matches(\"2024.07\", currentTime));\n\n        Debug.trace(34,audit.matches(\"2024.06.24\", currentTime)); // true\n        Debug.trace(-34,audit.matches(\"2024.06.25\", currentTime));\n    }\n\n    public static void testAuditAccuracy() {\n        GenericMultiFormDateTimeAudit audit = new GenericMultiFormDateTimeAudit();\n        LocalDateTime currentTime = LocalDateTime.of(2024, 6, 24, 1, 2, 3);\n\n        Debug.trace(1,audit.betweenSec(\"2024-06-24 01:??:13\", currentTime, 10 ));\n        Debug.trace(2,audit.betweenSec(\"2024-06-24 01:01:58\", currentTime, 10 ));\n        Debug.trace(3,audit.betweenSec(\"2024-06-?? 01:01:58\", currentTime, 10 ));\n        Debug.trace(-3,audit.betweenSec(\"2024-06-?? 01:02:58\", currentTime, 10 ));\n        Debug.trace(-3,audit.betweenSec(\"2024-06-?? 01:02:14\", currentTime, 10 ));\n\n        Debug.trace(4,audit.betweenMin(\"2024-06-?? 01:01:58\", currentTime, 1 ));\n        Debug.trace(-4,audit.betweenMin(\"2024-06-?? 01:03:52\", currentTime, 1 ));\n        Debug.trace(5,audit.betweenMin(\"2024-06-?? 01:03:02\", currentTime, 1 ));\n        Debug.trace(6,audit.betweenMin(\"2024-06-?? 01:03:03\", currentTime, 1 ));\n        Debug.trace(-6,audit.betweenMin(\"2024-06-?? 01:03:04\", currentTime, 1 ));\n//\n//        Debug.trace(7,audit.between(\"2024-06-?? 01:02:04\", currentTime, 1000 ));\n//        Debug.trace(-7,audit.between(\"2024-06-?? 01:02:05\", currentTime, 1000 ));\n\n        Debug.trace(8,audit.between(\"2024-06-?? 01:02:02.950\", currentTime, 100 ));\n        Debug.trace(-8,audit.between(\"2024-06-?? 01:02:02.850\", currentTime, 100 ));\n\n\n        Debug.trace(9,audit.betweenSec(\"??:2\", currentTime, 10 ));\n        Debug.trace(-9,audit.betweenSec(\"??:3\", currentTime, 10 ));\n    }\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            //TestDateTime.testAudit();\n            TestDateTime.testAuditAccuracy();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/util/TestJSONConfig.java",
    "content": "package com.util;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.json.*;\n\nimport java.nio.file.Path;\n\npublic class TestJSONConfig {\n    public static void test_JC1( )  {\n        JSONConfig json = new JSONConfig( new JSONMaptron( \"{ self:selfV, num:1234, sch:{ n: name, ssch: { n:n1  } } }\" )  );\n        json.addGlobalScope(  new JSONMaptron(\"{ satan: 'Satan', jesus: 'Jesus', obj:{ k=sss, f=sxf } }\")  );\n        json.addGlobalScope(  new JSONMaptron(\"{ f1: 'Satan', f2: 'Jesus', f3:{ k=fsss, f=s13xf } }\")  );\n\n        Debug.trace( json.optJSONObject( \"f3\" ), json.opt( \"num\" ), json.optJSONObject( \"sch\" ) );\n        Debug.trace( json );\n\n        JSONConfig sch = json.getChild( \"sch\" );\n        Debug.trace( sch, sch.optJSONObject( \"f3\" ), sch.opt( \"satan\" ), sch.optJSONObject( \"ssch\" ) );\n\n        JSONConfig ssch = json.getChild( \"ssch\" );\n        Debug.trace( ssch, ssch.opt( \"f2\" ), sch.opt( \"obj\" ), sch.opt( \"n\" ) );\n\n    }\n\n    public static void test_JC( )  {\n        JPlusContext context = new JPlusContext();\n        context.addParentPath(Path.of(\"E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Pinecone/src/test/java\"));\n        context.setOverriddenAffinity( true );\n        context.addGlobalScope( new JSONMaptron(\"{ satan: 'Satan', jesus: 'Jesus' }\") );\n        //context.addGlobalScope( new JSONMaptron(\"{ this: { key:'TakeOver' } }\") );\n\n        JSONObject obj = (JSONObject) JPlus.parse(\n                \" { ro = 'root', next = { p = 'parent', po1: { kp:true, int = 9 }, pa:[9,9.01,6],\" +\n                        \"next : { #extends super.po1, int = 7, str: &this.int,  end:xxxx, obj:{a:1, h:&this.a}, obj2:{/*#extends super.obj*/ h:&super.obj} ,inc:#include \\\"./com/util/inc.jplus\\\" /**/ }, \" +\n                        \"arr:[ #extends 'super.pa', 1, &'this[1]', null, 'fuck' ]/**/ } }\", context ) ;\n\n        Debug.echo( obj.toJSONStringI(4) );\n\n\n\n\n    }\n\n    public static void test_Dictionary( )  {\n        JSONObject object = new JSONMaptron( \"{ satan: 'Satan', jesus: 'Jesus' }\" );\n        JSONDictium dictium = object;\n\n        for ( Object o : dictium.entrySet() ) {\n            Debug.trace( o.toString() );\n        }\n\n        JSONArray array = new JSONArraytron( \"[0,1,2,3,4,5,6,7,8,9]\" );\n        dictium = array;\n\n        for ( Object o : dictium.entrySet() ) {\n            Debug.trace( o );\n        }\n\n        Debug.trace( dictium.optInt( \"31s\" ) );\n\n    }\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            //TestUnits.testUniScopeMap();\n\n\n            //TestJSONConfig.test_JC1();\n            //TestJSONConfig.test_Dictionary();\n            //Debug.trace( ( new URI( \"/ssss\" ) ) );\n\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/util/TestNamespace.java",
    "content": "package com.util;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.name.*;\n\nimport java.util.List;\n\npublic class TestNamespace {\n    public static void testNS() throws Exception {\n        GenericNamespaceParser parser = new GenericNamespaceParser();\n        Namespace parsedNamespace = parser.parse( \"x1::x2/x3\\\\x4->x5.x6.x7->x8.x9.x10.x11::x12.x13.x14\", List.of( \"::\", \".\", \"->\", \"\\\\\", \"/\" ) );\n        Debug.trace(  parsedNamespace.getFullName() );\n\n        parsedNamespace = parser.parse( \"x1::x2/x3\\\\x4->x5.x6.x7->x8.x9.x10.x11::x12.x13.x14\", \"::|\\\\.|->|\\\\\\\\|/\" );\n        Debug.trace(  parsedNamespace.getFullName(), parsedNamespace.getSimpleName(), parsedNamespace );\n\n\n        Namespace namespace = new UniNamespace( \"Jesus\", new UniNamespace( \"this\" ) );\n        Debug.trace( namespace.getFullName(), namespace.parent().getSimpleName(), parsedNamespace.root() );\n    }\n\n    public static void testMultiNS() throws Exception {\n        MultiNamespace root = new GenericMultiNamespace( \"root\" );\n\n\n        MultiNamespace namespace = new GenericMultiNamespace( \"x2\" );\n        namespace.addParent( new GenericMultiNamespace( \"x1_0\", root ) );\n        namespace.addParent( new GenericMultiNamespace( \"x1_1\", root ) );\n\n        Debug.trace( namespace.getFullNames(), namespace.hasOwnParentNS( \"x1_0\" ) );\n        Debug.trace( namespace.hasOwnParent( new GenericMultiNamespace( \"x1_0\", root ) ) );  // root.x1_0\n        Debug.trace( namespace.hasOwnParent( new GenericMultiNamespace( \"x1_0\" ) ) );  // x1_0\n\n        Debug.trace( namespace.getParentByNS( \"x1_0\" ).getFullName(), namespace.getParents(), namespace.getDomain() );\n\n\n        GenericNamespaceParser parser = new GenericNamespaceParser( GenericMultiNamespace.class );\n        Namespace parsedNamespace = parser.parse( \"x1::x2/x3\\\\x4->x5.x6.x7->x8.x9.x10.x11::x12.x13.x14\", List.of( \"::\", \".\", \"->\", \"\\\\\", \"/\" ) );\n        Debug.trace(  parsedNamespace.getFullName(), parsedNamespace.getDomain() );\n    }\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n\n            TestNamespace.testNS();\n            //TestNamespace.testMultiNS();\n\n\n            return 0;\n        }, (Object[]) args );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/util/TestParser.java",
    "content": "package com.util;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.config.GenericStartupCommandParser;\n\nimport java.util.Map;\n\n\npublic class TestParser {\n    public static void testGenericStratupCommandParser() throws Exception{\n        GenericStartupCommandParser parser = new GenericStartupCommandParser();\n        Map<String, String[]> result = parser.parse(new String[]{\"--key1=val1,val2\", \"-key2:val3;val4\", \"/key3=val5|val6\", \"--key4=1234\"});\n\n        for ( Map.Entry<String, String[]> entry : result.entrySet() ) {\n            Debug.trace( entry.getKey(), (Object) entry.getValue() );\n        }\n    }\n\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n\n            TestParser.testGenericStratupCommandParser();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/util/TestRRWSLock.java",
    "content": "package com.util;\n\nimport java.util.Map;\nimport java.util.TreeMap;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\nimport java.util.concurrent.locks.ReentrantLock;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.lock.ReentrantReadWriteSpinLock;\nimport com.pinecone.framework.util.lock.ReentrantSpinLock;\nimport com.pinecone.framework.util.lock.SpinLock;\n\npublic class TestRRWSLock {\n    private static final ReentrantReadWriteSpinLock lock = new ReentrantReadWriteSpinLock();\n    //private static final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();\n    //private static final ReentrantLock cl = new ReentrantLock();\n    //private static final SpinLock cl = new SpinLock();\n    private static final ReentrantSpinLock cl = new ReentrantSpinLock();\n\n    private static void readOperation(int threadId) {\n        lock.readLock().lock();\n        try {\n            Debug.trace( \"Thread \" + threadId + \" is reading...\" );\n            try {\n                Thread.sleep(100);\n            }\n            catch ( InterruptedException e ) {\n                e.printStackTrace();\n            }\n        }\n        finally {\n            lock.readLock().unlock();\n            Debug.trace( \"Thread \" + threadId + \" finished reading.\" );\n        }\n    }\n\n    private static void writeOperation(int threadId) {\n        lock.writeLock().lock();\n        try {\n            Debug.trace( \"Thread \" + threadId + \" is writing...\" );\n            try {\n                Thread.sleep(200);\n            }\n            catch (InterruptedException e) {\n                e.printStackTrace();\n            }\n        }\n        finally {\n            lock.writeLock().unlock();\n            Debug.trace( \"Thread \" + threadId + \" finished writing.\" );\n        }\n    }\n\n    private static void testReentrancy() {\n        lock.writeLock().lock();\n        try {\n            Debug.trace( \"Main thread started writing.\" );\n            lock.writeLock().lock();\n            try {\n                Debug.trace( \"Main thread re-entered writing.\" );\n            }\n            finally {\n                lock.writeLock().unlock();\n            }\n        }\n        finally {\n            lock.writeLock().unlock();\n            Debug.trace( \"Main thread finished writing.\" );\n        }\n    }\n\n    private static void testSimple() {\n        ExecutorService executorService = Executors.newFixedThreadPool( 4 );\n\n        for ( int i = 1; i <= 3; ++i ) {\n            final int threadId = i;\n            executorService.submit(() -> readOperation(threadId));\n        }\n\n        for ( int i = 1; i <= 3; ++i ) {\n            final int threadId = i;\n            executorService.submit(() -> writeOperation(threadId));\n        }\n\n        executorService.submit(() -> testReentrancy());\n\n        executorService.shutdown();\n    }\n\n\n    private static final Map<Integer, Integer> map = new TreeMap<>();\n\n    private static int cnt = 0;\n\n    private static void treeReadOperation() {\n        for ( int i = 0; i < 1e6; i++ ) {\n            lock.readLock().lock();\n            //cl.lock();\n            try {\n                //Debug.trace( map.get(i) );\n                map.get(i);\n            }\n            finally {\n                //cl.unlock();\n                lock.readLock().unlock();\n            }\n        }\n\n        ++cnt;\n    }\n\n    private static void treeWriteOperation() {\n        for ( int i = 0; i < 1e6; i++ ) {\n            lock.writeLock().lock();\n            //cl.lock();\n            try {\n                map.put(i, i);\n            }\n            finally {\n                //cl.unlock();\n                lock.writeLock().unlock();\n            }\n        }\n\n        ++cnt;\n    }\n\n    private static void testUnit() {\n        Thread rt = new Thread( TestRRWSLock::treeReadOperation );\n        Thread wt = new Thread( TestRRWSLock::treeWriteOperation );\n\n        rt.start();\n        wt.start();\n\n        while ( cnt < 2 ) {\n            Debug.sleep(1);\n        }\n\n    }\n\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n\n            TestRRWSLock.testUnit();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/util/TestTemplate.java",
    "content": "package com.util;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JPlus;\nimport com.pinecone.framework.util.json.JPlusContext;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.template.TemplateParser;\n\nimport java.nio.file.Path;\n\npublic class TestTemplate {\n    public static void test_UTL( )  {\n        TemplateParser templateParser = new TemplateParser( \"122 ${arr[1].f['k']} ${ key[ key['g'].c ] } sdd\", ( new JSONMaptron( \"{ g1:'k', key:{  g:{ c:'k' },k:'1xxxx2' }, arr:[1,{f:{k:'sss'}},3] }\" ) ).getMap() );\n        Debug.trace( templateParser.eval() );\n    }\n\n    public static void test_JPlus( )  {\n        JPlusContext context = new JPlusContext();\n        context.addParentPath(Path.of(\"E:/MyFiles/CodeScript/Project/Hazelnut/Sauron/Saurons/Pinecone/src/test/java\"));\n        context.setOverriddenAffinity( true );\n        context.addGlobalScope( new JSONMaptron(\"{ satan: 'Satan', jesus: 'Jesus' }\") );\n        //context.addGlobalScope( new JSONMaptron(\"{ this: { key:'TakeOver' } }\") );\n\n        JSONObject obj = (JSONObject) JPlus.parse(\n                \" { ro = 'root', next = { p = 'parent', po1: { kp:true, int = 9 }, pa:[9,9.01,6],\" +\n                \"next : { #extends super.po1, int = 7, str: &this.int,  end:xxxx, obj:{a:1, h:&this.a}, obj2:{/*#extends super.obj*/ h:&super.obj} ,inc:#include \\\"./com/util/inc.jplus\\\" /**/ }, \" +\n                \"arr:[ #extends 'super.pa', 1, &'this[1]', null, 'fuck' ]/**/ } }\", context ) ;\n\n        Debug.echo( obj.toJSONStringI(4) );\n    }\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            //TestUnits.testUniScopeMap();\n\n\n            //TestTemplate.test_UTL();\n            TestTemplate.test_JPlus();\n\n\n            return 0;\n        }, (Object[]) args );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/util/inc.jplus",
    "content": "{\n  #extends 'super.obj',\n  so : &super.obj,\n\n  parentScope:{\n    keykey: \"satan\",\n  },\n\n  \"key\": 'self->key',\n  \"utl\": #\"this->key: '${this.key}' ${ __scope__[ parentScope.keykey ] } fucks ${jesus} and ${key} so super harder | ${__root__}\",\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/util/json/Parasite.java",
    "content": "package com.util.json;\n\nimport com.pinecone.framework.util.json.homotype.DirectJSONInjector;\n\npublic class Parasite {\n    public String    name  ;\n    public long      length;\n    public int       emnus;\n\n    public Parasite() {\n\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public long getLength() {\n        return this.length;\n    }\n\n    public void setName( String name ) {\n        this.name = name;\n    }\n\n    public void setLength( long length ) {\n        this.length = length;\n    }\n\n    public String toJSONString() {\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }\n\n    public String toString(){\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/util/json/Slave.java",
    "content": "package com.util.json;\n\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.json.homotype.StructJSONEncoder;\n\npublic class Slave {\n    public String    name  ;\n    public long      length;\n    public int       emnus;\n    public Parasite  parasite;\n    public Map       atts;\n    public Object[]  li;\n\n    //public Slave     child;\n    public List<Slave>    children;\n    //public Slave[]    children2;\n    //public Object[]    children;\n    //public Map[]    children;\n    //public List<JSONObject>    children;\n    //public List<Map>    children;\n    //public List<TreeMap>    children;\n    //public List<Object>    children;\n\n\n    public Map<String, Slave > ms;\n\n    public Slave() {\n\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public long getLength() {\n        return this.length;\n    }\n\n    public void setName( String name ) {\n        this.name = name;\n    }\n\n    public void setLength( long length ) {\n        this.length = length;\n    }\n\n    public void setParasite2( Parasite parasite ) {\n        this.parasite = parasite;\n    }\n\n//    public void setChildren( List<Slave> slaves ) {\n//        this.children = slaves;\n//    }\n\n//    public List<Slave> getChildren() {\n//        return this.children;\n//    }\n\n    public String toJSONString() {\n        return StructJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    public String toString(){\n        return StructJSONEncoder.BasicEncoder.encode( this );\n    }\n}"
  },
  {
    "path": "Pinecones/Pinecone/src/test/java/com/util/json/TestJSON.java",
    "content": "package com.util.json;\n\nimport java.util.List;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.prototype.ObjectiveBean;\nimport com.pinecone.framework.system.prototype.ObjectiveClass;\nimport com.pinecone.framework.system.prototype.ObjectiveEvaluator;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.*;\nimport com.pinecone.framework.util.json.homotype.*;\n\nclass Dick {\n    @JSONGet( \"name\" )\n    public String    mName  ;\n    @MapStructure\n    public long      length;\n    public int       emnus;\n\n    public Dick() {\n\n    }\n\n    public Dick( long      length ) {\n        this.mName = \"dick\";\n        this.length = length;\n    }\n\n    public String toJSONString() {\n        return AnnotatedJSONInjector.instance().inject( this ).toString();\n    }\n\n    public String toString(){\n        return AnnotatedJSONInjector.instance().inject( this ).toString();\n    }\n}\n\nclass Shit{\n    @JSONGet\n    public  String    mName ;\n    public int        length;\n    @JSONGet\n    Dick[] array;\n    //public JSONArray  array;\n    @JSONGet\n    public Dick       dick = new Dick();\n\n    public Shit(  ){\n\n    }\n\n//    public Object getName() {\n//        return this.mName;\n//    }\n\n    public Object test( int i, Integer c, String sz ) {\n        return sz + i + c;\n    }\n\n    public Object trial( Object... arg ){\n        return arg;\n    }\n\n\n/*    public String toJSONString() {\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }*/\n\n/*    public String toString(){\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }*/\n\n}\n\nclass Vagina {\n    public String    name  ;\n    public long      length;\n    public int       emnus;\n\n    public Vagina() {\n\n    }\n\n    public String getName() {\n        return this.name;\n    }\n\n    public long getLength() {\n        return this.length;\n    }\n\n    public void setName( String name ) {\n        this.name = name;\n    }\n\n    public void setLength( long length ) {\n        this.length = length;\n    }\n\n    public String toJSONString() {\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }\n\n    public String toString(){\n        return DirectJSONInjector.instance().inject( this ).toString();\n    }\n}\n\npublic class TestJSON {\n    public static void testDirectlyInjector() {\n        Shit shit = new Shit();\n        JSONObject jsonShit = new JSONMaptron(\"{ name:'shit',  'fuck':7, 'length': 1, 'array':[{name:'shit',length:1998}] }\");\n        Debug.trace( jsonShit );\n        shit = (Shit) ( new DirectObjectInjector(  true, Shit.class ) ).inject( jsonShit );\n        Debug.trace(shit);\n\n        Debug.echo( JSON.marshal( shit ) );\n\n    }\n\n    public static void testAnnotatedInjector() {\n//        Dick dick = new Dick();\n//        JSONObject jsonShit = new JSONMaptron(\"{ name:'shit', 'length': 1, 'array':[{name:'shit',length:1998}] }\");\n//        Debug.trace( jsonShit );\n//        dick = (Dick) ( new AnnotatedObjectInjector( Dick.class ) ).inject( jsonShit );\n//        Debug.trace( dick );\n    }\n\n    public static void testObjectom() {\n        Dick dick = new Dick();\n        JSONObject jsonShit = new JSONMaptron(\"{ name:'shit', 'length': 1, 'array':[{name:'shit',length:1998}] }\");\n        Debug.trace( jsonShit );\n        dick = (Dick) ( new AnnotatedObjectInjector( Dick.class ) ).inject( jsonShit );\n\n        ObjectiveClass objectom = new ObjectiveClass( dick );\n        Debug.echo( objectom.toJSONString(), objectom.get( \"length\" ), objectom.get( \"mName\" ) );\n    }\n\n    public static void testObjectiveBean() {\n        Vagina vagina = new Vagina();\n        JSONObject jsonShit = new JSONMaptron(\"{ name:'shit', 'length': 1 }\");\n        Debug.trace( jsonShit );\n        vagina = (Vagina) ( new DirectObjectInjector( Vagina.class ) ).inject( jsonShit );\n\n//        JSONObject o = new JSONMaptron( vagina );\n//        Debug.echo( o.toJSONString() );\n\n\n        ObjectiveBean bean = new ObjectiveBean( vagina );\n        Debug.echo( bean.toJSONString() );\n        bean.set( \"name\", \"fuck\" );\n        Debug.echo( bean.toJSONString() );\n        //bean.set( \"key\", \"fuck\" );\n\n        Debug.trace( bean.keys() );\n\n\n        Vagina na = new Vagina();\n        ObjectiveBean naBean = new ObjectiveBean( na );\n        for ( String key : bean.keys() ) {\n            naBean.set( key, bean.get( key ) );\n        }\n        Debug.trace( vagina, na );\n\n        Debug.trace( ObjectiveEvaluator.MapStructures.get( na, \"name\" ) );\n        ObjectiveEvaluator.MapStructures.set( na, \"name\", \"test2\" );\n        Debug.trace( ObjectiveEvaluator.MapStructures.get( na, \"name\" ) );\n        Debug.trace( ObjectiveEvaluator.MapStructures.get( na, \"emnus\" ) );\n        ObjectiveEvaluator.MapStructures.set( na, \"emnus\", 124 );\n        Debug.trace( ObjectiveEvaluator.MapStructures.get( na, \"emnus\" ) );\n    }\n\n    public static void testStringfiy() {\n        JSONMaptron jo = new JSONMaptron( \"{ k1:v1, k2:v2, k3:{ k3_1: v3_1, k3_2:[ 0, 1, true, false, undefined, [[[[],[],[],{}]]], [{ k3_a_1: v3_a_1, k3_a_2: 3.1415926 }]  ] } }\" );\n        Debug.trace( jo );\n\n        Object[] arr = new Object[] { \"v1\", 1, 3.1415926, null, false, \"v_end\" };\n        Debug.trace( arr );\n\n    }\n\n    public static void testMarshal() {\n        Slave j = JSON.unmarshal( \"{ /*name:Slave, length:1234, parasite:{ name: parasite, length:20241102 }, atts: { key:val }, li:[1,2,3, 'ssss'],*/\" +\n                \"children: [{ name:Slave, length:1234, parasite:{ name: parasitec, length:20241117 }  } ],\" +\n                \"ms: { fi: { name:Slave, length:1234, parasite:{ name: parasitec, length:20241117 }  } }\" +\n                \" }\", Slave.class );\n        Debug.fmp( 2, j );\n\n        List<String> l = JSON.unmarshal( \"['fuck', 'me']\", new TypeReference<>() {} );\n        Debug.fmp( 2, l );\n    }\n\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            //TestJSON.testDirectlyInjector();\n            //TestJSON.testAnnotatedInjector();\n            //TestJSON.testObjectom();\n            //TestJSON.testObjectiveBean();\n            //TestJSON.testStringfiy();\n            TestJSON.testMarshal();\n\n\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>pinecones</artifactId>\n        <groupId>com.pinecones</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>9</source>\n                    <target>9</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.slime</groupId>\n    <artifactId>slime</artifactId>\n    <version>2.1.0</version>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/CacheConstants.java",
    "content": "package com.pinecone.slime.cache;\n\npublic final class CacheConstants {\n    public static final int DefaultCachePageLocalCapacity  = 100;\n    public static final int DefaultCachePageCapacity       = 1000;\n    public static final int DefaultCachePageMegaCapacity   = 10000;\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/ArchConcurrentCountDictCache.java",
    "content": "package com.pinecone.slime.cache.query;\n\nimport java.util.concurrent.atomic.AtomicLong;\n\npublic abstract class ArchConcurrentCountDictCache<V > implements UniformCountDictCache<V > {\n    protected AtomicLong mnMisses;\n    protected AtomicLong mnAccesses;\n\n    protected ArchConcurrentCountDictCache(){\n        this.mnMisses   = new AtomicLong( 0 );\n        this.mnAccesses = new AtomicLong( 0 );\n    }\n\n    protected void afterKeyVisited( Object key ) {\n        this.recordAccess();\n    }\n\n    protected abstract V missKey( Object key ) ;\n\n    protected void recordMiss() {\n        this.mnMisses.incrementAndGet();\n    }\n\n    protected void recordAccess() {\n        this.mnAccesses.incrementAndGet();\n    }\n\n    @Override\n    public long getMisses() {\n        return this.mnMisses.get();\n    }\n\n    @Override\n    public long getAccesses() {\n        return this.mnAccesses.get();\n    }\n}"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/ArchCountDictCache.java",
    "content": "package com.pinecone.slime.cache.query;\n\npublic abstract class ArchCountDictCache<V > implements UniformCountDictCache<V > {\n    protected long mnMisses;\n    protected long mnAccesses;\n\n    protected ArchCountDictCache(){\n        this.mnMisses   = 0;\n        this.mnAccesses = 0;\n    }\n\n    protected void afterKeyVisited( Object key ) {\n        this.recordAccess();\n    }\n\n    protected abstract V missKey( Object key ) ;\n\n    protected void recordMiss() {\n        ++this.mnMisses;\n    }\n\n    protected void recordAccess() {\n        ++this.mnAccesses;\n    }\n\n    @Override\n    public long getMisses() {\n        return this.mnMisses;\n    }\n\n    @Override\n    public long getAccesses() {\n        return this.mnAccesses;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/ArchLocalDictCachePage.java",
    "content": "package com.pinecone.slime.cache.query;\n\nimport com.pinecone.framework.unit.Dictium;\n\nimport java.util.Collection;\nimport java.util.Set;\n\npublic abstract class ArchLocalDictCachePage<V > extends ArchCountDictCache<V > implements LocalDictCachePage<V >, IterableDictCachePage<V > {\n    private long                  mnId;\n    private final int             mnCapacity;\n    private final Dictium<V >     mCache;\n\n    protected ArchLocalDictCachePage( long id, int capacity, Dictium<V > cache ) {\n        super();\n        this.mnId       = id;\n        this.mnCapacity = capacity;\n        this.mCache     = cache;\n    }\n\n    @Override\n    public long getId() {\n        return this.mnId;\n    }\n\n    @Override\n    public void setId( long id ) {\n        this.mnId = id;\n    }\n\n    @Override\n    public Dictium<V > getDictium() {\n        return this.mCache;\n    }\n\n    @Override\n    public long capacity() {\n        return this.mnCapacity;\n    }\n\n    @Override\n    public long size() {\n        return this.mCache.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mCache.isEmpty();\n    }\n\n    @Override\n    public V get( Object key ) {\n        V v = this.mCache.get( key );\n        if( v == null ) {\n            v = this.missKey( key );\n        }\n        this.afterKeyVisited( key );\n        return v;\n    }\n\n    @Override\n    public V erase( Object key ) {\n        V v = this.mCache.erase( key );\n        this.afterKeyVisited( key );\n        return v;\n    }\n\n    @Override\n    public boolean existsKey( Object key ) {\n        boolean b = this.mCache.containsKey( key );\n        this.afterKeyVisited( key );\n        return b;\n    }\n\n    @Override\n    public void clear() {\n        this.mCache.clear();\n    }\n\n    @Override\n    public long elementSize() {\n        return this.size();\n    }\n\n    @Override\n    public Set<? > entrySet() {\n        return this.getDictium().entrySet();\n    }\n\n    @Override\n    public Collection<V > values() {\n        return this.getDictium().values();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/ConcurrentMergeLRUDictCachePage.java",
    "content": "package com.pinecone.slime.cache.query;\n\nimport java.util.Collection;\nimport java.util.LinkedHashMap;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.concurrent.locks.ReadWriteLock;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\n\nimport com.pinecone.framework.unit.Dictium;\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.unit.MapDictium;\nimport com.pinecone.slime.cache.CacheConstants;\n\n/**\n *  Pinecone Ursus For Java [ ConcurrentMergeDictCachePage ]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Three-level caching strategy:\n *  L1 Cache: A thread-local cache, achieving the highest performance with lock-free access but is not shareable.\n *  L2 Cache: When a key is evicted from L1, it automatically degrades to L2. With read-write locks,\n *            resulting in a slight performance drop [Lazy merging upwards].\n *  L3 Cache: It will be eliminated, if a key is evicted from L2. No automatic replenishment occurs,\n *            and subsequent access will query external caching services.\n *  This design aims to minimize performance loss caused by locking and accessing external services.\n *  *****************************************************************************************\n *  采用三级缓存设计\n *  其中L1缓存是线程局部缓存（无锁化，性能最高，分治全局不可共享）\n *  当L1中键被淘汰，自动降级到L2，此时使用读写锁，性能有下降但不多 [向上懒汉式归并]\n *  L2缓存再次被淘汰，非升即走，不再自动补充，后面需要访问会访问外部缓存服务\n *  该设计旨在尽可能避免加锁和访问外部服务带来的性能损失。\n *  *****************************************************************************************\n */\npublic class ConcurrentMergeLRUDictCachePage<V > extends ArchConcurrentCountDictCache<V > implements LocalDictCachePage<V >, IterableDictCachePage<V >, UniformCountSelfLoadingDictCache<V >  {\n    private long                                  mnId;\n    private final int                             mnCapacity;\n    private final Dictium<V >                     mMegaCache;\n    private final ReadWriteLock                   mMegaLock;\n    private SourceRetriever<V >                   mSourceRetriever;\n    private boolean                               mbEnableL2DirectLoad;\n\n    private final ThreadLocal<Map<Object, V > >   mLocalPage;\n\n    protected boolean degradeLocalKey( int size, int capacity, Map.Entry<Object, V > eldest ) {\n        boolean bElimination = size > capacity;\n        if ( bElimination ) {\n            this.mMegaLock.writeLock().lock();\n            try{\n                // Degrading local-key and merging into mega-L2-cache if the key is ancient enough. (PS, L2-Cache is rw-lock-based then slower)\n                // 如果线程独占高速缓存中的键被淘汰，降级并入L2. (二缓有锁，慢)\n                this.mMegaCache.insert( eldest.getKey(), eldest.getValue() );\n            }\n            finally {\n                this.mMegaLock.writeLock().unlock();\n            }\n        }\n        return bElimination;\n    }\n\n    public ConcurrentMergeLRUDictCachePage( long id, int capacity, int localCap, boolean bUsingTree, boolean bEnableL2DirectLoad, Map<Object, V > initData, SourceRetriever<V > retriever ) {\n        super();\n        this.mnId                  = id;\n        this.mnCapacity            = capacity;\n        this.mMegaCache            = new MapDictium<>( LocalFixedLRUDictCachePage.newMap( bUsingTree, capacity, initData ) ) ;\n        this.mSourceRetriever      = retriever;\n        this.mMegaLock             = new ReentrantReadWriteLock();\n        this.mbEnableL2DirectLoad  = bEnableL2DirectLoad;\n\n        this.mLocalPage            = ThreadLocal.withInitial(() -> {\n            Map<Object, V > neo;\n\n            if ( bUsingTree ) {\n                neo = new LinkedTreeMap<>( true ){\n                    @Override\n                    protected boolean removeEldestEntry( Map.Entry<Object, V > eldest ) {\n                        return ConcurrentMergeLRUDictCachePage.this.degradeLocalKey( this.size(), localCap, eldest );\n                    }\n                };\n            }\n            else {\n                neo = new LinkedHashMap<>( capacity, 0.75f, true ){\n                    @Override\n                    protected boolean removeEldestEntry( Map.Entry<Object, V > eldest ) {\n                        return ConcurrentMergeLRUDictCachePage.this.degradeLocalKey( this.size(), localCap, eldest );\n                    }\n                };\n            }\n            return neo;\n        });\n    }\n\n    public ConcurrentMergeLRUDictCachePage( long id, int capacity, boolean bUsingTree, boolean bEnableL2DirectLoad, Map<Object, V > initData, SourceRetriever<V > retriever ){\n        this( id, capacity, CacheConstants.DefaultCachePageLocalCapacity, bUsingTree, bEnableL2DirectLoad, initData, retriever );\n    }\n\n    public ConcurrentMergeLRUDictCachePage( long id, int capacity, int localCap, boolean bUsingTree, Map<Object, V > initData, SourceRetriever<V > retriever ){\n        this( id, capacity, localCap, bUsingTree, true, initData, retriever );\n    }\n\n    public ConcurrentMergeLRUDictCachePage( long id, int capacity, boolean bUsingTree, Map<Object, V > initData, SourceRetriever<V > retriever ){\n        this( id, capacity, CacheConstants.DefaultCachePageLocalCapacity, bUsingTree, true, initData, retriever );\n    }\n\n    public ConcurrentMergeLRUDictCachePage( long id, int capacity, SourceRetriever<V > retriever ){\n        this( id, capacity, CacheConstants.DefaultCachePageLocalCapacity, false, true, null, retriever );\n    }\n\n    public ConcurrentMergeLRUDictCachePage( int capacity, SourceRetriever<V > retriever ){\n        this( -1, capacity, retriever );\n    }\n\n\n\n    public void setEnableL2DirectLoad( boolean bEnableL2DirectLoad ) {\n        this.mbEnableL2DirectLoad = bEnableL2DirectLoad;\n    }\n\n    @Override\n    public long getId() {\n        return this.mnId;\n    }\n\n    @Override\n    public void setId( long id ) {\n        this.mnId = id;\n    }\n\n    @Override\n    public Dictium<V > getDictium() {\n        return this.mMegaCache;\n    }\n\n    @Override\n    public long capacity() {\n        return this.mnCapacity;\n    }\n\n    @Override\n    public long size() {\n        this.mMegaLock.readLock().lock();\n        try{\n            return this.mMegaCache.size();\n        }\n        finally {\n            this.mMegaLock.readLock().unlock();\n        }\n    }\n\n    @Override\n    public boolean isEmpty() {\n        this.mMegaLock.readLock().lock();\n        try{\n            return this.mMegaCache.isEmpty();\n        }\n        finally {\n            this.mMegaLock.readLock().unlock();\n        }\n    }\n\n    @Override\n    public V get( Object key ) {\n        V v = this.mLocalPage.get().get( key );\n        if ( v == null ) {\n            this.mMegaLock.readLock().lock();\n            try{\n                v = this.mMegaCache.get( key ); // Stage2, try Level-2 Cache retrieving. [Single page is thread-unsafe]\n            }\n            finally {\n                this.mMegaLock.readLock().unlock();\n            }\n\n            if( v == null ) {\n                // Stage3, try L3 Cache retrieving => L1 . [From superior thread-safe source, e.g. `Redis`, `RDB`]\n                //         OR L3 => [ L1, L2 ]\n                v = this.missKey( key );\n            }\n            else {\n                this.mLocalPage.get().put( key, v ); // L2 => L1\n            }\n        }\n\n        this.afterKeyVisited( key );\n        return v;\n    }\n\n    @Override\n    public V erase( Object key ) {\n        V v  = this.mLocalPage.get().remove( key );\n        V v1;\n\n        this.mMegaLock.writeLock().lock();\n        try{\n            v1 = this.mMegaCache.erase( key );\n        }\n        finally {\n            this.mMegaLock.writeLock().unlock();\n        }\n\n        this.afterKeyVisited( key );\n\n        if ( v == null ) {\n            return v1;\n        }\n        return v;\n    }\n\n    @Override\n    public boolean existsKey( Object key ) {\n        boolean b;\n        this.mMegaLock.readLock().lock();\n        try{\n            b = this.mMegaCache.containsKey( key );\n        }\n        finally {\n            this.mMegaLock.readLock().unlock();\n        }\n\n        if ( !b ) {\n            b = this.mSourceRetriever.countsKey( key ) > 0;\n            if ( b ) {\n                // Trigger cache-loading to ensure coherency.\n                b = this.get( key ) != null;\n            }\n        }\n\n        this.afterKeyVisited( key );\n        return b;\n    }\n\n    @Override\n    public boolean implicatesKey( Object key ) {\n        return this.existsKey( key );\n    }\n\n    @Override\n    public SourceRetriever<V> getSourceRetriever() {\n        return this.mSourceRetriever;\n    }\n\n    @Override\n    public void clear() {\n        this.mLocalPage.get().clear();\n\n        this.mMegaLock.writeLock().lock();\n        try {\n            this.mMegaCache.clear();\n        }\n        finally {\n            this.mMegaLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public long elementSize() {\n        return this.size();\n    }\n\n    @Override\n    public Set<? > entrySet() {\n        return this.getDictium().entrySet();\n    }\n\n    @Override\n    public Collection<V > values() {\n        return this.getDictium().values();\n    }\n\n    @Override\n    protected V missKey( Object key ) {\n        this.recordMiss();\n        V v = this.mSourceRetriever.retrieve( key );\n        if( v != null ) {\n            // L3 => L1\n            this.mLocalPage.get().put( key, v );\n\n            if ( this.mbEnableL2DirectLoad ) {\n                // L3 => L2\n                this.mMegaLock.writeLock().lock();\n                try{\n                    this.mMegaCache.insert( key, v );\n                }\n                finally {\n                    this.mMegaLock.writeLock().unlock();\n                }\n            }\n        }\n        return v;\n    }\n\n\n    public static <V> ConcurrentMergeLRUDictCachePage<V> builder( long id, int capacity, SourceRetriever<V> retriever ) {\n        Builder<V > builder = new Builder<>( id, capacity, retriever );\n        return builder.build();\n    }\n\n    public static <V> ConcurrentMergeLRUDictCachePage<V> builder( int capacity, SourceRetriever<V> retriever ) {\n        Builder<V > builder = new Builder<>( capacity, retriever );\n        return builder.build();\n    }\n\n    public static class Builder<V > {\n        private final long                  id;\n        private final int                   capacity;\n        private final SourceRetriever<V >   retriever;\n\n        private int                         localCap           = CacheConstants.DefaultCachePageLocalCapacity;\n        private boolean                     usingTree          = false;\n        private boolean                     enableL2DirectLoad = true;\n        private Map<Object, V >             initData           = null;\n\n\n        public Builder( long id, int capacity, SourceRetriever<V> retriever ) {\n            this.id        = id;\n            this.capacity  = capacity;\n            this.retriever = retriever;\n        }\n\n        public Builder( int capacity, SourceRetriever<V> retriever ) {\n            this( -1, capacity, retriever );\n        }\n\n\n        public Builder<V> localCap( int localCap ) {\n            this.localCap = localCap;\n            return this;\n        }\n\n        public Builder<V> usingTree( boolean bUsingTree ) {\n            this.usingTree = bUsingTree;\n            return this;\n        }\n\n        public Builder<V> enableL2DirectLoad( boolean bEnableL2DirectLoad ) {\n            this.enableL2DirectLoad = bEnableL2DirectLoad;\n            return this;\n        }\n\n        public Builder<V> initData( Map<Object, V> initData ) {\n            this.initData = initData;\n            return this;\n        }\n\n        public ConcurrentMergeLRUDictCachePage<V> build() {\n            return new ConcurrentMergeLRUDictCachePage<>(\n                    this.id,\n                    this.capacity,\n                    this.localCap,\n                    this.usingTree,\n                    this.enableL2DirectLoad,\n                    this.initData,\n                    this.retriever\n            );\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/CountDictCachePage.java",
    "content": "package com.pinecone.slime.cache.query;\n\npublic interface CountDictCachePage<V > extends DictCachePage<V >, UniformCountDictCache<V > {\n    @Override\n    default long size(){\n        return this.elementSize();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/DictCachePage.java",
    "content": "package com.pinecone.slime.cache.query;\n\nimport com.pinecone.slime.chunk.Page;\n\npublic interface DictCachePage<V > extends Page, UniformDictCache<V > {\n    @Override\n    default long size(){\n        return this.elementSize();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/DirectlySourceAccessCacheAdapter.java",
    "content": "package com.pinecone.slime.cache.query;\n\npublic class DirectlySourceAccessCacheAdapter<V > extends ArchCountDictCache<V > implements UniformCountSelfLoadingDictCache<V > {\n    private SourceRetriever<V > mSourceRetriever;\n\n    public DirectlySourceAccessCacheAdapter( SourceRetriever<V > retriever ) {\n        this.mSourceRetriever = retriever;\n    }\n\n    @Override\n    protected V missKey( Object key ) {\n        this.recordMiss();\n        return this.mSourceRetriever.retrieve( key );\n    }\n\n    @Override\n    public boolean implicatesKey( Object key ) {\n        return this.mSourceRetriever.countsKey( key ) > 0;\n    }\n\n    @Override\n    public long capacity() {\n        return 0;\n    }\n\n    @Override\n    public long size() {\n        return 0;\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return false;\n    }\n\n    @Override\n    public V get( Object key ) {\n        this.recordAccess();\n        return this.missKey( key );\n    }\n\n    @Override\n    public V erase( Object key ) {\n        return null; // Do nothing.\n    }\n\n    @Override\n    public boolean existsKey( Object key ) {\n        return this.implicatesKey( key );\n    }\n\n    @Override\n    public void clear() {\n        // Do nothing.\n    }\n\n    @Override\n    public SourceRetriever<V > getSourceRetriever() {\n        return this.mSourceRetriever;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/IterableDictCachePage.java",
    "content": "package com.pinecone.slime.cache.query;\n\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.Set;\n\npublic interface IterableDictCachePage<V > extends CountDictCachePage<V >, Iterable {\n    default Iterator<? > iterator() {\n        return this.entrySet().iterator();\n    }\n\n    Set<? > entrySet();\n\n    Collection<V > values();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/LocalBufferedDictCachePage.java",
    "content": "package com.pinecone.slime.cache.query;\n\nimport com.pinecone.framework.unit.Dictium;\n\n/**\n * LocalBufferedDictCachePage\n * Only buffered, not self-loading\n * @param <V>\n */\npublic class LocalBufferedDictCachePage<V > extends ArchLocalDictCachePage<V > {\n    public LocalBufferedDictCachePage( long id, int capacity, Dictium<V > cache ) {\n        super( id, capacity, cache );\n    }\n\n    public LocalBufferedDictCachePage( int capacity, Dictium<V > cache ) {\n        this( -1, capacity, cache );\n    }\n\n    public LocalBufferedDictCachePage( Dictium<V > cache ) {\n        this( cache.size(), cache );\n    }\n\n    @Override\n    protected V missKey( Object key ) {\n        this.recordMiss();\n        return null;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/LocalDictCachePage.java",
    "content": "package com.pinecone.slime.cache.query;\n\nimport com.pinecone.framework.unit.Dictium;\n\npublic interface LocalDictCachePage<V > extends CountDictCachePage<V > {\n    Dictium<V > getDictium();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/LocalFixedLRUDictCachePage.java",
    "content": "package com.pinecone.slime.cache.query;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.unit.MapDictium;\n\nimport java.util.LinkedHashMap;\nimport java.util.Map;\n\npublic class LocalFixedLRUDictCachePage<V > extends ArchLocalDictCachePage<V > implements UniformCountSelfLoadingDictCache<V > {\n    protected static <V > Map<Object, V > newMap( boolean bUsingTree, int capacity, Map<Object, V > initData ) {\n        Map<Object, V > neo;\n\n        if( bUsingTree ) {\n            neo = new LinkedTreeMap<>( true ){\n                @Override\n                protected boolean removeEldestEntry( Map.Entry<Object, V > eldest ) {\n                    return this.size() > capacity;\n                }\n            };\n        }\n        else {\n            neo = new LinkedHashMap<>( capacity, 0.75f, true ){\n                @Override\n                protected boolean removeEldestEntry( Map.Entry<Object, V > eldest ) {\n                    return this.size() > capacity;\n                }\n            };\n        }\n\n        if( initData != null ) {\n            if( initData.size() > capacity ) {\n                throw new IllegalArgumentException( String.format( \"The initialization size[%d] exceeds the capacity[%d].\", initData.size(), capacity ) );\n            }\n            neo.putAll( initData );\n        }\n        return neo;\n    }\n\n    private SourceRetriever<V > mSourceRetriever;\n\n    public LocalFixedLRUDictCachePage( long id, int capacity, boolean bUsingTree, Map<Object, V > initData, SourceRetriever<V > retriever ) {\n        super( id, capacity, new MapDictium<>( LocalFixedLRUDictCachePage.newMap( bUsingTree, capacity, initData ) ) );\n\n        this.mSourceRetriever = retriever;\n    }\n\n    public LocalFixedLRUDictCachePage( int capacity, Map<Object, V > initData, SourceRetriever<V > retriever ) {\n        this( -1, capacity, false, initData, retriever );\n    }\n\n    public LocalFixedLRUDictCachePage( int capacity, SourceRetriever<V > retriever ) {\n        this(  capacity, null, retriever );\n    }\n\n    @Override\n    protected void afterKeyVisited( Object key ) {\n        super.afterKeyVisited( key );\n        // Since we used the `accessOrder`, the newest accessed key will auto moving to the top.\n    }\n\n    @Override\n    protected V missKey( Object key ) {\n        this.recordMiss();\n        V v = this.mSourceRetriever.retrieve( key );\n        if( v != null ) {\n            this.getDictium().insert( key, v );\n        }\n        return v;\n    }\n\n    @Override\n    public boolean implicatesKey( Object key ) {\n        return this.get( key ) != null;\n    }\n\n    @Override\n    public SourceRetriever<V > getSourceRetriever() {\n        return this.mSourceRetriever;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/RangedDictCachePage.java",
    "content": "package com.pinecone.slime.cache.query;\n\nimport com.pinecone.slime.unitization.PartialRange;\n\npublic interface RangedDictCachePage<V > extends CountDictCachePage<V > {\n    <T extends Comparable<T > > PartialRange<T > getRange();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/SourceRetriever.java",
    "content": "package com.pinecone.slime.cache.query;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface SourceRetriever<V > extends Pinenut {\n    V retrieve( Object key );\n\n    long countsKey( Object key );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/UniformCountDictCache.java",
    "content": "package com.pinecone.slime.cache.query;\n\npublic interface UniformCountDictCache<V > extends UniformDictCache<V > {\n    long getMisses();\n\n    long getAccesses();\n\n    default double getHitRate() {\n        double acc = (double)this.getAccesses();\n        return 1 - (double) this.getMisses() / acc;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/UniformCountSelfLoadingDictCache.java",
    "content": "package com.pinecone.slime.cache.query;\n\npublic interface UniformCountSelfLoadingDictCache<V > extends UniformCountDictCache<V >, UniformSelfLoadingDictCache<V > {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/UniformDictCache.java",
    "content": "package com.pinecone.slime.cache.query;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface UniformDictCache<V > extends Pinenut {\n    long capacity();\n\n    long size();\n\n    boolean isEmpty();\n\n    V get( Object key );\n\n    boolean existsKey( Object key );\n\n    V erase( Object key );\n\n    void clear();\n}"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/UniformSelfLoadingDictCache.java",
    "content": "package com.pinecone.slime.cache.query;\n\npublic interface UniformSelfLoadingDictCache<V > extends UniformDictCache<V > {\n    // Searching key in cache and data-source.\n    // If key is missed in cache, and there will triggers self-loading from data-source.\n    boolean implicatesKey( Object key );\n\n    SourceRetriever<V > getSourceRetriever();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/BatchPageSourceRetriever.java",
    "content": "package com.pinecone.slime.cache.query.pool;\n\nimport com.pinecone.slime.cache.query.RangedDictCachePage;\nimport com.pinecone.slime.cache.query.SourceRetriever;\nimport com.pinecone.slime.unitization.PartialRange;\n\npublic interface BatchPageSourceRetriever<V > extends SourceRetriever<V > {\n    String getRangeKey();\n\n    RangedDictCachePage<V > retrieves( Object key );\n\n    <T extends Comparable<T > > RangedDictCachePage<V > retrieves( Object key, PartialRange<T > range );\n\n    <T extends Comparable<T > > PartialRange<T > queryRangeOnly( Object key );\n\n    <T extends Comparable<T > > long counts( PartialRange<T > range );\n\n    long getBatchSize();\n\n    <T extends Comparable<T > > T nextRangeMax( T key );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/CountSelfPooledPageDictCache.java",
    "content": "package com.pinecone.slime.cache.query.pool;\n\nimport com.pinecone.slime.cache.query.UniformCountSelfLoadingDictCache;\n\npublic interface CountSelfPooledPageDictCache<V > extends UniformCountSelfLoadingDictCache<V >, PooledPageDictCache<V > {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/LocalHotspotPooledDictCache.java",
    "content": "package com.pinecone.slime.cache.query.pool;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.unit.Mapnut;\nimport com.pinecone.framework.unit.top.LinkedMultiTreeToptron;\nimport com.pinecone.slime.cache.query.ArchCountDictCache;\nimport com.pinecone.slime.cache.query.RangedDictCachePage;\nimport com.pinecone.slime.unitization.PartialRange;\n\nimport java.util.Map;\n\npublic class LocalHotspotPooledDictCache<IK extends Comparable<IK >, V > extends ArchCountDictCache<V > implements CountSelfPooledPageDictCache<V > {\n    private   final int                                                            mnPagesCapacity;\n    private   final int                                                            mnPageCapacity;\n    private   final int                                                            mnTemperaturesCapacity;\n    protected Mapnut<PartialRange<IK >, RangedDictCachePage<V > >                  mPageQueuePool;        // Interval range search with O(log( SUM( Pages ) / Each ))\n    protected Mapnut<PartialRange<IK >, Long >                                     mTemperaturesRecord;   // Interval range search with O(log( SUM( Pages ) / Each ))\n    //protected Topper<Map.Entry<Long, RangedDictCachePage<V > > >                   mTopNTemperatures;     // Heap method to find top-N with O(log(N))\n    protected LinkedMultiTreeToptron<Long, PartialRange<IK >  >                    mTopNTemperatures;     // Tree method to find top-N with O(log(N))\n    protected final BatchPageSourceRetriever<V >                                   mSourceRetriever;\n\n    public LocalHotspotPooledDictCache( int nPageEachCapacity, int nPagesCapacity, int nTemperaturesCapacity, BatchPageSourceRetriever<V > retriever ) {\n        super();\n        this.mnPageCapacity   = nPageEachCapacity;\n        this.mnPagesCapacity  = nPagesCapacity;\n        this.mSourceRetriever = retriever;\n        if( nTemperaturesCapacity < this.mnPagesCapacity ) {\n            throw new IllegalArgumentException( \"TemperaturesRecordCapacity can`t below the PagesCapacity.\" );\n        }\n\n        this.mnTemperaturesCapacity   = nTemperaturesCapacity;\n        this.mPageQueuePool           = new LinkedTreeMap<>( PartialRange.DefaultIntervalRangeComparator, true ); // With deque sequence access order.\n        this.mTemperaturesRecord      = new LinkedTreeMap<>( PartialRange.DefaultIntervalRangeComparator, true ); // With deque sequence access order.\n//        this.mTopNTemperatures        = new HeapTopper<>( this.mnPagesCapacity, new Comparator<>() {\n//            @Override\n//            public int compare( Map.Entry<Long, RangedDictCachePage<V > > o1, Map.Entry<Long, RangedDictCachePage<V > > o2 ) {\n//                return o1.getKey().compareTo( o2.getKey() );\n//            }\n//        });\n\n        this.mTopNTemperatures        = new LinkedMultiTreeToptron< >( this.mnPagesCapacity, true );\n        // Select Top-Pages::Temperature as activated caches.\n    }\n\n    public LocalHotspotPooledDictCache( int nPageEachCapacity, int nPagesCapacity, BatchPageSourceRetriever<V > retriever ) {\n        this( nPageEachCapacity, nPagesCapacity, nPagesCapacity * 4, retriever );\n    }\n\n    protected void updateTopNTemperatures( TemperatureInfo info ) {\n        this.mTopNTemperatures.clear();\n        info.nLowestTemp  = Long.MAX_VALUE;\n        info.nHighestTemp = Long.MIN_VALUE;\n        for( Map.Entry<PartialRange<IK >, Long > kv : this.mTemperaturesRecord.entrySet() ) {\n            Long tp = kv.getValue();\n            if ( tp < info.nLowestTemp ) {\n                info.nLowestTemp = tp;\n                info.lowestEntry = kv;\n            }\n            if ( tp > info.nHighestTemp ) {\n                info.nHighestTemp = tp;\n                info.highestEntry = kv;\n            }\n            this.mTopNTemperatures.add( tp, kv.getKey() );\n            //this.mTopNTemperatures.add( new KeyValue<>( tp, this.mPageQueuePool.get( kv.getKey() ) ));\n        }\n\n        Mapnut<PartialRange<IK >, RangedDictCachePage<V > >   neoPool = new LinkedTreeMap<>( PartialRange.DefaultIntervalRangeComparator, true );\n        //Collection<Map.Entry<Long, RangedDictCachePage<V > > > chosen = this.mTopNTemperatures.topmost();\n        info.nPooledLowestTemp  = Long.MAX_VALUE;\n        info.nPooledHighestTemp = Long.MIN_VALUE;\n        for( Map.Entry<Long, PartialRange<IK > > kv : this.mTopNTemperatures.collection()/*chosen*/ ) {\n            Long tp = kv.getKey();\n            PartialRange<IK > range = kv.getValue();\n            RangedDictCachePage<V > legacy = this.mPageQueuePool.get( range );\n            if( legacy == null ) {\n                // Restoring from history range.\n                RangedDictCachePage<V > recover = this.mSourceRetriever.retrieves( range );\n                neoPool.put( range, recover );\n            }\n            else {\n                neoPool.put( range, legacy  );\n            }\n\n            if ( tp < info.nPooledLowestTemp ) {\n                info.nPooledLowestTemp     = tp;\n                info.lowestPooledTopEntry  = kv;\n            }\n            if ( tp > info.nPooledHighestTemp ) {\n                info.nPooledHighestTemp    = tp;\n                info.highestPooledTopEntry = kv;\n            }\n        }\n\n        this.mPageQueuePool = neoPool;\n    }\n\n    protected void updateCacheTemperature( Object key ) {\n        Map.Entry<PartialRange<IK >, Long > tempInfo = this.mTemperaturesRecord.getEntryByKey( key );\n        if( tempInfo != null ) {\n            Long temperature = tempInfo.getValue();\n            ++temperature;\n            tempInfo.setValue( temperature );\n        }\n    }\n\n    @Override\n    protected void afterKeyVisited( Object key ) {\n        super.afterKeyVisited( key );\n    }\n\n    @Override\n    protected V missKey( Object key ) {\n        this.recordMiss();\n        TemperatureInfo info = new TemperatureInfo();\n        this.updateTopNTemperatures( info );\n\n        PartialRange<IK > range = this.mSourceRetriever.queryRangeOnly( key );\n        if( range != null ) {\n            Long temperature = this.mTemperaturesRecord.get( range );\n            if( temperature != null ) {\n                ++temperature;\n                this.mTemperaturesRecord.put( range, temperature );\n                if( temperature >= info.nPooledLowestTemp ) {\n                    RangedDictCachePage<V > page = this.mSourceRetriever.retrieves( key );\n                    if( this.mPageQueuePool.size() >= this.mnPagesCapacity ) {\n                        Map.Entry<Long, PartialRange<IK > > elimination = this.mTopNTemperatures.nextEviction();\n                        this.mPageQueuePool.remove( elimination.getValue() );\n                    }\n                    this.mTopNTemperatures.add( temperature, page.getRange() );  // Updating TopNTemperatures, and substitutes lowest LRU page.\n                    this.mPageQueuePool.put( range, page );\n                    return page.get( key );\n                }\n\n                return this.mSourceRetriever.retrieve( key ); // Don`t using cache.\n            }\n            else {\n                temperature = 1L;\n                if( this.mPageQueuePool.size() < this.mnPagesCapacity ) {\n                    RangedDictCachePage<V > page = this.mSourceRetriever.retrieves( key );\n                    this.mTopNTemperatures.add( temperature, page.getRange() );\n                    this.mPageQueuePool.put( range, page );\n                    this.mTemperaturesRecord.put( range, temperature ); // TemperaturesRecord.size should beyond PageQueuePool.size\n                    return page.get( key );\n                }\n                else {\n                    if( this.mTemperaturesRecord.size() >= this.mnTemperaturesCapacity ) {\n                        this.mTemperaturesRecord.remove( info.lowestEntry.getKey() );// Substituting lowest one in record.\n                        temperature = info.lowestEntry.getValue();\n                    }\n                    this.mTemperaturesRecord.put( range, temperature );\n                    return this.mSourceRetriever.retrieve( key ); // Don`t using cache.\n                }\n            }\n        }\n\n        return null;\n    }\n\n    @Override\n    public long capacity() {\n        return this.mnPageCapacity * this.mnPagesCapacity;\n    }\n\n    @Override\n    public long getPooledPagesCapacity() {\n        return this.mnPagesCapacity;\n    }\n\n    @Override\n    public long size() {\n        return PoolCaches.countPoolSize( this.mPageQueuePool );\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mPageQueuePool.isEmpty();\n    }\n\n    protected V getFromCache( Object key ) {\n        for( Map.Entry<PartialRange<IK >, RangedDictCachePage<V > > kv : this.mPageQueuePool.entrySet() ) {\n            V v = kv.getValue().get( key );\n            if( v != null ) {\n                return v;\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public V erase( Object key ) {\n        for( Map.Entry<PartialRange<IK >, RangedDictCachePage<V > > kv : this.mPageQueuePool.entrySet() ) {\n            V v = kv.getValue().erase( key );\n            if( v != null ) {\n                return v;\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public V get( Object key ) {\n        V v = this.getFromCache( key );\n        if( v == null ) {\n            //Debug.trace( key );\n            v = this.missKey( key ); // Update miss temperature\n        }\n        else {\n            this.updateCacheTemperature( key ); // Update cache temperature\n        }\n        this.afterKeyVisited( key );\n        return v;\n    }\n\n    @Override\n    public boolean existsKey( Object key ) {\n        boolean b = this.getFromCache( key ) != null;\n        this.afterKeyVisited( key );\n        return b;\n    }\n\n    @Override\n    public boolean implicatesKey( Object key ) {\n        return this.get( key ) != null;\n    }\n\n    @Override\n    public void clear() {\n        this.mPageQueuePool.clear();\n    }\n\n    @Override\n    public BatchPageSourceRetriever<V > getSourceRetriever() {\n        return this.mSourceRetriever;\n    }\n\n    class TemperatureInfo {\n        long nLowestTemp        = Long.MAX_VALUE;\n        long nHighestTemp       = Long.MIN_VALUE;\n        long nPooledLowestTemp  = Long.MAX_VALUE;\n        long nPooledHighestTemp = Long.MIN_VALUE;\n\n        Map.Entry<PartialRange<IK >, Long > lowestEntry;\n        Map.Entry<PartialRange<IK >, Long > highestEntry;\n        Map.Entry<Long, PartialRange<IK > > lowestPooledTopEntry; // In top-N unit.\n        Map.Entry<Long, PartialRange<IK > > highestPooledTopEntry; // In top-N unit.\n    }\n}"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/LocalLRUPooledDictCache.java",
    "content": "package com.pinecone.slime.cache.query.pool;\n\nimport com.pinecone.framework.unit.LinkedTreeMap;\n\nimport com.pinecone.slime.cache.query.ArchCountDictCache;\nimport com.pinecone.slime.cache.query.RangedDictCachePage;\nimport com.pinecone.slime.unitization.PartialRange;\n\nimport java.util.Map;\n\npublic class LocalLRUPooledDictCache<IK extends Comparable<IK >, V > extends ArchCountDictCache<V > implements CountSelfPooledPageDictCache<V >  {\n    private   final int                                                 mnPagesCapacity;\n    private   final int                                                 mnPageCapacity;\n    protected final Map<PartialRange<IK >, RangedDictCachePage<V >>     mPageQueuePool;\n    protected final BatchPageSourceRetriever<V >                        mSourceRetriever;\n\n    public LocalLRUPooledDictCache( int nPageEachCapacity, int nPagesCapacity, BatchPageSourceRetriever<V > retriever ) {\n        super();\n        this.mnPageCapacity   = nPageEachCapacity;\n        this.mnPagesCapacity  = nPagesCapacity;\n        this.mSourceRetriever = retriever;\n        this.mPageQueuePool   = new LinkedTreeMap<>( PartialRange.DefaultIntervalRangeComparator ) {\n            @Override\n            protected boolean removeEldestEntry( Map.Entry<PartialRange<IK >, RangedDictCachePage<V > > eldest ) {\n                return this.size() > LocalLRUPooledDictCache.this.mnPagesCapacity;\n            }\n        };\n    }\n\n    @Override\n    protected void afterKeyVisited( Object key ) {\n        super.afterKeyVisited( key );\n        // Since we used the `accessOrder`, the newest accessed key will auto moving to the top.\n    }\n\n    @Override\n    protected V missKey( Object key ) {\n        this.recordMiss();\n        RangedDictCachePage<V > page = this.mSourceRetriever.retrieves( key );\n        if( page != null ) {\n            this.mPageQueuePool.put( page.getRange(), page );\n            return page.get( key );\n        }\n        return null;\n    }\n\n    @Override\n    public long capacity() {\n        return this.mnPageCapacity * this.mnPagesCapacity;\n    }\n\n    @Override\n    public long getPooledPagesCapacity() {\n        return this.mnPagesCapacity;\n    }\n\n    @Override\n    public long size() {\n        return PoolCaches.countPoolSize( this.mPageQueuePool );\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mPageQueuePool.isEmpty();\n    }\n\n    protected V getFromCache( Object key ) {\n        for( Map.Entry<PartialRange<IK >, RangedDictCachePage<V > > kv : this.mPageQueuePool.entrySet() ) {\n            V v = kv.getValue().get( key );\n            if( v != null ) {\n                return v;\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public V get( Object key ) {\n        V v = this.getFromCache( key );\n        if( v == null ) {\n            v = this.missKey( key );\n        }\n        this.afterKeyVisited( key );\n        return v;\n    }\n\n    @Override\n    public V erase( Object key ) {\n        for( Map.Entry<PartialRange<IK >, RangedDictCachePage<V > > kv : this.mPageQueuePool.entrySet() ) {\n            V v = kv.getValue().erase( key );\n            if( v != null ) {\n                return v;\n            }\n        }\n        return null;\n    }\n\n    @Override\n    public boolean existsKey( Object key ) {\n        boolean b = this.getFromCache( key ) != null;\n        this.afterKeyVisited( key );\n        return b;\n    }\n\n    @Override\n    public boolean implicatesKey( Object key ) {\n        return this.get( key ) != null;\n    }\n\n    @Override\n    public void clear() {\n        this.mPageQueuePool.clear();\n    }\n\n    @Override\n    public BatchPageSourceRetriever<V > getSourceRetriever() {\n        return this.mSourceRetriever;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/LocalLRUPrimaryPooledDictCache.java",
    "content": "package com.pinecone.slime.cache.query.pool;\n\nimport com.pinecone.slime.cache.query.RangedDictCachePage;\n\n/**\n * For the index-key is same with the dict-cache-key\n * e.g. RDB::id( Auto-Increment ) as the Range-Key and the Cache-Key\n *    [000-100] => [object { 0 => key0, 1 => key1, ..., 100 => key100 }]\n *    [100-200] => [object { 100 => key100, 101 => key101, ..., 200 => key200 }]\n * In this example: Find( key ) => O(log(pages))(TreeMap) + O(1)(HashMap)\n */\npublic class LocalLRUPrimaryPooledDictCache<IK extends Comparable<IK >, V > extends LocalLRUPooledDictCache<IK, V > {\n    public LocalLRUPrimaryPooledDictCache( int nPageEachCapacity, int nPagesCapacity, BatchPageSourceRetriever<V > retriever ) {\n        super( nPageEachCapacity, nPagesCapacity, retriever );\n    }\n\n    @Override\n    protected V getFromCache( Object key ) {\n        RangedDictCachePage<V > page = this.mPageQueuePool.get( key );\n        if( page != null ) {\n            return page.get( key );\n        }\n        return null;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/LocalRangedDictCachePage.java",
    "content": "package com.pinecone.slime.cache.query.pool;\n\nimport com.pinecone.framework.unit.Dictium;\nimport com.pinecone.slime.cache.query.LocalBufferedDictCachePage;\nimport com.pinecone.slime.cache.query.RangedDictCachePage;\nimport com.pinecone.slime.unitization.PartialRange;\n\npublic class LocalRangedDictCachePage<V > extends LocalBufferedDictCachePage<V > implements RangedDictCachePage<V > {\n    protected PartialRange   mRange;\n\n    public <T extends Comparable<T > > LocalRangedDictCachePage( long id, int capacity, Dictium<V > cache, PartialRange<T > range ) {\n        super( id, capacity, cache );\n\n        this.mRange        = range;\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public <T extends Comparable<T > > PartialRange<T > getRange() {\n        return (PartialRange<T >) this.mRange;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/PoolCaches.java",
    "content": "package com.pinecone.slime.cache.query.pool;\n\nimport com.pinecone.slime.cache.query.RangedDictCachePage;\nimport com.pinecone.slime.unitization.PartialRange;\n\nimport java.util.Map;\n\npublic final class PoolCaches {\n    public static <IK extends Comparable<IK >, V > long countPoolSize( Map<PartialRange<IK >, RangedDictCachePage<V >> pool ) {\n        long n = 0;\n        for( Map.Entry<PartialRange<IK >, RangedDictCachePage<V >> kv : pool.entrySet() ) {\n            n += kv.getValue().size();\n        }\n        return n;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/PooledPageDictCache.java",
    "content": "package com.pinecone.slime.cache.query.pool;\n\nimport com.pinecone.slime.cache.query.UniformDictCache;\n\npublic interface PooledPageDictCache<V > extends UniformDictCache<V > {\n    long getPooledPagesCapacity();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cache/query/pool/PrimaryPooledDictCache.java",
    "content": "package com.pinecone.slime.cache.query.pool;\n\npublic interface PrimaryPooledDictCache {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/ArchPatriarchalChunk.java",
    "content": "package com.pinecone.slime.chunk;\n\npublic abstract class ArchPatriarchalChunk implements PatriarchalChunk {\n    protected ArchPatriarchalChunk mParent;\n\n    protected ArchPatriarchalChunk(){\n\n    }\n\n    protected ArchPatriarchalChunk( ArchPatriarchalChunk parent ) {\n        this.mParent = parent;\n    }\n\n    @Override\n    public PatriarchalChunk parent() {\n        return this.mParent;\n    }\n\n\n    @Override\n    public void setParent( PatriarchalChunk parent ){\n        this.mParent = (ArchPatriarchalChunk) parent;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/Chunk.java",
    "content": "package com.pinecone.slime.chunk;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Chunk extends Pinenut {\n    long getId();\n\n    void setId( long id );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/ContiguousPage.java",
    "content": "package com.pinecone.slime.chunk;\n\npublic interface ContiguousPage extends Page, Continunk {\n    void apply( Object... args );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/Continunk.java",
    "content": "package com.pinecone.slime.chunk;\n\nimport com.pinecone.slime.unitization.Range;\nimport com.pinecone.slime.unitization.Precision;\n\n/**\n * Continum Chunk\n */\npublic interface Continunk extends Chunk {\n    Range getRange();\n\n    Precision size();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/DiscreteChunk.java",
    "content": "package com.pinecone.slime.chunk;\n\nimport com.pinecone.slime.unitization.Precision;\n\npublic interface DiscreteChunk extends Chunk {\n    Precision size();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/DivisibleChunk.java",
    "content": "package com.pinecone.slime.chunk;\n\n/**\n * Divisible Chunk [Slime]\n */\npublic interface DivisibleChunk extends Chunk {\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/Frame.java",
    "content": "package com.pinecone.slime.chunk;\n\npublic interface Frame extends Minimunk {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/Minimunk.java",
    "content": "package com.pinecone.slime.chunk;\n\n/**\n * Minimum Chunk\n */\npublic interface Minimunk extends Chunk {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/Page.java",
    "content": "package com.pinecone.slime.chunk;\n\npublic interface Page extends Chunk {\n    long elementSize();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/PatriarchalChunk.java",
    "content": "package com.pinecone.slime.chunk;\n\npublic interface PatriarchalChunk extends Chunk {\n    PatriarchalChunk parent();\n\n    default PatriarchalChunk root() {\n        PatriarchalChunk p = this.parent();\n        if( p == null ) {\n            return this;\n        }\n\n        return p.root();\n    }\n\n    void setParent( PatriarchalChunk parent );\n\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/RangedChunk64.java",
    "content": "package com.pinecone.slime.chunk;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport com.pinecone.slime.unitization.MinMaxRange64;\nimport com.pinecone.slime.unitization.Precision64;\n\npublic abstract class RangedChunk64 extends ArchPatriarchalChunk implements Splitunk {\n    protected long             mnId;\n    protected MinMaxRange64    mRange;\n    protected Precision64      mChunkSize;\n\n    protected RangedChunk64(){\n        super();\n    }\n\n    public RangedChunk64( long nStart, long nEnd, long id, RangedChunk64 parent ) {\n        super( parent );\n        this.applyMembers( nStart, nEnd, id, parent );\n    }\n\n    public RangedChunk64( long nStart, long nEnd, long id ) {\n        this( nStart, nEnd, id, null );\n    }\n\n    protected void applyMembers( long nStart, long nEnd, long id, RangedChunk64 parent ) {\n        this.mParent    = parent;\n        this.mnId   = id;\n        this.mRange     = new MinMaxRange64( nStart, nEnd );\n        this.mChunkSize = new Precision64( (long)this.mRange.span() );\n    }\n\n    @Override\n    public long getId() {\n        return this.mnId;\n    }\n\n    @Override\n    public void setId( long id ) {\n        this.mnId = id;\n    }\n\n    @Override\n    public MinMaxRange64 getRange() {\n        return this.mRange;\n    }\n\n    @Override\n    public Precision64 size(){\n        return this.mChunkSize;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"class\", this.className() ),\n                new KeyValue<>( \"min\", this.getRange().getMin() ),\n                new KeyValue<>( \"max\", this.getRange().getMax() )\n        } );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/RangedPage.java",
    "content": "package com.pinecone.slime.chunk;\n\nimport com.pinecone.slime.unitization.NumPrecision;\n\npublic interface RangedPage extends ContiguousPage {\n    @Override\n    NumPrecision size();\n\n    @Override\n    default long elementSize() {\n        return this.size().longValue();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/RangedPage64.java",
    "content": "package com.pinecone.slime.chunk;\n\npublic class RangedPage64 extends RangedChunk64 implements RangedPage {\n    public RangedPage64(){\n        super();\n    }\n\n    public RangedPage64( long nStart, long nEnd, long id, RangedChunk64 parent ) {\n        super( nStart, nEnd, id, parent );\n    }\n\n    public RangedPage64( long nStart, long nEnd, long id ) {\n        super( nStart, nEnd, id );\n    }\n\n    public void apply( long nStart, long nEnd, long id, RangedChunk64 parent ) {\n        this.applyMembers( nStart, nEnd, id, parent );\n    }\n\n    @Override\n    public void apply( Object... args ) {\n        if( args.length == 0 ) {\n            return;\n        }\n        else if( args.length >= 3 ) {\n            long nStart          = ((Number) args[0]).longValue();\n            long nEnd            = ((Number) args[1]).longValue();\n            long id              = ((Number) args[2]).longValue();\n            RangedChunk64 parent  = null;\n            if( args.length >= 4 ){\n                parent = (RangedChunk64) args[3];\n            }\n\n            this.applyMembers( nStart, nEnd, id, parent );\n            return;\n        }\n\n        throw new IllegalArgumentException( \"RangedPage64 only be applied with 0, 3 and 4 arguments.\" );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/Splitunk.java",
    "content": "package com.pinecone.slime.chunk;\n\npublic interface Splitunk extends Continunk, DivisibleChunk {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/ArchMasterSplitunkPartitioner64.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.chunk.Splitunk;\n\npublic abstract class ArchMasterSplitunkPartitioner64 implements ChunkPartitioner {\n    protected Splitunk mMasterChunk;\n\n    protected ArchMasterSplitunkPartitioner64( Splitunk masterChunk ) {\n        this.mMasterChunk = masterChunk;\n    }\n\n    @Override\n    public Splitunk getMasterChunk(){\n        return this.mMasterChunk;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/BuddyPrepPartitionDividerStrategy64.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.cluster.SequentialChunkGroup;\nimport com.pinecone.slime.chunk.Chunk;\n\nimport java.util.List;\n\npublic class BuddyPrepPartitionDividerStrategy64 implements PartitionDividerStrategy {\n    protected long mnMaxPerPage;\n    protected int  mnBootstrapDivFactor;\n    protected long mnMinThresholdPerPage;\n\n    public BuddyPrepPartitionDividerStrategy64( long nMaxPerPage, int nBootstrapDivFactor, long nMinThresholdPerPage ) {\n        this.mnMaxPerPage          = nMaxPerPage;\n        this.mnBootstrapDivFactor  = nBootstrapDivFactor;\n        this.mnMinThresholdPerPage = nMinThresholdPerPage;\n    }\n\n    public BuddyPrepPartitionDividerStrategy64( long nMaxPerPage, long nMinThresholdPerPage ) {\n        this( nMaxPerPage, 2, nMinThresholdPerPage );\n    }\n\n    @Override\n    public SequentialChunkGroup assignment(SequentialChunkGroup group ) {\n        List<Chunk > chunks = (List<Chunk >) group.getSequentialChunks();\n        long each = this.mnMaxPerPage;\n        for ( int i = 0; i < chunks.size(); ++i ) {\n            ( (PreparedPageDividerPartition64)chunks.get( i ) ).setEachPerPage( each );\n\n            each = Math.max( this.mnMinThresholdPerPage , each / this.mnBootstrapDivFactor );\n        }\n        return group;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/ChunkPartitioner.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.slime.chunk.DivisibleChunk;\n\npublic interface ChunkPartitioner extends Pinenut {\n    DivisibleChunk getMasterChunk();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/EvenSeqChunkPartitioner64.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.chunk.RangedPage64;\nimport com.pinecone.slime.chunk.scheduler.DirectPagePool;\nimport com.pinecone.slime.chunk.scheduler.FixedPageDivider64;\nimport com.pinecone.slime.cluster.Cluster;\nimport com.pinecone.slime.cluster.SequentialChunkGroup;\nimport com.pinecone.slime.unitization.NumPrecision;\nimport com.pinecone.slime.chunk.Chunk;\nimport com.pinecone.slime.chunk.Splitunk;\n\npublic abstract class EvenSeqChunkPartitioner64 extends ArchMasterSplitunkPartitioner64 {\n    protected long                   mnGroups;\n    protected long                   mnEach;\n    protected FixedPageDivider64 mDivider;\n\n    protected EvenSeqChunkPartitioner64( Splitunk masterChunk, long nGroups ) {\n        super( masterChunk );\n\n        this.mnGroups = nGroups;\n        this.mnEach   = ((NumPrecision)this.getMasterChunk().size()).longValue() / this.mnGroups;\n        this.mDivider = new FixedPageDivider64( this.getMasterChunk(), new DirectPagePool( RangedPage64.class ), this.mnEach );\n    }\n\n    protected abstract Cluster newCluster(long nMin, long nMax, long id );\n\n    protected abstract SequentialChunkGroup newGroup();\n\n    public SequentialChunkGroup partition() {\n        SequentialChunkGroup group = this.newGroup();\n\n        for ( long i = 0; i < this.mnGroups; i++ ) {\n            Chunk c         = this.mDivider.allocate();\n            RangedPage64 tp = (RangedPage64) c;\n\n            group.add( this.newCluster( tp.getRange().getMin(), tp.getRange().getMax(), i ) );\n        }\n\n        return group;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PageCluster.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.chunk.ContiguousPage;\nimport com.pinecone.slime.cluster.RangedCluster;\n\npublic interface PageCluster extends RangedCluster {\n    boolean hasOwnPage( ContiguousPage that );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PageDividerPartition64.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.chunk.ContiguousPage;\n\npublic interface PageDividerPartition64 extends PagePartition {\n    long pagesSize();\n\n    long eachPerPage();\n\n    void inheritRange( ContiguousPage that );\n\n    void setEachPerPage( long eachPerPage );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PageGroup.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.cluster.ChunkGroup;\nimport com.pinecone.slime.chunk.PatriarchalChunk;\n\npublic interface PageGroup extends PatriarchalChunk, ChunkGroup {\n    boolean hasOwnPartition( PagePartition that );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PagePartition.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.chunk.PatriarchalChunk;\nimport com.pinecone.slime.chunk.Splitunk;\n\npublic interface PagePartition extends PageCluster, PatriarchalChunk, Splitunk {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PagePartitionGroup.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\npublic interface PagePartitionGroup extends PageGroup {\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PagePartitioner.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\npublic interface PagePartitioner extends ChunkPartitioner {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PartitionDividerStrategy.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.cluster.SequentialChunkGroup;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface PartitionDividerStrategy extends Pinenut {\n    SequentialChunkGroup assignment(SequentialChunkGroup group );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PartitionableChunkDivider64.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.chunk.scheduler.ArchMasterSplitunkDivider64;\nimport com.pinecone.slime.chunk.scheduler.BadAllocateException;\nimport com.pinecone.slime.chunk.Chunk;\nimport com.pinecone.slime.chunk.Splitunk;\nimport com.pinecone.slime.chunk.scheduler.ChunkDivider;\nimport com.pinecone.slime.chunk.scheduler.FixedChunkDivider64;\nimport com.pinecone.slime.cluster.SequentialChunkGroup;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic abstract class PartitionableChunkDivider64 extends ArchMasterSplitunkDivider64 implements ChunkDivider {\n    protected SequentialChunkGroup         mChunkGroup;\n    protected List<FixedChunkDivider64>   mPartitionsOwnedDivider;\n    protected int                          mnCurrentPartDivider;\n\n    protected long                         mnCurrentEpoch;\n    protected long                         mnMaxAllocations;\n\n    public PartitionableChunkDivider64( Splitunk masterChunk, SequentialChunkGroup chunkGroup ) {\n        super( masterChunk );\n\n        this.mnCurrentEpoch          = 0;\n        this.mChunkGroup             = chunkGroup;\n        this.mnCurrentPartDivider    = 0;\n    }\n\n    protected abstract FixedChunkDivider64 newDivider( Splitunk masterChunk, long each );\n\n    protected void preparePartitionsOwnedDivider() {\n        this.mPartitionsOwnedDivider = new ArrayList<>();\n        List<Chunk> chunks = (List<Chunk >) this.mChunkGroup.getSequentialChunks();\n        for ( int i = 0; i < chunks.size(); ++i ) {\n            PreparedPageDividerPartition64 partition = (PreparedPageDividerPartition64)chunks.get( i );\n            FixedChunkDivider64 divider = this.newDivider(\n                    partition, partition.eachPerPage()\n            );\n\n            this.mPartitionsOwnedDivider.add( divider );\n            this.mnMaxAllocations += divider.getMaxAllocations();\n        }\n    }\n\n    @Override\n    public long getMaxAllocations() {\n        return this.mnMaxAllocations;\n    }\n\n    @Override\n    public long remainAllocatable(){\n        return this.mnMaxAllocations - this.mnCurrentEpoch;\n    }\n\n    @Override\n    public Chunk allocate() throws BadAllocateException {\n        if( this.mnCurrentEpoch < this.getMaxAllocations() ) {\n            FixedChunkDivider64 divider = this.mPartitionsOwnedDivider.get( this.mnCurrentPartDivider );\n            if( divider.remainAllocatable() == 0 ){\n                ++this.mnCurrentPartDivider;\n                divider = this.mPartitionsOwnedDivider.get( this.mnCurrentPartDivider );\n            }\n\n            Chunk chunk = divider.allocate();\n            chunk.setId( this.mnCurrentEpoch );\n\n            ++this.mnCurrentEpoch;\n\n            return chunk;\n        }\n\n        throw new BadAllocateException();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PartitionablePageDivider64.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.chunk.scheduler.FixedPageDivider64;\nimport com.pinecone.slime.chunk.scheduler.PageDivider;\nimport com.pinecone.slime.cluster.SequentialChunkGroup;\nimport com.pinecone.slime.chunk.Chunk;\nimport com.pinecone.slime.chunk.Splitunk;\nimport com.pinecone.slime.chunk.scheduler.FixedChunkDivider64;\nimport com.pinecone.slime.chunk.scheduler.PagePool;\n\npublic class PartitionablePageDivider64 extends PartitionableChunkDivider64 implements PageDivider {\n    protected PagePool                  mPagePool;\n    protected long                      mnPageIdOffset;\n\n    public PartitionablePageDivider64(Splitunk masterChunk, PagePool pagePool, SequentialChunkGroup chunkGroup, long pageIdOffset ) {\n        super( masterChunk, chunkGroup );\n        this.mPagePool        = pagePool;\n        this.mnPageIdOffset   = pageIdOffset;\n\n        this.preparePartitionsOwnedDivider();\n    }\n\n    public PartitionablePageDivider64( Splitunk masterChunk, PagePool pagePool, SequentialChunkGroup chunkGroup ) {\n        this( masterChunk, pagePool, chunkGroup, 0 );\n    }\n\n    @Override\n    protected Chunk newChunk( long start, long end, long epoch ) {\n        return this.mPagePool.allocate( start, end, this.mnPageIdOffset + this.mnCurrentEpoch, this.mMasterChunk );\n    }\n\n    @Override\n    protected FixedChunkDivider64 newDivider( Splitunk masterChunk, long each ) {\n        return new FixedPageDivider64( masterChunk, this.getPagePool(), each );\n    }\n\n    @Override\n    public PagePool getPagePool() {\n        return this.mPagePool;\n    }\n\n    @Override\n    public long getPageIdOffset() {\n        return this.mnPageIdOffset;\n    }\n\n    @Override\n    public void setPageIdOffset( long offset ) {\n        this.mnPageIdOffset = offset;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PreparedEvenSeqPagePartitioner64.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.chunk.RangedChunk64;\nimport com.pinecone.slime.chunk.Splitunk;\n\npublic class PreparedEvenSeqPagePartitioner64 extends EvenSeqChunkPartitioner64 {\n    public PreparedEvenSeqPagePartitioner64( Splitunk masterChunk, long nGroups ) {\n        super( masterChunk, nGroups );\n    }\n\n    @Override\n    protected PreparedPageDividerPartition64 newCluster( long nMin, long nMax, long id ) {\n        return new PreparedPageDividerPartition64( nMin, nMax, id, 1, (RangedChunk64) this.getMasterChunk() );\n    }\n\n    @Override\n    protected SequentialPagePartitionGroup64 newGroup() {\n        return new SequentialPagePartitionGroup64();\n    }\n\n    @Override\n    public SequentialPagePartitionGroup64 partition() {\n        return (SequentialPagePartitionGroup64)super.partition();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/PreparedPageDividerPartition64.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.chunk.RangedChunk64;\nimport com.pinecone.slime.chunk.RangedPage;\nimport com.pinecone.slime.unitization.MinMaxRange;\nimport com.pinecone.slime.chunk.ContiguousPage;\n\npublic class PreparedPageDividerPartition64 extends RangedChunk64 implements PageDividerPartition64 {\n    protected long     mnPagesSize;\n    protected long     mnEachPerPage;\n\n    public PreparedPageDividerPartition64( long nStart, long nEnd, long id, long each, RangedChunk64 parent ) {\n        super( nStart, nEnd, id, parent );\n\n        this.mnEachPerPage = each;\n        this.update_page_size();\n    }\n\n    public PreparedPageDividerPartition64(ContiguousPage inheritedIntegratedPage, long id, long each, RangedChunk64 parent ) {\n        this( 0, 0, id, each, parent );\n\n        this.inheritRange( inheritedIntegratedPage );\n        this.update_page_size();\n    }\n\n    public PreparedPageDividerPartition64(ContiguousPage inheritedIntegratedPage, long id, long each ) {\n        this( inheritedIntegratedPage, id, each, null );\n    }\n\n    protected void update_page_size() {\n        this.mnPagesSize   = (this.getRange().span() + this.mnEachPerPage - 1) / this.mnEachPerPage;\n    }\n\n    @Override\n    public long pagesSize() {\n        return this.mnPagesSize;\n    }\n\n    @Override\n    public long eachPerPage() {\n        return this.mnEachPerPage;\n    }\n\n    @Override\n    public void setEachPerPage( long eachPerPage ) {\n        this.mnEachPerPage = eachPerPage;\n    }\n\n    @Override\n    public void inheritRange( ContiguousPage that ) {\n        MinMaxRange range      = (MinMaxRange) that.getRange();\n        this.mRange.setRange( range.getMin(), range.getMax() );\n    }\n\n    @Override\n    public PageDividerPartition64 parent() {\n        return (PageDividerPartition64)this.mParent;\n    }\n\n    @Override\n    public boolean hasOwnPage( ContiguousPage that ) {\n        RangedPage rangedPage = (RangedPage) that;\n        return this.getRange().contains( rangedPage.getRange() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/SequentialPagePartitionGroup.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.cluster.SequentialChunkGroup;\n\npublic interface SequentialPagePartitionGroup extends SequentialChunkGroup, PagePartitionGroup {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/marshaling/SequentialPagePartitionGroup64.java",
    "content": "package com.pinecone.slime.chunk.marshaling;\n\nimport com.pinecone.slime.cluster.ArchSequentialChunkGroup;\nimport com.pinecone.slime.chunk.ArchPatriarchalChunk;\nimport com.pinecone.slime.chunk.PatriarchalChunk;\n\n\npublic class SequentialPagePartitionGroup64 extends ArchSequentialChunkGroup implements PatriarchalChunk, SequentialPagePartitionGroup {\n    protected ArchPatriarchalChunk mParent;\n\n    public SequentialPagePartitionGroup64() {\n        super();\n    }\n\n    @Override\n    public PatriarchalChunk parent() {\n        return this.mParent;\n    }\n\n    @Override\n    public void setParent( PatriarchalChunk parent ){\n        this.mParent = (ArchPatriarchalChunk) parent;\n    }\n\n    @Override\n    public PagePartitionGroup getFirstChunkById( long id ){\n        return (PagePartitionGroup) super.getFirstChunkById( id );\n    }\n\n    @Override\n    public boolean hasOwnPartition( PagePartition that ) {\n        return this.mChunkRegister.containsKey( that.getId() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/ActivePageScheduler.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.slime.chunk.ContiguousPage;\n\npublic interface ActivePageScheduler extends RangedPageScheduler {\n    ContiguousPage activate();\n\n    void activate( ContiguousPage that );\n\n    void deactivate( ContiguousPage that );\n\n    void deactivate( ContiguousPage[] those );\n\n    long getActivatedSize();\n\n    ContiguousPage getPageById(long id );\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/ActivePageScheduler64.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.slime.chunk.ContiguousPage;\nimport com.pinecone.slime.chunk.RangedPage;\n\nimport java.util.ArrayList;\nimport java.util.Map;\n\npublic abstract class ActivePageScheduler64 extends RangedPageScheduler64 implements ActivePageScheduler {\n    protected ChunkRegister<Long, ContiguousPage> mChunkRegister;\n\n    protected ActivePageScheduler64( RangedPage masterPage, PagePool pagePool, PageDivider divider, long autoIncrementId ) {\n        super( masterPage, pagePool, divider, autoIncrementId );\n    }\n\n    protected ActivePageScheduler64( PageDivider divider, long autoIncrementId ) {\n        super( divider, autoIncrementId );\n    }\n\n    @Override\n    protected void beforeActivatePage() {\n        if( this.mRecycleStrategy == null ) {\n            return;\n        }\n\n        ArrayList<ContiguousPage> badPages = null;\n        for ( Map.Entry kv : this.mChunkRegister.entrySet() ) {\n            if( this.mRecycleStrategy.qualified( (ContiguousPage)kv.getValue() )  ) {\n                if( badPages == null ) {\n                    badPages = new ArrayList<>();\n                }\n\n                badPages.add( (ContiguousPage)kv.getValue() );\n            }\n        }\n\n        if( badPages != null ) {\n            for ( ContiguousPage p : badPages ) {\n                this.deactivate( p );\n            }\n        }\n    }\n\n    @Override\n    public ContiguousPage activate() {\n        this.beforeActivatePage();\n\n        ContiguousPage page = (ContiguousPage) this.getDivider().allocate();\n        this.activate( page );\n        return page;\n    }\n\n    @Override\n    public void activate( ContiguousPage that ) {\n        this.mChunkRegister.put( that.getId(), that );\n    }\n\n    @Override\n    public void deactivate( ContiguousPage that ) {\n        this.mChunkRegister.remove( that.getId() );\n        this.mPagePool.deallocate( that );\n    }\n\n    @Override\n    public void deactivate( ContiguousPage[] those ){\n        for ( ContiguousPage p : those ) {\n            this.deactivate( p );\n        }\n    }\n\n    @Override\n    public long getActivatedSize() {\n        return this.mChunkRegister.size();\n    }\n\n    @Override\n    public ContiguousPage getPageById(long id ){\n        return this.mChunkRegister.get( id );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/ArchMasterSplitunkDivider64.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.slime.chunk.Chunk;\nimport com.pinecone.slime.chunk.Splitunk;\n\npublic abstract class ArchMasterSplitunkDivider64 implements ChunkDivider {\n    protected Splitunk      mMasterChunk;\n\n    protected ArchMasterSplitunkDivider64( Splitunk masterChunk ) {\n        this.mMasterChunk = masterChunk;\n    }\n\n    protected abstract Chunk newChunk( long start, long end, long epoch );\n\n    @Override\n    public Splitunk getMasterChunk(){\n        return this.mMasterChunk;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/BadAllocateException.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class BadAllocateException extends PineRuntimeException {\n    public BadAllocateException    () {\n        super();\n    }\n\n    public BadAllocateException    ( String message ) {\n        super(message);\n    }\n\n    public BadAllocateException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public BadAllocateException    ( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/BatchActivePageScheduler.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.slime.chunk.ContiguousPage;\n\npublic interface BatchActivePageScheduler extends ActivePageScheduler {\n    long batchSize();\n\n    long getBatchEpoch();\n\n    ContiguousPage[] activates(long batch );\n\n    default ContiguousPage[] activates() {\n        return this.activates( this.batchSize() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/BatchActivePageScheduler64.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.slime.chunk.RangedPage;\n\npublic abstract class BatchActivePageScheduler64 extends ActivePageScheduler64 implements BatchActivePageScheduler {\n    protected long                 mnBatchSize;\n    protected long                 mnBatchEpoch;\n\n    protected BatchActivePageScheduler64(RangedPage masterPage, PagePool pagePool, PageDivider divider, long autoIncrementId, long batchSize ) {\n        super( masterPage, pagePool, divider, autoIncrementId );\n        this.mnBatchSize           = batchSize;\n        this.mnBatchEpoch          = 0;\n    }\n\n    protected BatchActivePageScheduler64( PageDivider divider, long autoIncrementId, long batchSize ) {\n        super( divider, autoIncrementId );\n        this.mnBatchSize           = batchSize;\n        this.mnBatchEpoch          = 0;\n    }\n\n    @Override\n    public long batchSize(){\n        return this.mnBatchSize;\n    }\n\n    @Override\n    public long getBatchEpoch(){\n        return this.mnBatchEpoch;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/ChunkDivider.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.slime.chunk.Chunk;\nimport com.pinecone.slime.chunk.DivisibleChunk;\n\npublic interface ChunkDivider extends Pinenut {\n    Chunk allocate() throws BadAllocateException;\n\n    DivisibleChunk getMasterChunk();\n\n    long getMaxAllocations();\n\n    long remainAllocatable();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/ChunkRegister.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.Map;\n\npublic interface ChunkRegister<K, V > extends Pinenut, Map<K, V> {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/DefaultPageRecycleStrategy.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.slime.chunk.ContiguousPage;\n\npublic class DefaultPageRecycleStrategy implements PageRecycleStrategy {\n    protected PageScheduler mPageScheduler;\n\n    public DefaultPageRecycleStrategy( PageScheduler parent ){\n        this.mPageScheduler = parent;\n    }\n\n    @Override\n    public PageScheduler parentScheduler(){\n        return this.mPageScheduler;\n    }\n\n    @Override\n    public boolean qualified( ContiguousPage that ){\n        return false;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/DirectPagePool.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.framework.system.PineRuntimeException;\nimport com.pinecone.slime.chunk.ContiguousPage;\n\nimport java.lang.reflect.InvocationTargetException;\n\npublic class DirectPagePool implements PagePool {\n    protected Class<? extends ContiguousPage> stereotype;\n\n    public DirectPagePool( Class<? extends ContiguousPage> stereotype ){\n        this.stereotype = stereotype;\n    }\n\n    @Override\n    public int size(){\n        return Integer.MAX_VALUE - 2;\n    }\n\n    @Override\n    public ContiguousPage allocate( Object... args ){\n        ContiguousPage page;\n\n        try {\n            page = this.stereotype.getDeclaredConstructor().newInstance();\n        }\n        catch ( InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e ) {\n            throw new PineRuntimeException( \"Failed to allocate a new page.\", e );\n        }\n\n        page.apply( args );\n\n        return page;\n    }\n\n    @Override\n    public void deallocate( ContiguousPage that ){\n\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/FixedChunkDivider64.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.slime.chunk.Splitunk;\nimport com.pinecone.slime.unitization.MinMaxRange;\nimport com.pinecone.slime.chunk.Chunk;\n\npublic abstract class FixedChunkDivider64 extends ArchMasterSplitunkDivider64 implements ChunkDivider {\n    protected long          mnEach;\n    protected long          mnStartOffset;\n    protected long          mnChunkMin;\n    protected long          mnChunkMax;\n    protected long          mnChunkElements;\n\n    protected long          mnCurrentEpoch;\n    protected long          mnMaxAllocations;\n\n\n    public FixedChunkDivider64(Splitunk masterChunk, long each ) {\n        super( masterChunk );\n\n        this.mnEach            = each;\n\n        this.mnChunkMin        = ( (MinMaxRange)this.mMasterChunk.getRange() ).getMin().longValue();\n        this.mnChunkMax        = ( (MinMaxRange)this.mMasterChunk.getRange() ).getMax().longValue();\n        this.mnStartOffset     = this.mnChunkMin;\n        this.mnChunkElements   = this.mnChunkMax - this.mnChunkMin;\n\n        this.mnCurrentEpoch    = 0;\n        this.mnMaxAllocations  = (this.mnChunkElements + this.mnEach - 1) / this.mnEach;\n    }\n\n    protected long nextRange( long to ) {\n        if( to + this.mnEach > this.mnChunkElements ) {\n            return this.mnChunkElements;\n        }\n        return to + this.mnEach;\n    }\n\n    @Override\n    public long getMaxAllocations() {\n        return this.mnMaxAllocations;\n    }\n\n    @Override\n    public long remainAllocatable(){\n        return this.mnMaxAllocations - this.mnCurrentEpoch;\n    }\n\n    public long getEach() {\n        return this.mnEach;\n    }\n\n    @Override\n    public Chunk allocate() throws BadAllocateException {\n        if( this.mnCurrentEpoch < this.getMaxAllocations() ) {\n            long start  = this.nextRange( (this.mnCurrentEpoch - 1) * this.mnEach ) + this.mnStartOffset;\n            long end    = this.nextRange( this.mnCurrentEpoch       * this.mnEach ) + this.mnStartOffset;\n\n            Chunk chunk = this.newChunk( start, end, this.mnCurrentEpoch );\n\n            ++this.mnCurrentEpoch;\n\n            return chunk;\n        }\n\n        throw new BadAllocateException();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/FixedPageDivider64.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.slime.chunk.Chunk;\nimport com.pinecone.slime.chunk.Splitunk;\nimport com.pinecone.slime.chunk.marshaling.PageDividerPartition64;\n\npublic class FixedPageDivider64 extends FixedChunkDivider64 implements PageDivider {\n    protected PagePool                  mPagePool;\n    protected long                      mnPageIdOffset;\n\n    public FixedPageDivider64( Splitunk masterChunk, PagePool pagePool, long each, long pageIdOffset ) {\n        super( masterChunk, each );\n        this.mPagePool        = pagePool;\n        this.mnPageIdOffset   = pageIdOffset;\n    }\n\n    public FixedPageDivider64( Splitunk masterChunk, PagePool pagePool, long each ) {\n        this( masterChunk, pagePool, each, 0 );\n    }\n\n    public FixedPageDivider64( PageDividerPartition64 partition, PagePool pagePool, long pageIdOffset ) {\n        this( partition, pagePool, partition.eachPerPage(), pageIdOffset );\n    }\n\n    public FixedPageDivider64( PageDividerPartition64 partition, PagePool pagePool ) {\n        this( partition, pagePool, 0 );\n    }\n\n    @Override\n    protected Chunk newChunk( long start, long end, long epoch ) {\n        return this.mPagePool.allocate( start, end, this.mnPageIdOffset + this.mnCurrentEpoch, this.mMasterChunk );\n    }\n\n    @Override\n    public PagePool getPagePool() {\n        return this.mPagePool;\n    }\n\n    @Override\n    public long getPageIdOffset() {\n        return this.mnPageIdOffset;\n    }\n\n    @Override\n    public void setPageIdOffset( long offset ) {\n        this.mnPageIdOffset = offset;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/LocalBatchActivePageScheduler64.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.slime.chunk.ContiguousPage;\n\npublic class LocalBatchActivePageScheduler64 extends BatchActivePageScheduler64 {\n    public LocalBatchActivePageScheduler64( PageDivider divider, long autoIncrementId, long batchSize ) {\n        super( divider, autoIncrementId, batchSize );\n        this.mChunkRegister = new LocalMapChunkRegister<>();\n    }\n\n    @Override\n    public ContiguousPage[] activates(long batch ) {\n        long nActivated   = this.getActivatedSize();\n        long nAllocations = batch - nActivated;\n        long nRemains     = this.getDivider().remainAllocatable();\n        if( nRemains < nAllocations ) {\n            nAllocations = nRemains;\n        }\n\n        int iAlloc = (int) nAllocations;\n\n\n        ContiguousPage[] pages = new ContiguousPage[ iAlloc ];\n        for ( int i = 0; i < iAlloc; i++ ) {\n            pages[i] = this.activate();\n        }\n\n        ++this.mnBatchEpoch;\n        return pages;\n    }\n\n    @Override\n    public ContiguousPage[] activates() {\n        return this.activates( this.batchSize() );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/LocalMapChunkRegister.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\n\nimport com.pinecone.framework.util.json.JSON;\n\nimport java.util.Map;\nimport java.util.LinkedHashMap;\nimport java.util.Set;\nimport java.util.Collection;\n\npublic class LocalMapChunkRegister<K, V > implements ChunkRegister<K, V > {\n    private final Map<K, V > targetMap;\n\n    public LocalMapChunkRegister() {\n        this.targetMap = new LinkedHashMap<>();\n    }\n\n    public LocalMapChunkRegister( Map<K, V > otherMap ) {\n        this.targetMap = otherMap;\n    }\n\n    @Override\n    public int size() {\n        return this.targetMap.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.targetMap.isEmpty();\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        return this.targetMap.containsKey(key);\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        return this.targetMap.containsValue(value);\n    }\n\n    @Override\n    public V get( Object key ) {\n        return this.targetMap.get(key);\n    }\n\n    @Override\n    public V put( K key, V value ) {\n        return this.targetMap.put(key, value);\n    }\n\n    @Override\n    public V remove( Object key ) {\n        return this.targetMap.remove(key);\n    }\n\n    @Override\n    public void putAll( Map<? extends K, ? extends V> m ) {\n        this.targetMap.putAll(m);\n    }\n\n    @Override\n    public void clear() {\n        this.targetMap.clear();\n    }\n\n    @Override\n    public Set<K> keySet() {\n        return this.targetMap.keySet();\n    }\n\n    @Override\n    public Collection<V> values() {\n        return this.targetMap.values();\n    }\n\n    @Override\n    public Set<Map.Entry<K, V> > entrySet() {\n        return this.targetMap.entrySet();\n    }\n\n    @Override\n    public boolean equals( Object o ){\n        return this.targetMap.equals(o);\n    }\n\n    @Override\n    public int hashCode(){\n        return this.targetMap.hashCode();\n    }\n\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/PageDivider.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\npublic interface PageDivider extends ChunkDivider {\n    PagePool getPagePool();\n\n    long getPageIdOffset();\n\n    void setPageIdOffset( long offset );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/PagePool.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.slime.chunk.ContiguousPage;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface PagePool extends Pinenut {\n    int size();\n\n    ContiguousPage allocate(Object... args );\n\n    void deallocate( ContiguousPage that );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/PageRecycleStrategy.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.slime.chunk.ContiguousPage;\n\npublic interface PageRecycleStrategy extends Pinenut {\n    PageScheduler parentScheduler();\n\n    boolean qualified( ContiguousPage that );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/PageScheduler.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface PageScheduler extends Pinenut {\n    PageScheduler setPageRecycleStrategy( PageRecycleStrategy strategy );\n\n    PageRecycleStrategy getPageRecycleStrategy();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/RangedPageScheduler.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.slime.chunk.ContiguousPage;\n\npublic interface RangedPageScheduler extends PageScheduler {\n    long getAutoIncrementId();\n\n    PageDivider getDivider();\n\n    ContiguousPage getMasterPage();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/chunk/scheduler/RangedPageScheduler64.java",
    "content": "package com.pinecone.slime.chunk.scheduler;\n\nimport com.pinecone.slime.chunk.ContiguousPage;\nimport com.pinecone.slime.chunk.RangedPage;\n\npublic abstract class RangedPageScheduler64 implements RangedPageScheduler {\n    protected long                 mnAutoIncrementId;\n    protected RangedPage           mMasterPage;\n    protected PagePool             mPagePool;\n    protected PageDivider          mDivider;\n\n    protected PageRecycleStrategy  mRecycleStrategy;\n\n    protected RangedPageScheduler64( RangedPage masterPage, PagePool pagePool, PageDivider divider, long autoIncrementId ) {\n        this.mMasterPage           = masterPage;\n        this.mPagePool             = pagePool;\n        this.mDivider              = divider;\n        this.mnAutoIncrementId     = autoIncrementId;\n\n        this.mRecycleStrategy      = null;\n    }\n\n    protected RangedPageScheduler64( PageDivider divider, long autoIncrementId ) {\n        this( (RangedPage) divider.getMasterChunk(), divider.getPagePool(), divider, autoIncrementId );\n        this.mDivider.setPageIdOffset( autoIncrementId );\n    }\n\n    protected abstract void beforeActivatePage() ;\n\n    @Override\n    public PageScheduler setPageRecycleStrategy( PageRecycleStrategy strategy ) {\n        this.mRecycleStrategy = strategy;\n        return this;\n    }\n\n    @Override\n    public PageRecycleStrategy getPageRecycleStrategy() {\n        return this.mRecycleStrategy;\n    }\n\n    @Override\n    public long getAutoIncrementId() {\n        return this.mnAutoIncrementId;\n    }\n\n    @Override\n    public PageDivider getDivider() {\n        return this.mDivider;\n    }\n\n    @Override\n    public ContiguousPage getMasterPage() {\n        return this.mMasterPage;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cluster/ArchSequentialChunkGroup.java",
    "content": "package com.pinecone.slime.cluster;\n\nimport com.pinecone.framework.unit.LinkedMultiValueMap;\nimport com.pinecone.framework.unit.MultiValueMap;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.slime.unitization.Precision;\nimport com.pinecone.slime.unitization.Precision64;\nimport com.pinecone.slime.chunk.Chunk;\n\nimport java.util.ArrayList;\nimport java.util.Iterator;\nimport java.util.List;\n\npublic abstract class ArchSequentialChunkGroup implements SequentialChunkGroup {\n    protected long                           mnId;\n    protected List<Chunk >                   mChunkList;\n    protected MultiValueMap<Long, Chunk >    mChunkRegister;\n\n    protected ArchSequentialChunkGroup() {\n        this.mChunkList     = new ArrayList<>();\n        this.mChunkRegister = new LinkedMultiValueMap<> ();\n    }\n\n    @Override\n    public long getId() {\n        return this.mnId;\n    }\n\n    @Override\n    public void setId( long id ) {\n        this.mnId = id;\n    }\n\n    @Override\n    public Precision size() {\n        return new Precision64( this.getSequentialChunks().size() );\n    }\n\n    @Override\n    public void add( Chunk that ) {\n        this.mChunkList.add( that );\n        this.mChunkRegister.add( that.getId(), that );\n    }\n\n    @Override\n    public List<Chunk > getChunksById( long id ){\n        return this.mChunkRegister.get( id );\n    }\n\n    @Override\n    public Chunk getFirstChunkById( long id ){\n        return this.mChunkRegister.getFirst( id );\n    }\n\n    @Override\n    public void remove( Chunk that ) {\n        List<Chunk > chunks = this.getChunksById( that.getId() );\n        if( chunks.size() > 1 ) {\n            chunks.remove( that );\n        }\n        else {\n            this.mChunkRegister.remove( that.getId() );\n        }\n\n        this.mChunkList.remove( that );\n    }\n\n    @Override\n    public void remove( long id ) {\n        List<Chunk > chunks = this.getChunksById( id );\n        this.mChunkRegister.remove( id );\n\n        for ( Chunk c : chunks  ) {\n            this.mChunkList.remove( c );\n        }\n    }\n\n    @Override\n    public List<Chunk > getSequentialChunks() {\n        return this.mChunkList;\n    }\n\n    @Override\n    public Iterator<Chunk > begin() {\n        return this.getSequentialChunks().iterator();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this.mChunkList );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cluster/ChunkGroup.java",
    "content": "package com.pinecone.slime.cluster;\n\nimport com.pinecone.slime.chunk.DiscreteChunk;\nimport com.pinecone.slime.chunk.PatriarchalChunk;\n\npublic interface ChunkGroup extends Cluster, PatriarchalChunk, DiscreteChunk {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cluster/Cluster.java",
    "content": "package com.pinecone.slime.cluster;\n\nimport com.pinecone.slime.chunk.Chunk;\n\npublic interface Cluster extends Chunk {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cluster/RangedCluster.java",
    "content": "package com.pinecone.slime.cluster;\n\nimport com.pinecone.slime.chunk.Continunk;\n\npublic interface RangedCluster extends Cluster, Continunk {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/cluster/SequentialChunkGroup.java",
    "content": "package com.pinecone.slime.cluster;\n\nimport com.pinecone.slime.chunk.Chunk;\n\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.List;\n\npublic interface SequentialChunkGroup extends ChunkGroup {\n    void add( Chunk that );\n\n    List<Chunk > getChunksById( long id );\n\n    Chunk getFirstChunkById( long id );\n\n    void remove( Chunk that );\n\n    void remove( long id );\n\n    Collection<Chunk > getSequentialChunks();\n\n    Iterator<Chunk > begin();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/entity/ArchEnumIndexableEntity.java",
    "content": "package com.pinecone.slime.entity;\n\npublic abstract class ArchEnumIndexableEntity implements EnumIndexableEntity {\n    protected long   mnEnumId;\n\n    protected ArchEnumIndexableEntity() {\n\n    }\n\n    @Override\n    public long getEnumId() {\n        return this.mnEnumId;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/entity/EnumIndexableEntity.java",
    "content": "package com.pinecone.slime.entity;\n\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.framework.util.id.Int64ID;\n\npublic interface EnumIndexableEntity extends ObjectiveEntity {\n    @Override\n    default Identification getId() {\n        return new Int64ID( this.getEnumId() );\n    }\n\n    long getEnumId();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/entity/ObjectiveEntity.java",
    "content": "package com.pinecone.slime.entity;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.Identification;\n\npublic interface ObjectiveEntity extends Pinenut {\n    Identification getId();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/map/AlterableCacher.java",
    "content": "package com.pinecone.slime.map;\n\nimport java.util.concurrent.TimeUnit;\n\npublic interface AlterableCacher<V> extends AlterableQuerier<V > {\n    V insert( Object key, V value, long expire, TimeUnit unit ) ;\n\n    V insert( Object key, V value, long expireMill );\n\n    V insertIfAbsent( Object key, V value, long expireMill );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/map/AlterableQuerier.java",
    "content": "package com.pinecone.slime.map;\n\npublic interface AlterableQuerier<V > extends Querier<V > {\n    void clear();\n\n    V insert( Object key, V value );\n\n    V insertIfAbsent( Object key, V value );\n\n    V erase( Object key );\n\n    // No need to retrieve value.\n    void expunge( Object key );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/map/LocalMapQuerier.java",
    "content": "package com.pinecone.slime.map;\n\nimport com.pinecone.framework.unit.Dictium;\nimport com.pinecone.framework.unit.ListDictium;\nimport com.pinecone.framework.unit.MapDictium;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\n\npublic class LocalMapQuerier<V > implements AlterableQuerier<V > {\n    protected Dictium<V > mTarget;\n\n    public LocalMapQuerier( Dictium<V > dictium ) {\n        this.mTarget = dictium;\n    }\n\n    public LocalMapQuerier( boolean bUsingList ) {\n        if( bUsingList ) {\n            this.mTarget = new ListDictium<>();\n        }\n        else {\n            this.mTarget = new MapDictium<>();\n        }\n    }\n\n    public LocalMapQuerier() {\n        this( false );\n    }\n\n    @Override\n    public long size() {\n        return this.mTarget.size();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.mTarget.isEmpty();\n    }\n\n    @Override\n    public void clear() {\n        this.mTarget.clear();\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        return this.mTarget.containsKey( key );\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        return this.mTarget.containsValue( value );\n    }\n\n    @Override\n    public V get( Object key ) {\n        return this.mTarget.get( key );\n    }\n\n    @Override\n    public V insert( Object key, V value ) {\n        return this.mTarget.insert( key, value );\n    }\n\n    @Override\n    public V insertIfAbsent( Object key, V value ) {\n        return this.mTarget.insertIfAbsent( key, value );\n    }\n\n    @Override\n    public V erase( Object key ) {\n        return this.mTarget.erase( key );\n    }\n\n    @Override\n    public void expunge( Object key ) {\n        this.erase( key );\n    }\n\n    @Override\n    public Set<? > entrySet() {\n        return this.mTarget.entrySet();\n    }\n\n    @Override\n    public Collection<V > values() {\n        return this.mTarget.values();\n    }\n\n    @Override\n    public Map<?, V > toMap() {\n        return this.mTarget.toMap();\n    }\n\n    @Override\n    public List<V > toList() {\n        return this.mTarget.toList();\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object elm ) {\n        return this.mTarget.hasOwnProperty( elm );\n    }\n\n    @Override\n    public String toJSONString() {\n        return this.mTarget.toJSONString();\n    }\n\n    @Override\n    public String toString() {\n        return this.mTarget.toString();\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/map/Mapper.java",
    "content": "package com.pinecone.slime.map;\n\nimport com.pinecone.framework.system.prototype.PineUnit;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\npublic interface Mapper<V > extends PineUnit {\n    long size();\n\n    boolean isEmpty();\n\n    @Override\n    boolean containsKey( Object key );\n\n    boolean containsValue( Object value );\n\n    V get( Object key );\n\n    Set<? > entrySet();\n\n    Collection<V > values();\n\n    Map<?, V > toMap();\n\n    List<V > toList();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/map/MonoKeyQueryRange.java",
    "content": "package com.pinecone.slime.map;\n\nimport com.pinecone.slime.unitization.PartialOrderRange;\n\npublic class MonoKeyQueryRange<T extends Comparable<T > > extends PartialOrderRange<T > implements QueryRange<T > {\n    protected String mszRangeKey;\n\n    public MonoKeyQueryRange( T min, T max, String szRangeKey ) {\n        super( min, max );\n        this.mszRangeKey = szRangeKey;\n    }\n\n    @Override\n    public String getRangeKey() {\n        return this.mszRangeKey;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/map/Querier.java",
    "content": "package com.pinecone.slime.map;\n\nimport java.util.List;\n\npublic interface Querier<V > extends Mapper<V > {\n    default List<? > query( Object statement ) {\n        return List.of( this.get( statement ) );\n    }\n\n    default List<V > queryVal( Object statement ) {\n        return List.of( this.get( statement ) );\n    }\n\n    default V queryValFirst( Object statement ) {\n        List<V > l = this.queryVal( statement );\n\n        if( l != null && !l.isEmpty() ) {\n            return l.get( 0 );\n        }\n        return null;\n    }\n\n    @Override\n    default boolean hasOwnProperty( Object elm ) {\n        return this.containsKey( elm );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/map/QueryRange.java",
    "content": "package com.pinecone.slime.map;\n\nimport com.pinecone.slime.unitization.PartialRange;\n\npublic interface QueryRange<T extends Comparable<T > > extends PartialRange<T > {\n    String getRangeKey();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/map/indexable/IndexableCachedMap.java",
    "content": "package com.pinecone.slime.map.indexable;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.unit.Mapnut;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.slime.cache.query.UniformCountSelfLoadingDictCache;\nimport com.pinecone.slime.source.indexable.IndexableDataManipulator;\nimport com.pinecone.slime.source.indexable.IndexableIterableManipulator;\nimport com.pinecone.slime.source.indexable.IndexableTargetScopeMeta;\n\nimport java.util.Collection;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.AbstractSet;\nimport java.util.AbstractCollection;\nimport java.util.Iterator;\n\npublic class IndexableCachedMap<K, V > implements Mapnut<K, V > {\n    private final IndexableMapQuerier<K, V >                mQuerier;\n    private final IndexableIterableManipulator<K, V >       mManipulator;\n    protected UniformCountSelfLoadingDictCache<V >          mCache;\n    protected IndexableTargetScopeMeta                      mIndexMeta;\n\n    public IndexableCachedMap( IndexableTargetScopeMeta indexMeta, UniformCountSelfLoadingDictCache<V > cache, IndexableMapQuerier<K, V > querier ) {\n        IndexableDataManipulator<K, V > manipulator = (IndexableDataManipulator<K, V >) indexMeta.<K, V >getDataManipulator();\n        if( ! ( manipulator instanceof IndexableIterableManipulator ) ) {\n            throw new IllegalArgumentException( \"Manipulator should be IterableManipulator.\" );\n        }\n\n        this.mManipulator = (IndexableIterableManipulator<K, V >) manipulator;\n        this.mIndexMeta   = indexMeta;\n        this.mCache       = cache;\n        this.mQuerier     = querier;\n    }\n\n    public IndexableCachedMap( IndexableTargetScopeMeta indexMeta, UniformCountSelfLoadingDictCache<V > cache ) {\n        this( indexMeta, cache, new IndexableMapQuerier<>( indexMeta, cache ) );\n    }\n\n    @Override\n    public long megaSize() {\n        return this.mQuerier.size();\n    }\n\n    @Override\n    public int size() {\n        return (int)this.megaSize();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.size() == 0;\n    }\n\n    @Override\n    public void clear() {\n        this.mQuerier.clear();\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        return this.mQuerier.containsKey( key );\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object key ) {\n        return this.containsKey( key );\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        return this.mQuerier.containsValue( value );\n    }\n\n    @Override\n    public V get( Object key ) {\n        return this.mQuerier.get( key );\n    }\n\n    @Override\n    public Entry<K, V> getEntryByKey( Object compatibleKey ) {\n        return this.getEntryCopyByKey( compatibleKey );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Entry<K, V> getEntryCopyByKey( Object compatibleKey ) {\n        return new KeyValue<>( (K)compatibleKey, this.get( compatibleKey ) );\n    }\n\n    @Override\n    public V put( K key, V value ) {\n        return this.mQuerier.insert( key, value );\n    }\n\n    @Override\n    public V putIfAbsent( K key, V value ) {\n        return this.mQuerier.insertIfAbsent( key, value );\n    }\n\n    @Override\n    public void putAll( Map<? extends K, ? extends V> m ) {\n        for( Map.Entry<? extends K, ? extends V> kv : m.entrySet() ){\n            this.put( kv.getKey(), kv.getValue() );\n        }\n    }\n\n    @Override\n    public V remove( Object key ) {\n        return this.mQuerier.erase( key );\n    }\n\n    @Override\n    public Set<K > keySet() {\n        return new IndexableKeySet();\n    }\n\n    @Override\n    public Set<Entry<K, V > > entrySet() {\n        return new IndexableEntrySet();\n    }\n\n    @Override\n    public Collection<V > values() {\n        return new IndexableValCollection();\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this );\n    }\n\n\n\n    protected final class IndexableValueIterator implements Iterator<V > {\n        Iterator<Map.Entry<K, V > > entryIterator;\n\n        IndexableValueIterator() {\n            this.entryIterator = IndexableCachedMap.this.mManipulator.iterator( IndexableCachedMap.this.mIndexMeta );\n        }\n\n        @Override\n        public final boolean hasNext() {\n            return this.entryIterator.hasNext();\n        }\n\n        public final V next() { return this.entryIterator.next().getValue(); }\n    }\n\n    protected class IndexableEntrySet extends AbstractSet<Map.Entry<K, V > > {\n        public final int size()                 { return IndexableCachedMap.this.size(); }\n\n        public final void clear()               { IndexableCachedMap.this.clear(); }\n\n        public final Iterator<Map.Entry<K, V > > iterator() {\n            return IndexableCachedMap.this.mManipulator.iterator( IndexableCachedMap.this.mIndexMeta );\n        }\n\n        public final boolean contains( Object o ) {\n            if ( !(o instanceof Map.Entry) ) {\n                return false;\n            }\n            Map.Entry<?,?> e = (Map.Entry<?,?>) o;\n            Object key = e.getKey();\n\n            Object v = IndexableCachedMap.this.get(key);\n            return v != null && v.equals(e.getValue());\n        }\n\n        public final boolean remove( Object o ) {\n            if ( this.contains(o) ) {\n                Map.Entry<?,?> e = (Map.Entry<?,?>) o;\n                Object key = e.getKey();\n\n                return IndexableCachedMap.this.remove(key) != null ;\n            }\n            return false;\n        }\n    }\n\n    protected class IndexableKeySet extends AbstractSet<K > {\n        public final int size()                 { return IndexableCachedMap.this.size(); }\n\n        public final void clear()               { IndexableCachedMap.this.clear(); }\n\n        public final Iterator<K > iterator() {\n            return IndexableCachedMap.this.mManipulator.keysIterator( IndexableCachedMap.this.mIndexMeta );\n        }\n\n        public final boolean contains( Object o ) {\n            return IndexableCachedMap.this.containsKey( o );\n        }\n\n        public final boolean remove( Object o ) {\n            return IndexableCachedMap.this.remove( o ) != null;\n        }\n    }\n\n    protected class IndexableValCollection extends AbstractCollection<V > {\n        public final int size()                 { return IndexableCachedMap.this.size(); }\n\n        public final void clear()               { IndexableCachedMap.this.clear(); }\n\n        public final Iterator<V > iterator() {\n            return new IndexableValueIterator();\n        }\n    }\n\n}"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/map/indexable/IndexableMapQuerier.java",
    "content": "package com.pinecone.slime.map.indexable;\n\nimport com.pinecone.framework.system.NotImplementedException;\nimport com.pinecone.slime.cache.CacheConstants;\nimport com.pinecone.slime.cache.query.ConcurrentMergeLRUDictCachePage;\nimport com.pinecone.slime.cache.query.LocalFixedLRUDictCachePage;\nimport com.pinecone.slime.cache.query.UniformCountSelfLoadingDictCache;\nimport com.pinecone.slime.map.AlterableCacher;\nimport com.pinecone.slime.source.indexable.GenericIndexKeySourceRetriever;\nimport com.pinecone.slime.source.indexable.IndexableDataManipulator;\nimport com.pinecone.slime.source.indexable.IndexableIterableManipulator;\nimport com.pinecone.slime.source.indexable.IndexableTargetScopeMeta;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.ArrayList;\nimport java.util.concurrent.TimeUnit;\n\npublic class IndexableMapQuerier<K, V > implements AlterableCacher<V > {\n    private final IndexableDataManipulator<K, V >  mManipulator;\n    protected UniformCountSelfLoadingDictCache<V > mCache;\n    protected IndexableTargetScopeMeta             mIndexMeta;\n\n    protected static <V > UniformCountSelfLoadingDictCache<V > newCache( IndexableTargetScopeMeta meta, boolean bConcurrent ) {\n        if ( bConcurrent ) {\n            return new ConcurrentMergeLRUDictCachePage<>( CacheConstants.DefaultCachePageMegaCapacity,\n                    new GenericIndexKeySourceRetriever<>( meta )\n            );\n        }\n        else {\n            return new LocalFixedLRUDictCachePage<>( CacheConstants.DefaultCachePageMegaCapacity,\n                    new GenericIndexKeySourceRetriever<>( meta )\n            );\n        }\n    }\n\n    public IndexableMapQuerier( IndexableTargetScopeMeta meta, UniformCountSelfLoadingDictCache<V > cache ) {\n        this.mManipulator  = (IndexableDataManipulator<K, V >) meta.<K, V >getDataManipulator();\n        this.mIndexMeta    = meta;\n        this.mCache        = cache;\n    }\n\n    public IndexableMapQuerier( IndexableTargetScopeMeta meta, boolean bConcurrent ) {\n        this( meta, IndexableMapQuerier.newCache( meta, bConcurrent ) );\n    }\n\n    public IndexableMapQuerier( IndexableTargetScopeMeta meta ) {\n        this( meta, true );\n    }\n\n\n    @Override\n    public long size() {\n        return this.mManipulator.counts( this.mIndexMeta, null );\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.size() == 0;\n    }\n\n    @Override\n    public void clear() {\n        this.mCache.clear();\n        this.mManipulator.purge( this.mIndexMeta );\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        return this.mCache.implicatesKey( key );\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        Object values = this.mManipulator.selectAllByNS( this.mIndexMeta, null, null );\n        if( values instanceof Collection) {\n            return ((Collection) values).contains( value );\n        }\n        else if( values instanceof Map) {\n            return ((Map) values).values().contains( value );\n        }\n        return false;\n    }\n\n    @Override\n    public V get( Object key ) {\n        return this.mCache.get( key );\n    }\n\n    @Override\n    public V insert( Object key, V value ) {\n        this.mManipulator.insert( this.mIndexMeta, (K)key, value );\n        return value;\n    }\n\n    @Override\n    public V insert( Object key, V value, long expireMill ) {\n        this.mManipulator.insert( this.mIndexMeta, (K)key, value, expireMill );\n        return value;\n    }\n\n    @Override\n    public V insert( Object key, V value, long expire, TimeUnit unit ) {\n        this.insert( key, value, unit.toMillis( expire ) );\n        return value;\n    }\n\n    @Override\n    public V insertIfAbsent( Object key, V value ) {\n        if ( !this.containsKey( key ) ) {\n            return this.insert( key, value );\n        }\n        return null;\n    }\n\n    @Override\n    public V insertIfAbsent( Object key, V value, long expireMill ) {\n        if ( !this.containsKey( key ) ) {\n            return this.insert( key, value, expireMill );\n        }\n        return null;\n    }\n\n    @Override\n    public V erase( Object key ) {\n        V value = this.get( key );\n        this.expunge( key );\n        return value;\n    }\n\n    @Override\n    public void expunge( Object key ) {\n        this.mCache.erase( key );\n        this.mManipulator.deleteByKey( this.mIndexMeta, key );\n    }\n\n    @Override\n    public Set<? extends Map.Entry<?, V>> entrySet() {\n        Map<K, V> map = this.toMap();\n        return map.entrySet();\n    }\n\n    @Override\n    public Collection<V> values() {\n        return this.toMap().values();\n    }\n\n    @Override\n    public Map<K, V > toMap() {\n        if( this.mManipulator instanceof IndexableIterableManipulator ) {\n            IndexableIterableManipulator<K, V > manipulator = (IndexableIterableManipulator<K, V >)this.mManipulator;\n\n            return new IndexableCachedMap<>( this.mIndexMeta, this.mCache, this );\n        }\n        throw new NotImplementedException( \"Manipulator should be IterableManipulator.\" );\n    }\n\n    @Override\n    public List<V> toList() {\n        return new ArrayList<>( this.values() );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/map/rdb/RDBMapQuerier.java",
    "content": "package com.pinecone.slime.map.rdb;\n\nimport com.pinecone.framework.system.NotImplementedException;\nimport com.pinecone.slime.cache.CacheConstants;\nimport com.pinecone.slime.cache.query.SourceRetriever;\nimport com.pinecone.slime.cache.query.UniformCountSelfLoadingDictCache;\nimport com.pinecone.slime.cache.query.pool.BatchPageSourceRetriever;\nimport com.pinecone.slime.cache.query.pool.CountSelfPooledPageDictCache;\nimport com.pinecone.slime.cache.query.pool.LocalLRUPrimaryPooledDictCache;\nimport com.pinecone.slime.map.AlterableQuerier;\nimport com.pinecone.slime.map.MonoKeyQueryRange;\nimport com.pinecone.slime.source.rdb.ContiguousNumIndexBatchPageSourceRetriever;\nimport com.pinecone.slime.source.rdb.RDBQuerierDataManipulator;\nimport com.pinecone.slime.source.rdb.RDBTargetTableMeta;\nimport com.pinecone.slime.source.rdb.RangedRDBQuerierDataManipulator;\n\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\npublic class RDBMapQuerier<K, V > implements AlterableQuerier<V > {\n    private final RDBQuerierDataManipulator<K, V >       mDataMapper;\n    protected UniformCountSelfLoadingDictCache<V >       mCache;\n    protected RDBTargetTableMeta                         mTableMeta;\n\n    public RDBMapQuerier( RDBTargetTableMeta tableMeta, UniformCountSelfLoadingDictCache<V > cache ) {\n        this.mDataMapper = (RDBQuerierDataManipulator<K, V >) tableMeta.<K, V >getDataManipulator();\n        this.mTableMeta  = tableMeta;\n        this.mCache      = cache;\n    }\n\n    public RDBMapQuerier( RDBTargetTableMeta tableMeta, String szRangeKey ) {\n        this( tableMeta, new LocalLRUPrimaryPooledDictCache<>( CacheConstants.DefaultCachePageCapacity, 5,\n                new ContiguousNumIndexBatchPageSourceRetriever<>( tableMeta, CacheConstants.DefaultCachePageCapacity, szRangeKey )\n        ) );\n    }\n\n    public RDBMapQuerier( RDBTargetTableMeta tableMeta ) {\n        this( tableMeta, tableMeta.getPrimaryKey() );\n    }\n\n    @Override\n    public long size() {\n        return this.mDataMapper.counts( this.mTableMeta,\"\" );\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.size() == 0;\n    }\n\n    @Override\n    public void clear() {\n        this.mCache.clear();\n        this.mDataMapper.truncate( this.mTableMeta );\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        return this.mCache.implicatesKey( key );\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        throw new NotImplementedException( \"Querier::containsValue is not implemented.\" );\n        //return this.dataMapper.selectListByColumn( this.tableMeta,  )\n    }\n\n    @Override\n    public V get( Object key ) {\n        return this.mCache.get( key );\n    }\n\n    @Override\n    public List<V > queryVal( Object statement ) {\n        if( statement instanceof String ) {\n            return this.mDataMapper.queryVal( this.mTableMeta, (String) statement );\n        }\n        return this.mDataMapper.queryVal( this.mTableMeta, statement.toString() );\n    }\n\n    public List<? > query( Object statement ) {\n        if( statement instanceof String ) {\n            return this.mDataMapper.query( this.mTableMeta, (String) statement );\n        }\n        return this.mDataMapper.query( this.mTableMeta, statement.toString() );\n    }\n\n    @Override\n    public V insert( Object key, V value ) {\n        try{\n            this.mDataMapper.insert( this.mTableMeta, (K)key, value );\n        }\n        catch ( Exception e ) {\n            this.mDataMapper.update( this.mTableMeta, (K)key, value );\n            if( this.mCache.existsKey( key ) ) {\n                this.mCache.erase( key );\n            }\n        }\n        return value;\n    }\n\n    @Override\n    public V insertIfAbsent( Object key, V value ) {\n        if( !this.containsKey( key ) ) {\n            return this.insert( key, value );\n        }\n        return null;\n    }\n\n    @Override\n    public V erase( Object key ) {\n        V v = this.get( key );\n        this.expunge( key );\n        return v;\n    }\n\n    @Override\n    public void expunge( Object key ) {\n        this.mCache.erase( key );\n        this.mDataMapper.deleteByKey( this.mTableMeta, key );\n    }\n\n    @Override\n    public Set<? extends Map.Entry<?, V > > entrySet() {\n        return this.toMap().entrySet();\n    }\n\n    @Override\n    public Collection<V > values() {\n        try{\n            Map<?, V > map = this.toMap();\n            return map.values();\n        }\n        catch ( NotImplementedException e ) {\n            return this.toList();\n        }\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Map<?, V > toMap() {\n        if( this.mDataMapper instanceof RangedRDBQuerierDataManipulator ) {\n            RangedRDBQuerierDataManipulator<K, V > manipulator = (RangedRDBQuerierDataManipulator<K, V >)this.mDataMapper;\n            SourceRetriever<V > retriever = this.mCache.getSourceRetriever();\n            String szRangeKey = this.mTableMeta.getIndexKey();\n\n\n            if( this.mCache instanceof CountSelfPooledPageDictCache && retriever instanceof BatchPageSourceRetriever ) {\n                return new RangedRDBCachedMap<>( this.mTableMeta, (CountSelfPooledPageDictCache)this.mCache, this );\n            }\n            else {\n                if( retriever instanceof BatchPageSourceRetriever ) {\n                    szRangeKey = ((BatchPageSourceRetriever<V >) retriever).getRangeKey();\n                }\n                Object max = manipulator.getMaximumRangeVal( this.mTableMeta, szRangeKey );\n                Object min = manipulator.getMinimumRangeVal( this.mTableMeta, szRangeKey );\n\n                if( max instanceof Comparable ) {\n                    return (Map<?, V >)manipulator.selectMappedByRange( this.mTableMeta, new MonoKeyQueryRange<>( (Comparable)min, (Comparable)max, szRangeKey ) );\n                }\n            }\n        }\n        throw new NotImplementedException( \"Manipulator should be has `Ranged`, and max/min should be `Comparable`.\" );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public List<V > toList() {\n        return ( List<V > ) this.mDataMapper.selectList( this.mTableMeta, \"\" );\n    }\n}"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/map/rdb/RangedRDBCachedMap.java",
    "content": "package com.pinecone.slime.map.rdb;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.unit.Mapnut;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.slime.cache.query.IterableDictCachePage;\nimport com.pinecone.slime.cache.query.RangedDictCachePage;\nimport com.pinecone.slime.cache.query.pool.BatchPageSourceRetriever;\nimport com.pinecone.slime.cache.query.pool.CountSelfPooledPageDictCache;\nimport com.pinecone.slime.map.MonoKeyQueryRange;\nimport com.pinecone.slime.source.rdb.RDBTargetTableMeta;\nimport com.pinecone.slime.source.rdb.RangedRDBQuerierDataManipulator;\n\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.Collection;\nimport java.util.Iterator;\nimport java.util.AbstractSet;\nimport java.util.AbstractCollection;\nimport java.util.NoSuchElementException;\n\n\npublic class RangedRDBCachedMap<K, V > implements Mapnut<K, V > {\n    private final RDBMapQuerier<K, V >                      mQuerier;\n    private final RangedRDBQuerierDataManipulator<K, V >    mDataMapper;\n    protected CountSelfPooledPageDictCache<V >              mCache;\n    protected RDBTargetTableMeta                            mTableMeta;\n\n    public RangedRDBCachedMap( RDBTargetTableMeta tableMeta, CountSelfPooledPageDictCache<V > cache, RDBMapQuerier<K, V > querier ) {\n        this.mDataMapper = (RangedRDBQuerierDataManipulator<K, V >) tableMeta.<K, V >getDataManipulator();\n        this.mTableMeta  = tableMeta;\n        this.mCache      = cache;\n        this.mQuerier    = querier;\n    }\n\n    public RangedRDBCachedMap( RDBTargetTableMeta tableMeta, CountSelfPooledPageDictCache<V > cache ) {\n        this( tableMeta, cache, new RDBMapQuerier<>( tableMeta, cache ) );\n    }\n\n    @Override\n    public long megaSize() {\n        return this.mQuerier.size();\n    }\n\n    @Override\n    public int size() {\n        return (int)this.megaSize();\n    }\n\n    @Override\n    public boolean isEmpty() {\n        return this.size() == 0;\n    }\n\n    @Override\n    public void clear() {\n        this.mQuerier.clear();\n    }\n\n    @Override\n    public boolean containsKey( Object key ) {\n        return this.mQuerier.containsKey( key );\n    }\n\n    @Override\n    public boolean hasOwnProperty( Object key ) {\n        return this.containsKey( key );\n    }\n\n    @Override\n    public boolean containsValue( Object value ) {\n        return this.mQuerier.containsValue( value );\n    }\n\n    @Override\n    public V get( Object key ) {\n        return this.mQuerier.get( key );\n    }\n\n    @Override\n    public Entry<K, V> getEntryByKey( Object compatibleKey ) {\n        return this.getEntryCopyByKey( compatibleKey );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Entry<K, V> getEntryCopyByKey( Object compatibleKey ) {\n        return new KeyValue<>( (K)compatibleKey, this.get( compatibleKey ) );\n    }\n\n    @Override\n    public V put( K key, V value ) {\n        return this.mQuerier.insert( key, value );\n    }\n\n    @Override\n    public V putIfAbsent( K key, V value ) {\n        return this.mQuerier.insertIfAbsent( key, value );\n    }\n\n    @Override\n    public void putAll( Map<? extends K, ? extends V> m ) {\n        for( Map.Entry<? extends K, ? extends V> kv : m.entrySet() ){\n            this.put( kv.getKey(), kv.getValue() );\n        }\n    }\n\n    @Override\n    public V remove( Object key ) {\n        return this.mQuerier.erase( key );\n    }\n\n    @Override\n    public Set<K > keySet() {\n        return new BufferedRDBKeySet();\n    }\n\n    @Override\n    public Set<Entry<K, V > > entrySet() {\n        return new BufferedRDBEntrySet();\n    }\n\n    @Override\n    public Collection<V > values() {\n        return new BufferedRDBValCollection();\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSON.stringify( this );\n    }\n\n    protected abstract class BufferedRDBIterator {\n        protected Comparable                                mMax;\n        protected Comparable                                mMin;\n        protected RangedRDBQuerierDataManipulator<K, V >    mDataMapper;\n        protected RDBTargetTableMeta                        mTableMeta;\n        protected BatchPageSourceRetriever<V >              mRetriever;\n        protected long                                      mnPageCapacity;\n        protected String                                    mszRangeKey;\n        protected RangedDictCachePage<V >                   mCurrentPage;\n        protected Iterator<? >                              mCurrentIter;\n\n        @SuppressWarnings( \"unchecked\" )\n        public BufferedRDBIterator() {\n            this.mDataMapper     = RangedRDBCachedMap.this.mDataMapper;\n            this.mTableMeta      = RangedRDBCachedMap.this.mTableMeta;\n            this.mRetriever      = (BatchPageSourceRetriever<V > ) RangedRDBCachedMap.this.mCache.getSourceRetriever(); // Specifically required `BatchPageSourceRetriever`.\n            this.mszRangeKey     = this.mRetriever.getRangeKey();\n            this.mMax            = (Comparable) this.mDataMapper.getMaximumRangeVal( this.mTableMeta, this.mszRangeKey );\n            this.mMin            = (Comparable) this.mDataMapper.getMinimumRangeVal( this.mTableMeta, this.mszRangeKey );\n            this.mnPageCapacity  = this.mRetriever.getBatchSize();\n\n            this.mCurrentPage    = this.mRetriever.retrieves( this.mMin );\n            if( !( this.mCurrentPage instanceof IterableDictCachePage ) ) {\n                throw new IllegalArgumentException( \"DictCachePage is not iterable.\" );\n            }\n\n            IterableDictCachePage<V > page = ( IterableDictCachePage<V > ) this.mCurrentPage;\n            this.mCurrentIter    = page.iterator();\n        }\n\n        @SuppressWarnings( \"unchecked\" )\n        public boolean hasNext() {\n            if( this.mCurrentIter.hasNext() ) {\n                return true;\n            }\n            this.mCurrentPage    = this.mRetriever.retrieves(\n                    this.mMin, new MonoKeyQueryRange( this.mCurrentPage.getRange().getMax(), this.mRetriever.nextRangeMax( this.mCurrentPage.getRange().getMax() ), this.mszRangeKey )\n            );\n            IterableDictCachePage<V > page = ( IterableDictCachePage<V > ) this.mCurrentPage;\n            this.mCurrentIter    = page.iterator();\n            return this.mCurrentIter.hasNext();\n        }\n\n        @SuppressWarnings( \"unchecked\" )\n        protected Map.Entry<K, V > nextNode() {\n            if ( !this.hasNext() ) {\n                throw new NoSuchElementException();\n            }\n\n            Object next = this.mCurrentIter.next();\n            if( next instanceof Map.Entry ) {\n                return (Map.Entry<K, V >) next;\n            }\n            throw new IllegalArgumentException( \"Iterable object is not `Map.Entry`.\" );\n        }\n\n        public void remove() {\n            this.mCurrentIter.remove();\n        }\n    }\n\n    protected final class BufferedRDBEntryIterator extends BufferedRDBIterator implements Iterator<Map.Entry<K, V > > {\n        public final Map.Entry<K, V > next() { return this.nextNode(); }\n    }\n\n    protected class BufferedRDBEntrySet extends AbstractSet<Map.Entry<K, V > > {\n        public final int size()                 { return RangedRDBCachedMap.this.size(); }\n\n        public final void clear()               { RangedRDBCachedMap.this.clear(); }\n\n        public final Iterator<Map.Entry<K, V > > iterator() {\n            return new BufferedRDBEntryIterator();\n        }\n\n        public final boolean contains( Object o ) {\n            if ( !(o instanceof Map.Entry) ) {\n                return false;\n            }\n            Map.Entry<?,?> e = (Map.Entry<?,?>) o;\n            Object key = e.getKey();\n\n            Object v = RangedRDBCachedMap.this.get(key);\n            return v != null && v.equals(e.getValue());\n        }\n\n        public final boolean remove( Object o ) {\n            if ( this.contains(o) ) {\n                Map.Entry<?,?> e = (Map.Entry<?,?>) o;\n                Object key = e.getKey();\n\n                return RangedRDBCachedMap.this.remove(key) != null ;\n            }\n            return false;\n        }\n    }\n\n    protected final class BufferedRDBKeyIterator extends BufferedRDBIterator implements Iterator<K > {\n        public final K next() { return this.nextNode().getKey(); }\n    }\n\n    protected class BufferedRDBKeySet extends AbstractSet<K > {\n        public final int size()                 { return RangedRDBCachedMap.this.size(); }\n\n        public final void clear()               { RangedRDBCachedMap.this.clear(); }\n\n        public final Iterator<K > iterator() {\n            return new BufferedRDBKeyIterator();\n        }\n\n        public final boolean contains( Object o ) {\n            return RangedRDBCachedMap.this.containsKey( o );\n        }\n\n        public final boolean remove( Object o ) {\n            return RangedRDBCachedMap.this.remove( o ) != null;\n        }\n    }\n\n    protected final class BufferedRDBValueIterator extends BufferedRDBIterator implements Iterator<V > {\n        public final V next() { return this.nextNode().getValue(); }\n    }\n\n    protected class BufferedRDBValCollection extends AbstractCollection<V > {\n        public final int size()                 { return RangedRDBCachedMap.this.size(); }\n\n        public final void clear()               { RangedRDBCachedMap.this.clear(); }\n\n        public final Iterator<V > iterator() {\n            return new BufferedRDBValueIterator();\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/meta/TableIndex64Meta.java",
    "content": "package com.pinecone.slime.meta;\n\npublic class TableIndex64Meta implements TableIndexMeta {\n\n    private long mnMinId;\n    private long mnMaxId;\n\n    public TableIndex64Meta( long nMinId, long nMaxId ) {\n        this.mnMinId = nMinId;\n        this.mnMaxId = nMaxId;\n    }\n\n    @Override\n    public long getMinId() {\n        return this.mnMinId;\n    }\n\n    @Override\n    public long getMaxId() {\n        return this.mnMaxId;\n    }\n\n    public void setMaxId( long nMaxId ) {\n        this.mnMaxId = nMaxId;\n    }\n\n    public void setMinId( long nMinId ) {\n        this.mnMinId = nMinId;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/meta/TableIndexMeta.java",
    "content": "package com.pinecone.slime.meta;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface TableIndexMeta extends Pinenut {\n\n    long getMaxId();\n\n    long getMinId();\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/query/GenericPageQuery.java",
    "content": "package com.pinecone.slime.query;\n\nimport com.pinecone.framework.system.Nullable;\n\npublic class GenericPageQuery<E> implements PageQuery<E> {\n    @Nullable\n    private String key;\n\n    private E      value;\n    private long   offset;\n    private long   pageSize;\n\n    public GenericPageQuery( String key, E value, long offset, long pageSize ) {\n        this.key      = key;\n        this.value    = value;\n        this.offset   = offset;\n        this.pageSize = pageSize;\n    }\n\n    public GenericPageQuery( E value, long offset, long pageSize ) {\n        this.value    = value;\n        this.offset   = offset;\n        this.pageSize = pageSize;\n    }\n\n    @Override\n    public String getKey() {\n        return this.key;\n    }\n\n    @Override\n    public void setKey( String key ) {\n        this.key = key;\n    }\n\n    @Override\n    public E getValue() {\n        return this.value;\n    }\n\n    @Override\n    public void setValue( E value ) {\n        this.value = value;\n    }\n\n    @Override\n    public long getOffset() {\n        return this.offset;\n    }\n\n    @Override\n    public void setOffset( long offset ) {\n        this.offset = offset;\n    }\n\n    @Override\n    public long getPageSize() {\n        return pageSize;\n    }\n\n    @Override\n    public void setPageSize( long pageSize ) {\n        this.pageSize = pageSize;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/query/PageQuery.java",
    "content": "package com.pinecone.slime.query;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface PageQuery<E> extends Pinenut {\n\n    String getKey();\n\n    void setKey( String key );\n\n    E getValue();\n\n    void setValue( E value );\n\n    long getOffset();\n\n    void setOffset( long offset );\n\n    long getPageSize();\n\n    void setPageSize( long pageSize );\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/ArchQueryScopeMeta.java",
    "content": "package com.pinecone.slime.source;\n\nimport java.util.Set;\n\npublic abstract class ArchQueryScopeMeta implements UniformQueryScopeMeta {\n    private String                               mszScopeName;\n    private String                               mszPrimaryKey;\n    private String                               mszIndexKey;\n    private Class<? >                            mValueType;\n    private Set<String >                         mValueMetaKeys;\n    private ResultConverter                      mResultConverter;\n\n    protected <K, V > ArchQueryScopeMeta( String namespace, String primaryKey, String indexKey, Class<?> valueType, Set<String > valueMetaKeys ) {\n        this.mszScopeName     = namespace;\n        this.mszPrimaryKey    = primaryKey;\n        this.mszIndexKey      = indexKey;\n        this.mValueType       = valueType;\n        this.mValueMetaKeys   = valueMetaKeys;\n    }\n\n    @Override\n    public String getScopeNS() {\n        return this.mszScopeName;\n    }\n\n    @Override\n    public UniformQueryScopeMeta setScopeNS( String namespace ) {\n        this.mszScopeName = namespace;\n        return this;\n    }\n\n    @Override\n    public String getPrimaryKey() {\n        return this.mszPrimaryKey;\n    }\n\n    @Override\n    public UniformQueryScopeMeta setPrimaryKey( String primaryKey ) {\n        this.mszPrimaryKey = primaryKey;\n        return this;\n    }\n\n    @Override\n    public String getIndexKey() {\n        return this.mszIndexKey;\n    }\n\n    @Override\n    public UniformQueryScopeMeta setIndexKey( String indexKey ) {\n        this.mszIndexKey = indexKey;\n        return this;\n    }\n\n    @Override\n    public Class<?> getValueType() {\n        return this.mValueType;\n    }\n\n    @Override\n    public UniformQueryScopeMeta setValueType( Class<?> valueType ) {\n        this.mValueType = valueType;\n        return this;\n    }\n\n    @Override\n    public Set<String > getValueMetaKeys(){\n        return this.mValueMetaKeys;\n    }\n\n    @Override\n    public UniformQueryScopeMeta setValueMetaKeys( Set<String > keys ){\n        this.mValueMetaKeys = keys;\n        return this;\n    }\n\n    @Override\n    public UniformQueryScopeMeta addValueMetaKey( String key ) {\n        this.getValueMetaKeys().add( key );\n        return this;\n    }\n\n    @Override\n    public UniformQueryScopeMeta removeValueMetaKey( String key ) {\n        this.getValueMetaKeys().remove( key );\n        return this;\n    }\n\n    @Override\n    public UniformQueryScopeMeta clone() {\n        try {\n            return (UniformQueryScopeMeta) super.clone();  // Refers inner pointer.\n        }\n        catch ( CloneNotSupportedException e ) {\n            // this shouldn't happen, since we are Cloneable\n            throw new InternalError(e);\n        }\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public <V > ResultConverter<V > getResultConverter() {\n        return this.mResultConverter;\n    }\n\n    @Override\n    public <V > UniformQueryScopeMeta setResultConverter( ResultConverter<V > converter ) {\n        this.mResultConverter = converter;\n        return this;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/DAOScanner.java",
    "content": "package com.pinecone.slime.source;\n\nimport com.pinecone.framework.util.lang.ClassScanner;\n\npublic interface DAOScanner extends ClassScanner {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/DataAccessObject.java",
    "content": "package com.pinecone.slime.source;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport java.lang.annotation.ElementType;\n\n\n@Target({ElementType.TYPE})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface DataAccessObject {\n    String value() default \"\";\n\n    // Which databases or data-manipulator that affinity to.\n    // For multi databases scenario.\n    String scope() default \"\";\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/GenericResultConverter.java",
    "content": "package com.pinecone.slime.source;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.slime.source.ResultConverter;\n\nimport java.beans.IntrospectionException;\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\nimport java.util.Set;\nimport java.util.Map;\nimport java.text.ParseException;\n\npublic class GenericResultConverter<V > implements ResultConverter<V > {\n    private Class<V >       mValueType;\n    private Set<String >    mValueMetaKeys;\n\n    public GenericResultConverter( Class<V > valueType, Set<String > valueMetaKeys ) {\n        this.mValueType     = valueType;\n        this.mValueMetaKeys = valueMetaKeys;\n    }\n\n    @Override\n    @SuppressWarnings(\"unchecked\")\n    public V convert( Object val ) {\n        if ( val instanceof Map ) {\n            Map<String, Object> map = (Map<String, Object>) val;\n\n            // Handling single value scenario for primitive or String\n            if ( this.mValueMetaKeys.size() == 1 ) {\n                Object singleValue = map.get( this.mValueMetaKeys.iterator().next() );\n                if ( ResultConverter.isPrimitiveOrSpecialType( this.mValueType ) ) {\n                    return (V) this.convertToType( singleValue, this.mValueType );\n                }\n            }\n\n            // Handling Map scenarios\n            if ( Map.class.isAssignableFrom( this.mValueType ) ) {\n                if ( val instanceof LinkedTreeMap && this.mValueType.isAssignableFrom( LinkedTreeMap.class ) ) {\n                    return (V) map;\n                }\n                else {\n                    try {\n                        Map<String, Object> targetMap = ( Map<String, Object> ) this.mValueType.getDeclaredConstructor().newInstance();\n                        targetMap.putAll( map );\n                        return (V) targetMap;\n                    }\n                    catch ( NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException e ) {\n                        if( this.mValueType.isAssignableFrom( val.getClass() ) ) {\n                            return (V) map;\n                        }\n                        throw new ProxyProvokeHandleException( \"Error converting to target Map type.\", e );\n                    }\n                }\n            }\n\n            // Handling Bean scenarios\n            try {\n                Constructor<V >  constructor = this.mValueType.getDeclaredConstructor();\n                constructor.setAccessible( true );  // [NOTICE] Set the constructor accessible\n                V bean = constructor.newInstance();\n\n                for ( Map.Entry<String, Object > entry : map.entrySet() ) {\n                    try{\n                        String property = entry.getKey();\n                        Object value = entry.getValue();\n                        this.setBeanProperty( bean, property, value );\n                    }\n                    catch ( IntrospectionException | InvocationTargetException | IllegalAccessException e ) {\n                        e.printStackTrace();\n                        // continue\n                    }\n                }\n                return bean;\n            }\n            catch ( NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException e ) {\n                throw new ProxyProvokeHandleException(\"Error converting to target Bean type\", e);\n            }\n        }\n        else if( val != null && ResultConverter.isPrimitiveOrSpecialType( val.getClass() )  ){\n            if ( ResultConverter.isPrimitiveOrSpecialType( this.mValueType ) ) {\n                return (V) this.convertToType( val, this.mValueType );\n            }\n            if( this.mValueType.equals( Object.class ) ){\n                return (V) val;\n            }\n        }\n        else if( this.mValueType.equals( Object.class ) ){\n            return (V) val;\n        }\n\n        throw new IllegalArgumentException( \"Unsupported conversion from value: \" + val );\n    }\n\n    private Object convertToType( Object value, Class<?> type ) {\n        if ( value == null ) {\n            return null;\n        }\n        if ( type.isInstance( value ) ) {\n            return type.cast( value );\n        }\n\n        if ( type == String.class ) {\n            return value.toString();\n        }\n        else if ( type == int.class || type == Integer.class ) {\n            if (value instanceof Number) {\n                return ((Number) value).intValue();\n            }\n            else {\n                return Integer.parseInt(value.toString());\n            }\n        }\n        else if ( type == long.class || type == Long.class ) {\n            if (value instanceof Number) {\n                return ((Number) value).longValue();\n            }\n            else {\n                return Long.parseLong(value.toString());\n            }\n        }\n        else if ( type == double.class || type == Double.class ) {\n            if (value instanceof Number) {\n                return ((Number) value).doubleValue();\n            } else {\n                return Double.parseDouble(value.toString());\n            }\n        }\n        else if ( type == boolean.class || type == Boolean.class ) {\n            if (value instanceof Boolean) {\n                return value;\n            } else {\n                return Boolean.parseBoolean(value.toString());\n            }\n        }\n        else if ( type == byte.class || type == Byte.class ) {\n            if (value instanceof Number) {\n                return ((Number) value).byteValue();\n            } else {\n                return Byte.parseByte(value.toString());\n            }\n        }\n        else if ( type == short.class || type == Short.class ) {\n            if (value instanceof Number) {\n                return ((Number) value).shortValue();\n            } else {\n                return Short.parseShort(value.toString());\n            }\n        }\n        else if ( type == float.class || type == Float.class ) {\n            if (value instanceof Number) {\n                return ((Number) value).floatValue();\n            } else {\n                return Float.parseFloat(value.toString());\n            }\n        }\n        else if ( type == char.class || type == Character.class ) {\n            return value.toString().charAt(0);\n        }\n        else if ( type == Date.class ) {\n            if ( value instanceof Date ) {\n                return value;\n            }\n            else {\n                try {\n                    return new SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss\").parse(value.toString());\n                }\n                catch (ParseException e) {\n                    throw new IllegalArgumentException(\"Cannot parse date: \" + value, e);\n                }\n            }\n        }\n        else if ( type.isEnum() ) {\n            return Enum.valueOf((Class<Enum>) type, value.toString());\n        }\n        else if ( type == byte[].class ) {\n            if ( value instanceof byte[] ) {\n                return value;\n            }\n            else {\n                return value.toString().getBytes();\n            }\n        }\n        else {\n            throw new IllegalArgumentException( \"Cannot convert value to type: \" + type );\n        }\n    }\n\n    private void setBeanProperty( Object bean, String property, Object value ) throws IntrospectionException, InvocationTargetException, IllegalAccessException {\n        java.beans.PropertyDescriptor propertyDescriptor = new java.beans.PropertyDescriptor( property, bean.getClass() );\n        Method writeMethod = propertyDescriptor.getWriteMethod();\n        if ( writeMethod != null ) {\n            writeMethod.setAccessible( true );\n            writeMethod.invoke( bean, value );\n        }\n    }\n}"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/ResultConverter.java",
    "content": "package com.pinecone.slime.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.Date;\n\npublic interface ResultConverter<V > extends Pinenut {\n    V convert( Object val );\n\n    static boolean isPrimitiveOrSpecialType(Class<?> type) {\n        return type.isPrimitive() ||\n                type == String.class ||\n                Number.class.isAssignableFrom(type) ||\n                type == Boolean.class ||\n                type == Character.class ||\n                type == Date.class ||\n                type.isEnum() ||\n                type == byte[].class;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/UniformQueryScopeMeta.java",
    "content": "package com.pinecone.slime.source;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.Set;\n\npublic interface UniformQueryScopeMeta extends Pinenut, Cloneable {\n    String getScopeNS();\n    UniformQueryScopeMeta setScopeNS( String namespace );\n\n    String getPrimaryKey();\n    UniformQueryScopeMeta setPrimaryKey( String primaryKey );\n\n    String getIndexKey();\n    UniformQueryScopeMeta setIndexKey( String indexKey );\n\n    Class<?> getValueType();\n    UniformQueryScopeMeta setValueType( Class<?> valueType );\n\n\n    Set<String > getValueMetaKeys();\n    UniformQueryScopeMeta setValueMetaKeys( Set<String > keys );\n    UniformQueryScopeMeta addValueMetaKey( String key );\n    UniformQueryScopeMeta removeValueMetaKey( String key );\n\n    <V > ResultConverter<V > getResultConverter();\n    <V > UniformQueryScopeMeta setResultConverter( ResultConverter<V > converter );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/XMLResourceScanner.java",
    "content": "package com.pinecone.slime.source;\n\nimport com.pinecone.framework.util.lang.ObjectScanner;\n\npublic interface XMLResourceScanner extends ObjectScanner {\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/indexable/GenericIndexKeySourceRetriever.java",
    "content": "package com.pinecone.slime.source.indexable;\n\nimport com.pinecone.slime.cache.query.SourceRetriever;\n\npublic class GenericIndexKeySourceRetriever<K, V >  implements SourceRetriever<V > {\n    private   IndexableDataManipulator<K, V >   mManipulator;\n    protected IndexableTargetScopeMeta          mIndexMeta;\n\n    public GenericIndexKeySourceRetriever( IndexableTargetScopeMeta meta ) {\n        this.mIndexMeta      = meta;\n        this.mManipulator    = (IndexableDataManipulator<K, V >) meta.<K, V >getDataManipulator();\n    }\n\n    @Override\n    public V retrieve( Object key ) {\n        return this.mManipulator.selectByKey( this.mIndexMeta, key );\n    }\n\n    @Override\n    public long countsKey( Object key ) {\n        return this.mManipulator.countsByNS( this.mIndexMeta, this.mIndexMeta.getIndexKey(), key );\n    }\n}"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/indexable/GenericIndexableTargetScopeMeta.java",
    "content": "package com.pinecone.slime.source.indexable;\n\nimport com.pinecone.slime.source.ArchQueryScopeMeta;\nimport com.pinecone.slime.source.ResultConverter;\n\nimport java.util.Set;\nimport java.util.TreeSet;\n\npublic class GenericIndexableTargetScopeMeta extends ArchQueryScopeMeta implements IndexableTargetScopeMeta {\n    private IndexableDataManipulator            mDataManipulator;\n\n    public <K, V > GenericIndexableTargetScopeMeta( String scopeName, String primaryKey, String indexKey, Class<?> valueType, IndexableDataManipulator<K, V > manipulator, Set<String > valueMetaKeys ) {\n        super( scopeName, primaryKey, indexKey, valueType, valueMetaKeys );\n        this.mDataManipulator = manipulator;\n    }\n\n    public <K, V > GenericIndexableTargetScopeMeta( String scopeName, String indexKey, Class<?> valueType, IndexableDataManipulator<K, V > manipulator, Set<String > valueMetaKeys ) {\n        this( scopeName, indexKey, indexKey, valueType, manipulator, valueMetaKeys );\n    }\n\n    public <K, V > GenericIndexableTargetScopeMeta( String scopeName, String indexKey, Class<?> valueType, IndexableDataManipulator<K, V > manipulator ) {\n        this( scopeName, indexKey, valueType, manipulator, new TreeSet<>() );\n    }\n\n    public GenericIndexableTargetScopeMeta( String scopeName, String indexKey, Class<?> valueType ) {\n        this( scopeName, indexKey, valueType, null );\n    }\n\n    @Override\n    public IndexableTargetScopeMeta setScopeNS( String namespace ) {\n        super.setScopeNS( namespace );\n        return this;\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public <K, V > IndexableDataManipulator<K, V > getDataManipulator() {\n        return this.mDataManipulator;\n    }\n\n    @Override\n    public <K, V >IndexableTargetScopeMeta setDataManipulator( IndexableDataManipulator<K, V > manipulator ){\n        this.mDataManipulator = manipulator;\n        return this;\n    }\n\n    @Override\n    public IndexableTargetScopeMeta setPrimaryKey( String primaryKey ) {\n        super.setPrimaryKey( primaryKey );\n        return this;\n    }\n\n    @Override\n    public IndexableTargetScopeMeta setIndexKey( String indexKey ) {\n        super.setIndexKey( indexKey );\n        return this;\n    }\n\n    @Override\n    public IndexableTargetScopeMeta setValueType( Class<?> valueType ) {\n        super.setValueType( valueType );\n        return this;\n    }\n\n    @Override\n    public IndexableTargetScopeMeta setValueMetaKeys( Set<String > keys ){\n        super.setValueMetaKeys( keys );\n        return this;\n    }\n\n    @Override\n    public IndexableTargetScopeMeta addValueMetaKey( String key ) {\n        super.addValueMetaKey( key );\n        return this;\n    }\n\n    @Override\n    public IndexableTargetScopeMeta removeValueMetaKey( String key ) {\n        super.removeValueMetaKey( key );\n        return this;\n    }\n\n    @Override\n    public <V > IndexableTargetScopeMeta setResultConverter( ResultConverter<V > converter ) {\n        super.setResultConverter( converter );\n        return this;\n    }\n\n    @Override\n    public GenericIndexableTargetScopeMeta clone() {\n        return (GenericIndexableTargetScopeMeta) super.clone();  // Refers inner pointer.\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/indexable/IndexableDataManipulator.java",
    "content": "package com.pinecone.slime.source.indexable;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.Collection;\nimport java.util.List;\n\npublic interface IndexableDataManipulator<K, V > extends Pinenut {\n\n    // Counting: szScopeKey\n    long counts                ( IndexableTargetScopeMeta meta, String szScopeKey );\n\n    // Counting: Namespace::key\n    long countsByNS            ( IndexableTargetScopeMeta meta, String szNamespace, Object key );\n\n    // Counting: Namespace\n    long countsNS              ( IndexableTargetScopeMeta meta, String szNamespace );\n\n    List query                 ( IndexableTargetScopeMeta meta, String szStatement );\n\n    List<V > queryVal          ( IndexableTargetScopeMeta meta, String szStatement );\n\n    Object selectAllByNS       ( IndexableTargetScopeMeta meta, String szNamespace, Object key );\n\n    List<V > selectsByNS       ( IndexableTargetScopeMeta meta, String szNamespace, Object key );\n\n    V    selectByNS            ( IndexableTargetScopeMeta meta, String szNamespace, Object key );\n\n    V    selectByKey           ( IndexableTargetScopeMeta meta, Object key );\n\n    void insertByNS            ( IndexableTargetScopeMeta meta, String szNamespace, K key, V entity );\n\n    void insert                ( IndexableTargetScopeMeta meta, K key, V entity );\n\n    void insert                ( IndexableTargetScopeMeta meta, K key, V entity, long expireMill );\n\n    void updateByNS            ( IndexableTargetScopeMeta meta, String szNamespace, K key, V entity );\n\n    void update                ( IndexableTargetScopeMeta meta, K key, V entity );\n\n    void deleteByNS            ( IndexableTargetScopeMeta meta, String szNamespace, Object key );\n\n    void deleteByKey           ( IndexableTargetScopeMeta meta, Object key );\n\n    void purge                 ( IndexableTargetScopeMeta meta );\n\n    void purgeByNS             ( IndexableTargetScopeMeta meta, String szNamespace );\n\n    void commit                ();\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/indexable/IndexableIterableManipulator.java",
    "content": "package com.pinecone.slime.source.indexable;\n\nimport java.util.Iterator;\nimport java.util.Map;\n\npublic interface IndexableIterableManipulator<K, V > extends IndexableDataManipulator<K, V > {\n    Iterator<K > keysIterator( IndexableTargetScopeMeta meta );\n\n    Iterator<Map.Entry<K, V > >  iterator( IndexableTargetScopeMeta meta );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/indexable/IndexableTargetScopeMeta.java",
    "content": "package com.pinecone.slime.source.indexable;\n\nimport com.pinecone.slime.source.ResultConverter;\nimport com.pinecone.slime.source.UniformQueryScopeMeta;\n\nimport java.util.Set;\n\npublic interface IndexableTargetScopeMeta extends UniformQueryScopeMeta {\n    @Override\n    IndexableTargetScopeMeta setPrimaryKey( String primaryKey );\n\n    @Override\n    IndexableTargetScopeMeta setIndexKey( String indexKey );\n\n    @Override\n    IndexableTargetScopeMeta setValueType( Class<?> valueType );\n\n    <K, V >IndexableDataManipulator<K, V > getDataManipulator();\n    <K, V >IndexableTargetScopeMeta setDataManipulator( IndexableDataManipulator<K, V > manipulator );\n\n    @Override\n    Set<String > getValueMetaKeys();\n    @Override\n    IndexableTargetScopeMeta setValueMetaKeys( Set<String > keys );\n    @Override\n    IndexableTargetScopeMeta addValueMetaKey( String key );\n    @Override\n    IndexableTargetScopeMeta removeValueMetaKey( String key );\n\n    @Override\n    <V > IndexableTargetScopeMeta setResultConverter( ResultConverter<V > converter );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/ArchRelationalDatabase.java",
    "content": "package com.pinecone.slime.source.rdb;\n\nimport com.pinecone.framework.util.json.homotype.MapStructure;\n\nimport java.util.regex.Matcher;\nimport java.util.regex.Pattern;\n\npublic abstract class ArchRelationalDatabase implements RelationalDatabase {\n    @MapStructure( \"host\" )\n    protected String  mHost;\n\n    @MapStructure( \"username\" )\n    protected String  mUsername;\n\n    @MapStructure( \"password\" )\n    protected String  mPassword;\n\n    @MapStructure( \"database\" )\n    protected String  mDatabase;\n\n    @MapStructure( \"port\" )\n    protected int     mPort;\n\n    @MapStructure( \"charset\" )\n    protected String  mCharset = \"utf8\";\n\n    @MapStructure( \"tablePrefix\" )\n    protected String  mTablePrefix;\n\n    @MapStructure( \"dbType\" )\n    protected String  mDBType;\n\n    @MapStructure( \"Enable\" )\n    protected boolean mEnable = true;\n\n\n    @Override\n    public String getHost() {\n        return this.mHost;\n    }\n\n    @Override\n    public void setHost( String host ) {\n        this.mHost = host;\n    }\n\n    @Override\n    public String getUsername() {\n        return this.mUsername;\n    }\n\n    @Override\n    public void setUsername( String username ) {\n        this.mUsername = username;\n    }\n\n    @Override\n    public String getPassword() {\n        return this.mPassword;\n    }\n\n    @Override\n    public void setPassword( String password ) {\n        this.mPassword = password;\n    }\n\n    @Override\n    public String getDatabase() {\n        return this.mDatabase;\n    }\n\n    @Override\n    public void setDatabase( String database ) {\n        this.mDatabase = database;\n    }\n\n    @Override\n    public int getPort() {\n        return this.mPort;\n    }\n\n    @Override\n    public void setPort( int port ) {\n        this.mPort = port;\n    }\n\n    @Override\n    public String getCharset() {\n        return this.mCharset;\n    }\n\n    @Override\n    public void setCharset( String charset ) {\n        this.mCharset = charset;\n    }\n\n    @Override\n    public String getTablePrefix() {\n        return this.mTablePrefix;\n    }\n\n    @Override\n    public void setTablePrefix( String tablePrefix ) {\n        this.mTablePrefix = tablePrefix;\n    }\n\n    @Override\n    public boolean isEnabled() {\n        return this.mEnable;\n    }\n\n    @Override\n    public void setEnabled( boolean enabled ) {\n        this.mEnable = enabled;\n    }\n\n    @Override\n    public String getDBType() {\n        return this.mDBType;\n    }\n\n    @Override\n    public void setDBType( String dbType ) {\n        this.mDBType = dbType;\n    }\n\n\n    @Override\n    public String getJDBCURL() {\n        String url = \"jdbc:\" + this.mDBType + \"://\" + this.mHost + \":\" + this.mPort + \"/\" + this.mDatabase;\n        if( this.mCharset.toLowerCase().startsWith( \"utf\" ) )  { // utf-8, utf8, etc...\n            url = url +\"?useUnicode=true&characterEncoding=\" + this.mCharset;\n        }\n        else {\n            url = url +\"?characterEncoding=\" + this.mCharset;\n        }\n\n        return url;\n    }\n\n    @Override\n    public void fromJDBCURL( String jdbcUrl ) {\n        Pattern pattern = Pattern.compile( \"jdbc:(\\\\w+):\\\\/\\\\/(.+):(\\\\d+)\\\\/(.+)\\\\?useUnicode=true&characterEncoding=(\\\\w+)\" );\n        Matcher matcher = pattern.matcher( jdbcUrl );\n        boolean bMatched = false;\n        if ( matcher.matches() ) {\n            bMatched = true;\n        }\n        else {\n            pattern = Pattern.compile( \"jdbc:(\\\\w+):\\\\/\\\\/(.+):(\\\\d+)\\\\/(.+)\\\\?characterEncoding=(\\\\w+)\" );\n            matcher = pattern.matcher( jdbcUrl );\n            bMatched = matcher.matches();\n        }\n\n        if ( bMatched ) {\n            this.mDBType   = matcher.group(1);\n            this.mHost     = matcher.group(2);\n            this.mPort     = Integer.parseInt(matcher.group(3));\n            this.mDatabase = matcher.group(4);\n            this.mCharset  = matcher.group(5);\n        }\n        else {\n            throw new IllegalArgumentException( \"Invalid JDBC URL format: \" + jdbcUrl );\n        }\n    }\n}"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/ContiguousNumIndexBatchPageSourceRetriever.java",
    "content": "package com.pinecone.slime.source.rdb;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.unit.MapDictium;\nimport com.pinecone.slime.cache.query.RangedDictCachePage;\nimport com.pinecone.slime.cache.query.pool.BatchPageSourceRetriever;\nimport com.pinecone.slime.cache.query.pool.LocalRangedDictCachePage;\nimport com.pinecone.slime.map.MonoKeyQueryRange;\nimport com.pinecone.slime.map.QueryRange;\nimport com.pinecone.slime.unitization.PartialRange;\n\nimport java.util.Map;\n\npublic class ContiguousNumIndexBatchPageSourceRetriever<K extends Number & Comparable<K >, V >  implements BatchPageSourceRetriever<V > {\n    private   RangedRDBQuerierDataManipulator<K, V > mDataMapper;\n    protected RDBTargetTableMeta                     mTableMeta;\n    protected int                                    mnPageCapacity;\n    protected String                                 mszRangeKey;\n\n    public ContiguousNumIndexBatchPageSourceRetriever( RDBTargetTableMeta tableMeta, int nPageCapacity, String szRangeKey ) {\n        this.mTableMeta     = tableMeta;\n        this.mDataMapper    = (RangedRDBQuerierDataManipulator<K, V >) tableMeta.<K, V >getDataManipulator();\n        this.mnPageCapacity = nPageCapacity;\n        this.mszRangeKey    = szRangeKey;\n    }\n\n    @Override\n    public long getBatchSize() {\n        return this.mnPageCapacity;\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public <T extends Comparable<T > > T nextRangeMax( T key ) {\n        if ( key instanceof Integer ) {\n            return (T)(Integer)( (Integer) key + this.mnPageCapacity );\n        }\n        else if ( key instanceof Long ) {\n            return (T)(Long)( (Long) key + this.mnPageCapacity );\n        }\n        else if ( key instanceof Short ) {\n            return (T)(Short)( (Integer)( (Short) key + this.mnPageCapacity ) ).shortValue();\n        }\n        else if ( key instanceof Double ) {\n            return (T)(Double)( (Double) key + this.mnPageCapacity );\n        }\n        else if ( key instanceof Float ) {\n            return (T)(Float)( (Float) key + this.mnPageCapacity );\n        }\n        else if ( key instanceof Byte ) {\n            return (T)(Byte)( (Integer)( (Byte) key + this.mnPageCapacity ) ).byteValue();\n        }\n        else {\n            throw new IllegalArgumentException( \"Unsupported number type.\" );\n        }\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public QueryRange<K > queryRangeOnly( Object key ) {\n        if ( key instanceof Integer ) {\n            return (QueryRange<K >) this.calculateRangeForInteger( (Integer) key );\n        }\n        else if ( key instanceof Long ) {\n            return (QueryRange<K >) this.calculateRangeForLong((Long) key);\n        }\n        else if ( key instanceof Short ) {\n            return (QueryRange<K >) this.calculateRangeForShort((Short) key);\n        }\n        else if ( key instanceof Double ) {\n            return (QueryRange<K >) this.calculateRangeForDouble((Double) key);\n        }\n        else if ( key instanceof Float ) {\n            return (QueryRange<K >) this.calculateRangeForFloat((Float) key);\n        }\n        else if ( key instanceof Byte ) {\n            return (QueryRange<K >) this.calculateRangeForByte((Byte) key);\n        }\n        else if ( key instanceof QueryRange ) {\n            return (QueryRange<K >) key;\n        }\n        else {\n            throw new IllegalArgumentException( \"Unsupported number type.\" );\n        }\n    }\n\n    protected QueryRange<? > calculateRangeForLong( Long key ) {\n        long start = (key / this.mnPageCapacity) * this.mnPageCapacity;\n        long end   = start + this.mnPageCapacity;\n        return new MonoKeyQueryRange<>( start, end, this.mszRangeKey );\n    }\n\n    protected QueryRange<? > calculateRangeForInteger(Integer key ) {\n        int start = (key / this.mnPageCapacity) * this.mnPageCapacity;\n        int end   = start + this.mnPageCapacity;\n        return new MonoKeyQueryRange<>( start, end, this.mszRangeKey );\n    }\n\n    protected QueryRange<? > calculateRangeForShort( Short key ) {\n        short start = (short) ((key / this.mnPageCapacity) * this.mnPageCapacity);\n        short end   = (short) (start + this.mnPageCapacity);\n        return new MonoKeyQueryRange<>( start, end, this.mszRangeKey );\n    }\n\n    protected QueryRange<? > calculateRangeForDouble( Double key ) {\n        double start = Math.floor(key / this.mnPageCapacity) * this.mnPageCapacity;\n        double end   = start + this.mnPageCapacity;\n        return new MonoKeyQueryRange<>( start, end, this.mszRangeKey );\n    }\n\n    protected QueryRange<? > calculateRangeForFloat( Float key ) {\n        float start = (float) Math.floor(key / this.mnPageCapacity) * this.mnPageCapacity;\n        float end   = start + this.mnPageCapacity;\n        return new MonoKeyQueryRange<>( start, end, this.mszRangeKey );\n    }\n\n    protected QueryRange<? > calculateRangeForByte( Byte key ) {\n        byte start = (byte) ((key / this.mnPageCapacity) * this.mnPageCapacity);\n        byte end   = (byte) (start + this.mnPageCapacity);\n        return new MonoKeyQueryRange<>( start, end, this.mszRangeKey );\n    }\n\n    @Override\n    public String getRangeKey() {\n        return this.mszRangeKey;\n    }\n\n    @Override\n    public V retrieve( Object key ) {\n        return this.mDataMapper.selectByKey( this.mTableMeta, key );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public <T extends Comparable<T > > RangedDictCachePage<V > retrieves( Object key, @Nullable PartialRange<T > range ) {\n        QueryRange queryRange;\n        if( range == null ) {\n            queryRange = ( QueryRange )this.queryRangeOnly( key );\n        }\n        else {\n            queryRange = new MonoKeyQueryRange<>( range.getMin(), range.getMax(), this.mszRangeKey );\n        }\n\n        Map map = this.mDataMapper.selectMappedByRange( this.mTableMeta, queryRange );\n        return new LocalRangedDictCachePage<>( -1, this.mnPageCapacity, new MapDictium<>( map ), (PartialRange<T >)queryRange );\n    }\n\n    @Override\n    public RangedDictCachePage<V > retrieves( Object key ) {\n        return this.retrieves( key, null );\n    }\n\n    @Override\n    public <T extends Comparable<T>> long counts( PartialRange<T> range ) {\n        QueryRange queryRange;\n        if( range instanceof QueryRange ) {\n            queryRange = ( QueryRange ) range;\n        }\n        else {\n            queryRange = new MonoKeyQueryRange<>( range.getMin(), range.getMax(), this.mszRangeKey );\n        }\n        return this.mDataMapper.countsByRange( this.mTableMeta, queryRange );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public long countsKey( Object key ) {\n        if( key instanceof Comparable ) {\n            return this.mDataMapper.countsByRange( this.mTableMeta, new MonoKeyQueryRange<>( (Comparable)key, (Comparable)key, this.mszRangeKey ) );\n        }\n        throw new IllegalArgumentException( \"Key should be comparable.\" );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/GenericRDBTargetTableMeta.java",
    "content": "package com.pinecone.slime.source.rdb;\n\nimport com.pinecone.slime.source.ArchQueryScopeMeta;\nimport com.pinecone.slime.source.ResultConverter;\n\nimport java.util.Set;\nimport java.util.TreeSet;\n\npublic class GenericRDBTargetTableMeta extends ArchQueryScopeMeta implements RDBTargetTableMeta {\n    private RDBQuerierDataManipulator            mDataManipulator;\n\n    public <K, V > GenericRDBTargetTableMeta( String tableName, String primaryKey, String indexKey, Class<?> valueType, RDBQuerierDataManipulator<K, V > manipulator, Set<String > valueMetaKeys ) {\n        super( tableName, primaryKey, indexKey, valueType, valueMetaKeys );\n        this.mDataManipulator = manipulator;\n    }\n\n    public <K, V > GenericRDBTargetTableMeta( String tableName, String indexKey, Class<?> valueType, RDBQuerierDataManipulator<K, V > manipulator, Set<String > valueMetaKeys ) {\n        this( tableName, indexKey, indexKey, valueType, manipulator, valueMetaKeys );\n    }\n\n    public <K, V > GenericRDBTargetTableMeta( String tableName, String indexKey, Class<?> valueType, RDBQuerierDataManipulator<K, V > manipulator ) {\n        this( tableName, indexKey, valueType, manipulator, new TreeSet<>() );\n    }\n\n    public GenericRDBTargetTableMeta( String tableName, String indexKey, Class<?> valueType ) {\n        this( tableName, indexKey, valueType, null );\n    }\n\n    @Override\n    public RDBTargetTableMeta setScopeNS( String namespace ) {\n        super.setScopeNS( namespace );\n        return this;\n    }\n\n    @Override\n    public String getTableName() {\n        return this.getScopeNS();\n    }\n\n    @Override\n    public RDBTargetTableMeta setTableName( String tableName ) {\n        return this.setScopeNS( tableName );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public <K, V > RDBQuerierDataManipulator<K, V > getDataManipulator() {\n        return this.mDataManipulator;\n    }\n\n    @Override\n    public <K, V >RDBTargetTableMeta setDataManipulator( RDBQuerierDataManipulator<K, V > manipulator ){\n        this.mDataManipulator = manipulator;\n        return this;\n    }\n\n    @Override\n    public RDBTargetTableMeta setPrimaryKey( String primaryKey ) {\n        super.setPrimaryKey( primaryKey );\n        return this;\n    }\n\n    @Override\n    public RDBTargetTableMeta setIndexKey( String indexKey ) {\n        super.setIndexKey( indexKey );\n        return this;\n    }\n\n    @Override\n    public RDBTargetTableMeta setValueType( Class<?> valueType ) {\n        super.setValueType( valueType );\n        return this;\n    }\n\n    @Override\n    public RDBTargetTableMeta setValueMetaKeys( Set<String > keys ){\n        super.setValueMetaKeys( keys );\n        return this;\n    }\n\n    @Override\n    public RDBTargetTableMeta addValueMetaKey( String key ) {\n        super.addValueMetaKey( key );\n        return this;\n    }\n\n    @Override\n    public RDBTargetTableMeta removeValueMetaKey( String key ) {\n        super.removeValueMetaKey( key );\n        return this;\n    }\n\n    @Override\n    public <V > RDBTargetTableMeta setResultConverter( ResultConverter<V > converter ) {\n        super.setResultConverter( converter );\n        return this;\n    }\n\n    @Override\n    public GenericRDBTargetTableMeta clone() {\n        return (GenericRDBTargetTableMeta) super.clone();  // Refers inner pointer.\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/GenericSingleKeySourceRetriever.java",
    "content": "package com.pinecone.slime.source.rdb;\n\nimport com.pinecone.slime.cache.query.SourceRetriever;\n\npublic class GenericSingleKeySourceRetriever<K, V >  implements SourceRetriever<V > {\n    private   RangedRDBQuerierDataManipulator<K, V > mDataMapper;\n    protected RDBTargetTableMeta                     mTableMeta;\n\n    public GenericSingleKeySourceRetriever( RDBTargetTableMeta tableMeta ) {\n        this.mTableMeta     = tableMeta;\n        this.mDataMapper    = (RangedRDBQuerierDataManipulator<K, V >) tableMeta.<K, V >getDataManipulator();\n    }\n\n    @Override\n    public V retrieve( Object key ) {\n        return this.mDataMapper.selectByKey( this.mTableMeta, key );\n    }\n\n    @Override\n    public long countsKey( Object key ) {\n        return this.mDataMapper.countsByColumn( this.mTableMeta, this.mTableMeta.getIndexKey(), key );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/RDBClient.java",
    "content": "package com.pinecone.slime.source.rdb;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.slime.source.DAOScanner;\nimport com.pinecone.slime.source.DataAccessObject;\n\nimport java.lang.annotation.Annotation;\nimport java.util.List;\n\npublic interface RDBClient extends Pinenut {\n    String getInstanceName();\n\n    void close();\n\n    boolean isTerminated();\n\n    DAOScanner getDataAccessObjectScanner();\n\n    List<Class<? > > addDataAccessObjectScope( String szPacketName );\n\n    List<Class<? > > addDataAccessObjectScope( String szPacketName, boolean bIgnoreOwnedChecked );\n\n    List<Class<? > > addDataAccessObjectScopeNoneSync( String szPacketName, boolean bIgnoreOwnedChecked );\n\n    default boolean hasOwnDataAccessObject( Class<?> clazz ) {\n        Annotation[] annotations = clazz.getAnnotations();\n        for( Annotation annotation : annotations ) {\n            if( annotation instanceof DataAccessObject ) {\n                String s = ((DataAccessObject) annotation).scope();\n                if( s.isEmpty() || s.equals( this.getInstanceName() ) ){\n                    return true;\n                }\n            }\n        }\n        return false;\n    }\n\n    String getJDBCURL();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/RDBQuerierDataManipulator.java",
    "content": "package com.pinecone.slime.source.rdb;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.List;\n\npublic interface RDBQuerierDataManipulator<K, V > extends Pinenut {\n\n    long counts                ( RDBTargetTableMeta meta, String szExSafeSQL );\n\n    long countsByColumn        ( RDBTargetTableMeta meta, String szSpecificColumnKeyName, Object key );\n\n    List selectList            ( RDBTargetTableMeta meta, String szExSafeSQL );\n\n    List query                 ( RDBTargetTableMeta meta, String szStatementSQL );\n\n    List<V > queryVal          ( RDBTargetTableMeta meta, String szStatementSQL );\n\n    List selectListByColumn    ( RDBTargetTableMeta meta, String szSpecificColumnKeyName, Object key );\n\n    V    selectByKey           ( RDBTargetTableMeta meta, Object key );\n\n    void insert                ( RDBTargetTableMeta meta, K key, V entity );\n\n    void update                ( RDBTargetTableMeta meta, K key, V entity );\n\n    void deleteByKey           ( RDBTargetTableMeta meta, Object key );\n\n    void truncate              ( RDBTargetTableMeta meta );\n\n    void commit                ();\n\n}"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/RDBTargetTableMeta.java",
    "content": "package com.pinecone.slime.source.rdb;\n\nimport com.pinecone.slime.source.ResultConverter;\nimport com.pinecone.slime.source.UniformQueryScopeMeta;\n\nimport java.util.Set;\n\npublic interface RDBTargetTableMeta extends UniformQueryScopeMeta {\n    @Override\n    default String getScopeNS() {\n        return this.getTableName();\n    }\n\n    @Override\n    default UniformQueryScopeMeta setScopeNS( String namespace ) {\n        return this.setTableName( namespace );\n    }\n\n    String getTableName();\n    RDBTargetTableMeta setTableName( String tableName );\n\n    @Override\n    RDBTargetTableMeta setPrimaryKey( String primaryKey );\n\n    @Override\n    RDBTargetTableMeta setIndexKey( String indexKey );\n\n    @Override\n    RDBTargetTableMeta setValueType( Class<?> valueType );\n\n    <K, V >RDBQuerierDataManipulator<K, V > getDataManipulator();\n    <K, V >RDBTargetTableMeta setDataManipulator( RDBQuerierDataManipulator<K, V > manipulator );\n\n    /**\n     * ValueMetaKeys\n     * if set is empty => SELECT * FROM       => map / bean\n     * if set has one  => SELECT set[0] FROM => map / bean / primitive\n     * if set has more => SELECT ...set FROM => map / bean\n     */\n    @Override\n    Set<String > getValueMetaKeys();\n    @Override\n    RDBTargetTableMeta setValueMetaKeys( Set<String > keys );\n    @Override\n    RDBTargetTableMeta addValueMetaKey( String key );\n    @Override\n    RDBTargetTableMeta removeValueMetaKey( String key );\n\n    @Override\n    <V > RDBTargetTableMeta setResultConverter( ResultConverter<V > converter );\n}"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/RangedRDBQuerierDataManipulator.java",
    "content": "package com.pinecone.slime.source.rdb;\n\n\nimport com.pinecone.slime.map.QueryRange;\n\nimport java.util.List;\nimport java.util.Map;\n\npublic interface RangedRDBQuerierDataManipulator<K, V > extends RDBQuerierDataManipulator<K, V > {\n    long countsByRange                 ( RDBTargetTableMeta meta, QueryRange range );\n\n    List selectListByRange             ( RDBTargetTableMeta meta, QueryRange range );\n\n    Map selectMappedByRange            ( RDBTargetTableMeta meta, QueryRange range );\n\n    Object getMaximumRangeVal          ( RDBTargetTableMeta meta, String szRangeKeyName );\n\n    Object getMinimumRangeVal          ( RDBTargetTableMeta meta, String szRangeKeyName );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/source/rdb/RelationalDatabase.java",
    "content": "package com.pinecone.slime.source.rdb;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface RelationalDatabase extends Pinenut {\n    String getHost();\n    void setHost( String host );\n\n    String getUsername();\n    void setUsername( String username );\n\n    String getPassword();\n    void setPassword( String password );\n\n    String getDatabase();\n    void setDatabase( String database );\n\n    int getPort();\n    void setPort( int port );\n\n    String getCharset();\n    void setCharset( String charset );\n\n    String getTablePrefix();\n    void setTablePrefix( String tablePrefix );\n\n    boolean isEnabled();\n    void setEnabled( boolean enabled );\n\n    String getDBType();\n    void setDBType( String dbType );\n\n    String getJDBCURL();\n    void fromJDBCURL( String jdbcUrl );\n}"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/IntervalRangeComparator.java",
    "content": "package com.pinecone.slime.unitization;\n\nimport java.util.Comparator;\n\npublic class IntervalRangeComparator implements Comparator<Object > {\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public int compare( Object o1, Object o2 ) {\n        if( o1 instanceof PartialRange && o2 instanceof PartialRange ) {\n            return ((PartialRange) o1).compareTo( (PartialRange)o2 );\n        }\n        else if ( o1 instanceof PartialRange && o2 instanceof Comparable ) {\n            PartialRange range = (PartialRange) o1;\n            return range.compareTo( (Comparable)o2 );\n        }\n        else if ( o1 instanceof Comparable && o2 instanceof PartialRange ) {\n            return -this.compare( o2, o1 );\n        }\n        else {\n            throw new IllegalArgumentException( \"Objects are not of type PartialRange or Comparable\" );\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/LinerRange.java",
    "content": "package com.pinecone.slime.unitization;\n\npublic interface LinerRange extends Range {\n    Precision getPrimePrecision();\n\n    Number span();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/MinMaxRange.java",
    "content": "package com.pinecone.slime.unitization;\n\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\n\npublic interface MinMaxRange extends LinerRange, Comparable<MinMaxRange > {\n    Number getMin();\n\n    Number getMax();\n\n    void   setRange( Number min, Number max );\n\n    void   setMin  ( Number min );\n\n    void   setMax  ( Number max );\n\n    @Override\n    default String toJSONString() {\n        return String.format(\n                \"{\\\"class\\\":\\\"%s\\\",\\\"min\\\":%s,\\\"max\\\":%s}\", this.className(), this.getMin(), this.getMax()\n        );\n    }\n\n    @Override\n    default int compareTo( MinMaxRange o ) {\n        if ( this == o ) {\n            return 0;\n        }\n        if ( o == null ) {\n            return 1;\n        }\n\n        Number min = this.getMin();\n        if( min instanceof Double || min instanceof Float ) {\n            int minCompare = Double.compare( this.getMin().doubleValue(), o.getMin().doubleValue() );\n            if ( minCompare != 0 ) {\n                return minCompare;\n            }\n\n            return Double.compare( this.getMax().doubleValue(), o.getMax().doubleValue() );\n        }\n        else if( min instanceof Integer || min instanceof Long || min instanceof Short || min instanceof Byte ) {\n            int minCompare = Long.compare( this.getMin().longValue(), o.getMin().longValue() );\n            if ( minCompare != 0 ) {\n                return minCompare;\n            }\n\n            return Long.compare( this.getMax().longValue(), o.getMax().longValue() );\n        }\n        else if( min instanceof BigInteger ) {\n            int minCompare = ( (BigInteger)this.getMin() ).compareTo( (BigInteger)o.getMin() );\n            if ( minCompare != 0 ) {\n                return minCompare;\n            }\n\n            return ( (BigInteger)this.getMax() ).compareTo( (BigInteger)o.getMax() );\n        }\n        else if( min instanceof BigDecimal ) {\n            int minCompare = ( (BigDecimal)this.getMin() ).compareTo( (BigDecimal)o.getMin() );\n            if ( minCompare != 0 ) {\n                return minCompare;\n            }\n\n            return ( (BigDecimal)this.getMax() ).compareTo( (BigDecimal)o.getMax() );\n        }\n\n        throw new IllegalArgumentException( \"Unknown number to compare.\" );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/MinMaxRange64.java",
    "content": "package com.pinecone.slime.unitization;\n\npublic class MinMaxRange64 implements MinMaxRange {\n    protected long           mnMin;\n    protected long           mnMax;\n    protected Precision      mPrimePrecision;  // Precision(1)=> for the array-like structure.\n\n    public MinMaxRange64( long nMin, long nMax, Precision primePrecision ) {\n        this.mnMin           = nMin;\n        this.mnMax           = nMax;\n        this.mPrimePrecision = primePrecision;\n    }\n\n    public MinMaxRange64( long nMin, long nMax ) {\n        this( nMin, nMax, NumPrecision.PRECISION_64_1 );\n    }\n\n\n    @Override\n    public Long getMin() {\n        return this.mnMin;\n    }\n\n    @Override\n    public Long getMax() {\n        return this.mnMax;\n    }\n\n    @Override\n    public void setRange( Number min, Number max ){\n        this.setMin( min );\n        this.setMax( max );\n    }\n\n    @Override\n    public void setMin  ( Number min ){\n        this.mnMin = min.longValue();\n    }\n\n    @Override\n    public void setMax  ( Number max ) {\n        this.mnMax = max.longValue();\n    }\n\n    @Override\n    public Long span() {\n        return this.mnMax - this.mnMin;\n    }\n\n    @Override\n    public Precision getPrimePrecision() {\n        return this.mPrimePrecision;\n    }\n\n    @Override\n    public boolean contains( Range that ) {\n        MinMaxRange range = (MinMaxRange) that;\n        return this.mnMin <= range.getMin().longValue() && this.mnMax >= range.getMax().longValue();\n    }\n\n    @Override\n    public boolean contains( Object elm ) {\n        long e = ( (Number) elm ).longValue();\n        return this.mnMin <= e && this.mnMax >= e;\n    }\n\n    @Override\n    public int compareTo( MinMaxRange o ) {\n        if ( this == o ) {\n            return 0;\n        }\n        if ( o == null ) {\n            return 1;\n        }\n\n        int minCompare = Long.compare( this.mnMin, o.getMin().longValue() );\n        if ( minCompare != 0 ) {\n            return minCompare;\n        }\n\n        return Long.compare( this.mnMax, o.getMax().longValue() );\n    }\n\n    @Override\n    public boolean equals( Object obj ) {\n        if( super.equals( obj ) ) {\n            return true;\n        }\n\n        if( obj instanceof MinMaxRange ) {\n            return this.getMin().equals( ((MinMaxRange) obj).getMin().longValue() ) && this.getMax().equals( ((MinMaxRange) obj).getMax().longValue() );\n        }\n        return false;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/NumPrecision.java",
    "content": "package com.pinecone.slime.unitization;\n\npublic interface NumPrecision extends Precision {\n    NumPrecision PRECISION_64_1 = new Precision64(1);\n\n    Number numericValue();\n\n    long longValue();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/PartialOrderRange.java",
    "content": "package com.pinecone.slime.unitization;\n\npublic class PartialOrderRange<T extends Comparable<T > > implements PartialRange<T > {\n    protected T mMin;\n    protected T mMax;\n\n    public PartialOrderRange( T min, T max ) {\n        this.mMin           = min;\n        this.mMax           = max;\n    }\n\n    @Override\n    public T getMin(){\n        return this.mMin;\n    }\n\n    @Override\n    public T getMax(){\n        return this.mMax;\n    }\n\n    @Override\n    public void   setRange( T min, T max ){\n        this.mMin = min;\n        this.mMax = max;\n    }\n\n    @Override\n    public void   setMin  ( T min ){\n        this.mMin = min;\n    }\n\n    @Override\n    public void   setMax  ( T max ){\n        this.mMax = max;\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public boolean contains( Range that ) {\n        if ( !( that instanceof PartialOrderRange ) ) {\n            throw new ClassCastException(\"Range is not a PartialOrderRange.\");\n        }\n        PartialOrderRange<T> range = (PartialOrderRange<T>) that;\n\n        return ( range.getMin().compareTo( this.mMin ) >= 0) && ( range.getMax().compareTo( this.mMax ) <= 0 );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public boolean equals( Object obj ) {\n        if( super.equals( obj ) ) {\n            return true;\n        }\n\n        if( obj instanceof PartialOrderRange ) {\n            PartialOrderRange<T> range = (PartialOrderRange<T>) obj;\n            return ( range.getMin().compareTo( this.mMin ) == 0) && ( range.getMax().compareTo( this.mMax ) == 0 );\n        }\n        return false;\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/PartialRange.java",
    "content": "package com.pinecone.slime.unitization;\n\npublic interface PartialRange<T extends Comparable<T > > extends Range, Comparable<PartialRange<T > > {\n    T getMin();\n\n    T getMax();\n\n    void   setRange( T min, T max );\n\n    void   setMin  ( T min );\n\n    void   setMax  ( T max );\n\n    @Override\n    default String toJSONString() {\n        return String.format(\n                \"{\\\"class\\\":\\\"%s\\\",\\\"min\\\":%s,\\\"max\\\":%s}\", this.className(), this.getMin(), this.getMax()\n        );\n    }\n\n    @Override\n    default boolean contains( Object elm ) {\n        if ( elm == null ) {\n            return false;\n        }\n\n        if ( !( elm instanceof Comparable ) ) {\n            throw new ClassCastException( \"Element is not comparable.\" );\n        }\n\n        @SuppressWarnings( \"unchecked\" )\n        Comparable<T> comparableElm = (Comparable<T>) elm;\n\n        return ( comparableElm.compareTo((T) this.getMin() ) >= 0 ) && ( comparableElm.compareTo((T) this.getMax()) <= 0 ); // [min, max]\n    }\n\n    @Override\n    default int compareTo( PartialRange<T > o ) {\n        int minCompare = this.getMin().compareTo( o.getMin() );\n        if ( minCompare != 0 ) {\n            return minCompare;\n        }\n        return this.getMax().compareTo( o.getMax() );\n    }\n\n\n    default int compareTo( T that ) {\n        if( this.contains( that ) ) {\n            return 0;\n        }\n        else if ( this.getMin().compareTo( that ) > 0 ) { // this > that\n            return 1;\n        }\n        else if ( this.getMax().compareTo( that ) < 0 ) { // this < that\n            return -1;\n        }\n        else {\n            return 0; // Jesus!\n        }\n    }\n\n\n    IntervalRangeComparator DefaultIntervalRangeComparator = new IntervalRangeComparator();\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/Precision.java",
    "content": "package com.pinecone.slime.unitization;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Precision extends Pinenut {\n\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/Precision64.java",
    "content": "package com.pinecone.slime.unitization;\n\npublic class Precision64 extends Number implements NumPrecision {\n    protected long precision;\n\n    public Precision64( long precision ) {\n        this.precision = precision;\n    }\n\n    @Override\n    public Number numericValue() {\n        return this.precision;\n    }\n\n    @Override\n    public long longValue() {\n        return this.precision;\n    }\n\n    @Override\n    public int intValue() {\n        return (int) this.precision;\n    }\n\n    @Override\n    public float floatValue() {\n        return (float) this.precision;\n    }\n\n    @Override\n    public double doubleValue() {\n        return (double) this.precision;\n    }\n\n    @Override\n    public String toString() {\n        return Long.toString( this.precision );\n    }\n\n    @Override\n    public String toJSONString() {\n        return this.toString();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/main/java/com/pinecone/slime/unitization/Range.java",
    "content": "package com.pinecone.slime.unitization;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface Range extends Pinenut {\n    boolean contains( Range that );\n\n    boolean contains( Object elm );\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/test/java/com/cache/TestCache.java",
    "content": "package com.cache;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.unit.Mapnut;\nimport com.pinecone.framework.unit.MultiValueMapper;\nimport com.pinecone.framework.unit.multi.MultiSetMaptron;\nimport com.pinecone.framework.unit.top.*;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.slime.unitization.PartialOrderRange;\nimport com.pinecone.slime.unitization.PartialRange;\n\nimport java.util.*;\n\npublic class TestCache {\n    public static void testRange() {\n        PartialRange<String > partialOrderRange = new PartialOrderRange<>( \"A\", \"F\" );\n        Debug.trace( partialOrderRange.contains( \"C\" ) );\n        Debug.trace( partialOrderRange.contains( \"G\" ) );\n    }\n\n    public static void testRangeMap() {\n        Map<PartialRange<Long >, Long > map = new LinkedTreeMap<>( PartialRange.DefaultIntervalRangeComparator );\n\n        Debug.trace( (new PartialOrderRange<>(   10L,  20L )).compareTo( 10L ) );\n        Debug.trace( \"8\".compareTo( \"9\" ) );\n\n        map.put( new PartialOrderRange<>(   0L,  10L ),  10L );\n        map.put( new PartialOrderRange<>(  10L,  20L ),  20L );\n        map.put( new PartialOrderRange<>(  20L,  30L ),  30L );\n\n        map.put( new PartialOrderRange<>(  40L,  50L ),  50L );\n\n\n        map.put( new PartialOrderRange<>(  90L, 100L ), 100L );\n        map.put( new PartialOrderRange<>(  60L,  70L ),  70L );\n        map.put( new PartialOrderRange<>(  80L,  90L ),  90L );\n\n\n        Debug.trace( map );\n        Debug.trace( ((LinkedTreeMap<PartialRange<Long>, Long>) map).treeEntrySet() );\n\n        for ( int i = 0; i < 100; i++ ) {\n            Debug.trace( i, map.containsKey( (long)i ) );\n        }\n    }\n\n    public static void testMultiValueEntity(){\n        MultiValueMapper<Integer, Integer > maptron = new MultiSetMaptron<>();\n        //MultiValueMapper<Integer, Integer > maptron = new MultiListMaptron<>();\n        maptron.add( 1, 10 );\n        maptron.add( 2, 20 );\n        maptron.add( 2, 22 );\n        maptron.add( 2, 21 );\n        maptron.add( 2, 22 );\n\n        Debug.trace( maptron, maptron.collection(), maptron.collectionValues() );\n    }\n\n    public static void testMultiTreeToptron(){\n        MultiTreeToptron<Integer, Integer > toptron = new MultiTreeToptron<>( 8 );\n        toptron = new LinkedMultiTreeToptron<>( 8 );\n\n        toptron.add( 1, 10 );\n        toptron.add( 1, 11 );\n        toptron.add( 4, 40 );\n        toptron.add( 1, 12 );\n        toptron.add( 2, 20 );\n        toptron.add( 3, 30 );\n        toptron.add( 3, 31 );\n        toptron.add( 5, 50 );\n        toptron.add( 6, 60 );\n\n        Debug.trace( toptron.getMap(), toptron.topEntrySet(), toptron.bottomEntrySet(), toptron.collection() );\n\n        toptron.update( 1, 16, 12 );\n        Debug.trace( toptron.getMap() );\n        //toptron.setTopmostSize( 4 );\n        //Debug.trace( toptron.getMap() );\n    }\n\n    public static void testTreeToptron(){\n        //TreeToptron<Integer, Integer > toptron = new TreeToptron<>( 3 );\n        LinkedTreeToptron<Integer, Integer > toptron = new LinkedTreeToptron<>( 3 );\n\n        toptron.put( 1, 10 );\n        toptron.put( 5, 50 );\n        toptron.put( 1, 11 );\n        toptron.put( 1, 12 );\n        toptron.put( 2, 20 );\n        toptron.put( 3, 30 );\n        toptron.put( 3, 30 );\n        toptron.put( 4, 40 );\n        toptron.put( 6, 60 );\n\n        Debug.trace( toptron.getMap(), toptron.topEntrySet(), toptron.bottomEntrySet() );\n    }\n\n    public static void testTopper(){\n        Topper<KeyValue<Integer, Integer > > heapTopper = new HeapTopper<>(4, new Comparator<KeyValue<Integer, Integer>>() {\n            @Override\n            public int compare( KeyValue<Integer, Integer > o1, KeyValue<Integer, Integer > o2 ) {\n                return o1.getKey().compareTo( o2.getKey() );\n            }\n        });\n        heapTopper.add( new KeyValue<>( 1,10 ) );\n        heapTopper.add( new KeyValue<>( 5,50 ) );\n        heapTopper.add( new KeyValue<>( 2,20 ) );\n        heapTopper.add( new KeyValue<>( 9,90 ) );\n        heapTopper.add( new KeyValue<>( 4,40 ) );\n        heapTopper.add( new KeyValue<>( 6,60 ) );\n        heapTopper.add( new KeyValue<>( 3,30 ) );\n\n        Debug.trace( heapTopper, heapTopper.nextEviction() );\n    }\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            //TestCache.testRange();\n            //TestCache.testRangeMap();\n            //TestCache.testMultiValueEntity();\n            //TestCache.testMultiTreeToptron();\n            //TestCache.testTreeToptron();\n            TestCache.testTopper();\n\n\n            Mapnut<Integer, Long > map = new LinkedTreeMap<>();\n            map.put( 4, 40L );\n            map.put( 3, 30L );\n            Debug.trace( map );\n            map.getEntryByKey( 4 ).setValue( 41L );\n\n            Debug.trace( map );\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Slime/src/test/java/com/chunk/TestChunk.java",
    "content": "package com.chunk;\n\n\nimport com.pinecone.slime.chunk.marshaling.*;\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.slime.chunk.Chunk;\nimport com.pinecone.slime.chunk.ContiguousPage;\nimport com.pinecone.slime.chunk.RangedChunk64;\nimport com.pinecone.slime.chunk.RangedPage64;\nimport com.pinecone.slime.chunk.scheduler.DirectPagePool;\nimport com.pinecone.slime.chunk.scheduler.FixedPageDivider64;\nimport com.pinecone.slime.chunk.scheduler.LocalBatchActivePageScheduler64;\n\nimport java.util.List;\n\n\npublic class TestChunk {\n    public static void testPool() {\n        RangedPage64   page64   = new RangedPage64( 0, 850,0 );\n        DirectPagePool pagePool = new DirectPagePool( RangedPage64.class );\n\n        FixedPageDivider64 divider64 = new FixedPageDivider64( page64, pagePool, 100 );\n\n        Debug.trace( divider64.getMaxAllocations() );\n\n        for ( int i = 0; i < divider64.getMaxAllocations(); i++ ) {\n            RangedPage64 page = (RangedPage64) divider64.allocate();\n            Debug.trace( page.getRange(), ( (RangedPage64)page.parent() ).getRange(), page.getId(), page.size() );\n        }\n    }\n\n    public static void testPartition() {\n        RangedPage64                      page64       = new RangedPage64( 0, 850,0 );\n        DirectPagePool                    pagePool     = new DirectPagePool( RangedPage64.class );\n        PreparedPageDividerPartition64 partition64  = new PreparedPageDividerPartition64( page64, 0, 100 );\n\n\n        FixedPageDivider64 divider64 = new FixedPageDivider64( partition64, pagePool );\n\n        Debug.trace( divider64.getMaxAllocations() );\n\n        for ( int i = 0; i < divider64.getMaxAllocations(); i++ ) {\n            RangedPage64 page = (RangedPage64) divider64.allocate();\n            Debug.trace( page.getRange(), ( (RangedChunk64)page.parent() ).getRange(), page.getId(), page.size() );\n        }\n\n        Debug.trace( partition64 );\n    }\n\n    public static void testPartitioner() {\n        RangedPage64                      page64        = new RangedPage64( 0, 1000,0 );\n        PreparedEvenSeqPagePartitioner64 partitioner64 = new PreparedEvenSeqPagePartitioner64( page64, 5 );\n\n        SequentialPagePartitionGroup64 group64 = partitioner64.partition();\n        List<Chunk > l = group64.getSequentialChunks();\n        Debug.trace( ( (PreparedPageDividerPartition64)l.get(0)).eachPerPage() );\n\n        BuddyPrepPartitionDividerStrategy64 strategy64 = new BuddyPrepPartitionDividerStrategy64( 100, 2, 1 );\n        strategy64.assignment( group64 );\n\n        for ( int i = 0; i < l.size();++ i ) {\n            Debug.trace( ((PreparedPageDividerPartition64)l.get(i)).eachPerPage() );\n        }\n    }\n\n    public static void testPartitionablePageDivider() {\n        RangedPage64                      page64        = new RangedPage64( 0, 1000,0 );\n        PreparedEvenSeqPagePartitioner64  partitioner64 = new PreparedEvenSeqPagePartitioner64( page64, 6 );\n\n        SequentialPagePartitionGroup64 group64 = partitioner64.partition();\n        List<Chunk > l = group64.getSequentialChunks();\n        Debug.trace( ( (PreparedPageDividerPartition64)l.get(0)).eachPerPage() );\n\n        BuddyPrepPartitionDividerStrategy64 strategy64 = new BuddyPrepPartitionDividerStrategy64( 100, 2, 1 );\n        strategy64.assignment( group64 );\n\n\n        DirectPagePool pagePool = new DirectPagePool( RangedPage64.class );\n        PartitionablePageDivider64 divider64 = new PartitionablePageDivider64( page64, pagePool, group64 );\n\n        Debug.trace( divider64.getMaxAllocations() );\n\n        for ( int i = 0; i < divider64.getMaxAllocations(); i++ ) {\n            RangedPage64 page = (RangedPage64) divider64.allocate();\n            Debug.trace( page.getRange(), page.getId(), page.size() );\n        }\n    }\n\n    public static void testSimpleScheduler() {\n        RangedPage64   page64   = new RangedPage64( 0, 850,0 );\n        DirectPagePool pagePool = new DirectPagePool( RangedPage64.class );\n\n        LocalBatchActivePageScheduler64 scheduler64 = new LocalBatchActivePageScheduler64( new FixedPageDivider64( page64, pagePool, 100 ), page64.getId() + 1, 4 );\n        ContiguousPage[] pages = scheduler64.activates();\n\n        Debug.trace( scheduler64.getDivider().getMaxAllocations() );\n        for ( int i = 0; i < pages.length; i++ ) {\n            Debug.trace( pages[i].getRange(), pages[i].getId() );\n        }\n\n        //scheduler64.deactivate( pages[1] );\n        scheduler64.deactivate( pages );\n        Debug.hhf();\n\n        pages = scheduler64.activates();\n        for ( int i = 0; i < pages.length; i++ ) {\n            Debug.trace( pages[i].getRange(), pages[i].getId() );\n        }\n\n        scheduler64.deactivate( pages );\n        Debug.hhf();\n\n        pages = scheduler64.activates();\n        for ( int i = 0; i < pages.length; i++ ) {\n            Debug.trace( pages[i].getRange(), pages[i].getId() );\n        }\n    }\n\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            //TestBasicTransaction.testSequential();\n            //TestBasicTransaction.testParallel();\n            //TestChunk.testPool();\n            TestChunk.testSimpleScheduler();\n            //TestChunk.testPartition();\n            //TestChunk.testPartitioner();\n            //TestChunk.testPartitionablePageDivider();\n\n\n\n//            double factor = 0.2;\n//            int page = 1000;\n//            int stratum = 2;\n//            int start = 0;\n//            int end = 10000;\n//\n//            splitInterval(start, end, factor, page, stratum);\n\n            return 0;\n        }, (Object[]) args );\n    }\n\n\n    public static void splitInterval(int start, int end, double factor, int page, int stratum) {\n        int totalRange = end - start;\n        int subIntervalSize = (int) (totalRange * factor);\n        int numSubIntervals = (int) (1 / factor);\n\n        for (int i = 0; i < numSubIntervals; i++) {\n            int subStart = start + i * subIntervalSize;\n            int subEnd = Math.min(subStart + subIntervalSize, end);\n            splitSubInterval(subStart, subEnd, page, stratum);\n            page = Math.max(10, page / stratum); // Update the page size for the next interval\n        }\n    }\n\n    public static void splitSubInterval(int start, int end, int page, int stratum) {\n        int currentStart = start;\n\n        while (currentStart < end) {\n            int currentEnd = Math.min(currentStart + page, end);\n            System.out.println(currentStart + \", \" + currentEnd);\n            currentStart = currentEnd;\n        }\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Springram/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>pinecones</artifactId>\n        <groupId>com.pinecones</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.summer.springram</groupId>\n    <artifactId>springram</artifactId>\n    <version>2.1.0</version>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Pinecones/Springram/src/main/java/com/pinecone/summer/spring/SpringKernel.java",
    "content": "package com.pinecone.summer.spring;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.summer.spring.util.ConfigUtils;\nimport org.springframework.boot.SpringApplication;\nimport org.springframework.boot.autoconfigure.SpringBootApplication;\nimport org.springframework.context.ConfigurableApplicationContext;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.system.functions.Executor;\n\n@SpringBootApplication\npublic class SpringKernel implements Pinenut {\n    private ConfigurableApplicationContext mContext;\n    private SpringApplication              mSpringApplication;\n    private Springram                      mSpringram;\n    private Class<? >                      mPrimarySources = SpringKernel.class;\n    private Executor                       mInitializer;\n\n\n    void setSpringram( Springram springram ) {\n        this.mSpringram = springram;\n    }\n\n    public void setPrimarySources( Class<?> primarySources ) {\n        this.mPrimarySources = primarySources;\n    }\n\n    public void execute( String... args ) {\n        this.mSpringApplication = new SpringApplication( this.mPrimarySources );\n\n        PatriarchalConfig jo = this.mSpringram.getConfig();\n        if( jo instanceof JSONConfig ) {\n            Map<String, Object > confs = ConfigUtils.recursionMapToPropertiesMap( (JSONConfig)jo );\n            if( !confs.containsKey( \"spring.config.location\" ) ){\n                confs.put( \"spring.config.location\", \"\" );\n            }\n//            this.mSpringApplication.setDefaultProperties(Map.of(\n//                    //\"spring.config.location\", \"./system/setup/application.yaml\"\n//                    \"spring.config.location\", \"\",\n//                    \"server.port\", \"3912\"\n//            ));\n\n            this.mSpringApplication.setDefaultProperties( confs );\n        } // Otherwise, using default config `application.yaml`.\n\n        if( this.mInitializer != null ) {\n            try{\n                this.mInitializer.execute();\n            }\n            catch ( Exception ignore ) {\n                // Ignore\n            }\n        }\n        this.mContext = this.mSpringApplication.run( args );\n    }\n\n    public void terminate() {\n        if ( this.mContext != null ) {\n            this.mContext.close();\n        }\n    }\n\n    public ConfigurableApplicationContext getContext() {\n        return this.mContext;\n    }\n\n    public SpringApplication getSpringApplication() {\n        return this.mSpringApplication;\n    }\n\n    public void setInitializer( Executor initializer ) {\n        this.mInitializer = initializer;\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Springram/src/main/java/com/pinecone/summer/spring/Springram.java",
    "content": "package com.pinecone.summer.spring;\n\nimport com.pinecone.framework.system.functions.Executor;\nimport com.pinecone.hydra.servgram.Servgram;\n\nimport org.springframework.boot.SpringApplication;\nimport org.springframework.context.ConfigurableApplicationContext;\n\npublic interface Springram extends Servgram {\n    void execute() throws Exception;\n\n    void join() throws InterruptedException;\n\n    void join( long millis ) throws InterruptedException;\n\n    ConfigurableApplicationContext getContext();\n\n    Springram setPrimarySources( Class<?> primarySources ) ;\n\n    SpringApplication getSpringApplication();\n\n    void setInitializer( Executor initializer );\n}\n"
  },
  {
    "path": "Pinecones/Springram/src/main/java/com/pinecone/summer/spring/Springron.java",
    "content": "package com.pinecone.summer.spring;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.functions.Executor;\nimport com.pinecone.hydra.servgram.ArchServgramium;\n\nimport org.springframework.boot.SpringApplication;\nimport org.springframework.context.ConfigurableApplicationContext;\n\nimport java.util.concurrent.TimeoutException;\n\npublic class Springron extends ArchServgramium implements Springram {\n    protected String[]                 mSpringbootArgs;\n    protected Thread                   mSpringPrimaryThread;\n    protected SpringKernel             mSpringKernel;\n\n    public Springron( String szName, Processum parent, String[] springbootArgs ) {\n        super( szName, parent );\n        this.mSpringbootArgs      = springbootArgs;\n        this.mSpringKernel        = new SpringKernel();\n        this.mSpringKernel.setSpringram( this );\n\n        this.mSpringPrimaryThread = new Thread(new Runnable() {\n            @Override\n            public void run() {\n                Springron.this.infoLifecycle( \"VitalizingSubsystem\", \"Start\" );\n                Springron.this.mSpringKernel.execute( Springron.this.mSpringbootArgs );\n                Springron.this.infoLifecycle( \"VitalizingSubsystem\", \"Subsystem readied\" );\n                while ( Springron.this.mSpringKernel.getContext().isActive() ) {\n                    try {\n                        Thread.sleep( 100 );\n                    }\n                    catch ( InterruptedException e ) {\n                        Springron.this.mSpringKernel.terminate();\n                        Thread.currentThread().interrupt();\n                        break;\n                    }\n                }\n\n                Springron.this.infoLifecycle( \"SubsystemTermination\", \"Subsystem terminated\" );\n            }\n        });\n\n        this.mSpringPrimaryThread.setName( this.getName() + \"Primary\" + this.mSpringPrimaryThread.getName() );\n        this.setThreadAffinity( this.mSpringPrimaryThread );\n    }\n\n    public Springron( String szName, Processum parent ) {\n        this( szName, parent, new String[0] );\n    }\n\n    @Override\n    public Springram setPrimarySources( Class<?> primarySources ) {\n        this.mSpringKernel.setPrimarySources( primarySources );\n        return this;\n    }\n\n    @Override\n    public void join() throws InterruptedException {\n        this.mSpringPrimaryThread.join();\n    }\n\n    @Override\n    public void join( long millis ) throws InterruptedException {\n        this.mSpringPrimaryThread.join( millis );\n    }\n\n    @Override\n    public void execute() throws Exception {\n        this.mSpringPrimaryThread.start();\n    }\n\n    @Override\n    public ConfigurableApplicationContext getContext() {\n        return this.mSpringKernel.getContext();\n    }\n\n    @Override\n    public SpringApplication getSpringApplication() {\n        return this.mSpringKernel.getSpringApplication();\n    }\n\n    @Override\n    public void setInitializer( Executor initializer ) {\n        this.mSpringKernel.setInitializer(initializer);\n    }\n\n    @Override\n    public void terminate() {\n        this.mSpringKernel.terminate();\n\n        long nStart = System.currentTimeMillis();\n        try{\n            while ( this.mSpringKernel.getContext().isActive() ){\n                Thread.sleep( 50 );\n                if( System.currentTimeMillis() - nStart > 5000 ) {\n                    throw new TimeoutException( \"Terminating springboot timeout.\" );\n                }\n            }\n\n            this.mSpringPrimaryThread.join();\n        }\n        catch ( InterruptedException e ) {\n            Thread.currentThread().interrupt();\n        }\n        catch ( TimeoutException e1 ) {\n            throw new ProxyProvokeHandleException( e1 );\n        }\n    }\n\n    @Override\n    public void interrupt() {\n        super.interrupt();\n    }\n\n    @Override\n    public void apoptosis() {\n        this.terminate();\n    }\n\n    @Override\n    public void kill() {\n        this.terminate();\n        super.kill();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Springram/src/main/java/com/pinecone/summer/spring/util/ConfigUtils.java",
    "content": "package com.pinecone.summer.spring.util;\n\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.unit.Units;\nimport com.pinecone.framework.unit.tabulate.FamilyEntryNameEncoder;\nimport com.pinecone.framework.unit.tabulate.GenericNamespaceFamilyEntryNameEncoder;\nimport com.pinecone.framework.unit.tabulate.RecursiveFamilyIterator;\nimport com.pinecone.framework.unit.tabulate.UnitFamilyNode;\n\n\nimport java.util.Collection;\nimport java.util.Map;\n\npublic final class ConfigUtils {\n    /**\n     * Convert JSON formatted or recursion map to spring-properties map.\n     * So spring can using json or json5.\n     * e.g. { server : { port : 1234 } } => { server.port : 1234 }.\n     * @return Spring Properties Map\n     */\n    public static Map<String, Object > recursionMapToPropertiesMap( Map<String, Object > recursionMap ){\n        RecursiveFamilyIterator<Object > iterator = new RecursiveFamilyIterator<>( recursionMap, true );\n        FamilyEntryNameEncoder entryNameEncoder = new GenericNamespaceFamilyEntryNameEncoder( \".\", true );\n        Map<String, Object > neo = Units.spawnExtendParent( recursionMap );\n\n        while( iterator.hasNext() ) {\n            UnitFamilyNode<Object, Object > node = iterator.next();\n\n            String k = entryNameEncoder.encode( node );\n            k = k.substring( 1 ); // Skip '.'\n            neo.put( k, node.getEntry().getValue() );\n        }\n\n        return neo;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>pinecones</artifactId>\n        <groupId>com.pinecones</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.summer</groupId>\n    <artifactId>summer</artifactId>\n    <version>2.1.0</version>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>commons-fileupload</groupId>\n            <artifactId>commons-fileupload</artifactId>\n            <version>1.3.1</version>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/ArchConnectDispatcher.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSONException;\nimport com.pinecone.summer.http.HttpEntityParser;\nimport com.pinecone.summer.prototype.ConnectDispatcher;\nimport com.pinecone.summer.prototype.Pagesion;\nimport com.pinecone.summer.prototype.Wizard;\nimport com.pinecone.Pinecone;\n\nimport javax.servlet.ServletException;\nimport javax.servlet.http.HttpServletRequest;\nimport java.io.IOException;\nimport java.lang.reflect.InvocationTargetException;\n\n/**\n *  Pinecone For Java SystemDispatcher [ Bean Nuts Pinecone PineconeJava Summer SystemDispatcher ]\n *  Copyright © 2008 - 2024 Bean Nuts Foundation ( DR.Undefined ) All rights reserved. [Mr.A.R.B / WJH]\n *  Tip:\n *  *****************************************************************************************\n *  For Pinecone Net Family, we adopted PHP-Style as function paradigm because for Pinecone CPP\n *  we has been referenced PHP-Style for long history and in this case there is almost no inappropriate\n *  scenarios has found so for each version of Pinecone in other language we still inherited Pinecone CPP.\n *  For following case we defined:\n *   => $_GET      : Parsed query string key-values json object.\n *   => $_POST     : Whichever form or multipart for key-values json object.\n *   => $_GPC      : Using Java parameter map but json object format.\n *   => $_FILES    : Like PHP $_FILES just files map.\n *   => $_REQUEST  : Currently session global http request.\n *   => $_RESPONSE : Currently session global http response.\n *  *****************************************************************************************\n */\npublic class ArchConnectDispatcher implements ConnectDispatcher {\n    protected ArchHostSystem      mArchHostSystem;\n    protected RouterType          mRouterType       = RouterType.QueryString;\n    protected String              mszURI            = \"\";\n    protected String[]            mURIParts         = new String[0];\n    protected String              mszDomainHref     = \"\";\n    protected ArchWizardSummoner  mWizardSummoner   = null;\n    protected HttpEntityParser    mHttpEntityParser = null ;\n\n    protected String mszWizardCommand   = null;\n    protected String mszModelCommand    = null;\n    protected String mszControlCommand  = null;\n\n    protected ArchConnection mConnection  = null;\n\n    public ArchConnectDispatcher( ArchHostSystem system, RouterType routerType ){\n        this.mArchHostSystem   = system;\n        this.mHttpEntityParser = this.mArchHostSystem.mHttpEntityParser;\n        this.mRouterType       = routerType;\n    }\n\n    public ArchHostSystem getHostSystem(){\n        return this.mArchHostSystem;\n    }\n\n    public HttpEntityParser getHttpEntityParser(){\n        return this.mHttpEntityParser;\n    }\n\n    public ArchWizardSummoner getWizardSummoner() {\n        return this.mWizardSummoner;\n    }\n\n    public ArchConnection getConnection(){\n        return this.mConnection;\n    }\n\n\n\n    public String getWizardCommand() {\n        return this.mszWizardCommand;\n    }\n\n    public String getModelCommand() {\n        return this.mszModelCommand;\n    }\n\n    public String getControlCommand() {\n        return this.mszControlCommand;\n    }\n\n\n\n    @Override\n    public void afterConnectionAccepted( Connectiom connectiom ) throws ServletException, IOException {\n        connectiom.response.setCharacterEncoding( this.mArchHostSystem.getServerCharset() );\n    }\n\n    /** Http Method Handler **/\n    @Override\n    public void handleGet( Connectiom connectiom ) throws ServletException, IOException {\n        this.afterConnectionAccepted(connectiom);\n        this.mConnection = new GetConnection( this, connectiom );\n        this.invokeDispatchBus();\n    }\n\n    @Override\n    public void handlePost( Connectiom connectiom ) throws ServletException, IOException {\n        this.afterConnectionAccepted(connectiom);\n        this.mConnection = new PostConnection( this, connectiom );\n        this.invokeDispatchBus();\n    }\n\n    @Override\n    public void handleHead( Connectiom connectiom ) throws ServletException, IOException {\n        this.afterConnectionAccepted(connectiom);\n    }\n\n    @Override\n    public void handleOptions( Connectiom connectiom ) throws ServletException, IOException {\n        this.afterConnectionAccepted(connectiom);\n    }\n\n    @Override\n    public void handlePut( Connectiom connectiom ) throws ServletException, IOException {\n        this.afterConnectionAccepted(connectiom);\n    }\n\n    @Override\n    public void handlePatch( Connectiom connectiom ) throws ServletException, IOException {\n        this.afterConnectionAccepted(connectiom);\n    }\n\n    @Override\n    public void handleDelete( Connectiom connectiom ) throws ServletException, IOException {\n        this.afterConnectionAccepted(connectiom);\n    }\n\n    @Override\n    public void handleTrace( Connectiom connectiom ) throws ServletException, IOException {\n        this.afterConnectionAccepted(connectiom);\n    }\n\n\n    @Override\n    public void stop() throws RuntimeException {\n        throw new TerminateSessionException(\"This session or sequence has been terminated.\");\n    }\n\n    public void jspRenderPage( String szDispatcherPath ) throws IOException, ServletException {\n        this.mConnection.getRequest().getRequestDispatcher( szDispatcherPath ).forward(this.mConnection.getRequest(), this.mConnection.getResponse());\n    }\n\n    public void jspTPLRenderPage( String szTemplatePath ) throws IOException, ServletException {\n        this.mConnection.getRequest().getRequestDispatcher( this.mArchHostSystem.getRealTemplatePath() + szTemplatePath ).forward(this.mConnection.getRequest(), this.mConnection.getResponse());\n    }\n\n\n\n\n    @Override\n    public void traceSystemErrorMsg( String szTitle, String szErrorMsg ) throws IOException, ServletException {\n        this.mConnection.getRequest().setAttribute(\"pineVersion\", Pinecone.VERSION);\n        this.mConnection.getRequest().setAttribute(\"pineReleaseDate\", Pinecone.RELEASE_DATE);\n        this.mConnection.getRequest().setAttribute(\"javaVersion\", System.getProperty(\"java.version\"));\n        this.mConnection.getRequest().setAttribute(\"pageTitle\", szTitle);\n        this.mConnection.getRequest().setAttribute(\"errorMsg\", szErrorMsg);\n\n        this.jspRenderPage( this.mArchHostSystem.getDefaultErrorPagePath() );\n    }\n\n    @Override\n    public void traceSystemErrorMsg( int nErrorID, String szTitle, String szErrorMsg ) throws IOException, ServletException {\n        this.mConnection.getResponse().setStatus(nErrorID);\n        this.traceSystemErrorMsg( szTitle,szErrorMsg );\n    }\n\n    @Override\n    public void traceSystem404Error() throws IOException, ServletException {\n        this.traceSystem404Error(\"<h2>You are trying to access an undefined file !</h2>\" );\n    }\n\n    @Override\n    public void traceSystem404Error( String szErrorMsg ) throws IOException, ServletException {\n        this.traceSystemErrorMsg( 404,\"SERVER 404 ERROR\",szErrorMsg );\n    }\n\n    @Override\n    public void traceSystem500Error( String szErrorMsg ) throws IOException, ServletException {\n        this.traceSystemErrorMsg( 500,\"SERVER 500 ERROR\",szErrorMsg );\n    }\n\n    public void echoIndexPage() throws IOException, ServletException {\n        this.traceSystemErrorMsg( \"WELCOME TO PINECONE JAVA\" ,\"<h1>Everything should be fine.</h1>\" );\n    }\n\n\n\n\n    protected void beforeDispath() throws ServletException, IOException {\n    }\n\n    protected void afterDispatch() throws ServletException, IOException {\n\n    }\n\n    @Override\n    public void invokeDispatchBus() throws ServletException, IOException {\n        this.requestReceived();\n        this.dispatch();\n    }\n\n    @Override\n    public void dispatch() throws IOException, ServletException {\n        this.beforeDispath();\n        this.profileURL();\n        this.toSummon();\n        this.afterDispatch();\n    }\n\n    @Override\n    public void requestReceived() throws ServletException, IOException {\n        try {\n            this.mszWizardCommand = this.mConnection.$_GET().getString(this.mArchHostSystem.getWizardParameter());\n        } catch (JSONException e){ this.mszWizardCommand = \"\"; }\n        try {\n            this.mszModelCommand = this.mConnection.$_GET().getString(this.mArchHostSystem.getModelParameter());\n        } catch (JSONException e){ this.mszModelCommand = \"\"; }\n        try {\n            this.mszControlCommand = this.mConnection.$_GET().getString(this.mArchHostSystem.getControlParameter());\n        }\n        catch (JSONException e){ this.mszControlCommand = \"\"; }\n\n        this.mWizardSummoner   = SystemSpawner.spawnWizardSummoner( this.mArchHostSystem.getWizardSummonerConfig(), this.mConnection );\n    }\n\n    protected void profileURL() throws ServletException {\n        HttpServletRequest request = this.mConnection.getRequest();\n        StringBuffer  sbRequestURL = request.getRequestURL();\n        String        szRequestURI = request.getRequestURI();\n\n        if( szRequestURI.equals( \"/\" ) ){\n            if( sbRequestURL.charAt( sbRequestURL.length() - 1 ) == '/' ){\n                this.mszDomainHref = sbRequestURL.deleteCharAt( sbRequestURL.length() - 1 ).toString();\n            }\n            else {\n                this.mszDomainHref = sbRequestURL.toString();\n            }\n        }\n        else {\n            String            szRequestURL = sbRequestURL.toString();\n            String[] debris = szRequestURL.split( szRequestURI );\n            if( debris.length >= 1 ) {\n                this.mszDomainHref = debris[0];\n            }\n            else {\n                throw new ServletException( \"Illegal URL given '\" + szRequestURL + \"'.\" );\n            }\n        }\n\n        this.mszURI    = szRequestURI;\n        this.mURIParts = StringUtils.trimEmptyElement( this.mszURI.split( \"/\" ) );\n        //Debug.trace( this.mURIParts, this.mszURI );\n    }\n\n    protected void summonByQueryString() throws ServletException, IOException {\n        switch ( this.mszWizardCommand ){\n            case \"\":{\n                this.echoIndexPage();\n                break;\n            }\n            default:{\n                this.mWizardSummoner.summonAndExecute( this.mszWizardCommand );\n                break;\n            }\n        }\n    }\n\n    protected void summonByRouterPath() throws ServletException, IOException {\n        Object routum = this.mArchHostSystem.getPrimeRouterDispatcher().queryRoutum( this.mszURI );\n        if( routum != null ) {\n            ArchRouterDispatcher.RouterClass routerClass = null;\n            ArchRouterDispatcher.RouterMethod routerMethod = null;\n            if( routum instanceof ArchRouterDispatcher.RouterClass ) {\n                routerClass = (ArchRouterDispatcher.RouterClass) routum;\n            }\n            else if( routum instanceof ArchRouterDispatcher.RouterMethod ) {\n                routerMethod = (ArchRouterDispatcher.RouterMethod) routum;\n                routerClass  = routerMethod.parent;\n            }\n\n            if( routerClass != null ) {\n\n                this.mszWizardCommand = routerClass.antetype.getSuperclass().getSimpleName();\n                Wizard wizard = this.mWizardSummoner.summonIfExist( this.mszWizardCommand );\n                if( routerMethod != null ) {\n                    Pagesion pagesion = (Pagesion) wizard;\n                    pagesion.setRenderum( routerMethod.antetype );\n                    try{\n                        routerMethod.antetype.invoke( pagesion );\n                    }\n                    catch ( IllegalAccessException | InvocationTargetException e ){\n                        e.printStackTrace();\n                    }\n\n                    pagesion.render();\n                }\n            }\n\n        }\n\n        String szClass = \"\";\n        if( this.mURIParts.length > 0 ) {\n            szClass = this.mURIParts[0];\n        }\n\n        switch ( szClass ){\n            case \"\":{\n                this.echoIndexPage();\n                break;\n            }\n            default:{\n                this.mszWizardCommand = szClass;\n                this.mWizardSummoner.summonAndExecute( szClass );\n                break;\n            }\n        }\n    }\n\n    protected void toSummon() throws ServletException, IOException {\n        switch ( this.mRouterType ) {\n            case QueryString:{\n                this.summonByQueryString();\n                break;\n            }\n            case PathRouter:{\n                this.summonByRouterPath();\n                break;\n            }\n            default:{\n                break;\n            }\n        }\n    }\n\n\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/ArchConnection.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.framework.unit.LinkedMultiValueMap;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.summer.multiparts.MultipartFile;\nimport com.pinecone.summer.multiparts.commons.CommonsMultipartFiles;\nimport com.pinecone.summer.http.HttpEntityParser;\nimport com.pinecone.summer.http.HttpMethod;\nimport com.pinecone.summer.prototype.Connectson;\n\nimport javax.servlet.ServletOutputStream;\nimport javax.servlet.http.Cookie;\nimport javax.servlet.http.HttpServlet;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport java.io.IOException;\nimport java.io.PrintWriter;\nimport java.util.Map;\nimport java.util.TreeMap;\n\npublic abstract class ArchConnection implements Connectson {\n    protected ArchConnectDispatcher mDispatcher;\n    protected ArchHostSystem        mHostSystem;\n\n    protected Connectiom          mConnectiom;\n    protected HttpServletRequest  mCurrentMultipartRequest;\n\n    protected JSONObject mGlobalParameterContainer  = null; /** GPC  **/\n    protected JSONObject mGETMapContainer           = null; /** GET  **/\n    protected JSONObject mPOSTMapContainer          = null; /** POST **/\n\n    protected HttpMethod                 mCurrentHttpMethod    = HttpMethod.GET;\n    protected CommonsMultipartFiles      mMultipartFilesMaker  = null ;\n    protected HttpEntityParser           mHttpEntityParser     = null ;\n    protected Map<String, MultipartFile> mFilesMapContainer    = new LinkedMultiValueMap() ;\n    protected Map<String, Cookie>        mCookiesContainer     = new TreeMap<>();\n\n\n    public ArchConnection( ArchConnectDispatcher dispatcher ) {\n        this.mDispatcher       = dispatcher;\n        this.mHostSystem       = this.mDispatcher.getHostSystem();\n        this.mHttpEntityParser = this.mDispatcher.getHttpEntityParser();\n    }\n\n    protected ArchConnection(ArchConnectDispatcher dispatcher, Connectiom connectiom ) {\n        this( dispatcher );\n        this.apply(connectiom);\n    }\n\n    protected ArchConnection apply( Connectiom connectiom ) {\n        this.mConnectiom = connectiom;\n        this.mMultipartFilesMaker = new CommonsMultipartFiles( this );\n        this.mConnectiom.afterConnectionRipe( this );\n        return this;\n    }\n\n\n    @Override\n    public ArchConnectDispatcher getDispatcher(){\n        return this.mDispatcher;\n    }\n\n    @Override\n    public ArchHostSystem getHostSystem() {\n        return this.mHostSystem;\n    }\n\n\n    @Override\n    public HttpServletRequest getRequest() {\n        return this.mConnectiom.request;\n    }\n\n    @Override\n    public HttpServletResponse getResponse() {\n        return this.mConnectiom.response;\n    }\n\n    @Override\n    public HttpServlet getServlet() {\n        return this.mConnectiom.servlet;\n    }\n\n    @Override\n    public HttpServletRequest getMultipartRequest() {\n        return this.mCurrentMultipartRequest;\n    }\n\n    @Override\n    public boolean isMultipartRequest() {\n        return this.mMultipartFilesMaker.isMultipart();\n    }\n\n    @Override\n    public JSONObject $_GPC(){\n        return this.mGlobalParameterContainer;\n    }\n\n    @Override\n    public JSONObject $_GET(){\n        return this.mGETMapContainer;\n    }\n\n    @Override\n    public JSONObject $_POST(){\n        return this.mPOSTMapContainer;\n    }\n\n    @Override\n    public PrintWriter writer() throws IOException {\n        return this.getResponse().getWriter();\n    }\n\n    @Override\n    public ServletOutputStream out() throws IOException {\n        return this.getResponse().getOutputStream();\n    }\n\n    @Override\n    public HttpServletRequest $_REQUEST(){\n        return this.$_REQUEST( false );\n    }\n\n    @Override\n    public HttpServletRequest $_REQUEST ( boolean bUsingMultipart ){\n        if ( bUsingMultipart && this.isMultipartRequest() ){\n            return this.mCurrentMultipartRequest;\n        }\n        return this.mConnectiom.request;\n    }\n\n    @Override\n    public HttpServletResponse $_RESPONSE(){\n        return this.mConnectiom.response;\n    }\n\n    @Override\n    public Map<String, MultipartFile> $_FILES() {\n        return this.mFilesMapContainer;\n    }\n\n    @Override\n    public Map<String, Cookie > $_COOKIE() {\n        return this.mCookiesContainer;\n    }\n\n\n    @Override\n    public HttpMethod currentHttpMethod(){\n        return this.mCurrentHttpMethod;\n    }\n\n\n    public CommonsMultipartFiles getMultipartFilesMaker() {\n        return this.mMultipartFilesMaker;\n    }\n\n    public Connectiom getConnectiom() {\n        return this.mConnectiom;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/ArchHostSystem.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.framework.util.io.FileUtils;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.summer.http.CommonHttpEntityParser;\nimport com.pinecone.summer.http.HttpEntityParser;\nimport com.pinecone.summer.prototype.HostSystem;\n\n\nimport javax.servlet.ServletContext;\nimport javax.servlet.ServletException;\nimport java.io.IOException;\nimport java.nio.charset.Charset;\nimport java.text.SimpleDateFormat;\nimport java.util.Date;\n\npublic class ArchHostSystem implements HostSystem {\n    public static SystemServlet       G_SystemServlet;\n\n    protected SystemSpawner           mSystemSpawner          = null;\n    protected SystemServlet           mSystemServlet          = null;\n\n\n    protected JSONObject              mGlobalConfig           = null;\n    protected JSONObject              mSystemConfig           = null;\n    protected JSONObject              mPublicWizardConfig     = null;\n    protected ServletContext          mServletContext         = null   ;\n\n    protected String                  mszModelParameter       = \"action\"  ;\n    protected String                  mszControlParameter     = \"control\" ;\n    protected String                  mszModelClassSuffix     = \"Model\"   ;\n    protected String                  mszControlClassSuffix   = \"Control\" ;\n    protected String                  mszWizardParameter      = \"do\"      ;\n    protected String                  mszServerCharset        = \"UTF-8\"   ;\n\n    protected String                  mszTemplatePath                  ;\n    protected String                  mszRealTemplatePath              ;\n    protected String                  mszConfigPath                    ;\n    protected String                  mszSimpResourcesPath             ;\n    protected String                  mszJavaWebInfoSuffix             ;\n    protected String                  mszRootClassPath                 ;\n    protected String                  mszSystemPath          = null    ;\n    protected String                  mszResourcesPath       = null    ;\n\n    protected HttpEntityParser        mHttpEntityParser = null ;\n    protected ArchRouterDispatcher    mRouterDispatcher = null ;\n\n    /** Class Function **/\n    public String readFileContentAll( String szPath ) throws IOException {\n        return FileUtils.readAll( szPath, Charset.forName( this.getServerCharset() ) );\n    }\n\n    private void parseConfig()  throws IOException {\n        this.mGlobalConfig = new JSONMaptron( this.readFileContentAll( this.mszConfigPath )  );\n    }\n\n    private void construct() throws IOException {\n        if( this.mServletContext != null ){\n            this.mszServerCharset        = this.mServletContext.getInitParameter(\"encoding\");\n        }\n\n        this.parseConfig();\n        if( this.mGlobalConfig != null ){\n            this.mSystemConfig           = this.mGlobalConfig.getJSONObject(\"SummerSystem\");\n            this.mszWizardParameter      = this.mSystemConfig.getString(\"WizardParameter\");\n            this.mszModelParameter       = this.mSystemConfig.getString(\"ModelParameter\");\n            this.mszControlParameter     = this.mSystemConfig.getString(\"ControlParameter\");\n            this.mPublicWizardConfig     = this.mSystemConfig.getJSONObject(\"PublicWizardConfig\");\n            this.mszTemplatePath         = this.mSystemConfig.getString(\"TemplatePath\");\n            this.mszSimpResourcesPath    = this.mSystemConfig.getString(\"ResourcesPath\");\n            this.mszJavaWebInfoSuffix    = this.mSystemConfig.getString(\"JavaWebInfoSuffix\");\n            this.mszRealTemplatePath     = this.mszJavaWebInfoSuffix + this.mszTemplatePath;\n            this.mszModelClassSuffix     = this.mSystemConfig.getString(\"ModelClassSuffix\");\n            this.mszControlClassSuffix   = this.mSystemConfig.getString(\"ControlClassSuffix\");\n        }\n\n        this.mSystemSpawner    = new SystemSpawner();\n\n        if ( this.mSystemServlet != null ) {\n            this.registerRootClassPath( this.mSystemServlet.getClassPath() );\n        }\n\n        this.mHttpEntityParser    = new CommonHttpEntityParser( this.getServerCharset() );\n        this.mRouterDispatcher    = new ArchRouterDispatcher( this );\n    }\n\n    public ArchHostSystem( String szResourcesPath, String szConfigFileName ) throws IOException {\n        this.mszResourcesPath = szResourcesPath;\n        this.mszConfigPath    = szResourcesPath + szConfigFileName;\n        this.construct();\n    }\n\n    public ArchHostSystem( SystemServlet servlet ) throws IOException {\n        this.mSystemServlet  = servlet;\n        this.mServletContext = this.mSystemServlet.getServletContext();\n        this.mszConfigPath   = ArchHostSystem.getSystemConfigPath(\n                this.mSystemServlet.getClassPath(), this.getServletContext().getInitParameter(\"IlluminationConfigLocation\")\n        );\n\n        this.construct();\n    }\n\n\n    public HttpEntityParser getHttpEntityParser(){\n        return this.mHttpEntityParser;\n    }\n\n    public JSONObject getGlobalConfig() {\n        return this.mGlobalConfig;\n    }\n\n    public JSONObject getSystemConfig() {\n        return this.mSystemConfig;\n    }\n\n\n    public JSONObject getPublicWizardConfig() {\n        return this.mPublicWizardConfig;\n    }\n\n    public String getControlParameter() {\n        return this.mszControlParameter;\n    }\n\n    public String getWizardParameter() {\n        return this.mszWizardParameter;\n    }\n\n    public String getModelParameter() {\n        return this.mszModelParameter;\n    }\n\n    public String getModelClassSuffix() { return this.mszModelClassSuffix; }\n\n    public String getControlClassSuffix() { return this.mszControlClassSuffix; }\n\n    public String getTemplatePath() {\n        return this.mszTemplatePath;\n    }\n\n    public String getRealTemplatePath() {\n        return this.mszRealTemplatePath;\n    }\n\n    public String getServerCharset() {\n        return this.mszServerCharset;\n    }\n\n    public ServletContext getServletContext() {\n        return this.mServletContext;\n    }\n\n    public String getDefaultErrorPagePath() {\n        return this.mSystemConfig.getString(\"DefaultErrorPageTpl\");\n    }\n\n    public JSONObject getHosts() {\n        return this.mSystemConfig.getJSONObject(\"Hosts\");\n    }\n\n    public String getResourcesPath() {\n        if( this.mszResourcesPath == null ){\n            this.mszResourcesPath = this.mSystemServlet.getClassPath() + this.mszSimpResourcesPath + \"/\";\n        }\n        return this.mszResourcesPath;\n    }\n\n    public void savageSetResourcesPath( String szUncheckedResourcesPath ){\n        this.mszResourcesPath = szUncheckedResourcesPath;\n    }\n\n\n    /** Upload Function **/\n    public JSONObject getUploadConfig() {\n        return this.mSystemConfig.getJSONObject(\"UploadConfig\");\n    }\n\n    public long getSingleFileSizeMax() {\n        return this.getUploadConfig().getLong(\"SingleFileSizeMax\");\n    }\n\n    public long getSumFileSizeMax() {\n        return this.getUploadConfig().getLong(\"SumFileSizeMax\");\n    }\n\n    public String getUploadEncode() {\n        return this.getUploadConfig().getString(\"UploadEncode\");\n    }\n\n    public String getUploadTempDir() {\n        return this.getUploadConfig().getString(\"UploadTempDir\");\n    }\n\n\n\n\n    /** System Class **/\n    @Override\n    public String getWizardSummonerConfig() { return this.getSystemConfig().getString(\"WizardSummoner\"); }\n\n    @Override\n    public String getWizardPackageName(){\n        return \"Wizard\";\n    }\n\n    public SystemSpawner getSystemSpawner() { return this.mSystemSpawner; }\n\n    @Override\n    public ArchConnectDispatcher handleByDispatcher(RouterType routerType ) {\n        return new ArchConnectDispatcher( this, routerType );\n    }\n\n    @Override\n    public ArchRouterDispatcher getPrimeRouterDispatcher() {\n        return this.mRouterDispatcher;\n    }\n\n\n    public String getSystemPath() {\n        if( this.mszRootClassPath != null && this.mszJavaWebInfoSuffix != null ){\n            if( this.mszSystemPath == null ) {\n                String[] szPathChip = this.mszRootClassPath.split( this.mszJavaWebInfoSuffix );\n                if( szPathChip.length > 0 ){\n                    this.mszSystemPath = szPathChip[0];\n                }\n            }\n        }\n        return this.mszSystemPath;\n    }\n\n    public String getRootClassPath() {\n        return this.mszRootClassPath;\n    }\n\n    protected void registerRootClassPath( String szRootClassPath ) {\n        this.mszRootClassPath = szRootClassPath;\n    }\n\n    public SystemServlet getSystemServlet(){\n        return this.mSystemServlet;\n    }\n\n    public void init() throws ServletException {\n        System.err.println( \"----------------------------------------------\" );\n        System.err.println( \"Bean Nuts Pinecone PineconeJava Summer Has Been Initiated\" );\n        System.err.println( \"Time: \" + ( new SimpleDateFormat(\"yyyy-MM-dd HH：mm：ss\") ).format(new Date()) );\n        System.err.println( \"----------------------------------------------\" );\n    }\n\n\n    protected static String getSystemConfigPath( String szClassPath , String szIlluminationConfigLocation ) {\n        if( szIlluminationConfigLocation.startsWith(\"classpath:\") ){\n            szIlluminationConfigLocation = szIlluminationConfigLocation.replaceFirst( \"classpath:\", szClassPath );\n        }\n        return szIlluminationConfigLocation;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/ArchPageson.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.summer.prototype.ModelEnchanter;\nimport com.pinecone.summer.prototype.Pagesion;\nimport com.pinecone.summer.prototype.Pageson;\nimport com.pinecone.summer.prototype.Wizard;\n\nimport javax.servlet.ServletException;\nimport java.io.IOException;\nimport java.lang.annotation.Annotation;\nimport java.lang.reflect.Method;\n\npublic abstract class ArchPageson extends ArchWizardum implements Pageson {\n    protected JSONObject  mPageData                     =  null  ;\n    protected boolean     mbGlobalEnchanter             =  false ;\n\n    public ArchPageson( ArchConnection session ) {\n        super( session );\n        this.mPageData = new JSONMaptron();\n        if( this instanceof Pagesion ){\n            this.mbGlobalEnchanter = this.hasEnchanterTrait();\n        }\n        this.appendDefaultPageDate();\n    }\n\n    protected boolean hasEnchanterTrait() {\n        Annotation[] annotations = this.getClass().getAnnotations();\n        for( Annotation annotation : annotations ){\n            if( annotation instanceof ModelEnchanter ){\n                return ((ModelEnchanter) annotation).value();\n            }\n        }\n        return false;\n    }\n\n    protected void appendDefaultPageDate(){\n        this.mPageData.put( \"PrototypeName\", this.prototypeName() );\n        this.mPageData.put( \"szMainTitle\", ((Wizard)this).getTitle() );\n        this.mPageData.put( \"szWizardRole\", ((Wizard)this).getModularRole() );\n    }\n\n    public void forward ( ArchPageson that ) {\n        this.mPageData = that.mPageData;\n    }\n\n\n    public JSONObject getPageData(){\n        return this.mPageData;\n    }\n\n    public String toJSONString(){\n        return this.mPageData.toString();\n    }\n\n    public void setEnchanterRole( boolean bRole ){\n        this.mbGlobalEnchanter = bRole;\n    }\n\n    public boolean isEnchanter() {\n        return this.mbGlobalEnchanter;\n    }\n\n    public void setRenderum( Method fnRenderum ) {\n\n    }\n\n    public void render() throws ServletException, IOException {\n        if( this instanceof Pagesion && this.mbGlobalEnchanter ){\n            this.writer().print( ((Pagesion)this).toJSONString() );\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/ArchRouterDispatcher.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.framework.util.lang.ClassNameFetcher;\nimport com.pinecone.framework.util.lang.NamespaceCollector;\nimport com.pinecone.summer.prototype.Controller;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.ClassUtils;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.summer.prototype.*;\n\nimport java.lang.annotation.Annotation;\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\n\npublic class ArchRouterDispatcher implements RouterDispatcher {\n    public class RouterClass {\n        public boolean isPagesionController = false;\n        public boolean isSimpleController   = false;\n        public RouterMapping routerMapping  = null;\n        public Class<?>      antetype       = null;\n    }\n\n    public class RouterMethod {\n        public RouterClass   parent         = null;\n        public RouterMapping routerMapping  = null;\n        public Method        antetype       = null;\n    }\n\n    protected ArchHostSystem             mArchHostSystem            ;\n    protected JSONArray                  mRoutumPackageNSs          ;\n    protected Map<String, RouterClass >  mDesignatedRouterClassMap  = new LinkedHashMap<>();\n    protected Map<String, RouterMethod > mDesignatedRouterMethodMap = new LinkedHashMap<>();\n    protected NamespaceCollector         mClassLoader               = new ClassNameFetcher();\n\n    protected static String urlNormalize( String url ) {\n        if( !url.startsWith( \"/\" ) ) {\n            url = \"/\" + url;\n        }\n        return url;\n    }\n\n    protected static String[] fetchPaths( String[] urls, String szDefaultName ) {\n        String[] paths = urls;\n        if( paths.length == 0 ) {\n            paths = new String[] { ArchRouterDispatcher.urlNormalize( szDefaultName ) };\n        }\n        else {\n            String[] norPaths = new String[paths.length];\n            for ( int i = 0; i < paths.length; i++ ) {\n                String path = paths[i];\n                norPaths[i] = ArchRouterDispatcher.urlNormalize( path );\n            }\n            paths = norPaths;\n        }\n        return paths;\n    }\n\n    protected void fetchRouterClass( String szRoutumPackageNS ) {\n        List<String> classNames = this.mClassLoader.fetch( szRoutumPackageNS );\n        if ( classNames != null ) {\n            for ( String className : classNames ) {\n                try {\n                    className = className.substring( className.indexOf( szRoutumPackageNS ) );\n                    Class<?> antetype = Thread.currentThread().getContextClassLoader().loadClass( className );\n                    Annotation[] annotations = antetype.getAnnotations();\n\n                    RouterClass routerClass          = new RouterClass();\n                    routerClass.isPagesionController = ClassUtils.isAssignable( Pagesion.class, antetype );\n                    routerClass.isSimpleController   = false;\n                    routerClass.antetype             = antetype;\n                    for( Annotation annotation : annotations ) {\n                        if( annotation instanceof Controller) {\n                            routerClass.isSimpleController = true;\n                        }\n                        else if( annotation instanceof RouterMapping ) {\n                            routerClass.routerMapping = (RouterMapping)annotation;\n                        }\n                    }\n\n                    if( routerClass.isPagesionController || routerClass.isSimpleController ) {\n                        String[] paths = null;\n                        if( routerClass.routerMapping != null ) {\n                            RouterMapping routerMapping = routerClass.routerMapping;\n                            String szDefaultName = antetype.getSimpleName();\n                            if ( routerClass.isPagesionController ) {\n                                szDefaultName = antetype.getSuperclass().getSimpleName();\n                            }\n\n                            paths = ArchRouterDispatcher.fetchPaths( routerMapping.value(), szDefaultName );\n                            for ( String path : paths ) {\n                                this.mDesignatedRouterClassMap.put( path, routerClass );\n                            }\n                        }\n                        this.fetchRouterMethod( routerClass, paths, antetype );\n                    }\n\n\n                }\n                catch ( ClassNotFoundException e ) {\n                    e.printStackTrace();\n                }\n            }\n        }\n    }\n\n    protected void fetchRouterMethod( RouterClass routerClass, String[] parentPaths, Class<?> that ) {\n        Method[] methods = that.getDeclaredMethods();\n        for ( Method method : methods ) {\n            Annotation[] annotations = method.getAnnotations();\n            RouterMapping routerMapping = null;\n            for( Annotation annotation : annotations ) {\n                if( annotation instanceof RouterMapping ) {\n                    routerMapping = (RouterMapping)annotation;\n                    break;\n                }\n            }\n\n            if( routerMapping != null ) {\n                String szDefaultName = method.getName();\n                String[] paths = ArchRouterDispatcher.fetchPaths( routerMapping.value(), szDefaultName );\n                String[] finalPath;\n                int nParentLen = 0;\n                if( parentPaths != null ){\n                    nParentLen = parentPaths.length;\n                    finalPath = new String[ parentPaths.length * paths.length ];\n                }\n                else {\n                    finalPath = new String[ paths.length ];\n                }\n\n                RouterMethod routerMethod = new RouterMethod();\n                routerMethod.antetype = method;\n                routerMethod.parent = routerClass;\n                routerMethod.routerMapping = routerMapping;\n\n\n                int k = 0;\n                nParentLen = nParentLen == 0 ? 1 : nParentLen; // 1 for only children loop.\n                for ( int i = 0; i < nParentLen; i++ ) {\n                    for ( int j = 0; j < paths.length; j++ ) {\n                        if( routerMapping.relative() && parentPaths != null ) {\n                            finalPath[ k ] = parentPaths[i] + paths[j];\n                        }\n                        else {\n                            finalPath[ k ] = paths[j];\n                        }\n                        this.mDesignatedRouterMethodMap.put( finalPath[k], routerMethod );\n                        ++k;\n                    }\n                }\n                Debug.trace( finalPath );\n            }\n        }\n\n    }\n\n    protected void fetchRouterMaps() {\n        for( Object obj : this.mRoutumPackageNSs ) {\n            String szNS = (String) obj;\n            this.fetchRouterClass( szNS );\n        }\n    }\n\n    public ArchRouterDispatcher( ArchHostSystem system ){\n        this.mArchHostSystem      = system;\n        this.mRoutumPackageNSs    = this.mArchHostSystem.getSystemConfig().optJSONArray( \"RoutumPackageNSs\" );\n        this.fetchRouterMaps();\n    }\n\n    public JSONArray getRoutumPackageNSs() {\n        return this.mRoutumPackageNSs;\n    }\n\n    public Object queryRoutum( String szURI ){\n        RouterClass routerClass = this.mDesignatedRouterClassMap.get( szURI );\n        if( routerClass == null ) {\n            return this.mDesignatedRouterMethodMap.get( szURI );\n        }\n        return routerClass;\n    }\n\n    public Pagesion spawnPagesion( RouterClass routerClass, ArchConnection connection ) {\n        Pagesion obj = null;\n        try {\n            try{\n                Constructor<?> constructor = routerClass.antetype.getConstructor( ArchConnection.class );\n                obj = (Pagesion) constructor.newInstance( connection );\n            }\n            catch ( NoSuchMethodException | InvocationTargetException e1 ){\n                e1.printStackTrace();\n            }\n        }\n        catch ( IllegalAccessException | InstantiationException e ){\n            System.err.println( \"Summon Compromised: [\" + e.toString() + \"]\" );\n        }\n        return obj;\n    }\n\n    public Object spawnController( RouterClass routerClass ) {\n        Object obj = null;\n        try {\n            try{\n                Constructor<?> constructor = routerClass.antetype.getConstructor();\n                obj = constructor.newInstance();\n            }\n            catch ( NoSuchMethodException | InvocationTargetException e1 ){\n                e1.printStackTrace();\n            }\n        }\n        catch ( IllegalAccessException | InstantiationException e ){\n            System.err.println( \"Summon Compromised: [\" + e.toString() + \"]\" );\n        }\n        return obj;\n    }\n\n\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/ArchWizard.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.summer.prototype.JSONBasedControl;\nimport com.pinecone.summer.prototype.Pagesion;\nimport com.pinecone.summer.prototype.Wizard;\n\nimport javax.servlet.ServletException;\nimport java.io.IOException;\n\npublic abstract class ArchWizard implements Wizard {\n    protected ArchConnection         mConnection         =  null  ;\n    protected ArchHostSystem         mParentSystem    =  null  ;\n    protected ArchConnectDispatcher  mDispatcher      =  null  ;\n    private Pagesion                 mYokedModel      =  null  ;\n    private JSONBasedControl         mYokedControl    =  null  ;\n\n\n    public ArchWizard ( ArchConnection session ) {\n        this.mConnection = session;\n        this.mDispatcher = this.mConnection.getDispatcher();\n        this.mParentSystem = this.mDispatcher.getHostSystem();\n    }\n\n    @Override\n    public ArchConnection getConnection() {\n        return this.mConnection;\n    }\n\n    @Override\n    public ArchHostSystem getHostSystem() {\n        return this.mParentSystem;\n    }\n\n    @Override\n    public ArchConnectDispatcher getDispatcher(){\n        return this.mDispatcher;\n    }\n\n\n\n    public void soulBound(Pagesion model, JSONBasedControl control ){\n        this.mYokedModel   = model;\n        this.mYokedControl = control;\n    }\n\n    public Pagesion revealYokedModel(){\n        return this.mYokedModel;\n    }\n\n    public JSONBasedControl revealYokedControl(){\n        return this.mYokedControl;\n    }\n\n\n\n    public void beforeSummon() {\n    }\n\n    public void summoning() throws ServletException, IOException {\n        try{\n            if( this.mYokedControl != null ){\n                this.mYokedControl.beforeDispatch();\n                this.mYokedControl.dispatch();\n                this.mYokedControl.afterDispatch();\n            }\n\n            if( this.mYokedModel != null ){\n                this.mYokedModel.beforeDispatch();\n                this.mYokedModel.dispatch();\n                this.mYokedModel.render();\n                this.mYokedModel.afterDispatch();\n            }\n\n        }\n        catch ( TerminateSessionException e ){\n            System.out.println( \"Wizard: One of caught session or sequence has been terminated.\" );\n        }\n    }\n\n    public void afterSummon() {}\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/ArchWizardSummoner.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.summer.prototype.*;\n\nimport javax.servlet.ServletException;\nimport java.io.IOException;\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\n\npublic class ArchWizardSummoner implements WizardSummoner {\n    protected HostSystem                 mParentSystem;\n    protected ArchConnection             mConnection;\n    protected Wizard                     mLastWizard     = null ;\n\n    public ArchWizardSummoner(ArchConnection connection ) {\n        this.mParentSystem = connection.getHostSystem();\n        this.mConnection = connection;\n    }\n\n    public HostSystem getSystem() {\n        return this.mParentSystem;\n    }\n\n   public Wizard getLastSummoned(){\n        return this.mLastWizard;\n   }\n\n\n\n    @Override\n    public String queryNamespace( String szNickName ){\n        return this.mParentSystem.getWizardPackageName() + \".\" + szNickName;\n    }\n\n    protected String spawnFullModelPrototypeName( String szNickName ){\n        return this.queryNamespace(szNickName) + \".\" + szNickName + this.mParentSystem.getModelClassSuffix();\n    }\n\n    protected String spawnFullControlPrototypeName( String szNickName ){\n        return this.queryNamespace(szNickName) + \".\" + szNickName + this.mParentSystem.getControlClassSuffix();\n    }\n\n    protected Pagesion spawnWizardModelByCallHisName(String szClassName ){\n        Pagesion obj = null;\n        try {\n            Class<?> pVoid = Class.forName( szClassName );\n            try{\n                Constructor<?> constructor = pVoid.getConstructor( ArchConnection.class );\n                obj = (Pagesion) constructor.newInstance( this.mConnection );\n            }\n            catch (NoSuchMethodException | InvocationTargetException e1){\n                e1.printStackTrace();\n            }\n        }\n        catch (ClassNotFoundException | IllegalAccessException | InstantiationException e){\n            System.err.println( \"Summon Compromised: [\" + e.toString() + \"]\" );\n            //e.printStackTrace();\n        }\n\n        return obj;\n    }\n\n    protected JSONBasedControl spawnWizardControlByCallHisName( String szClassName ){\n        JSONBasedControl obj = null;\n        try {\n            Class<?> pVoid = Class.forName( szClassName );\n            try{\n                Constructor<?> constructor = pVoid.getConstructor( ArchConnection.class );\n                obj = (JSONBasedControl) constructor.newInstance( this.mConnection );\n            }\n            catch (NoSuchMethodException | InvocationTargetException e1){\n                e1.printStackTrace();\n            }\n        }\n        catch (ClassNotFoundException | IllegalAccessException | InstantiationException e){\n            System.err.println( \"Summon Compromised: [\" + e.toString() + \"]\" );\n            //e.printStackTrace();\n        }\n\n        return obj;\n    }\n\n    public Pagesion spawnWizardModel(String szNickName ) {\n        return this.spawnWizardModelByCallHisName( this.spawnFullModelPrototypeName( szNickName ) );\n    }\n\n    public JSONBasedControl    spawnWizardControl( String szNickName ) {\n        return this.spawnWizardControlByCallHisName( this.spawnFullControlPrototypeName( szNickName ) );\n    }\n\n    @Override\n    public Wizard summon( String szNickName, Object... args ) throws ServletException, IOException {\n        JSONBasedControl hControl = this.spawnWizardControl( szNickName );\n        Pagesion hModel     = this.spawnWizardModel( szNickName );\n\n        ArchWizard hArchetype = this.revealArchetype( hModel ,hControl );\n        if( hArchetype == null ){\n            return null;\n        }\n        this.beforeSummon( hModel, hControl );\n        this.soulBound( hModel, hControl );\n\n        this.mLastWizard = (Wizard) hArchetype;\n        return this.mLastWizard;\n    }\n\n    @Override\n    public void executeAfterSummonSequence() throws ServletException, IOException {\n        if( this.mLastWizard != null ){\n            ((ArchWizard)this.mLastWizard).summoning();\n            ((ArchWizard)this.mLastWizard).afterSummon();\n        }\n    }\n\n\n    @Override\n    public Wizard summonIfExist( String szNickName ) throws ServletException, IOException {\n        Wizard wizard = this.summon( szNickName );\n        if( wizard == null ){\n            this.mConnection.getDispatcher().traceSystem404Error();\n        }\n        return wizard;\n    }\n\n    @Override\n    public Wizard summonAndExecute( String szNickName ) throws ServletException, IOException {\n        Wizard wizard = this.summonIfExist( szNickName );\n        this.executeAfterSummonSequence();\n        return wizard;\n    }\n\n    public ArchWizard revealArchetype( Pagesion hModel, JSONBasedControl hControl ){\n        if( hModel instanceof ArchWizard){\n            return (ArchWizard)hModel;\n        }\n        else if( hControl instanceof ArchWizard){\n            return (ArchWizard)hControl;\n        }\n        return null;\n    }\n\n    public void beforeSummon(Pagesion hModel, JSONBasedControl hControl ){\n        if( hModel instanceof ArchWizard){\n            ( (ArchWizard) hModel ).beforeSummon();\n        }\n        if( hControl instanceof ArchWizard){\n            ( (ArchWizard) hControl ).beforeSummon();\n        }\n    }\n\n    public void soulBound(Pagesion hModel, JSONBasedControl hControl ){\n        if( hModel instanceof ArchWizard){\n            ( (ArchWizard) hModel ).soulBound( hModel, hControl );\n        }\n        if( hControl instanceof ArchWizard){\n            ( (ArchWizard) hControl ).soulBound( hModel, hControl );\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/ArchWizardum.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.summer.multiparts.MultipartFile;\nimport com.pinecone.summer.prototype.Wizardum;\n\nimport javax.servlet.ServletException;\nimport javax.servlet.ServletOutputStream;\nimport javax.servlet.http.Cookie;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport java.io.IOException;\nimport java.io.PrintWriter;\nimport java.util.Map;\n\npublic abstract class ArchWizardum extends ArchWizard implements Wizardum {\n    protected PrintWriter mConnectWriter                =  null;\n    protected ServletOutputStream mConnectOut           =  null;\n\n    public ArchWizardum( ArchConnection session ) {\n        super( session );\n    }\n\n    public ArchHostSystem system(){\n        return this.mParentSystem;\n    }\n\n    public ArchConnectDispatcher getSystemDispatcher() {\n        return this.mDispatcher;\n    }\n\n    public JSONObject $_GPC(){\n        return this.getConnection().$_GPC();\n    }\n\n    public JSONObject $_GET(){\n        return this.getConnection().$_GET();\n    }\n\n    public JSONObject $_POST(){\n        return this.getConnection().$_POST();\n    }\n\n    @Override\n    public PrintWriter writer() {\n        try{\n            if( this.mConnectWriter == null ) {\n                this.mConnectWriter = this.getConnection().writer();\n            }\n            return this.mConnectWriter;\n        }\n        catch ( IOException e ) {\n            throw new IllegalStateException( \"Illegal invoke writer.\", e );\n        }\n    }\n\n    @Override\n    public ServletOutputStream out() {\n        try{\n            if( this.mConnectOut == null ) {\n                this.mConnectOut = this.getConnection().out();\n            }\n            return this.mConnectOut;\n        }\n        catch ( IOException e ) {\n            throw new IllegalStateException( \"Illegal invoke out.\", e );\n        }\n    }\n\n    public HttpServletRequest $_REQUEST(){\n        return this.getConnection().$_REQUEST();\n    }\n\n    public HttpServletRequest getCurrentMultipartRequest() {\n        return this.getConnection().getMultipartRequest();\n    }\n\n    public HttpServletResponse $_RESPONSE(){\n        return this.getConnection().$_RESPONSE();\n    }\n\n    @Override\n    public Map<String, Cookie> $_COOKIE() {\n        return this.getConnection().$_COOKIE();\n    }\n\n    @Override\n    public Map<String, MultipartFile> $_FILES() {\n        throw new IllegalStateException(\"Notice: $_FILES() is abstract.\");\n    }\n\n\n\n\n    public void beforeDispatch() throws IOException, ServletException {}\n\n    public void afterDispatch() throws IOException, ServletException {}\n\n    public void stop() throws RuntimeException {\n        this.mDispatcher.stop();\n    }\n\n\n    public String getWizardCommand() {\n        return this.mDispatcher.getWizardCommand();\n    }\n\n    public String getModelCommand() {\n        return this.mDispatcher.getModelCommand();\n    }\n\n    public String getControlCommand() {\n        return this.mDispatcher.getControlCommand();\n    }\n\n    public void redirect( String szURL ) throws IOException {\n        this.$_RESPONSE().sendRedirect( szURL );\n    }\n\n\n\n    public String spawnWizardQuerySpell( String szPrototype ){\n        return \"?\" + this.system().getWizardParameter() + \"=\" + szPrototype;\n    }\n\n    public String spawnActionQuerySpell( String szActionFnName ){\n        return this.spawnActionControlSpell( szActionFnName, null );\n    }\n\n    public String spawnControlQuerySpell( String szControlFnName ) {\n        return this.spawnActionControlSpell( null, szControlFnName );\n    }\n\n    public String spawnActionControlSpell( String szActionFnName, String szControlFnName ) {\n        String szQueryString = \"?\" + this.system().getWizardParameter() + \"=\" + this.prototypeName();\n        if( szActionFnName != null && !szActionFnName.isEmpty() ){\n            szQueryString += \"&\" + this.system().getModelParameter() + \"=\" + szActionFnName;\n        }\n        if( szControlFnName != null && !szControlFnName.isEmpty() ){\n            szQueryString += \"&\" + this.system().getControlParameter() + \"=\" + szControlFnName;\n        }\n        return szQueryString;\n    }\n\n    public Object summonNormalGenieByCallHisName(String szGenieName) throws NaughtyGenieInvokedException {\n        throw new IllegalStateException(\"Notice: summonNormalGenieByCallHisName() is abstract.\");\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/Connectiom.java",
    "content": "package com.pinecone.summer;\n\nimport javax.servlet.http.HttpServlet;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\n\npublic class Connectiom {\n    protected ArchConnection   connection;\n    public HttpServletRequest  request;\n    public HttpServletResponse response;\n    public HttpServlet         servlet;\n\n    public Connectiom( HttpServletRequest request, HttpServletResponse response, HttpServlet servlet ) {\n        this.request  = request;\n        this.response = response;\n        this.servlet  = servlet;\n    }\n\n    protected void afterConnectionRipe( ArchConnection connection ) {\n        this.connection = connection;\n    }\n\n    public ArchConnection getConnection() {\n        return this.connection;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/GetConnection.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.summer.http.HttpMethod;\n\npublic class GetConnection extends ArchConnection {\n    public GetConnection(ArchConnectDispatcher dispatcher ) {\n        super( dispatcher );\n    }\n\n    protected GetConnection(ArchConnectDispatcher dispatcher, Connectiom connectiom ) {\n        super( dispatcher, connectiom );\n    }\n\n    @Override\n    protected ArchConnection apply(Connectiom connectiom) {\n        super.apply(connectiom);\n\n        this.mCurrentHttpMethod        = HttpMethod.GET;\n        this.mGETMapContainer          = this.mHttpEntityParser.parseQueryString( this.mConnectiom.request.getQueryString(), false );\n        this.mPOSTMapContainer         = new JSONMaptron();\n        this.mGlobalParameterContainer = this.mHttpEntityParser.requestMapJsonify( this.mConnectiom.request, false );\n        this.mCookiesContainer         = this.mHttpEntityParser.cookiesMapify( this.mCookiesContainer, this.mConnectiom.request );\n        this.mMultipartFilesMaker.refresh();\n        return this;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/NaughtyGenieInvokedException.java",
    "content": "package com.pinecone.summer;\n\npublic class NaughtyGenieInvokedException extends ReflectiveOperationException {\n    public enum NaughtyGenieType {\n        N_NAUGHTY,\n        N_GHOST,\n        N_HETEROGENEOUS,\n        N_ILLEGAL\n    }\n\n    private NaughtyGenieType mNaughtyGenieType = NaughtyGenieType.N_NAUGHTY;\n\n    public NaughtyGenieInvokedException() {\n        super();\n    }\n\n    public NaughtyGenieInvokedException(String s) {\n        super(s);\n    }\n\n    public NaughtyGenieInvokedException( String s, NaughtyGenieType naughtyGenieType ) {\n        super(s);\n        this.mNaughtyGenieType = naughtyGenieType;\n    }\n\n    public NaughtyGenieInvokedException( String s, NaughtyGenieType naughtyGenieType, Throwable cause ) {\n        super( s, cause );\n        this.mNaughtyGenieType = naughtyGenieType;\n    }\n\n    public NaughtyGenieType getType() {\n        return this.mNaughtyGenieType;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/PostConnection.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.summer.http.HttpMethod;\n\npublic class PostConnection extends ArchConnection {\n    public PostConnection( ArchConnectDispatcher dispatcher ) {\n        super( dispatcher );\n    }\n\n    protected PostConnection(ArchConnectDispatcher dispatcher, Connectiom connectiom ) {\n        super( dispatcher, connectiom);\n    }\n\n    @Override\n    protected ArchConnection apply( Connectiom connectiom ) {\n        super.apply(connectiom);\n\n        this.mCurrentHttpMethod            = HttpMethod.POST;\n        this.mGETMapContainer              = this.mHttpEntityParser.parseQueryString  ( this.mConnectiom.request.getQueryString(), false );\n        this.mCookiesContainer             = this.mHttpEntityParser.cookiesMapify( this.mCookiesContainer, this.mConnectiom.request );\n        this.mMultipartFilesMaker.refresh();\n        /* Notice: 2020-12-25\n         * Java Servlet abandoned multipart post.\n         * Pinecone be forced to redefined $_POST.\n         * **/\n        if( this.mMultipartFilesMaker.isMultipart() ){\n            this.mMultipartFilesMaker.interceptMultipartFiles();\n            this.mFilesMapContainer         = this.mMultipartFilesMaker.getCurrentFilesMap();\n            this.mCurrentMultipartRequest   = this.mMultipartFilesMaker.getCurrentMultipartRequest();\n            this.mPOSTMapContainer          = this.mHttpEntityParser.siftPostFromParameterMap ( this.mCurrentMultipartRequest, false );\n            this.mGlobalParameterContainer  = this.mHttpEntityParser.requestMapJsonify        ( this.mCurrentMultipartRequest,false );\n        }\n        else {\n            this.mPOSTMapContainer         = this.mHttpEntityParser.siftPostFromParameterMap ( this.mConnectiom.request, false );\n            this.mGlobalParameterContainer = this.mHttpEntityParser.requestMapJsonify        ( this.mConnectiom.request,false );\n        }\n        return this;\n    }\n}"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/RouterType.java",
    "content": "package com.pinecone.summer;\n\npublic enum RouterType {\n    QueryString,\n    PathRouter\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/SystemRoutlet.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.summer.prototype.Servletson;\n\nimport javax.servlet.ServletException;\nimport javax.servlet.http.HttpServlet;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport java.io.IOException;\n\n/**\n *  Pinecone System Router Servlet\n *  Like spring framework for map and dispatch all path based router.\n */\npublic class SystemRoutlet extends HttpServlet implements Servletson {\n    private ArchHostSystem mSystem     = null             ;\n\n    public ArchHostSystem getHostSystem(){\n        return this.mSystem;\n    }\n\n\n    @Override\n    public void init() throws ServletException {\n        this.mSystem = ArchHostSystem.G_SystemServlet.getHostSystem();\n    }\n\n    @Override\n    protected void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException {\n        this.mSystem.handleByDispatcher( RouterType.PathRouter ).handleGet( new Connectiom( request, response, this ) );\n    }\n\n    @Override\n    protected void doPost( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException {\n        this.mSystem.handleByDispatcher( RouterType.PathRouter ).handlePost( new Connectiom( request, response, this ) );\n    }\n\n}"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/SystemServlet.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.summer.prototype.Servletson;\n\nimport javax.servlet.ServletException;\nimport javax.servlet.http.HttpServlet;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport java.io.IOException;\n\n/**\n *  Pinecone System Servlet\n *  System Primary Servlet for router(xxx:/host/), for map and dispatch all query-string based router.\n */\npublic class SystemServlet extends HttpServlet implements Servletson {\n    private ArchHostSystem mSystem     = null             ;\n    private String mszClassPath                           ;\n    private String mszArchSystemClassName                 ;\n\n\n\n    public String getClassPath() {\n        return this.mszClassPath;\n    }\n\n    public String getServletMatrixConfig() {\n        return this.mszArchSystemClassName;\n    }\n\n    public ArchHostSystem getHostSystem(){\n        return this.mSystem;\n    }\n\n\n\n\n    @Override\n    public void init() throws ServletException {\n        this.mszClassPath = ArchHostSystem.class.getProtectionDomain().getCodeSource().getLocation().getPath();\n\n        this.mszArchSystemClassName = this.getServletContext().getInitParameter(\"HostSystem\");\n\n        this.mSystem = SystemSpawner.spawnSystem( this.mszArchSystemClassName, this );\n        this.mSystem.init();\n        ArchHostSystem.G_SystemServlet = this;\n    }\n\n    @Override\n    protected void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException {\n        this.mSystem.handleByDispatcher().handleGet( new Connectiom( request, response, this ) );\n    }\n\n    @Override\n    protected void doPost( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException {\n        this.mSystem.handleByDispatcher().handlePost( new Connectiom( request, response, this ) );\n    }\n\n}\n\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/SystemSpawner.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.framework.system.prototype.Factory;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.lang.reflect.Constructor;\nimport java.lang.reflect.InvocationTargetException;\n\npublic class SystemSpawner implements Pinenut {\n    public static ArchHostSystem spawnSystem ( String szMatrixPrototypeName, SystemServlet servlet ){\n        ArchHostSystem hMatrix = null;\n        try {\n            Class<?> pVoid = Class.forName( szMatrixPrototypeName );\n            try{\n                Constructor<?> constructor = pVoid.getConstructor( SystemServlet.class );\n                hMatrix = (ArchHostSystem) constructor.newInstance( servlet );\n            }\n            catch (NoSuchMethodException | InvocationTargetException e1){\n                System.err.println( \"Critical Error: Servlet Class is error defined.\" );\n                e1.printStackTrace();\n            }\n        }\n        catch ( ClassNotFoundException | IllegalAccessException | InstantiationException e ){\n            System.err.println( \"Critical Error: Checking 'web.xml' [ServletSystem] to find what was happened. \" );\n            e.printStackTrace();\n        }\n\n        return hMatrix;\n    }\n\n    public static ArchConnectDispatcher spawnDispatcher (String szDispatcherPrototypeName, ArchHostSystem system, RouterType routerType ){\n        ArchConnectDispatcher archControlDispatcher = null;\n        try {\n            Class<?> pVoid = Class.forName( szDispatcherPrototypeName );\n            try{\n                Constructor<?> constructor = pVoid.getConstructor( ArchHostSystem.class, RouterType.class );\n                archControlDispatcher = (ArchConnectDispatcher) constructor.newInstance( system, routerType );\n            }\n            catch (NoSuchMethodException | InvocationTargetException e1){\n                System.err.println( \"Critical Error: system Dispatcher is error defined.\" );\n                e1.printStackTrace();\n            }\n        }\n        catch ( ClassNotFoundException | IllegalAccessException | InstantiationException e ){\n            System.err.println( \"Critical Error: Checking 'web.xml' [SystemDispatcher] to find what was happened. \" );\n            e.printStackTrace();\n        }\n\n        return archControlDispatcher;\n    }\n\n    public static ArchWizardSummoner spawnWizardSummoner (String szSummonerPrototypeName, ArchConnection connection ) {\n        ArchWizardSummoner summoner = null;\n        try {\n            Class<?> pVoid = Class.forName( szSummonerPrototypeName );\n            try{\n                Constructor<?> constructor = pVoid.getConstructor( ArchConnection.class );\n                summoner = (ArchWizardSummoner) constructor.newInstance( connection );\n            }\n            catch (NoSuchMethodException | InvocationTargetException e1){\n                e1.printStackTrace();\n            }\n        }\n        catch (ClassNotFoundException | IllegalAccessException | InstantiationException e){\n            e.printStackTrace();\n        }\n\n        return summoner;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/TerminateSessionException.java",
    "content": "package com.pinecone.summer;\n\npublic class TerminateSessionException extends RuntimeException {\n    public TerminateSessionException() {\n    }\n\n    public TerminateSessionException(String message) {\n        super(message);\n    }\n\n    public TerminateSessionException(String message, Throwable rootCause) {\n        super(message, rootCause);\n    }\n\n    public TerminateSessionException(Throwable rootCause) {\n        super(rootCause);\n    }\n\n    public Throwable getRootCause() {\n        return this.getCause();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/WizardGeniesInvoker.java",
    "content": "package com.pinecone.summer;\n\nimport com.pinecone.framework.system.prototype.Prototype;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONArraytron;\nimport com.pinecone.framework.util.json.JSONException;\nimport com.pinecone.summer.prototype.GenieBottle;\nimport com.pinecone.summer.prototype.JSONBasedControl;\nimport com.pinecone.summer.prototype.Pagesion;\nimport com.pinecone.summer.prototype.Wizard;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.util.Arrays;\nimport java.util.HashSet;\n\npublic class WizardGeniesInvoker {\n    private HashSet<String > mNaughtyGeniesReel               = null  ;\n    private HashSet<String > mPrivateGeniesReel               = null  ;\n    private Object           mWizardProto                     = null  ;\n    private boolean          mbSiftNaughtyGenies              = false ;\n\n    public WizardGeniesInvoker( Object wizardProto ){\n        this.mWizardProto = wizardProto;\n        this.siftNaughtyGenies( null );\n        this.siftPrivateGenies();\n    }\n\n    public WizardGeniesInvoker( Object wizardProto, boolean bSiftNaughtyGenies ){\n        this.mWizardProto        = wizardProto;\n        this.mbSiftNaughtyGenies = bSiftNaughtyGenies;\n        this.siftNaughtyGenies(null );\n        this.siftPrivateGenies();\n    }\n\n    public WizardGeniesInvoker( Object wizardProto, ArchHostSystem matrix ){\n        this.mWizardProto        = wizardProto;\n        this.mbSiftNaughtyGenies = matrix.getPublicWizardConfig().optBoolean(\"NaughtyGeniesSifted\");\n        this.siftNaughtyGenies( matrix );\n        this.siftPrivateGenies();\n    }\n\n    private void siftFromJSONArray( JSONArray jsonArray ){\n        if( jsonArray != null ){\n            for (int i = 0; i < jsonArray.length(); i++) {\n                this.mNaughtyGeniesReel.add( jsonArray.optString( i ) );\n            }\n        }\n    }\n\n    private void siftNaughtyGenies( ArchHostSystem matrix ){\n        if( this.mbSiftNaughtyGenies ){\n            this.prospectReel();\n            Prototype.getDeclaredMethodsNameSet( this.mNaughtyGeniesReel, GenieBottle.class );\n            Prototype.getDeclaredMethodsNameSet( this.mNaughtyGeniesReel, Pagesion.class   );\n            Prototype.getDeclaredMethodsNameSet( this.mNaughtyGeniesReel, JSONBasedControl.class );\n\n            if( matrix != null ){\n                JSONArray otherNaughtyGenies = matrix.getPublicWizardConfig().optJSONArray(\"OtherNaughtyGenies\");\n                this.siftFromJSONArray( otherNaughtyGenies );\n            }\n\n            if( this.mWizardProto instanceof Wizard){\n                try{\n                    JSONArray myNaughtyGenies = ( (Wizard)(this.mWizardProto) ).getMyNaughtyGenies();\n                    this.siftFromJSONArray( myNaughtyGenies );\n                }\n                catch (JSONException e){\n                    e.printStackTrace();\n                }\n            }\n        }\n    }\n\n    private void siftPrivateGenies(){\n        if( this.mbSiftNaughtyGenies ){\n            if( this.mPrivateGeniesReel == null ){\n                this.mPrivateGeniesReel = new HashSet<>();\n            }\n            Prototype.getDeclaredMethodsNameSet( this.mPrivateGeniesReel, this.mWizardProto.getClass() );\n        }\n    }\n\n    private void prospectReel(){\n        if( this.mNaughtyGeniesReel == null ){\n            this.mNaughtyGeniesReel = new HashSet<>();\n        }\n    }\n\n    public HashSet<String > getNaughtyGeniesReel(){\n        return this.mNaughtyGeniesReel;\n    }\n\n    public HashSet<String > getPrivateGeniesReel(){\n        return this.mPrivateGeniesReel;\n    }\n\n    public boolean willSiftNaughtyGenies(){\n        return this.mbSiftNaughtyGenies;\n    }\n\n\n    public Object invokeNormalGenieByCallHisName(String szGenieName) throws NaughtyGenieInvokedException {\n        if( this.mNaughtyGeniesReel != null && this.mNaughtyGeniesReel.contains( szGenieName ) ){\n            throw new NaughtyGenieInvokedException( \"Naughty genie has been invoked.\" );\n        }\n        if( this.mPrivateGeniesReel != null && !this.mPrivateGeniesReel.contains( szGenieName ) ){\n            throw new NaughtyGenieInvokedException( \"Naughty genie has been invoked.\" );\n        }\n\n        try{\n            return Prototype.invokeNoParameterMethod( this.mWizardProto, szGenieName );\n        }\n        catch ( NoSuchMethodException e1 ){\n            throw new NaughtyGenieInvokedException( \"Ghost genie has been invoked.\", NaughtyGenieInvokedException.NaughtyGenieType.N_GHOST );\n        }\n        catch ( InvocationTargetException e2 ){\n            if( e2.getCause() instanceof TerminateSessionException ){\n                throw (TerminateSessionException) e2.getCause();\n            }\n\n            throw new NaughtyGenieInvokedException( \"Heterogeneous genie has been invoked.\", NaughtyGenieInvokedException.NaughtyGenieType.N_HETEROGENEOUS, e2 );\n        }\n        catch ( IllegalAccessException e3 ){\n            throw new NaughtyGenieInvokedException( \"Illegal genie has been invoked.\", NaughtyGenieInvokedException.NaughtyGenieType.N_ILLEGAL,e3 );\n        }\n    }\n\n\n\n\n\n    public void removeNaughtyGenie( String szGenieName ){\n        if( this.mNaughtyGeniesReel != null ){\n            this.mNaughtyGeniesReel.remove( szGenieName );\n        }\n\n    }\n\n    public void removeNaughtyGenie( String[] genieNames ){\n        if( this.mNaughtyGeniesReel != null ){\n            this.mNaughtyGeniesReel.removeAll( Arrays.asList(genieNames) );\n        }\n    }\n\n    public void removeNaughtyGenie( JSONArray genieNames ){\n        if( this.mNaughtyGeniesReel != null ){\n            for (int i = 0; i < genieNames.length(); i++) {\n                this.mNaughtyGeniesReel.remove( genieNames.optString( i ) );\n            }\n        }\n    }\n\n    public void removeNaughtyGenie( Method[] genies ){\n        if( this.mNaughtyGeniesReel != null ) {\n            for (Method row : genies) {\n                this.mNaughtyGeniesReel.remove(row.getName());\n            }\n        }\n    }\n\n    public void removeNaughtyGenie( Class<?> wizard ){\n        if( this.mNaughtyGeniesReel != null ) {\n            for (Method row : wizard.getDeclaredMethods()) {\n                this.mNaughtyGeniesReel.remove(row.getName());\n            }\n        }\n    }\n\n    public void removeNaughtyGenie( Object wizard ){\n        this.prospectReel();\n        for (Method row : wizard.getClass().getDeclaredMethods()) {\n            this.mNaughtyGeniesReel.remove(row.getName());\n        }\n    }\n\n    public void removeNaughtyGeniesByJSON( String szGeniesJSON ){\n        this.removeNaughtyGenie( new JSONArraytron( szGeniesJSON ) );\n    }\n\n\n\n    public void addNaughtyGenie( String szGenieName ){\n        this.prospectReel();\n        this.mNaughtyGeniesReel.add( szGenieName );\n    }\n\n    public void addNaughtyGenie( String[] genieNames ){\n        this.prospectReel();\n        this.mNaughtyGeniesReel.addAll( Arrays.asList(genieNames) );\n    }\n\n    public void addNaughtyGenie( JSONArray genieNames ){\n        this.prospectReel();\n        if( genieNames != null ){\n            for (int i = 0; i < genieNames.length(); i++) {\n                this.mNaughtyGeniesReel.add( genieNames.optString( i ) );\n            }\n        }\n    }\n\n    public void addNaughtyGenie( Method[] genies ){\n        this.prospectReel();\n        for ( Method row : genies ) {\n            this.mNaughtyGeniesReel.add( row.getName() );\n        }\n    }\n\n    public void addNaughtyGenie( Class<?> wizard ){\n        this.prospectReel();\n        Prototype.getDeclaredMethodsNameSet( this.mNaughtyGeniesReel, wizard );\n    }\n\n    public void addNaughtyGenie( Object wizard ){\n        this.prospectReel();\n        Prototype.getDeclaredMethodsNameSet( this.mNaughtyGeniesReel, wizard );\n    }\n\n    public void addNaughtyGeniesByJSON( String szGeniesJSON ){\n        this.addNaughtyGenie( new JSONArraytron( szGeniesJSON ) );\n    }\n\n\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/context/ServletContextAware.java",
    "content": "package com.pinecone.summer.context;\n\nimport javax.servlet.ServletContext;\n\npublic interface ServletContextAware {\n    void setServletContext(ServletContext var1);\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/http/CommonHttpEntityParser.java",
    "content": "package com.pinecone.summer.http;\n\nimport com.pinecone.framework.util.StringUtils;\n\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONArraytron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.JSONMaptron;\n\nimport javax.servlet.http.Cookie;\nimport javax.servlet.http.HttpServletRequest;\nimport java.io.UnsupportedEncodingException;\nimport java.util.Arrays;\nimport java.util.Iterator;\nimport java.util.LinkedHashSet;\nimport java.util.Map;\n\n/**\n *  Pinecone For Java HttpEntityParser\n *  Copyright © Bean Nuts Foundation All rights reserved. [Mr.A.R.B / WJH]\n *  Tip:\n *  *****************************************************************************************\n *  Java Servlet is incredible stupid !!!\n *  What fuck did author think of merge $_GET and $_POST to ParameterMap ???\n *  And there is no separation method to separate it.\n *  So many scenarios, we need to separate those.\n *  Oh my goodness !!! Jesus fucking christ !!!\n *  *****************************************************************************************\n */\npublic class CommonHttpEntityParser implements HttpEntityParser {\n    protected String    mszCharset = \"UTF-8\";\n\n    public CommonHttpEntityParser( String szCharset ){\n        if( szCharset != null ){\n            this.mszCharset = szCharset;\n        }\n    }\n\n\n    protected String getCharset(){\n        return this.mszCharset;\n    }\n\n    private Object realValue( Object value, boolean bSafe ){\n        if( value instanceof String ){\n            String szValue = (String) value;\n            return bSafe ? this.valueSafety( szValue ) : szValue;\n        }\n        return value;\n    }\n\n    private LinkedHashSet<String >  parameterMapStrings2Set( String[] strings ) {\n        if( strings != null ){\n            return new LinkedHashSet<>( Arrays.asList(strings) );\n        }\n        return null;\n    }\n\n    private Object  linkedHashSet2JSONValue( LinkedHashSet<String > set, boolean bSafe ) {\n        JSONArray jsonArray = new JSONArraytron();\n        if( set != null ){\n            int nSetSize = set.size();\n            for ( Object row : set ) {\n                row = this.realValue( row, bSafe );\n                if( nSetSize != 1 ){\n                    jsonArray.put( row );\n                }\n                else {\n                    return row;\n                }\n            }\n        }\n        return jsonArray;\n    }\n\n    private Object parameterMapValue2JSONValue( Object parameterMapValue, boolean bSafe ){\n        Object jsonValue;\n\n        if( null == parameterMapValue ){\n            jsonValue = null;\n        }\n        else if ( parameterMapValue instanceof String[] ) {\n            String[] strings = (String[]) parameterMapValue;\n            jsonValue = new JSONArraytron();\n            if( strings.length == 1 ){\n                jsonValue = this.realValue( strings[0], bSafe );\n            }\n            else {\n                for ( String str : strings ) {\n                    ((JSONArray) jsonValue).put( this.realValue( str, bSafe ) );\n                }\n            }\n        }\n        else {\n            jsonValue = this.realValue( parameterMapValue, bSafe );\n        }\n\n        return jsonValue;\n    }\n\n\n\n    /** And java have no pointer, fuck! **/\n    public Object valueSafety( Object value ) {\n        if( value instanceof String ){\n            return StringUtils.addSlashes( (String) value );\n        }\n        return value;\n    }\n\n    public JSONObject parseQueryString  ( String szQueryString, boolean bSafe ){\n        int nParseAt = 0;\n        JSONObject hObject = new JSONMaptron();\n\n        if( szQueryString != null && !szQueryString.isEmpty() ){\n            while ( nParseAt < szQueryString.length() ) {\n                StringBuilder hKeyBuf   = new StringBuilder();\n                StringBuilder hValueBuf = new StringBuilder();\n\n                while ( nParseAt < szQueryString.length() ) {\n                    if( szQueryString.charAt(nParseAt) == '&' ){\n                        break;\n                    }\n\n                    if( szQueryString.charAt(nParseAt) == '=' ){\n                        nParseAt++;\n                        break;\n                    }\n                    hKeyBuf.append( szQueryString.charAt(nParseAt) );\n                    nParseAt++;\n                }\n\n                while ( nParseAt < szQueryString.length() ) {\n                    if( szQueryString.charAt(nParseAt) == '&' ){\n                        nParseAt++;\n                        break;\n                    }\n                    hValueBuf.append( szQueryString.charAt(nParseAt) );\n                    nParseAt++;\n                }\n\n                try {\n                    String szDecodedValueBuf = HttpURLParser.decode( hValueBuf.toString(), this.getCharset() );\n                    hValueBuf = new StringBuilder( bSafe ? (String) this.valueSafety(szDecodedValueBuf) : szDecodedValueBuf ) ;\n                }\n                catch ( UnsupportedEncodingException e ){\n                    e.printStackTrace();\n                }\n\n                if( hKeyBuf.length() != 0 ){\n                    try {\n                        hKeyBuf = new StringBuilder( HttpURLParser.decode(hKeyBuf.toString(), this.getCharset()) );\n                    }\n                    catch ( UnsupportedEncodingException e ){\n                        e.printStackTrace();\n                    }\n\n                    if( hKeyBuf.length() >= 2 && hKeyBuf.charAt( hKeyBuf.length() - 2 ) == '[' && hKeyBuf.charAt( hKeyBuf.length() - 1 ) == ']' ){\n                        /** Notice: Java Servlet Key of Array Value is different with PHP. { Such as 'key[]' would not trim to 'key' }**/\n                        /*hKeyBuf.replace( hKeyBuf.length() - 1,hKeyBuf.length(), \"\" );\n                        hKeyBuf.replace( hKeyBuf.length() - 1,hKeyBuf.length(), \"\" );*/\n\n                        String szKeyBuf = hKeyBuf.toString();\n                        Object rRow = hObject.opt( szKeyBuf );\n                        if( !(rRow instanceof JSONArray) ){\n                            hObject.remove( szKeyBuf );\n                            hObject.put( szKeyBuf, new JSONArraytron() );\n                        }\n                        hObject.optJSONArray( szKeyBuf ).put( hValueBuf.toString() );\n                    }\n                    else{\n                        hObject.put( hKeyBuf.toString(), hValueBuf.toString() );\n                    }\n                }\n            }\n        }\n\n        return hObject;\n    }\n\n    public JSONObject parseFormData     ( HttpServletRequest request, boolean bSafe ){\n        return this.siftPostFromParameterMap( request, bSafe );\n    }\n\n    public JSONObject requestMapJsonify ( HttpServletRequest request, boolean bSafe ) {\n        Map<?, ?> properties = request.getParameterMap();\n        JSONObject jsonObject = new JSONMaptron();\n\n        Iterator<?> entries = properties.entrySet().iterator();\n        Map.Entry entry;\n        while ( entries.hasNext() ) {\n            entry = (Map.Entry) entries.next();\n            String szKey = (String) entry.getKey();\n\n            jsonObject.put( szKey, this.parameterMapValue2JSONValue( entry.getValue(), bSafe ) );\n        }\n        return jsonObject;\n    }\n\n    public JSONObject siftPostFromParameterMap( HttpServletRequest request, boolean bSafe ){\n        JSONObject queryMap = this.parseQueryString( request.getQueryString(), false );\n        JSONObject postMap  = new JSONMaptron();\n        Map<?, ?>  unionMap = request.getParameterMap();\n\n        Iterator<?> entries = unionMap.entrySet().iterator();\n        Map.Entry entry;\n        while ( entries.hasNext() ) {\n            entry = (Map.Entry) entries.next();\n            String szKey = (String) entry.getKey();\n            Object parameterMapValue   = entry.getValue();\n\n            Object queryMapValue = queryMap.opt( szKey );\n            if( queryMapValue != null ){\n                if( parameterMapValue instanceof String[] ){\n                    LinkedHashSet<String > hashSet = this.parameterMapStrings2Set( (String[])parameterMapValue );\n\n                    if( queryMapValue instanceof String ){\n                        hashSet.remove( (String) queryMapValue );\n                    }\n                    else if( queryMapValue instanceof JSONArray ){\n                        for( Object row : ( (JSONArray)queryMapValue ).getArray() ){\n                            hashSet.remove( (String) row );\n                        }\n                    }\n\n                    if( !hashSet.isEmpty() ){\n                        postMap.put( szKey, this.linkedHashSet2JSONValue( hashSet, bSafe ) );\n                    }\n                }\n                else if( parameterMapValue instanceof String ){\n                    boolean bQualified = true;\n                    if( queryMapValue instanceof String && queryMap == parameterMapValue ){\n                        bQualified = false;\n                    }\n                    else if( queryMapValue instanceof JSONArray ){\n                        for( Object row : ( (JSONArray)queryMapValue ).getArray() ){\n                            if( row == parameterMapValue ){\n                                bQualified = false;\n                                break;\n                            }\n                        }\n                    }\n\n                    if( bQualified ){\n                        postMap.put( szKey, this.realValue( queryMapValue, bSafe ) );\n                    }\n                }\n            }\n            else {\n                postMap.put( szKey, this.parameterMapValue2JSONValue( parameterMapValue, bSafe ) );\n            }\n\n        }\n        return postMap;\n    }\n\n    public Map<String, Cookie > cookiesMapify ( Map<String, Cookie > map, HttpServletRequest request ) {\n        Cookie[] cookies = request.getCookies();\n\n        map.clear();\n        if( cookies != null ) {\n            for ( Cookie cookie : cookies ) {\n                map.put( cookie.getName(), cookie );\n            }\n        }\n\n        return map;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/http/HttpEntityParser.java",
    "content": "package com.pinecone.summer.http;\n\nimport com.pinecone.framework.util.json.JSONObject;\n\nimport javax.servlet.http.Cookie;\nimport javax.servlet.http.HttpServletRequest;\nimport java.util.Map;\n\npublic interface HttpEntityParser {\n    Object valueSafety( Object value );\n\n    JSONObject parseQueryString  (String szQueryString, boolean bSafe );\n\n    JSONObject parseFormData     (HttpServletRequest request, boolean bSafe );\n\n    JSONObject requestMapJsonify ( HttpServletRequest request, boolean bSafe );\n\n    JSONObject siftPostFromParameterMap( HttpServletRequest request, boolean bSafe );\n\n    Map<String, Cookie > cookiesMapify ( Map<String, Cookie > map, HttpServletRequest request );\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/http/HttpHeaders.java",
    "content": "package com.pinecone.summer.http;\n\n\nimport com.pinecone.framework.unit.AbstractMultiValueMap;\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.framework.unit.LinkedCaseInsensitiveMap;\nimport com.pinecone.framework.unit.MultiValueMap;\nimport com.pinecone.framework.util.StringUtils;\n\nimport java.io.Serializable;\nimport java.net.URI;\nimport java.nio.charset.Charset;\nimport java.text.ParseException;\nimport java.text.SimpleDateFormat;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Date;\nimport java.util.EnumSet;\nimport java.util.Iterator;\nimport java.util.LinkedHashMap;\nimport java.util.LinkedList;\nimport java.util.List;\nimport java.util.Locale;\nimport java.util.Map;\nimport java.util.Set;\nimport java.util.TimeZone;\n\npublic class HttpHeaders extends AbstractMultiValueMap<String, String > implements MultiValueMap<String, String>, Serializable {\n    private static final long serialVersionUID = -8578554704772377436L;\n    public static final String ACCEPT = \"Accept\";\n    public static final String ACCEPT_CHARSET = \"Accept-Charset\";\n    public static final String ACCEPT_ENCODING = \"Accept-Encoding\";\n    public static final String ACCEPT_LANGUAGE = \"Accept-Language\";\n    public static final String ACCEPT_RANGES = \"Accept-Ranges\";\n    public static final String AGE = \"Age\";\n    public static final String ALLOW = \"Allow\";\n    public static final String AUTHORIZATION = \"Authorization\";\n    public static final String CACHE_CONTROL = \"Cache-Control\";\n    public static final String CONNECTION = \"Connection\";\n    public static final String CONTENT_ENCODING = \"Content-Encoding\";\n    public static final String CONTENT_DISPOSITION = \"Content-Disposition\";\n    public static final String CONTENT_LANGUAGE = \"Content-Language\";\n    public static final String CONTENT_LENGTH = \"Content-Length\";\n    public static final String CONTENT_LOCATION = \"Content-Location\";\n    public static final String CONTENT_RANGE = \"Content-Range\";\n    public static final String CONTENT_TYPE = \"Content-Type\";\n    public static final String COOKIE = \"Cookie\";\n    public static final String DATE = \"Date\";\n    public static final String ETAG = \"ETag\";\n    public static final String EXPECT = \"Expect\";\n    public static final String EXPIRES = \"Expires\";\n    public static final String FROM = \"From\";\n    public static final String HOST = \"Host\";\n    public static final String IF_MATCH = \"If-Match\";\n    public static final String IF_MODIFIED_SINCE = \"If-Modified-Since\";\n    public static final String IF_NONE_MATCH = \"If-None-Match\";\n    public static final String IF_RANGE = \"If-Range\";\n    public static final String IF_UNMODIFIED_SINCE = \"If-Unmodified-Since\";\n    public static final String LAST_MODIFIED = \"Last-Modified\";\n    public static final String LINK = \"Link\";\n    public static final String LOCATION = \"Location\";\n    public static final String MAX_FORWARDS = \"Max-Forwards\";\n    public static final String ORIGIN = \"Origin\";\n    public static final String PRAGMA = \"Pragma\";\n    public static final String PROXY_AUTHENTICATE = \"Proxy-Authenticate\";\n    public static final String PROXY_AUTHORIZATION = \"Proxy-Authorization\";\n    public static final String RANGE = \"Range\";\n    public static final String REFERER = \"Referer\";\n    public static final String RETRY_AFTER = \"Retry-After\";\n    public static final String SERVER = \"Server\";\n    public static final String SET_COOKIE = \"Set-Cookie\";\n    public static final String SET_COOKIE2 = \"Set-Cookie2\";\n    public static final String TE = \"TE\";\n    public static final String TRAILER = \"Trailer\";\n    public static final String TRANSFER_ENCODING = \"Transfer-Encoding\";\n    public static final String UPGRADE = \"Upgrade\";\n    public static final String USER_AGENT = \"User-Agent\";\n    public static final String VARY = \"Vary\";\n    public static final String VIA = \"Via\";\n    public static final String WARNING = \"Warning\";\n    public static final String WWW_AUTHENTICATE = \"WWW-Authenticate\";\n    private static final String[] DATE_FORMATS = new String[]{\"EEE, dd MMM yyyy HH:mm:ss zzz\", \"EEE, dd-MMM-yy HH:mm:ss zzz\", \"EEE MMM dd HH:mm:ss yyyy\"};\n    private static TimeZone GMT = TimeZone.getTimeZone(\"GMT\");\n    private final Map<String, List<String>> headers;\n\n    public HttpHeaders() {\n        this(new LinkedCaseInsensitiveMap(8, Locale.ENGLISH), false);\n    }\n\n    private HttpHeaders(Map<String, List<String>> headers, boolean readOnly) {\n        Assert.notNull(headers, \"'headers' must not be null\");\n        if (readOnly) {\n            Map<String, List<String>> map = new LinkedCaseInsensitiveMap(headers.size(), Locale.ENGLISH);\n            Iterator var4 = headers.entrySet().iterator();\n\n            while(var4.hasNext()) {\n                Map.Entry<String, List<String>> entry = (Map.Entry)var4.next();\n                List<String> values = Collections.unmodifiableList((List)entry.getValue());\n                map.put(entry.getKey(), values);\n            }\n\n            this.headers = Collections.unmodifiableMap(map);\n        } else {\n            this.headers = headers;\n        }\n\n    }\n\n    public void setAccept(List<MediaType> acceptableMediaTypes) {\n        this.set(\"Accept\", MediaType.toString(acceptableMediaTypes));\n    }\n\n    public List<MediaType> getAccept() {\n        String value = this.getFirst(\"Accept\");\n        List<MediaType> result = value != null ? MediaType.parseMediaTypes(value) : Collections.emptyList();\n        if (result.size() == 1) {\n            List<String> acceptHeader = this.get(\"Accept\");\n            if (acceptHeader.size() > 1) {\n                value = StringUtils.collectionToCommaDelimitedString(acceptHeader);\n                result = MediaType.parseMediaTypes(value);\n            }\n        }\n\n        return result;\n    }\n\n    public void setAcceptCharset(List<Charset> acceptableCharsets) {\n        StringBuilder builder = new StringBuilder();\n        Iterator iterator = acceptableCharsets.iterator();\n\n        while(iterator.hasNext()) {\n            Charset charset = (Charset)iterator.next();\n            builder.append(charset.name().toLowerCase(Locale.ENGLISH));\n            if (iterator.hasNext()) {\n                builder.append(\", \");\n            }\n        }\n\n        this.set(\"Accept-Charset\", builder.toString());\n    }\n\n    public List<Charset> getAcceptCharset() {\n        List<Charset> result = new ArrayList();\n        String value = this.getFirst(\"Accept-Charset\");\n        if (value != null) {\n            String[] tokens = value.split(\",\\\\s*\");\n            String[] var4 = tokens;\n            int var5 = tokens.length;\n\n            for(int var6 = 0; var6 < var5; ++var6) {\n                String token = var4[var6];\n                int paramIdx = token.indexOf(59);\n                String charsetName;\n                if (paramIdx == -1) {\n                    charsetName = token;\n                } else {\n                    charsetName = token.substring(0, paramIdx);\n                }\n\n                if (!charsetName.equals(\"*\")) {\n                    result.add(Charset.forName(charsetName));\n                }\n            }\n        }\n\n        return result;\n    }\n\n    public void setAllow(Set<HttpMethod> allowedMethods) {\n        this.set(\"Allow\", StringUtils.collectionToCommaDelimitedString(allowedMethods));\n    }\n\n    public Set<HttpMethod> getAllow() {\n        String value = this.getFirst(\"Allow\");\n        if (StringUtils.isEmpty(value)) {\n            return EnumSet.noneOf(HttpMethod.class);\n        } else {\n            List<HttpMethod> allowedMethod = new ArrayList(5);\n            String[] tokens = value.split(\",\\\\s*\");\n            String[] var4 = tokens;\n            int var5 = tokens.length;\n\n            for(int var6 = 0; var6 < var5; ++var6) {\n                String token = var4[var6];\n                allowedMethod.add(HttpMethod.valueOf(token));\n            }\n\n            return EnumSet.copyOf(allowedMethod);\n        }\n    }\n\n    public void setCacheControl(String cacheControl) {\n        this.set(\"Cache-Control\", cacheControl);\n    }\n\n    public String getCacheControl() {\n        return this.getFirst(\"Cache-Control\");\n    }\n\n    public void setConnection(String connection) {\n        this.set(\"Connection\", connection);\n    }\n\n    public void setConnection(List<String> connection) {\n        this.set(\"Connection\", this.toCommaDelimitedString(connection));\n    }\n\n    public List<String> getConnection() {\n        return this.getFirstValueAsList(\"Connection\");\n    }\n\n    public void setContentDispositionFormData(String name, String filename) {\n        Assert.notNull(name, \"'name' must not be null\");\n        StringBuilder builder = new StringBuilder(\"form-data; name=\\\"\");\n        builder.append(name).append('\"');\n        if (filename != null) {\n            builder.append(\"; filename=\\\"\");\n            builder.append(filename).append('\"');\n        }\n\n        this.set(\"Content-Disposition\", builder.toString());\n    }\n\n    public void setContentLength(long contentLength) {\n        this.set(\"Content-Length\", Long.toString(contentLength));\n    }\n\n    public long getContentLength() {\n        String value = this.getFirst(\"Content-Length\");\n        return value != null ? Long.parseLong(value) : -1L;\n    }\n\n    public void setContentType(MediaType mediaType) {\n        Assert.isTrue(!mediaType.isWildcardType(), \"'Content-Type' cannot contain wildcard type '*'\");\n        Assert.isTrue(!mediaType.isWildcardSubtype(), \"'Content-Type' cannot contain wildcard subtype '*'\");\n        this.set(\"Content-Type\", mediaType.toString());\n    }\n\n    public MediaType getContentType() {\n        String value = this.getFirst(\"Content-Type\");\n        return StringUtils.hasLength(value) ? MediaType.parseMediaType(value) : null;\n    }\n\n    public void setDate(long date) {\n        this.setDate(\"Date\", date);\n    }\n\n    public long getDate() {\n        return this.getFirstDate(\"Date\");\n    }\n\n    public void setETag(String eTag) {\n        if (eTag != null) {\n            Assert.isTrue(eTag.startsWith(\"\\\"\") || eTag.startsWith(\"W/\"), \"Invalid eTag, does not start with W/ or \\\"\");\n            Assert.isTrue(eTag.endsWith(\"\\\"\"), \"Invalid eTag, does not end with \\\"\");\n        }\n\n        this.set(\"ETag\", eTag);\n    }\n\n    public String getETag() {\n        return this.getFirst(\"ETag\");\n    }\n\n    public void setExpires(long expires) {\n        this.setDate(\"Expires\", expires);\n    }\n\n    public long getExpires() {\n        try {\n            return this.getFirstDate(\"Expires\");\n        } catch (IllegalArgumentException var2) {\n            return -1L;\n        }\n    }\n\n    public void setIfModifiedSince(long ifModifiedSince) {\n        this.setDate(\"If-Modified-Since\", ifModifiedSince);\n    }\n\n    /** @deprecated */\n    @Deprecated\n    public long getIfNotModifiedSince() {\n        return this.getIfModifiedSince();\n    }\n\n    public long getIfModifiedSince() {\n        return this.getFirstDate(\"If-Modified-Since\");\n    }\n\n    public void setIfNoneMatch(String ifNoneMatch) {\n        this.set(\"If-None-Match\", ifNoneMatch);\n    }\n\n    public void setIfNoneMatch(List<String> ifNoneMatchList) {\n        this.set(\"If-None-Match\", this.toCommaDelimitedString(ifNoneMatchList));\n    }\n\n    protected String toCommaDelimitedString(List<String> list) {\n        StringBuilder builder = new StringBuilder();\n        Iterator iterator = list.iterator();\n\n        while(iterator.hasNext()) {\n            String ifNoneMatch = (String)iterator.next();\n            builder.append(ifNoneMatch);\n            if (iterator.hasNext()) {\n                builder.append(\", \");\n            }\n        }\n\n        return builder.toString();\n    }\n\n    public List<String> getIfNoneMatch() {\n        return this.getFirstValueAsList(\"If-None-Match\");\n    }\n\n    protected List<String> getFirstValueAsList(String header) {\n        List<String> result = new ArrayList();\n        String value = this.getFirst(header);\n        if (value != null) {\n            String[] tokens = value.split(\",\\\\s*\");\n            String[] var5 = tokens;\n            int var6 = tokens.length;\n\n            for(int var7 = 0; var7 < var6; ++var7) {\n                String token = var5[var7];\n                result.add(token);\n            }\n        }\n\n        return result;\n    }\n\n    public void setLastModified(long lastModified) {\n        this.setDate(\"Last-Modified\", lastModified);\n    }\n\n    public long getLastModified() {\n        return this.getFirstDate(\"Last-Modified\");\n    }\n\n    public void setLocation(URI location) {\n        this.set(\"Location\", location.toASCIIString());\n    }\n\n    public URI getLocation() {\n        String value = this.getFirst(\"Location\");\n        return value != null ? URI.create(value) : null;\n    }\n\n    public void setOrigin(String origin) {\n        this.set(\"Origin\", origin);\n    }\n\n    public String getOrigin() {\n        return this.getFirst(\"Origin\");\n    }\n\n    public void setPragma(String pragma) {\n        this.set(\"Pragma\", pragma);\n    }\n\n    public String getPragma() {\n        return this.getFirst(\"Pragma\");\n    }\n\n    public void setUpgrade(String upgrade) {\n        this.set(\"Upgrade\", upgrade);\n    }\n\n    public String getUpgrade() {\n        return this.getFirst(\"Upgrade\");\n    }\n\n    public long getFirstDate(String headerName) {\n        String headerValue = this.getFirst(headerName);\n        if (headerValue == null) {\n            return -1L;\n        } else {\n            String[] var3 = DATE_FORMATS;\n            int var4 = var3.length;\n            int var5 = 0;\n\n            while(var5 < var4) {\n                String dateFormat = var3[var5];\n                SimpleDateFormat simpleDateFormat = new SimpleDateFormat(dateFormat, Locale.US);\n                simpleDateFormat.setTimeZone(GMT);\n\n                try {\n                    return simpleDateFormat.parse(headerValue).getTime();\n                } catch (ParseException var9) {\n                    ++var5;\n                }\n            }\n\n            throw new IllegalArgumentException(\"Cannot parse date value \\\"\" + headerValue + \"\\\" for \\\"\" + headerName + \"\\\" header\");\n        }\n    }\n\n    public void setDate(String headerName, long date) {\n        SimpleDateFormat dateFormat = new SimpleDateFormat(DATE_FORMATS[0], Locale.US);\n        dateFormat.setTimeZone(GMT);\n        this.set(headerName, dateFormat.format(new Date(date)));\n    }\n\n    public String getFirst(String headerName) {\n        List<String> headerValues = (List)this.headers.get(headerName);\n        return headerValues != null ? (String)headerValues.get(0) : null;\n    }\n\n    public String add(String headerName, String headerValue) {\n        List<String> headerValues = (List)this.headers.get(headerName);\n        if (headerValues == null) {\n            headerValues = new LinkedList();\n            this.headers.put(headerName, headerValues);\n        }\n\n        ((List)headerValues).add(headerValue);\n        return headerValue;\n    }\n\n    public String set( String headerName, String headerValue ) {\n        List<String> headerValues = new LinkedList();\n        headerValues.add(headerValue);\n        this.headers.put(headerName, headerValues);\n        return headerValue;\n    }\n\n    public void setAll(Map<String, String> values) {\n        Iterator var2 = values.entrySet().iterator();\n\n        while(var2.hasNext()) {\n            Map.Entry<String, String> entry = (Map.Entry)var2.next();\n            this.set((String)entry.getKey(), (String)entry.getValue());\n        }\n\n    }\n\n    public Map<String, String> toSingleValueMap() {\n        LinkedHashMap<String, String> singleValueMap = new LinkedHashMap(this.headers.size());\n        Iterator var2 = this.headers.entrySet().iterator();\n\n        while(var2.hasNext()) {\n            Map.Entry<String, List<String>> entry = (Map.Entry)var2.next();\n            singleValueMap.put(entry.getKey(), (String) ((List)entry.getValue()).get(0));\n        }\n\n        return singleValueMap;\n    }\n\n    public int size() {\n        return this.headers.size();\n    }\n\n    public boolean isEmpty() {\n        return this.headers.isEmpty();\n    }\n\n    public boolean containsKey(Object key) {\n        return this.headers.containsKey(key);\n    }\n\n    public boolean containsValue(Object value) {\n        return this.headers.containsValue(value);\n    }\n\n    public List<String> get(Object key) {\n        return (List)this.headers.get(key);\n    }\n\n    public List<String> put(String key, List<String> value) {\n        return (List)this.headers.put(key, value);\n    }\n\n    public List<String> remove(Object key) {\n        return (List)this.headers.remove(key);\n    }\n\n    public void putAll(Map<? extends String, ? extends List<String>> map) {\n        this.headers.putAll(map);\n    }\n\n    public void clear() {\n        this.headers.clear();\n    }\n\n    public Set<String> keySet() {\n        return this.headers.keySet();\n    }\n\n    public Collection<List<String>> values() {\n        return this.headers.values();\n    }\n\n    public Set<Map.Entry<String, List<String>>> entrySet() {\n        return this.headers.entrySet();\n    }\n\n    public boolean equals(Object other) {\n        if (this == other) {\n            return true;\n        } else if (!(other instanceof HttpHeaders)) {\n            return false;\n        } else {\n            HttpHeaders otherHeaders = (HttpHeaders)other;\n            return this.headers.equals(otherHeaders.headers);\n        }\n    }\n\n    public int hashCode() {\n        return this.headers.hashCode();\n    }\n\n    public String toString() {\n        return this.headers.toString();\n    }\n\n    public static HttpHeaders readOnlyHttpHeaders(HttpHeaders headers) {\n        return new HttpHeaders(headers, true);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/http/HttpMethod.java",
    "content": "package com.pinecone.summer.http;\n\npublic enum HttpMethod {\n    GET,\n    POST,\n    HEAD,\n    OPTIONS,\n    PUT,\n    PATCH,\n    DELETE,\n    TRACE;\n\n    private HttpMethod() {\n    }\n}"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/http/HttpURLParser.java",
    "content": "package com.pinecone.summer.http;\n\nimport java.io.CharArrayWriter;\nimport java.io.UnsupportedEncodingException;\nimport java.nio.charset.Charset;\nimport java.nio.charset.IllegalCharsetNameException;\nimport java.nio.charset.UnsupportedCharsetException;\nimport java.util.BitSet;\n\npublic class HttpURLParser {\n    static BitSet dontNeedEncoding;\n    static final int caseDiff = ('a' - 'A');\n    static String dfltEncName = null;\n\n    static {\n\n        /* The list of characters that are not encoded has been\n         * determined as follows:\n         *\n         * RFC 2396 states:\n         * -----\n         * Data characters that are allowed in a URI but do not have a\n         * reserved purpose are called unreserved.  These include upper\n         * and lower case letters, decimal digits, and a limited set of\n         * punctuation marks and symbols.\n         *\n         * unreserved  = alphanum | mark\n         *\n         * mark        = \"-\" | \"_\" | \".\" | \"!\" | \"~\" | \"*\" | \"'\" | \"(\" | \")\"\n         *\n         * Unreserved characters can be escaped without changing the\n         * semantics of the URI, but this should not be done unless the\n         * URI is being used in a context that does not allow the\n         * unescaped character to appear.\n         * -----\n         *\n         * It appears that both Netscape and Internet Explorer escape\n         * all special characters from this list with the exception\n         * of \"-\", \"_\", \".\", \"*\". While it is not clear why they are\n         * escaping the other characters, perhaps it is safest to\n         * assume that there might be contexts in which the others\n         * are unsafe if not escaped. Therefore, we will use the same\n         * list. It is also noteworthy that this is consistent with\n         * O'Reilly's \"HTML: The Definitive Guide\" (page 164).\n         *\n         * As a last note, Intenet Explorer does not encode the \"@\"\n         * character which is clearly not unreserved according to the\n         * RFC. We are being consistent with the RFC in this matter,\n         * as is Netscape.\n         *\n         */\n\n        dontNeedEncoding = new BitSet(256);\n        int i;\n        for (i = 'a'; i <= 'z'; i++) {\n            dontNeedEncoding.set(i);\n        }\n        for (i = 'A'; i <= 'Z'; i++) {\n            dontNeedEncoding.set(i);\n        }\n        for (i = '0'; i <= '9'; i++) {\n            dontNeedEncoding.set(i);\n        }\n        dontNeedEncoding.set(' '); /* encoding a space to a + is done\n         * in the encode() method */\n        dontNeedEncoding.set('-');\n        dontNeedEncoding.set('_');\n        dontNeedEncoding.set('.');\n        dontNeedEncoding.set('*');\n\n        dfltEncName = (String) System.getProperties().get(\"file.encoding\");\n    }\n\n\n    public static String decode(String s) {\n\n        String str = null;\n\n        try {\n            str = decode(s, dfltEncName);\n        } catch (UnsupportedEncodingException e) {\n            // The system should always have the platform default\n        }\n\n        return str;\n    }\n\n    public static String decode(String s, String enc) throws UnsupportedEncodingException {\n\n        boolean needToChange = false;\n        int numChars = s.length();\n        StringBuffer sb = new StringBuffer(numChars > 500 ? numChars / 2 : numChars);\n        int i = 0;\n\n        if ( enc.length() == 0 ) {\n            throw new UnsupportedEncodingException (\"URLDecoder: empty string enc parameter\");\n        }\n\n        char c;\n        byte[] bytes = null;\n        while (i < numChars) {\n            c = s.charAt(i);\n            switch (c) {\n                case '+':{\n                    sb.append(' ');\n                    i++;\n                    needToChange = true;\n                    break;\n                }\n                case '%':{\n                    if (bytes == null) {\n                        bytes = new byte[(numChars-i)/3];\n                    }\n                    int pos = 0;\n\n                    boolean bUsingRaw = false;\n                    while ( (( i + 2 ) < numChars) &&  (c=='%') ) {\n                        int v = 0;\n                        try {\n                            v = Integer.parseInt(s.substring( i + 1,i + 3),16);\n                        }\n                        catch ( NumberFormatException e ) {\n                            //throw new IllegalArgumentException(\"URLDecoder: Illegal hex characters in escape (%) pattern - \" + e.getMessage()); /** Fuck Java **/\n                            sb.append( c );\n                            sb.append( s, i + 1, i + 3 );\n                            i+=3;\n                            c = s.charAt(i);\n                            bUsingRaw = true;\n                            continue;\n                        }\n\n                        if ( v < 0 ) {\n                            throw new IllegalArgumentException(\"URLDecoder: Illegal hex characters in escape (%) pattern - negative value\");\n                        }\n                        bytes[pos++] = (byte) v;\n                        i+= 3;\n                        if ( i < numChars ) {\n                            c = s.charAt(i);\n                        }\n                    }\n\n\n                    if ( (i < numChars) && (c=='%') ) {\n                        //throw new IllegalArgumentException(\"URLDecoder: Incomplete trailing escape (%) pattern\"); /** Fuck Java **/\n                        sb.append(c);\n                        i++;\n                        continue;\n                    }\n\n                    if( !bUsingRaw ){\n                        sb.append(new String(bytes, 0, pos, enc));\n                    }\n\n                    needToChange = true;\n                    break;\n                }\n                default:{\n                    sb.append(c);\n                    i++;\n                    break;\n                }\n            }\n        }\n\n        return (needToChange? sb.toString() : s);\n    }\n\n    public static String encode(String s) {\n\n        String str = null;\n\n        try {\n            str = encode(s, dfltEncName);\n        } catch (UnsupportedEncodingException e) {\n            // The system should always have the platform default\n        }\n\n        return str;\n    }\n\n    public static String encode(String s, String enc) throws UnsupportedEncodingException {\n\n        boolean needToChange = false;\n        StringBuffer out = new StringBuffer(s.length());\n        Charset charset;\n        CharArrayWriter charArrayWriter = new CharArrayWriter();\n\n        if (enc == null)\n            throw new NullPointerException(\"charsetName\");\n\n        try {\n            charset = Charset.forName(enc);\n        } catch (IllegalCharsetNameException e) {\n            throw new UnsupportedEncodingException(enc);\n        } catch (UnsupportedCharsetException e) {\n            throw new UnsupportedEncodingException(enc);\n        }\n\n        for (int i = 0; i < s.length();) {\n            int c = (int) s.charAt(i);\n            //System.out.println(\"Examining character: \" + c);\n            if (dontNeedEncoding.get(c)) {\n                if (c == ' ') {\n                    c = '+';\n                    needToChange = true;\n                }\n                //System.out.println(\"Storing: \" + c);\n                out.append((char)c);\n                i++;\n            } else {\n                // convert to external encoding before hex conversion\n                do {\n                    charArrayWriter.write(c);\n                    /*\n                     * If this character represents the start of a Unicode\n                     * surrogate pair, then pass in two characters. It's not\n                     * clear what should be done if a bytes reserved in the\n                     * surrogate pairs range occurs outside of a legal\n                     * surrogate pair. For now, just treat it as if it were\n                     * any other character.\n                     */\n                    if (c >= 0xD800 && c <= 0xDBFF) {\n                        /*\n                          System.out.println(Integer.toHexString(c)\n                          + \" is high surrogate\");\n                        */\n                        if ( (i+1) < s.length()) {\n                            int d = (int) s.charAt(i+1);\n                            /*\n                              System.out.println(\"\\tExamining \"\n                              + Integer.toHexString(d));\n                            */\n                            if (d >= 0xDC00 && d <= 0xDFFF) {\n                                /*\n                                  System.out.println(\"\\t\"\n                                  + Integer.toHexString(d)\n                                  + \" is low surrogate\");\n                                */\n                                charArrayWriter.write(d);\n                                i++;\n                            }\n                        }\n                    }\n                    i++;\n                } while (i < s.length() && !dontNeedEncoding.get((c = (int) s.charAt(i))));\n\n                charArrayWriter.flush();\n                String str = new String(charArrayWriter.toCharArray());\n                byte[] ba = str.getBytes(charset);\n                for (int j = 0; j < ba.length; j++) {\n                    out.append('%');\n                    char ch = Character.forDigit((ba[j] >> 4) & 0xF, 16);\n                    // converting to use uppercase letter as part of\n                    // the hex value if ch is a letter.\n                    if (Character.isLetter(ch)) {\n                        ch -= caseDiff;\n                    }\n                    out.append(ch);\n                    ch = Character.forDigit(ba[j] & 0xF, 16);\n                    if (Character.isLetter(ch)) {\n                        ch -= caseDiff;\n                    }\n                    out.append(ch);\n                }\n                charArrayWriter.reset();\n                needToChange = true;\n            }\n        }\n\n        return (needToChange? out.toString() : s);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/http/InvalidMediaTypeException.java",
    "content": "package com.pinecone.summer.http;\n\nimport com.pinecone.summer.util.InvalidMimeTypeException;\n\npublic class InvalidMediaTypeException extends IllegalArgumentException {\n    private String mediaType;\n\n    public InvalidMediaTypeException(String mediaType, String message) {\n        super(\"Invalid media type \\\"\" + mediaType + \"\\\": \" + message);\n        this.mediaType = mediaType;\n    }\n\n    InvalidMediaTypeException(InvalidMimeTypeException ex) {\n        super(ex.getMessage(), ex);\n        this.mediaType = ex.getMimeType();\n    }\n\n    public String getMediaType() {\n        return this.mediaType;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/http/MediaType.java",
    "content": "package com.pinecone.summer.http;\n\nimport java.io.Serializable;\nimport java.nio.charset.Charset;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.framework.util.comparator.CompoundComparator;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.summer.util.InvalidMimeTypeException;\nimport com.pinecone.summer.util.MimeType;\nimport com.pinecone.summer.util.MimeTypeUtils;\n\npublic class MediaType extends MimeType implements Serializable {\n    private static final long serialVersionUID = 2069937152339670231L;\n    public static final MediaType ALL = valueOf(\"*/*\");\n    public static final String ALL_VALUE = \"*/*\";\n    public static final MediaType APPLICATION_ATOM_XML = valueOf(\"application/atom+xml\");\n    public static final String APPLICATION_ATOM_XML_VALUE = \"application/atom+xml\";\n    public static final MediaType APPLICATION_FORM_URLENCODED = valueOf(\"application/x-www-form-urlencoded\");\n    public static final String APPLICATION_FORM_URLENCODED_VALUE = \"application/x-www-form-urlencoded\";\n    public static final MediaType APPLICATION_JSON = valueOf(\"application/json\");\n    public static final String APPLICATION_JSON_VALUE = \"application/json\";\n    public static final MediaType APPLICATION_OCTET_STREAM = valueOf(\"application/octet-stream\");\n    public static final String APPLICATION_OCTET_STREAM_VALUE = \"application/octet-stream\";\n    public static final MediaType APPLICATION_XHTML_XML = valueOf(\"application/xhtml+xml\");\n    public static final String APPLICATION_XHTML_XML_VALUE = \"application/xhtml+xml\";\n    public static final MediaType APPLICATION_XML = valueOf(\"application/xml\");\n    public static final String APPLICATION_XML_VALUE = \"application/xml\";\n    public static final MediaType IMAGE_GIF = valueOf(\"image/gif\");\n    public static final String IMAGE_GIF_VALUE = \"image/gif\";\n    public static final MediaType IMAGE_JPEG = valueOf(\"image/jpeg\");\n    public static final String IMAGE_JPEG_VALUE = \"image/jpeg\";\n    public static final MediaType IMAGE_PNG = valueOf(\"image/png\");\n    public static final String IMAGE_PNG_VALUE = \"image/png\";\n    public static final MediaType MULTIPART_FORM_DATA = valueOf(\"multipart/form-data\");\n    public static final String MULTIPART_FORM_DATA_VALUE = \"multipart/form-data\";\n    public static final MediaType TEXT_HTML = valueOf(\"text/html\");\n    public static final String TEXT_HTML_VALUE = \"text/html\";\n    public static final MediaType TEXT_PLAIN = valueOf(\"text/plain\");\n    public static final String TEXT_PLAIN_VALUE = \"text/plain\";\n    public static final MediaType TEXT_XML = valueOf(\"text/xml\");\n    public static final String TEXT_XML_VALUE = \"text/xml\";\n    private static final String PARAM_QUALITY_FACTOR = \"q\";\n    public static final Comparator<MediaType> QUALITY_VALUE_COMPARATOR = new Comparator<MediaType>() {\n        public int compare(MediaType mediaType1, MediaType mediaType2) {\n            double quality1 = mediaType1.getQualityValue();\n            double quality2 = mediaType2.getQualityValue();\n            int qualityComparison = Double.compare(quality2, quality1);\n            if (qualityComparison != 0) {\n                return qualityComparison;\n            } else if (mediaType1.isWildcardType() && !mediaType2.isWildcardType()) {\n                return 1;\n            } else if (mediaType2.isWildcardType() && !mediaType1.isWildcardType()) {\n                return -1;\n            } else if (!mediaType1.getType().equals(mediaType2.getType())) {\n                return 0;\n            } else if (mediaType1.isWildcardSubtype() && !mediaType2.isWildcardSubtype()) {\n                return 1;\n            } else if (mediaType2.isWildcardSubtype() && !mediaType1.isWildcardSubtype()) {\n                return -1;\n            } else if (!mediaType1.getSubtype().equals(mediaType2.getSubtype())) {\n                return 0;\n            } else {\n                int paramsSize1 = mediaType1.getParameters().size();\n                int paramsSize2 = mediaType2.getParameters().size();\n                return paramsSize2 < paramsSize1 ? -1 : (paramsSize2 == paramsSize1 ? 0 : 1);\n            }\n        }\n    };\n    public static final Comparator<MediaType> SPECIFICITY_COMPARATOR = new SpecificityComparator<MediaType>() {\n        protected int compareParameters(MediaType mediaType1, MediaType mediaType2) {\n            double quality1 = mediaType1.getQualityValue();\n            double quality2 = mediaType2.getQualityValue();\n            int qualityComparison = Double.compare(quality2, quality1);\n            return qualityComparison != 0 ? qualityComparison : super.compareParameters(mediaType1, mediaType2);\n        }\n    };\n\n    public MediaType(String type) {\n        super(type);\n    }\n\n    public MediaType(String type, String subtype) {\n        super(type, subtype, Collections.emptyMap());\n    }\n\n    public MediaType(String type, String subtype, Charset charset) {\n        super(type, subtype, charset);\n    }\n\n    public MediaType(String type, String subtype, double qualityValue) {\n        this(type, subtype, Collections.singletonMap(\"q\", Double.toString(qualityValue)));\n    }\n\n    public MediaType(MediaType other, Map<String, String> parameters) {\n        super(other.getType(), other.getSubtype(), parameters);\n    }\n\n    public MediaType(String type, String subtype, Map<String, String> parameters) {\n        super(type, subtype, parameters);\n    }\n\n    protected void checkParameters(String attribute, String value) {\n        super.checkParameters(attribute, value);\n        if (\"q\".equals(attribute)) {\n            value = this.unquote(value);\n            double d = Double.parseDouble(value);\n            Assert.isTrue(d >= 0.0D && d <= 1.0D, \"Invalid quality value \\\"\" + value + \"\\\": should be between 0.0 and 1.0\");\n        }\n\n    }\n\n    public double getQualityValue() {\n        String qualityFactory = this.getParameter(\"q\");\n        return qualityFactory != null ? Double.parseDouble(this.unquote(qualityFactory)) : 1.0D;\n    }\n\n    public boolean includes(MediaType other) {\n        return super.includes(other);\n    }\n\n    public boolean isCompatibleWith(MediaType other) {\n        return super.isCompatibleWith(other);\n    }\n\n    public MediaType copyQualityValue(MediaType mediaType) {\n        if (!mediaType.getParameters().containsKey(\"q\")) {\n            return this;\n        } else {\n            Map<String, String> params = new LinkedHashMap(this.getParameters());\n            params.put(\"q\", mediaType.getParameters().get(\"q\"));\n            return new MediaType(this, params);\n        }\n    }\n\n    public MediaType removeQualityValue() {\n        if (!this.getParameters().containsKey(\"q\")) {\n            return this;\n        } else {\n            Map<String, String> params = new LinkedHashMap(this.getParameters());\n            params.remove(\"q\");\n            return new MediaType(this, params);\n        }\n    }\n\n    public static MediaType valueOf(String value) {\n        return parseMediaType(value);\n    }\n\n    public static MediaType parseMediaType(String mediaType) {\n        MimeType type;\n        try {\n            type = MimeTypeUtils.parseMimeType(mediaType);\n        } catch (InvalidMimeTypeException var4) {\n            throw new InvalidMediaTypeException(var4);\n        }\n\n        try {\n            return new MediaType(type.getType(), type.getSubtype(), type.getParameters());\n        } catch (IllegalArgumentException var3) {\n            throw new InvalidMediaTypeException(mediaType, var3.getMessage());\n        }\n    }\n\n    public static List<MediaType> parseMediaTypes(String mediaTypes) {\n        if (!StringUtils.hasLength(mediaTypes)) {\n            return Collections.emptyList();\n        } else {\n            String[] tokens = mediaTypes.split(\",\\\\s*\");\n            List<MediaType> result = new ArrayList(tokens.length);\n            String[] var3 = tokens;\n            int var4 = tokens.length;\n\n            for(int var5 = 0; var5 < var4; ++var5) {\n                String token = var3[var5];\n                result.add(parseMediaType(token));\n            }\n\n            return result;\n        }\n    }\n\n    public static String toString(Collection<MediaType> mediaTypes) {\n        return MimeTypeUtils.toString(mediaTypes);\n    }\n\n    public static void sortBySpecificity(List<MediaType> mediaTypes) {\n        Assert.notNull(mediaTypes, \"'mediaTypes' must not be null\");\n        if (mediaTypes.size() > 1) {\n            Collections.sort(mediaTypes, SPECIFICITY_COMPARATOR);\n        }\n\n    }\n\n    public static void sortByQualityValue(List<MediaType> mediaTypes) {\n        Assert.notNull(mediaTypes, \"'mediaTypes' must not be null\");\n        if (mediaTypes.size() > 1) {\n            Collections.sort(mediaTypes, QUALITY_VALUE_COMPARATOR);\n        }\n\n    }\n\n    public static void sortBySpecificityAndQuality(List<MediaType> mediaTypes) {\n        Assert.notNull(mediaTypes, \"'mediaTypes' must not be null\");\n        if (mediaTypes.size() > 1) {\n            Collections.sort(mediaTypes, new CompoundComparator(new Comparator[]{SPECIFICITY_COMPARATOR, QUALITY_VALUE_COMPARATOR}));\n        }\n\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/io/AbstractResource.java",
    "content": "package com.pinecone.summer.io;\n\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.summer.util.ResourceUtils;\n\nimport java.io.File;\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.net.URL;\n\npublic abstract class AbstractResource implements Resource {\n    public AbstractResource() {\n    }\n\n    public boolean exists() {\n        try {\n            return this.getFile().exists();\n        } catch (IOException var4) {\n            try {\n                InputStream is = this.getInputStream();\n                is.close();\n                return true;\n            } catch (Throwable var3) {\n                return false;\n            }\n        }\n    }\n\n    public boolean isReadable() {\n        return true;\n    }\n\n    public boolean isOpen() {\n        return false;\n    }\n\n    public URL getURL() throws IOException {\n        throw new FileNotFoundException(this.getDescription() + \" cannot be resolved to URL\");\n    }\n\n    public URI getURI() throws IOException {\n        URL url = this.getURL();\n\n        try {\n            return ResourceUtils.toURI(url);\n        } catch (URISyntaxException var3) {\n            throw new IOException(\"Invalid URI [\" + url + \"]\", var3);\n        }\n    }\n\n    public File getFile() throws IOException {\n        throw new FileNotFoundException(this.getDescription() + \" cannot be resolved to absolute file path\");\n    }\n\n    public long contentLength() throws IOException {\n        InputStream is = this.getInputStream();\n        Assert.state(is != null, \"resource input stream must not be null\");\n\n        try {\n            long size = 0L;\n\n            int read;\n            for(byte[] buf = new byte[255]; (read = is.read(buf)) != -1; size += (long)read) {\n            }\n\n            long var6 = size;\n            return var6;\n        } finally {\n            try {\n                is.close();\n            } catch (IOException var14) {\n            }\n\n        }\n    }\n\n    public long lastModified() throws IOException {\n        long lastModified = this.getFileForLastModifiedCheck().lastModified();\n        if (lastModified == 0L) {\n            throw new FileNotFoundException(this.getDescription() + \" cannot be resolved in the file system for resolving its last-modified timestamp\");\n        } else {\n            return lastModified;\n        }\n    }\n\n    protected File getFileForLastModifiedCheck() throws IOException {\n        return this.getFile();\n    }\n\n    public Resource createRelative(String relativePath) throws IOException {\n        throw new FileNotFoundException(\"Cannot create a relative resource for \" + this.getDescription());\n    }\n\n    public String getFilename() {\n        return null;\n    }\n\n    public String toString() {\n        return this.getDescription();\n    }\n\n    public boolean equals(Object obj) {\n        return obj == this || obj instanceof Resource && ((Resource)obj).getDescription().equals(this.getDescription());\n    }\n\n    public int hashCode() {\n        return this.getDescription().hashCode();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/io/InputStreamSource.java",
    "content": "package com.pinecone.summer.io;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\npublic interface InputStreamSource {\n    InputStream getInputStream() throws IOException;\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/io/PathResource.java",
    "content": "package com.pinecone.summer.io;\n\nimport com.pinecone.framework.util.Assert;\n\nimport java.io.*;\nimport java.net.URI;\nimport java.net.URL;\nimport java.nio.file.Files;\nimport java.nio.file.LinkOption;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\n\npublic class PathResource extends AbstractResource implements WritableResource {\n    private final Path path;\n\n    public PathResource(Path path) {\n        Assert.notNull(path, \"Path must not be null\");\n        this.path = path.normalize();\n    }\n\n    public PathResource(String path) {\n        Assert.notNull(path, \"Path must not be null\");\n        this.path = Paths.get(path).normalize();\n    }\n\n    public PathResource(URI uri) {\n        Assert.notNull(uri, \"URI must not be null\");\n        this.path = Paths.get(uri).normalize();\n    }\n\n    public final String getPath() {\n        return this.path.toString();\n    }\n\n    public boolean exists() {\n        return Files.exists(this.path, new LinkOption[0]);\n    }\n\n    public boolean isReadable() {\n        return Files.isReadable(this.path) && !Files.isDirectory(this.path, new LinkOption[0]);\n    }\n\n    public InputStream getInputStream() throws IOException {\n        if (!this.exists()) {\n            throw new FileNotFoundException(this.getPath() + \" (no such file or directory)\");\n        } else if (Files.isDirectory(this.path, new LinkOption[0])) {\n            throw new FileNotFoundException(this.getPath() + \" (is a directory)\");\n        } else {\n            return Files.newInputStream(this.path);\n        }\n    }\n\n    public URL getURL() throws IOException {\n        return this.path.toUri().toURL();\n    }\n\n    public URI getURI() throws IOException {\n        return this.path.toUri();\n    }\n\n    public File getFile() throws IOException {\n        try {\n            return this.path.toFile();\n        } catch (UnsupportedOperationException var2) {\n            throw new FileNotFoundException(this.path + \" cannot be resolved to \" + \"absolute file path\");\n        }\n    }\n\n    public long contentLength() throws IOException {\n        return Files.size(this.path);\n    }\n\n    public long lastModified() throws IOException {\n        return Files.getLastModifiedTime(this.path).toMillis();\n    }\n\n    public Resource createRelative(String relativePath) throws IOException {\n        return new PathResource(this.path.resolve(relativePath));\n    }\n\n    public String getFilename() {\n        return this.path.getFileName().toString();\n    }\n\n    public String getDescription() {\n        return \"path [\" + this.path.toAbsolutePath() + \"]\";\n    }\n\n    public boolean isWritable() {\n        return Files.isWritable(this.path) && !Files.isDirectory(this.path, new LinkOption[0]);\n    }\n\n    public OutputStream getOutputStream() throws IOException {\n        if (Files.isDirectory(this.path, new LinkOption[0])) {\n            throw new FileNotFoundException(this.getPath() + \" (is a directory)\");\n        } else {\n            return Files.newOutputStream(this.path);\n        }\n    }\n\n    public boolean equals(Object obj) {\n        return this == obj || obj instanceof PathResource && this.path.equals(((PathResource)obj).path);\n    }\n\n    public int hashCode() {\n        return this.path.hashCode();\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/io/Resource.java",
    "content": "package com.pinecone.summer.io;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.net.URI;\nimport java.net.URL;\n\npublic interface Resource extends InputStreamSource {\n    boolean exists();\n\n    boolean isReadable();\n\n    boolean isOpen();\n\n    URL getURL() throws IOException;\n\n    URI getURI() throws IOException;\n\n    File getFile() throws IOException;\n\n    long contentLength() throws IOException;\n\n    long lastModified() throws IOException;\n\n    Resource createRelative(String var1) throws IOException;\n\n    String getFilename();\n\n    String getDescription();\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/io/WritableResource.java",
    "content": "package com.pinecone.summer.io;\n\nimport java.io.IOException;\nimport java.io.OutputStream;\n\npublic interface WritableResource extends Resource {\n    boolean isWritable();\n\n    OutputStream getOutputStream() throws IOException;\n}"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/MaxUploadSizeExceededException.java",
    "content": "package com.pinecone.summer.multiparts;\n\npublic class MaxUploadSizeExceededException extends MultipartException {\n    private final long maxUploadSize;\n\n    public MaxUploadSizeExceededException(long maxUploadSize) {\n        this(maxUploadSize, (Throwable)null);\n    }\n\n    public MaxUploadSizeExceededException(long maxUploadSize, Throwable ex) {\n        super(\"Maximum upload size of \" + maxUploadSize + \" bytes exceeded\", ex);\n        this.maxUploadSize = maxUploadSize;\n    }\n\n    public long getMaxUploadSize() {\n        return this.maxUploadSize;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/MultipartException.java",
    "content": "package com.pinecone.summer.multiparts;\n\n\npublic class MultipartException extends RuntimeException {\n    public MultipartException(String msg) {\n        super(msg);\n    }\n\n    public MultipartException(String msg, Throwable cause) {\n        super(msg, cause);\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/MultipartFile.java",
    "content": "package com.pinecone.summer.multiparts;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStream;\n\npublic interface MultipartFile {\n    String getName();\n\n    String getOriginalFilename();\n\n    String getContentType();\n\n    boolean isEmpty();\n\n    long getSize();\n\n    byte[] getBytes() throws IOException;\n\n    InputStream getInputStream() throws IOException;\n\n    void transferTo(File dest) throws IOException, IllegalStateException;\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/MultipartHttpServletRequest.java",
    "content": "package com.pinecone.summer.multiparts;\n\nimport com.pinecone.summer.http.HttpHeaders;\nimport com.pinecone.summer.http.HttpMethod;\n\nimport javax.servlet.http.HttpServletRequest;\n\npublic interface MultipartHttpServletRequest extends HttpServletRequest, MultipartRequest {\n    HttpMethod getRequestMethod();\n\n    HttpHeaders getRequestHeaders();\n\n    HttpHeaders getMultipartHeaders(String var1);\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/MultipartRequest.java",
    "content": "package com.pinecone.summer.multiparts;\n\nimport com.pinecone.framework.unit.MultiValueMap;\n\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\n\npublic interface MultipartRequest {\n    Iterator<String> getFileNames();\n\n    MultipartFile getFile(String szFileFieldName);\n\n    List<MultipartFile> getFiles(String szName);\n\n    Map<String, MultipartFile> getFileMap();\n\n    MultiValueMap<String, MultipartFile> getMultiFileMap();\n\n    String getMultipartContentType(String szParamOrFileName);\n}"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/MultipartResolver.java",
    "content": "package com.pinecone.summer.multiparts;\n\nimport javax.servlet.http.HttpServletRequest;\n\npublic interface MultipartResolver {\n    boolean isMultipart(javax.servlet.http.HttpServletRequest hHttpServletRequest);\n\n    MultipartHttpServletRequest resolveMultipart(HttpServletRequest hHttpServletRequest) throws MultipartException;\n\n    void cleanupMultipart(MultipartHttpServletRequest hMultipartHttpServletRequest);\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/commons/CommonsFileUploadSupport.java",
    "content": "package com.pinecone.summer.multiparts.commons;\n\nimport java.io.IOException;\nimport java.io.UnsupportedEncodingException;\nimport java.nio.charset.Charset;\nimport java.util.HashMap;\nimport java.util.Iterator;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.summer.multiparts.MultipartFile;\nimport com.pinecone.summer.http.MediaType;\nimport org.apache.commons.fileupload.FileItem;\nimport org.apache.commons.fileupload.FileItemFactory;\nimport org.apache.commons.fileupload.FileUpload;\nimport org.apache.commons.fileupload.disk.DiskFileItemFactory;\nimport com.pinecone.summer.io.Resource;\nimport com.pinecone.framework.unit.LinkedMultiValueMap;\nimport com.pinecone.framework.unit.MultiValueMap;\n\npublic abstract class CommonsFileUploadSupport {\n    private final DiskFileItemFactory fileItemFactory = this.newFileItemFactory();\n    private final FileUpload fileUpload = this.newFileUpload(this.getFileItemFactory());\n    private boolean uploadTempDirSpecified = false;\n\n    public CommonsFileUploadSupport() {\n    }\n\n    public DiskFileItemFactory getFileItemFactory() {\n        return this.fileItemFactory;\n    }\n\n    public FileUpload getFileUpload() {\n        return this.fileUpload;\n    }\n\n    public void setMaxUploadSize(long maxUploadSize) {\n        this.fileUpload.setSizeMax(maxUploadSize);\n    }\n\n    public void setSingleUploadSize(long maxUploadSize) {\n        this.fileUpload.setFileSizeMax(maxUploadSize);\n    }\n\n    public void setMaxInMemorySize(int maxInMemorySize) {\n        this.fileItemFactory.setSizeThreshold(maxInMemorySize);\n    }\n\n    public void setDefaultEncoding(String defaultEncoding) {\n        this.fileUpload.setHeaderEncoding(defaultEncoding);\n    }\n\n    protected String getDefaultEncoding() {\n        String encoding = this.getFileUpload().getHeaderEncoding();\n        if (encoding == null) {\n            encoding = \"ISO-8859-1\";\n        }\n\n        return encoding;\n    }\n\n    public void setUploadTempDir(Resource uploadTempDir) throws IOException {\n        if (!uploadTempDir.exists() && !uploadTempDir.getFile().mkdirs()) {\n            throw new IllegalArgumentException(\"Given uploadTempDir [\" + uploadTempDir + \"] could not be created\");\n        } else {\n            this.fileItemFactory.setRepository(uploadTempDir.getFile());\n            this.uploadTempDirSpecified = true;\n        }\n    }\n\n    protected boolean isUploadTempDirSpecified() {\n        return this.uploadTempDirSpecified;\n    }\n\n    protected DiskFileItemFactory newFileItemFactory() {\n        return new DiskFileItemFactory();\n    }\n\n    protected abstract FileUpload newFileUpload(FileItemFactory var1);\n\n    protected FileUpload prepareFileUpload(String encoding) {\n        FileUpload fileUpload = this.getFileUpload();\n        FileUpload actualFileUpload = fileUpload;\n        if (encoding != null && !encoding.equals(fileUpload.getHeaderEncoding())) {\n            actualFileUpload = this.newFileUpload(this.getFileItemFactory());\n            actualFileUpload.setSizeMax(fileUpload.getSizeMax());\n            actualFileUpload.setHeaderEncoding(encoding);\n        }\n\n        return actualFileUpload;\n    }\n\n    protected CommonsFileUploadSupport.MultipartParsingResult parseFileItems(List<FileItem> fileItems, String encoding) {\n        MultiValueMap<String, MultipartFile> multipartFiles = new LinkedMultiValueMap();\n        Map<String, String[]> multipartParameters = new HashMap();\n        Map<String, String> multipartParameterContentTypes = new HashMap();\n        Iterator var6 = fileItems.iterator();\n\n        while(true) {\n            while(var6.hasNext()) {\n                FileItem fileItem = (FileItem)var6.next();\n                if (fileItem.isFormField()) {\n                    String partEncoding = this.determineEncoding(fileItem.getContentType(), encoding);\n                    String value;\n                    if (partEncoding != null) {\n                        try {\n                            value = fileItem.getString(partEncoding);\n                        } catch (UnsupportedEncodingException var12) {\n                            System.err.println(\"Could not decode multipart item '\" + fileItem.getFieldName() + \"' with encoding '\" + partEncoding + \"': using platform default\");\n\n                            value = fileItem.getString();\n                        }\n                    } else {\n                        value = fileItem.getString();\n                    }\n\n                    String[] curParam = (String[])multipartParameters.get(fileItem.getFieldName());\n                    if (curParam == null) {\n                        multipartParameters.put(fileItem.getFieldName(), new String[]{value});\n                    } else {\n                        String[] newParam = StringUtils.addStringToArray(curParam, value);\n                        multipartParameters.put(fileItem.getFieldName(), newParam);\n                    }\n\n                    multipartParameterContentTypes.put(fileItem.getFieldName(), fileItem.getContentType());\n                } else {\n                    CommonsMultipartFile file = new CommonsMultipartFile(fileItem);\n                    multipartFiles.add(file.getName(), file);\n                    Debug.trace(\n                            \"Found multipart file [\" + file.getName() + \"] of size \" + file.getSize() + \" bytes with original filename [\" + file.getOriginalFilename() + \"], stored \" + file.getStorageDescription()\n                    );\n                }\n            }\n\n            return new CommonsFileUploadSupport.MultipartParsingResult(multipartFiles, multipartParameters, multipartParameterContentTypes);\n        }\n    }\n\n    protected void cleanupFileItems(MultiValueMap<String, MultipartFile> multipartFiles) {\n        Iterator var2 = multipartFiles.values().iterator();\n\n        while(var2.hasNext()) {\n            List<MultipartFile> files = (List)var2.next();\n            Iterator var4 = files.iterator();\n\n            while(var4.hasNext()) {\n                MultipartFile file = (MultipartFile)var4.next();\n                if (file instanceof CommonsMultipartFile) {\n                    CommonsMultipartFile cmf = (CommonsMultipartFile)file;\n                    cmf.getFileItem().delete();\n\n                    Debug.trace(\"Cleaning up multipart file [\" + cmf.getName() + \"] with original filename [\" + cmf.getOriginalFilename() + \"], stored \" + cmf.getStorageDescription());\n                }\n            }\n        }\n\n    }\n\n    private String determineEncoding(String contentTypeHeader, String defaultEncoding) {\n        if (!StringUtils.hasText(contentTypeHeader)) {\n            return defaultEncoding;\n        } else {\n            MediaType contentType = MediaType.parseMediaType(contentTypeHeader);\n            Charset charset = contentType.getCharSet();\n            return charset != null ? charset.name() : defaultEncoding;\n        }\n    }\n\n    protected static class MultipartParsingResult {\n        private final MultiValueMap<String, MultipartFile> multipartFiles;\n        private final Map<String, String[]> multipartParameters;\n        private final Map<String, String> multipartParameterContentTypes;\n\n        public MultipartParsingResult(MultiValueMap<String, MultipartFile> mpFiles, Map<String, String[]> mpParams, Map<String, String> mpParamContentTypes) {\n            this.multipartFiles = mpFiles;\n            this.multipartParameters = mpParams;\n            this.multipartParameterContentTypes = mpParamContentTypes;\n        }\n\n        public MultiValueMap<String, MultipartFile> getMultipartFiles() {\n            return this.multipartFiles;\n        }\n\n        public Map<String, String[]> getMultipartParameters() {\n            return this.multipartParameters;\n        }\n\n        public Map<String, String> getMultipartParameterContentTypes() {\n            return this.multipartParameterContentTypes;\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/commons/CommonsMultipartFile.java",
    "content": "package com.pinecone.summer.multiparts.commons;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.summer.multiparts.MultipartFile;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.Serializable;\n\nimport org.apache.commons.fileupload.FileItem;\nimport org.apache.commons.fileupload.FileUploadException;\nimport org.apache.commons.fileupload.disk.DiskFileItem;\n\npublic class CommonsMultipartFile implements MultipartFile, Serializable {\n    private final FileItem fileItem;\n    private final long size;\n\n    public CommonsMultipartFile(FileItem fileItem) {\n        this.fileItem = fileItem;\n        this.size = this.fileItem.getSize();\n    }\n\n    public final FileItem getFileItem() {\n        return this.fileItem;\n    }\n\n    public String getName() {\n        return this.fileItem.getFieldName();\n    }\n\n    public String getOriginalFilename() {\n        String filename = this.fileItem.getName();\n        if (filename == null) {\n            return \"\";\n        } else {\n            int pos = filename.lastIndexOf(\"/\");\n            if (pos == -1) {\n                pos = filename.lastIndexOf(\"\\\\\");\n            }\n\n            return pos != -1 ? filename.substring(pos + 1) : filename;\n        }\n    }\n\n    public String getContentType() {\n        return this.fileItem.getContentType();\n    }\n\n    public boolean isEmpty() {\n        return this.size == 0L;\n    }\n\n    public long getSize() {\n        return this.size;\n    }\n\n    public byte[] getBytes() {\n        if (!this.isAvailable()) {\n            throw new IllegalStateException(\"File has been moved - cannot be read again\");\n        } else {\n            byte[] bytes = this.fileItem.get();\n            return bytes != null ? bytes : new byte[0];\n        }\n    }\n\n    public InputStream getInputStream() throws IOException {\n        if (!this.isAvailable()) {\n            throw new IllegalStateException(\"File has been moved - cannot be read again\");\n        } else {\n            InputStream inputStream = this.fileItem.getInputStream();\n            return (InputStream)(inputStream != null ? inputStream : new ByteArrayInputStream(new byte[0]));\n        }\n    }\n\n    public void transferTo(File dest) throws IOException, IllegalStateException {\n        if (!this.isAvailable()) {\n            throw new IllegalStateException(\"File has already been moved - cannot be transferred again\");\n        } else if (dest.exists() && !dest.delete()) {\n            throw new IOException(\"Destination file [\" + dest.getAbsolutePath() + \"] already exists and could not be deleted\");\n        } else {\n            try {\n                this.fileItem.write(dest);\n                String action = \"transferred\";\n                if (!this.fileItem.isInMemory()) {\n                    action = this.isAvailable() ? \"copied\" : \"moved\";\n                }\n\n                Debug.trace(\"Multipart file '\" + this.getName() + \"' with original filename [\" + this.getOriginalFilename() + \"], stored \" + this.getStorageDescription() + \": \" + action + \" to [\" + dest.getAbsolutePath() + \"]\");\n\n\n            } catch (FileUploadException e) {\n                throw new IllegalStateException(e.getMessage());\n            } catch (IOException ioException) {\n                throw ioException;\n            } catch (Exception e2) {\n                Debug.trace(\"Could not transfer to file\");\n                throw new IOException(\"Could not transfer to file: \" + e2.getMessage());\n            }\n        }\n    }\n\n    protected boolean isAvailable() {\n        if (this.fileItem.isInMemory()) {\n            return true;\n        } else if (this.fileItem instanceof DiskFileItem) {\n            return ((DiskFileItem)this.fileItem).getStoreLocation().exists();\n        } else {\n            return this.fileItem.getSize() == this.size;\n        }\n    }\n\n    public String getStorageDescription() {\n        if (this.fileItem.isInMemory()) {\n            return \"in memory\";\n        } else {\n            return this.fileItem instanceof DiskFileItem ? \"at [\" + ((DiskFileItem)this.fileItem).getStoreLocation().getAbsolutePath() + \"]\" : \"on disk\";\n        }\n    }\n\n    public String getStoragePath(){\n        return this.fileItem instanceof DiskFileItem ? ((DiskFileItem)this.fileItem).getStoreLocation().getAbsolutePath() : \"\";\n    }\n\n\n\n    public void finalize() throws Throwable{\n        super.finalize();\n/*        if ( !this.fileItem.isInMemory() ){\n            String szGarbage = this.getStoragePath();\n            File fGarbage = new File( szGarbage );\n            if ( fGarbage.exists() ) { *//* Jesus fucking christ with tomcat.. **//*\n                //System.err.println( \"Upload garbage annihilating.\" );\n                if( !fGarbage.delete() ){\n                    System.err.println( \"Error after upload garbage annihilated.\" );\n                }\n            }\n        }*/\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/commons/CommonsMultipartFiles.java",
    "content": "package com.pinecone.summer.multiparts.commons;\n\nimport com.pinecone.framework.unit.LinkedMultiValueMap;\nimport com.pinecone.summer.ArchConnection;\nimport com.pinecone.summer.ArchHostSystem;\nimport com.pinecone.summer.multiparts.MultipartException;\nimport com.pinecone.summer.multiparts.MultipartFile;\nimport com.pinecone.summer.multiparts.MultipartHttpServletRequest;\nimport com.pinecone.summer.ArchConnectDispatcher;\nimport com.pinecone.summer.io.PathResource;\n\nimport javax.servlet.http.HttpServletRequest;\nimport java.io.IOException;\nimport java.util.Map;\n\npublic class CommonsMultipartFiles {\n    protected ArchConnection                 mConnection;\n    protected ArchHostSystem                 mSystem;\n    protected CommonsMultipartResolver       mMultipartResolver;\n    protected MultipartHttpServletRequest    mCurrentMultipartHttpServletRequest = null;\n    protected Map<String, MultipartFile>     mFilesMap = null;\n\n    public CommonsMultipartFiles( ArchConnection connection ) {\n        this.mConnection = connection;\n        this.mSystem = this.mConnection.getHostSystem();\n        this.init();\n    }\n\n\n\n\n    public ArchHostSystem getHostSystem(){\n        return this.mSystem;\n    }\n\n    public ArchConnectDispatcher getSystemDispathcher(){\n        return this.mConnection.getDispatcher();\n    }\n\n    public CommonsMultipartResolver getMultipartResolver(){\n        return this.mMultipartResolver;\n    }\n\n\n\n\n    private void init(){\n        this.mMultipartResolver = new CommonsMultipartResolver( this.mSystem.getSystemServlet().getServletContext() );\n\n        this.mMultipartResolver.setSingleUploadSize( this.mSystem.getSingleFileSizeMax() );\n        this.mMultipartResolver.setMaxUploadSize( this.mSystem.getSumFileSizeMax() );\n        this.mMultipartResolver.setDefaultEncoding( this.mSystem.getUploadEncode() );\n\n        String szUploadTempDir = this.mSystem.getUploadTempDir();\n        if( szUploadTempDir != null && !szUploadTempDir.isEmpty() ){\n            try {\n                this.mMultipartResolver.setUploadTempDir( new PathResource(szUploadTempDir) );\n            }\n            catch ( IOException e ) {\n                e.printStackTrace();\n            }\n        }\n\n    }\n\n    private HttpServletRequest getHttpServletRequest(){\n        return this.mConnection.$_REQUEST();\n    }\n\n    public boolean isMultipart(){\n        return this.mMultipartResolver.isMultipart( this.getHttpServletRequest() );\n    }\n\n    public void interceptMultipartFiles() throws MultipartException {\n        if ( this.isMultipart() ){\n            this.mCurrentMultipartHttpServletRequest = this.mMultipartResolver.resolveMultipart( this.getHttpServletRequest() );\n            this.mFilesMap = this.mCurrentMultipartHttpServletRequest.getFileMap();\n        }\n        else {\n            this.refresh();\n        }\n    }\n\n    public MultipartHttpServletRequest getCurrentMultipartRequest(){\n        return this.mCurrentMultipartHttpServletRequest;\n    }\n\n    public Map<String, MultipartFile> getCurrentFilesMap(){\n        return this.mFilesMap;\n    }\n\n    public void refresh() {\n        if( this.mFilesMap != null ){\n            if( !this.mFilesMap.isEmpty() ){\n                this.mFilesMap.clear();\n            }\n        }\n        else {\n            this.mFilesMap = new LinkedMultiValueMap() ;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/commons/CommonsMultipartResolver.java",
    "content": "package com.pinecone.summer.multiparts.commons;\n\nimport java.util.List;\nimport javax.servlet.ServletContext;\nimport javax.servlet.http.HttpServletRequest;\n\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.summer.multiparts.MaxUploadSizeExceededException;\nimport com.pinecone.summer.multiparts.MultipartException;\nimport com.pinecone.summer.multiparts.MultipartHttpServletRequest;\nimport com.pinecone.summer.multiparts.MultipartResolver;\nimport com.pinecone.summer.multiparts.support.DefaultMultipartHttpServletRequest;\nimport com.pinecone.summer.context.ServletContextAware;\nimport com.pinecone.summer.util.WebUtils;\nimport org.apache.commons.fileupload.FileItem;\nimport org.apache.commons.fileupload.FileItemFactory;\nimport org.apache.commons.fileupload.FileUpload;\nimport org.apache.commons.fileupload.FileUploadException;\nimport org.apache.commons.fileupload.FileUploadBase.SizeLimitExceededException;\nimport org.apache.commons.fileupload.servlet.ServletFileUpload;\n\n\npublic class CommonsMultipartResolver extends CommonsFileUploadSupport implements MultipartResolver, ServletContextAware {\n    private boolean resolveLazily;\n\n    public CommonsMultipartResolver() {\n        this.resolveLazily = false;\n    }\n\n    public CommonsMultipartResolver(ServletContext servletContext) {\n        this();\n        this.setServletContext(servletContext);\n    }\n\n    public void setResolveLazily(boolean resolveLazily) {\n        this.resolveLazily = resolveLazily;\n    }\n\n    protected FileUpload newFileUpload(FileItemFactory fileItemFactory) {\n        return new ServletFileUpload(fileItemFactory);\n    }\n\n    public void setServletContext(ServletContext servletContext) {\n        if (!this.isUploadTempDirSpecified()) {\n            this.getFileItemFactory().setRepository(WebUtils.getTempDir(servletContext));\n        }\n\n    }\n\n    public boolean isMultipart(HttpServletRequest request) {\n        return request != null && ServletFileUpload.isMultipartContent(request);\n    }\n\n    public MultipartHttpServletRequest resolveMultipart(final HttpServletRequest request) throws MultipartException {\n        Assert.notNull(request, \"Request must not be null\");\n        if (this.resolveLazily) {\n            return new DefaultMultipartHttpServletRequest(request) {\n                protected void initializeMultipart() {\n                    MultipartParsingResult parsingResult = CommonsMultipartResolver.this.parseRequest(request);\n                    this.setMultipartFiles(parsingResult.getMultipartFiles());\n                    this.setMultipartParameters(parsingResult.getMultipartParameters());\n                    this.setMultipartParameterContentTypes(parsingResult.getMultipartParameterContentTypes());\n                }\n            };\n        } else {\n            MultipartParsingResult parsingResult = this.parseRequest(request);\n            return new DefaultMultipartHttpServletRequest(request, parsingResult.getMultipartFiles(), parsingResult.getMultipartParameters(), parsingResult.getMultipartParameterContentTypes());\n        }\n    }\n\n    protected MultipartParsingResult parseRequest(HttpServletRequest request) throws MultipartException {\n        String encoding = this.determineEncoding(request);\n        FileUpload fileUpload = this.prepareFileUpload(encoding);\n\n        try {\n            List<FileItem> fileItems = ((ServletFileUpload)fileUpload).parseRequest(request);\n            return this.parseFileItems(fileItems, encoding);\n        } catch (SizeLimitExceededException var5) {\n            throw new MaxUploadSizeExceededException(fileUpload.getSizeMax(), var5);\n        } catch (FileUploadException var6) {\n            throw new MultipartException(\"Could not parse multipart servlet request\", var6);\n        }\n    }\n\n    protected String determineEncoding(HttpServletRequest request) {\n        String encoding = request.getCharacterEncoding();\n        if (encoding == null) {\n            encoding = this.getDefaultEncoding();\n        }\n\n        return encoding;\n    }\n\n    public void cleanupMultipart(MultipartHttpServletRequest request) {\n        if (request != null) {\n            try {\n                this.cleanupFileItems(request.getMultiFileMap());\n            } catch (Throwable var3) {\n                System.err.println(\"Failed to perform multipart cleanup for servlet request\");\n            }\n        }\n\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/support/AbstractMultipartHttpServletRequest.java",
    "content": "package com.pinecone.summer.multiparts.support;\n\nimport com.pinecone.framework.unit.LinkedMultiValueMap;\nimport com.pinecone.framework.unit.MultiValueMap;\nimport com.pinecone.summer.multiparts.MultipartFile;\nimport com.pinecone.summer.multiparts.MultipartHttpServletRequest;\nimport com.pinecone.summer.http.HttpHeaders;\nimport com.pinecone.summer.http.HttpMethod;\n\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletRequestWrapper;\nimport java.util.*;\n\npublic abstract class AbstractMultipartHttpServletRequest extends HttpServletRequestWrapper implements MultipartHttpServletRequest {\n    private MultiValueMap<String, MultipartFile> multipartFiles;\n\n    protected AbstractMultipartHttpServletRequest(HttpServletRequest request) {\n        super(request);\n    }\n\n    public HttpServletRequest getRequest() {\n        return (HttpServletRequest)super.getRequest();\n    }\n\n    public HttpMethod getRequestMethod() {\n        return HttpMethod.valueOf(this.getRequest().getMethod());\n    }\n\n    public HttpHeaders getRequestHeaders() {\n        HttpHeaders headers = new HttpHeaders();\n        Enumeration headerNames = this.getHeaderNames();\n\n        while(headerNames.hasMoreElements()) {\n            String headerName = (String)headerNames.nextElement();\n            headers.put(headerName, Collections.list(this.getHeaders(headerName)));\n        }\n\n        return headers;\n    }\n\n    public Iterator<String> getFileNames() {\n        return this.getMultipartFiles().keySet().iterator();\n    }\n\n    public MultipartFile getFile(String name) {\n        return (MultipartFile)this.getMultipartFiles().getFirst(name);\n    }\n\n    public List<MultipartFile> getFiles(String name) {\n        List<MultipartFile> multipartFiles = (List<MultipartFile>)this.getMultipartFiles().get(name);\n        return multipartFiles != null ? multipartFiles : Collections.emptyList();\n    }\n\n    public Map<String, MultipartFile> getFileMap() {\n        return this.getMultipartFiles().toSingleValueMap();\n    }\n\n    public MultiValueMap<String, MultipartFile> getMultiFileMap() {\n        return this.getMultipartFiles();\n    }\n\n    protected final void setMultipartFiles(MultiValueMap<String, MultipartFile> multipartFiles) {\n        this.multipartFiles = new LinkedMultiValueMap<>(Collections.unmodifiableMap(multipartFiles));\n    }\n\n    protected MultiValueMap<String, MultipartFile> getMultipartFiles() {\n        if (this.multipartFiles == null) {\n            this.initializeMultipart();\n        }\n\n        return this.multipartFiles;\n    }\n\n    protected void initializeMultipart() {\n        throw new IllegalStateException(\"Multipart request not initialized\");\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/multiparts/support/DefaultMultipartHttpServletRequest.java",
    "content": "package com.pinecone.summer.multiparts.support;\n\nimport com.pinecone.framework.unit.MultiValueMap;\nimport com.pinecone.summer.multiparts.MultipartFile;\nimport com.pinecone.summer.http.HttpHeaders;\n\nimport javax.servlet.http.HttpServletRequest;\nimport java.util.*;\n\npublic class DefaultMultipartHttpServletRequest extends AbstractMultipartHttpServletRequest {\n    private static final String CONTENT_TYPE = \"Content-Type\";\n    private Map<String, String[]> multipartParameters;\n    private Map<String, String> multipartParameterContentTypes;\n\n    public DefaultMultipartHttpServletRequest(HttpServletRequest request, MultiValueMap<String, MultipartFile> mpFiles, Map<String, String[]> mpParams, Map<String, String> mpParamContentTypes) {\n        super(request);\n        this.setMultipartFiles(mpFiles);\n        this.setMultipartParameters(mpParams);\n        this.setMultipartParameterContentTypes(mpParamContentTypes);\n    }\n\n    public DefaultMultipartHttpServletRequest(HttpServletRequest request) {\n        super(request);\n    }\n\n    public String getParameter(String name) {\n        String[] values = (String[])this.getMultipartParameters().get(name);\n        if (values != null) {\n            return values.length > 0 ? values[0] : null;\n        } else {\n            return super.getParameter(name);\n        }\n    }\n\n    public String[] getParameterValues(String name) {\n        String[] values = (String[])this.getMultipartParameters().get(name);\n        return values != null ? values : super.getParameterValues(name);\n    }\n\n    public Enumeration<String> getParameterNames() {\n        Map<String, String[]> multipartParameters = this.getMultipartParameters();\n        if (multipartParameters.isEmpty()) {\n            return super.getParameterNames();\n        } else {\n            Set<String> paramNames = new LinkedHashSet<>();\n            Enumeration paramEnum = super.getParameterNames();\n\n            while(paramEnum.hasMoreElements()) {\n                paramNames.add((String) paramEnum.nextElement());\n            }\n\n            paramNames.addAll(multipartParameters.keySet());\n            return Collections.enumeration(paramNames);\n        }\n    }\n\n    public Map<String, String[]> getParameterMap() {\n        Map<String, String[]> multipartParameters = this.getMultipartParameters();\n        if (multipartParameters.isEmpty()) {\n            return super.getParameterMap();\n        } else {\n            Map<String, String[]> paramMap = new LinkedHashMap<>();\n            paramMap.putAll(super.getParameterMap());\n            paramMap.putAll(multipartParameters);\n            return paramMap;\n        }\n    }\n\n    public String getMultipartContentType(String paramOrFileName) {\n        MultipartFile file = this.getFile(paramOrFileName);\n        return file != null ? file.getContentType() : (String)this.getMultipartParameterContentTypes().get(paramOrFileName);\n    }\n\n    public HttpHeaders getMultipartHeaders(String paramOrFileName) {\n        String contentType = this.getMultipartContentType(paramOrFileName);\n        if (contentType != null) {\n            HttpHeaders headers = new HttpHeaders();\n            headers.add(\"Content-Type\", contentType);\n            return headers;\n        } else {\n            return null;\n        }\n    }\n\n    protected final void setMultipartParameters(Map<String, String[]> multipartParameters) {\n        this.multipartParameters = multipartParameters;\n    }\n\n    protected Map<String, String[]> getMultipartParameters() {\n        if (this.multipartParameters == null) {\n            this.initializeMultipart();\n        }\n\n        return this.multipartParameters;\n    }\n\n    protected final void setMultipartParameterContentTypes(Map<String, String> multipartParameterContentTypes) {\n        this.multipartParameterContentTypes = multipartParameterContentTypes;\n    }\n\n    protected Map<String, String> getMultipartParameterContentTypes() {\n        if (this.multipartParameterContentTypes == null) {\n            this.initializeMultipart();\n        }\n\n        return this.multipartParameterContentTypes;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Citizen.java",
    "content": "package com.pinecone.summer.prototype;\n\npublic interface Citizen {\n    String vocationName();\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Component.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport java.lang.annotation.*;\n\n@Target({ElementType.TYPE})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface Component {\n    String value() default \"\";\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/ConnectDispatcher.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport com.pinecone.summer.Connectiom;\n\nimport javax.servlet.ServletException;\nimport java.io.IOException;\n\npublic interface ConnectDispatcher extends SequentialDispatcher {\n    void invokeDispatchBus() throws ServletException, IOException ;\n\n    void requestReceived() throws ServletException, IOException ;\n\n\n    void afterConnectionAccepted( Connectiom connectiom) throws ServletException, IOException;\n\n    /** Http Method Handler **/\n    void handleGet( Connectiom connectiom ) throws ServletException, IOException;\n\n    void handlePost( Connectiom connectiom ) throws ServletException, IOException;\n\n    void handleHead( Connectiom connectiom ) throws ServletException, IOException;\n\n    void handleOptions( Connectiom connectiom ) throws ServletException, IOException;\n\n    void handlePut( Connectiom connectiom ) throws ServletException, IOException;\n\n    void handlePatch( Connectiom connectiom ) throws ServletException, IOException;\n\n    void handleDelete( Connectiom connectiom ) throws ServletException, IOException;\n\n    void handleTrace( Connectiom connectiom ) throws ServletException, IOException;\n\n\n    /** Tracer **/\n    void traceSystemErrorMsg( String szTitle, String szErrorMsg ) throws IOException, ServletException;\n\n    void traceSystemErrorMsg( int nErrorID, String szTitle, String szErrorMsg ) throws IOException, ServletException;\n\n    void traceSystem404Error() throws IOException, ServletException;\n\n    void traceSystem404Error( String szErrorMsg ) throws IOException, ServletException;\n\n    void traceSystem500Error( String szErrorMsg ) throws IOException, ServletException;\n\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Connection.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport com.pinecone.summer.http.HttpMethod;\n\nimport javax.servlet.ServletOutputStream;\nimport javax.servlet.http.HttpServlet;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport java.io.IOException;\nimport java.io.PrintWriter;\n\npublic interface Connection {\n    PrintWriter writer() throws IOException;\n\n    ServletOutputStream out() throws IOException;\n\n    HttpServletRequest getRequest();\n\n    HttpServletRequest getMultipartRequest();\n\n    boolean isMultipartRequest();\n\n    HttpServletResponse getResponse();\n\n    HttpServlet getServlet();\n\n    HttpMethod currentHttpMethod() ;\n\n    ConnectDispatcher getDispatcher();\n\n    HostSystem getHostSystem();\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Connectson.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.summer.multiparts.MultipartFile;\n\nimport javax.servlet.http.Cookie;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport java.util.Map;\n\npublic interface Connectson extends Connection {\n    JSONObject $_GPC();\n\n    JSONObject $_GET();\n\n    JSONObject $_POST();\n\n    default HttpServletRequest $_REQUEST(){\n        return this.getRequest();\n    }\n\n    HttpServletRequest $_REQUEST ( boolean bUsingMultipart );\n\n    default HttpServletResponse $_RESPONSE() {\n        return this.getResponse();\n    }\n\n    Map<String, MultipartFile> $_FILES();\n\n    Map<String, Cookie> $_COOKIE();\n}"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Controller.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport java.lang.annotation.*;\n\n@Target({ElementType.TYPE})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\n@Component\npublic @interface Controller {\n    String value() default \"\";\n}\n\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/GenieBottle.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport javax.servlet.ServletException;\nimport java.io.IOException;\n\npublic interface GenieBottle extends Wizardum, SequentialDispatcher {\n    void dispatch() throws IOException, ServletException;\n\n    void defaultGenie() throws Exception ;\n\n    void beforeGenieInvoke() throws Exception ;\n\n    void afterGenieInvoked() throws Exception ;\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/HostSystem.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.summer.RouterType;\n\nimport javax.servlet.ServletException;\n\npublic interface HostSystem {\n    JSONObject getGlobalConfig() ;\n\n    JSONObject getSystemConfig() ;\n\n    JSONObject getPublicWizardConfig();\n\n    String getControlParameter() ;\n\n    String getWizardParameter()  ;\n\n    String getModelParameter()   ;\n\n    void init() throws ServletException;\n\n    String getSystemPath();\n\n    String getRootClassPath();\n\n\n\n    String getWizardSummonerConfig();\n\n    String getWizardPackageName();\n\n    String getModelClassSuffix();\n\n    String getControlClassSuffix();\n\n\n    ConnectDispatcher handleByDispatcher(RouterType routerType );\n\n    default ConnectDispatcher handleByDispatcher() {\n        return this.handleByDispatcher( RouterType.QueryString );\n    }\n\n    RouterDispatcher getPrimeRouterDispatcher();\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/JSONBasedControl.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport javax.servlet.ServletException;\nimport java.io.IOException;\n\npublic interface JSONBasedControl {\n    void beforeDispatch() throws IOException, ServletException;\n\n    void dispatch() throws IOException, ServletException ;\n\n    void afterDispatch() throws IOException, ServletException;\n\n    String getControlCommand();\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/JasperBasedModel.java",
    "content": "package com.pinecone.summer.prototype;\n\ninterface JasperBasedModel {\n\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/ModelEnchanter.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport java.lang.annotation.*;\n\n@Target({ElementType.METHOD, ElementType.TYPE})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface ModelEnchanter {\n    boolean value() default true;\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Pagesion.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport javax.servlet.ServletException;\nimport java.io.IOException;\n\n/**\n * Summer JSON Based Pagina(Page) Ion, for template stereotype.\n * Default as a controller. equal-> @Controller.\n */\npublic interface Pagesion extends Pageson, Wizard {\n    void beforeDispatch() throws IOException, ServletException;\n\n    void dispatch() throws IOException, ServletException ;\n\n    void afterDispatch() throws IOException, ServletException;\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Pageson.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport com.pinecone.framework.util.json.JSONObject;\n\nimport javax.servlet.ServletException;\nimport java.io.IOException;\nimport java.lang.reflect.Method;\n\npublic interface Pageson extends Wizard {\n    JSONObject getPageData();\n\n    String toJSONString();\n\n    String getModelCommand();\n\n    void setRenderum( Method fnRenderum );\n\n    void render() throws ServletException, IOException;\n\n    void setEnchanterRole( boolean bRole );\n\n    boolean isEnchanter();\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/RouterDispatcher.java",
    "content": "package com.pinecone.summer.prototype;\n\npublic interface RouterDispatcher {\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/RouterMapping.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport com.pinecone.summer.http.HttpMethod;\n\nimport java.lang.annotation.*;\n\n@Target({ElementType.METHOD, ElementType.TYPE})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface RouterMapping {\n    String name() default \"\";\n\n    String[] value() default {};\n\n    boolean relative() default true; // Only for methods.\n\n    HttpMethod[] method() default {};\n\n    String[] params() default {};\n\n    String[] headers() default {};\n\n    String[] consumes() default {};\n\n    String[] produces() default {};\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/SequentialDispatcher.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport javax.servlet.ServletException;\nimport java.io.IOException;\n\npublic interface SequentialDispatcher {\n    void dispatch() throws IOException, ServletException;\n\n    void stop() throws RuntimeException;\n\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Servletson.java",
    "content": "package com.pinecone.summer.prototype;\n\npublic interface Servletson {\n\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Wizard.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport com.pinecone.framework.system.prototype.Ally;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONObject;\n\n/**\n *  Bean Nuts Pinecone PineconeJava Summer - Wizard\n *  ****************************************************************************************************************\n *  Summer: JSON Based Java Servlet [C/C++ Style]\n *  Matrix: Bean Nuts Pinecone C/CPP Runtime Framework Extension Fast CGI Servlet Summer (JSON Based MVC)\n *  Notice: Pinecone is base on JSON Prototype,\n *  Notice: All functions or methods are based on JSON. We highly recommend you using JSON as data format, it is\n *          easy to compatible with JS, PHP and other platforms.\n *  Notice: For sub modular extends this interface is necessary. Add any function if your json config haves.\n *  ****************************************************************************************************************\n */\npublic interface Wizard extends Ally, Citizen {\n    @Override\n    default String vocationName(){\n        return this.getClass().getSimpleName();\n    }\n\n    String prototypeName();\n\n    String getTitle();\n\n    JSONObject getModularConfig();\n\n    String getModularRole();\n\n    int getModularRoleIndex();\n\n    JSONArray getMyNaughtyGenies();\n\n    String getWizardCommand();\n\n\n    /***  Parent getter methods ***/\n    Connectson getConnection();\n\n    HostSystem getHostSystem();\n\n    ConnectDispatcher getDispatcher();\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/WizardSummoner.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport com.pinecone.framework.system.executum.ExecutableSummoner;\n\nimport javax.servlet.ServletException;\nimport java.io.IOException;\n\npublic interface WizardSummoner extends ExecutableSummoner {\n    HostSystem getSystem();\n\n    String queryNamespace( String szNickName );\n\n    Wizard getLastSummoned();\n\n    Wizard summonIfExist( String szNickName ) throws ServletException, IOException ;\n\n    Wizard summonAndExecute( String szNickName ) throws ServletException, IOException ;\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/prototype/Wizardum.java",
    "content": "package com.pinecone.summer.prototype;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.summer.multiparts.MultipartFile;\nimport com.pinecone.summer.NaughtyGenieInvokedException;\n\nimport javax.servlet.ServletOutputStream;\nimport javax.servlet.http.Cookie;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport java.io.IOException;\nimport java.io.PrintWriter;\nimport java.util.Map;\n\n/**\n *  Pinecone For Java Wizardum [ Wizard Kernel Layer Prototype Interface ]\n *  Copyright © 2008 - 2028 Bean Nuts Foundation ( DR.Undefined ) All rights reserved. [Mr.A.R.B / WJH]\n *  *****************************************************************************************\n *  JSON Based: All dynamic map variables are based on JSON.\n *  PHP Style: QueryString, Form, Files, and etc. are overridden to $_GET, $_POST, and etc.\n *  *****************************************************************************************\n */\npublic interface Wizardum extends Wizard {\n    JSONObject $_GPC();\n\n    JSONObject $_GET();\n\n    JSONObject $_POST();\n\n    PrintWriter writer() ;\n\n    ServletOutputStream out() ;\n\n    HttpServletRequest $_REQUEST();\n\n    HttpServletRequest getCurrentMultipartRequest();\n\n    HttpServletResponse $_RESPONSE();\n\n    Map<String, Cookie > $_COOKIE();\n\n    Map<String, MultipartFile> $_FILES();\n\n    void redirect( String szURL ) throws IOException;\n\n    String spawnWizardQuerySpell( String szPrototype );\n\n    String spawnActionQuerySpell( String szActionFunctionName ) ;\n\n    String spawnControlQuerySpell( String szControlFunctionName ) ;\n\n    String spawnActionControlSpell( String szActionFnName, String szControlFnName );\n\n    Object summonNormalGenieByCallHisName( String szGenieName ) throws NaughtyGenieInvokedException;\n\n    String getWizardCommand();\n\n    String getModelCommand();\n\n    String getControlCommand();\n\n    void stop() throws RuntimeException;\n\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/util/InvalidMimeTypeException.java",
    "content": "package com.pinecone.summer.util;\n\npublic class InvalidMimeTypeException extends IllegalArgumentException {\n    private String mimeType;\n\n    public InvalidMimeTypeException(String mimeType, String message) {\n        super(\"Invalid mime type \\\"\" + mimeType + \"\\\": \" + message);\n        this.mimeType = mimeType;\n    }\n\n    public String getMimeType() {\n        return this.mimeType;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/util/MimeType.java",
    "content": "package com.pinecone.summer.util;\n\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.framework.util.CollectionUtils;\nimport com.pinecone.framework.unit.LinkedCaseInsensitiveMap;\n\nimport java.io.Serializable;\nimport java.nio.charset.Charset;\nimport java.util.BitSet;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.Iterator;\nimport java.util.Locale;\nimport java.util.Map;\nimport java.util.TreeSet;\nimport java.util.Map.Entry;\n\npublic class MimeType implements Comparable<MimeType>, Serializable {\n    private static final long serialVersionUID = 4085923477777865903L;\n    protected static final String WILDCARD_TYPE = \"*\";\n    private static final BitSet TOKEN;\n    private static final String PARAM_CHARSET = \"charset\";\n    private final String type;\n    private final String subtype;\n    private final Map<String, String> parameters;\n\n    public MimeType(String type) {\n        this(type, \"*\");\n    }\n\n    public MimeType(String type, String subtype) {\n        this(type, subtype, Collections.emptyMap());\n    }\n\n    public MimeType(String type, String subtype, Charset charSet) {\n        this(type, subtype, Collections.singletonMap(\"charset\", charSet.name()));\n    }\n\n    public MimeType(MimeType other, Map<String, String> parameters) {\n        this(other.getType(), other.getSubtype(), parameters);\n    }\n\n    public MimeType(String type, String subtype, Map<String, String> parameters) {\n        Assert.hasLength(type, \"type must not be empty\");\n        Assert.hasLength(subtype, \"subtype must not be empty\");\n        this.checkToken(type);\n        this.checkToken(subtype);\n        this.type = type.toLowerCase(Locale.ENGLISH);\n        this.subtype = subtype.toLowerCase(Locale.ENGLISH);\n        if (!CollectionUtils.isEmpty(parameters)) {\n            Map<String, String> map = new LinkedCaseInsensitiveMap<>(parameters.size(), Locale.ENGLISH);\n            Iterator var5 = parameters.entrySet().iterator();\n\n            while(var5.hasNext()) {\n                Entry<String, String> entry = (Entry)var5.next();\n                String attribute = (String)entry.getKey();\n                String value = (String)entry.getValue();\n                this.checkParameters(attribute, value);\n                map.put(attribute, value);\n            }\n\n            this.parameters = Collections.unmodifiableMap(map);\n        } else {\n            this.parameters = Collections.emptyMap();\n        }\n\n    }\n\n    private void checkToken(String token) {\n        for(int i = 0; i < token.length(); ++i) {\n            char ch = token.charAt(i);\n            if (!TOKEN.get(ch)) {\n                throw new IllegalArgumentException(\"Invalid token character '\" + ch + \"' in token \\\"\" + token + \"\\\"\");\n            }\n        }\n\n    }\n\n    protected void checkParameters(String attribute, String value) {\n        Assert.hasLength(attribute, \"parameter attribute must not be empty\");\n        Assert.hasLength(value, \"parameter value must not be empty\");\n        this.checkToken(attribute);\n        if (\"charset\".equals(attribute)) {\n            value = this.unquote(value);\n            Charset.forName(value);\n        } else if (!this.isQuotedString(value)) {\n            this.checkToken(value);\n        }\n\n    }\n\n    private boolean isQuotedString(String s) {\n        if (s.length() < 2) {\n            return false;\n        } else {\n            return s.startsWith(\"\\\"\") && s.endsWith(\"\\\"\") || s.startsWith(\"'\") && s.endsWith(\"'\");\n        }\n    }\n\n    protected String unquote(String s) {\n        if (s == null) {\n            return null;\n        } else {\n            return this.isQuotedString(s) ? s.substring(1, s.length() - 1) : s;\n        }\n    }\n\n    public boolean isWildcardType() {\n        return \"*\".equals(this.getType());\n    }\n\n    public boolean isWildcardSubtype() {\n        return \"*\".equals(this.getSubtype()) || this.getSubtype().startsWith(\"*+\");\n    }\n\n    public boolean isConcrete() {\n        return !this.isWildcardType() && !this.isWildcardSubtype();\n    }\n\n    public String getType() {\n        return this.type;\n    }\n\n    public String getSubtype() {\n        return this.subtype;\n    }\n\n    public Charset getCharSet() {\n        String charSet = this.getParameter(\"charset\");\n        return charSet != null ? Charset.forName(this.unquote(charSet)) : null;\n    }\n\n    public String getParameter(String name) {\n        return (String)this.parameters.get(name);\n    }\n\n    public Map<String, String> getParameters() {\n        return this.parameters;\n    }\n\n    public boolean includes(MimeType other) {\n        if (other == null) {\n            return false;\n        } else if (this.isWildcardType()) {\n            return true;\n        } else {\n            if (this.getType().equals(other.getType())) {\n                if (this.getSubtype().equals(other.getSubtype())) {\n                    return true;\n                }\n\n                if (this.isWildcardSubtype()) {\n                    int thisPlusIdx = this.getSubtype().indexOf(43);\n                    if (thisPlusIdx == -1) {\n                        return true;\n                    }\n\n                    int otherPlusIdx = other.getSubtype().indexOf(43);\n                    if (otherPlusIdx != -1) {\n                        String thisSubtypeNoSuffix = this.getSubtype().substring(0, thisPlusIdx);\n                        String thisSubtypeSuffix = this.getSubtype().substring(thisPlusIdx + 1);\n                        String otherSubtypeSuffix = other.getSubtype().substring(otherPlusIdx + 1);\n                        if (thisSubtypeSuffix.equals(otherSubtypeSuffix) && \"*\".equals(thisSubtypeNoSuffix)) {\n                            return true;\n                        }\n                    }\n                }\n            }\n\n            return false;\n        }\n    }\n\n    public boolean isCompatibleWith(MimeType other) {\n        if (other == null) {\n            return false;\n        } else if (!this.isWildcardType() && !other.isWildcardType()) {\n            if (this.getType().equals(other.getType())) {\n                if (this.getSubtype().equals(other.getSubtype())) {\n                    return true;\n                }\n\n                if (this.isWildcardSubtype() || other.isWildcardSubtype()) {\n                    int thisPlusIdx = this.getSubtype().indexOf(43);\n                    int otherPlusIdx = other.getSubtype().indexOf(43);\n                    if (thisPlusIdx == -1 && otherPlusIdx == -1) {\n                        return true;\n                    }\n\n                    if (thisPlusIdx != -1 && otherPlusIdx != -1) {\n                        String thisSubtypeNoSuffix = this.getSubtype().substring(0, thisPlusIdx);\n                        String otherSubtypeNoSuffix = other.getSubtype().substring(0, otherPlusIdx);\n                        String thisSubtypeSuffix = this.getSubtype().substring(thisPlusIdx + 1);\n                        String otherSubtypeSuffix = other.getSubtype().substring(otherPlusIdx + 1);\n                        if (thisSubtypeSuffix.equals(otherSubtypeSuffix) && (\"*\".equals(thisSubtypeNoSuffix) || \"*\".equals(otherSubtypeNoSuffix))) {\n                            return true;\n                        }\n                    }\n                }\n            }\n\n            return false;\n        } else {\n            return true;\n        }\n    }\n\n    public int compareTo(MimeType other) {\n        int comp = this.getType().compareToIgnoreCase(other.getType());\n        if (comp != 0) {\n            return comp;\n        } else {\n            comp = this.getSubtype().compareToIgnoreCase(other.getSubtype());\n            if (comp != 0) {\n                return comp;\n            } else {\n                comp = this.getParameters().size() - other.getParameters().size();\n                if (comp != 0) {\n                    return comp;\n                } else {\n                    TreeSet<String> thisAttributes = new TreeSet(String.CASE_INSENSITIVE_ORDER);\n                    thisAttributes.addAll(this.getParameters().keySet());\n                    TreeSet<String> otherAttributes = new TreeSet(String.CASE_INSENSITIVE_ORDER);\n                    otherAttributes.addAll(other.getParameters().keySet());\n                    Iterator<String> thisAttributesIterator = thisAttributes.iterator();\n                    Iterator otherAttributesIterator = otherAttributes.iterator();\n\n                    do {\n                        if (!thisAttributesIterator.hasNext()) {\n                            return 0;\n                        }\n\n                        String thisAttribute = (String)thisAttributesIterator.next();\n                        String otherAttribute = (String)otherAttributesIterator.next();\n                        comp = thisAttribute.compareToIgnoreCase(otherAttribute);\n                        if (comp != 0) {\n                            return comp;\n                        }\n\n                        String thisValue = (String)this.getParameters().get(thisAttribute);\n                        String otherValue = (String)other.getParameters().get(otherAttribute);\n                        if (otherValue == null) {\n                            otherValue = \"\";\n                        }\n\n                        comp = thisValue.compareTo(otherValue);\n                    } while(comp == 0);\n\n                    return comp;\n                }\n            }\n        }\n    }\n\n    public boolean equals(Object other) {\n        if (this == other) {\n            return true;\n        } else if (!(other instanceof MimeType)) {\n            return false;\n        } else {\n            MimeType otherType = (MimeType)other;\n            return this.type.equalsIgnoreCase(otherType.type) && this.subtype.equalsIgnoreCase(otherType.subtype) && this.parameters.equals(otherType.parameters);\n        }\n    }\n\n    public int hashCode() {\n        int result = this.type.hashCode();\n        result = 31 * result + this.subtype.hashCode();\n        result = 31 * result + this.parameters.hashCode();\n        return result;\n    }\n\n    public String toString() {\n        StringBuilder builder = new StringBuilder();\n        this.appendTo(builder);\n        return builder.toString();\n    }\n\n    protected void appendTo(StringBuilder builder) {\n        builder.append(this.type);\n        builder.append('/');\n        builder.append(this.subtype);\n        this.appendTo(this.parameters, builder);\n    }\n\n    private void appendTo(Map<String, String> map, StringBuilder builder) {\n        Iterator var3 = map.entrySet().iterator();\n\n        while(var3.hasNext()) {\n            Entry<String, String> entry = (Entry)var3.next();\n            builder.append(';');\n            builder.append((String)entry.getKey());\n            builder.append('=');\n            builder.append((String)entry.getValue());\n        }\n\n    }\n\n    public static MimeType valueOf(String value) {\n        return MimeTypeUtils.parseMimeType(value);\n    }\n\n    static {\n        BitSet ctl = new BitSet(128);\n\n        for(int i = 0; i <= 31; ++i) {\n            ctl.set(i);\n        }\n\n        ctl.set(127);\n        BitSet separators = new BitSet(128);\n        separators.set(40);\n        separators.set(41);\n        separators.set(60);\n        separators.set(62);\n        separators.set(64);\n        separators.set(44);\n        separators.set(59);\n        separators.set(58);\n        separators.set(92);\n        separators.set(34);\n        separators.set(47);\n        separators.set(91);\n        separators.set(93);\n        separators.set(63);\n        separators.set(61);\n        separators.set(123);\n        separators.set(125);\n        separators.set(32);\n        separators.set(9);\n        TOKEN = new BitSet(128);\n        TOKEN.set(0, 128);\n        TOKEN.andNot(ctl);\n        TOKEN.andNot(separators);\n    }\n\n    public static class SpecificityComparator<T extends MimeType> implements Comparator<T> {\n        public SpecificityComparator() {\n        }\n\n        public int compare(T mimeType1, T mimeType2) {\n            if (mimeType1.isWildcardType() && !mimeType2.isWildcardType()) {\n                return 1;\n            } else if (mimeType2.isWildcardType() && !mimeType1.isWildcardType()) {\n                return -1;\n            } else if (!mimeType1.getType().equals(mimeType2.getType())) {\n                return 0;\n            } else if (mimeType1.isWildcardSubtype() && !mimeType2.isWildcardSubtype()) {\n                return 1;\n            } else if (mimeType2.isWildcardSubtype() && !mimeType1.isWildcardSubtype()) {\n                return -1;\n            } else {\n                return !mimeType1.getSubtype().equals(mimeType2.getSubtype()) ? 0 : this.compareParameters(mimeType1, mimeType2);\n            }\n        }\n\n        protected int compareParameters(T mimeType1, T mimeType2) {\n            int paramsSize1 = mimeType1.getParameters().size();\n            int paramsSize2 = mimeType2.getParameters().size();\n            return paramsSize2 < paramsSize1 ? -1 : (paramsSize2 == paramsSize1 ? 0 : 1);\n        }\n    }\n}\n\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/util/MimeTypeUtils.java",
    "content": "package com.pinecone.summer.util;\n\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.framework.util.StringUtils;\n\nimport java.nio.charset.UnsupportedCharsetException;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.Collections;\nimport java.util.Comparator;\nimport java.util.Iterator;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\nimport com.pinecone.summer.util.MimeType.SpecificityComparator;\n\npublic abstract class MimeTypeUtils {\n    public static final MimeType ALL = MimeType.valueOf(\"*/*\");\n    public static final String ALL_VALUE = \"*/*\";\n    public static final MimeType APPLICATION_ATOM_XML = MimeType.valueOf(\"application/atom+xml\");\n    public static final String APPLICATION_ATOM_XML_VALUE = \"application/atom+xml\";\n    public static final MimeType APPLICATION_FORM_URLENCODED = MimeType.valueOf(\"application/x-www-form-urlencoded\");\n    public static final String APPLICATION_FORM_URLENCODED_VALUE = \"application/x-www-form-urlencoded\";\n    public static final MimeType APPLICATION_JSON = MimeType.valueOf(\"application/json\");\n    public static final String APPLICATION_JSON_VALUE = \"application/json\";\n    public static final MimeType APPLICATION_OCTET_STREAM = MimeType.valueOf(\"application/octet-stream\");\n    public static final String APPLICATION_OCTET_STREAM_VALUE = \"application/octet-stream\";\n    public static final MimeType APPLICATION_XHTML_XML = MimeType.valueOf(\"application/xhtml+xml\");\n    public static final String APPLICATION_XHTML_XML_VALUE = \"application/xhtml+xml\";\n    public static final MimeType APPLICATION_XML = MimeType.valueOf(\"application/xml\");\n    public static final String APPLICATION_XML_VALUE = \"application/xml\";\n    public static final MimeType IMAGE_GIF = MimeType.valueOf(\"image/gif\");\n    public static final String IMAGE_GIF_VALUE = \"image/gif\";\n    public static final MimeType IMAGE_JPEG = MimeType.valueOf(\"image/jpeg\");\n    public static final String IMAGE_JPEG_VALUE = \"image/jpeg\";\n    public static final MimeType IMAGE_PNG = MimeType.valueOf(\"image/png\");\n    public static final String IMAGE_PNG_VALUE = \"image/png\";\n    public static final MimeType MULTIPART_FORM_DATA = MimeType.valueOf(\"multipart/form-data\");\n    public static final String MULTIPART_FORM_DATA_VALUE = \"multipart/form-data\";\n    public static final MimeType TEXT_HTML = MimeType.valueOf(\"text/html\");\n    public static final String TEXT_HTML_VALUE = \"text/html\";\n    public static final MimeType TEXT_PLAIN = MimeType.valueOf(\"text/plain\");\n    public static final String TEXT_PLAIN_VALUE = \"text/plain\";\n    public static final MimeType TEXT_XML = MimeType.valueOf(\"text/xml\");\n    public static final String TEXT_XML_VALUE = \"text/xml\";\n    public static final Comparator<MimeType> SPECIFICITY_COMPARATOR = new SpecificityComparator();\n\n    public MimeTypeUtils() {\n    }\n\n    public static MimeType parseMimeType(String mimeType) {\n        if (!StringUtils.hasLength(mimeType)) {\n            throw new InvalidMimeTypeException(mimeType, \"'mimeType' must not be empty\");\n        } else {\n            String[] parts = StringUtils.tokenizeToStringArray(mimeType, \";\");\n            String fullType = parts[0].trim();\n            if (\"*\".equals(fullType)) {\n                fullType = \"*/*\";\n            }\n\n            int subIndex = fullType.indexOf(47);\n            if (subIndex == -1) {\n                throw new InvalidMimeTypeException(mimeType, \"does not contain '/'\");\n            } else if (subIndex == fullType.length() - 1) {\n                throw new InvalidMimeTypeException(mimeType, \"does not contain subtype after '/'\");\n            } else {\n                String type = fullType.substring(0, subIndex);\n                String subtype = fullType.substring(subIndex + 1, fullType.length());\n                if (\"*\".equals(type) && !\"*\".equals(subtype)) {\n                    throw new InvalidMimeTypeException(mimeType, \"wildcard type is legal only in '*/*' (all mime types)\");\n                } else {\n                    Map<String, String> parameters = null;\n                    if (parts.length > 1) {\n                        parameters = new LinkedHashMap(parts.length - 1);\n\n                        for(int i = 1; i < parts.length; ++i) {\n                            String parameter = parts[i];\n                            int eqIndex = parameter.indexOf(61);\n                            if (eqIndex != -1) {\n                                String attribute = parameter.substring(0, eqIndex);\n                                String value = parameter.substring(eqIndex + 1, parameter.length());\n                                parameters.put(attribute, value);\n                            }\n                        }\n                    }\n\n                    try {\n                        return new MimeType(type, subtype, parameters);\n                    } catch (UnsupportedCharsetException var12) {\n                        throw new InvalidMimeTypeException(mimeType, \"unsupported charset '\" + var12.getCharsetName() + \"'\");\n                    } catch (IllegalArgumentException var13) {\n                        throw new InvalidMimeTypeException(mimeType, var13.getMessage());\n                    }\n                }\n            }\n        }\n    }\n\n    public static List<MimeType> parseMimeTypes(String mimeTypes) {\n        if (!StringUtils.hasLength(mimeTypes)) {\n            return Collections.emptyList();\n        } else {\n            String[] tokens = mimeTypes.split(\",\\\\s*\");\n            List<MimeType> result = new ArrayList(tokens.length);\n            String[] var3 = tokens;\n            int var4 = tokens.length;\n\n            for(int var5 = 0; var5 < var4; ++var5) {\n                String token = var3[var5];\n                result.add(parseMimeType(token));\n            }\n\n            return result;\n        }\n    }\n\n    public static String toString(Collection<? extends MimeType> mimeTypes) {\n        StringBuilder builder = new StringBuilder();\n        Iterator iterator = mimeTypes.iterator();\n\n        while(iterator.hasNext()) {\n            MimeType mimeType = (MimeType)iterator.next();\n            mimeType.appendTo(builder);\n            if (iterator.hasNext()) {\n                builder.append(\", \");\n            }\n        }\n\n        return builder.toString();\n    }\n\n    public static void sortBySpecificity(List<MimeType> mimeTypes) {\n        Assert.notNull(mimeTypes, \"'mimeTypes' must not be null\");\n        if (mimeTypes.size() > 1) {\n            Collections.sort(mimeTypes, SPECIFICITY_COMPARATOR);\n        }\n\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/util/ResourceUtils.java",
    "content": "package com.pinecone.summer.util;\n\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.framework.util.ClassUtils;\nimport com.pinecone.framework.util.StringUtils;\n\nimport java.io.File;\nimport java.io.FileNotFoundException;\nimport java.net.MalformedURLException;\nimport java.net.URI;\nimport java.net.URISyntaxException;\nimport java.net.URL;\nimport java.net.URLConnection;\n\npublic abstract class ResourceUtils {\n    public static final String CLASSPATH_URL_PREFIX = \"classpath:\";\n    public static final String FILE_URL_PREFIX = \"file:\";\n    public static final String JAR_URL_PREFIX = \"jar:\";\n    public static final String URL_PROTOCOL_FILE = \"file\";\n    public static final String URL_PROTOCOL_JAR = \"jar\";\n    public static final String URL_PROTOCOL_ZIP = \"zip\";\n    public static final String URL_PROTOCOL_WSJAR = \"wsjar\";\n    public static final String URL_PROTOCOL_VFSZIP = \"vfszip\";\n    public static final String URL_PROTOCOL_VFSFILE = \"vfsfile\";\n    public static final String URL_PROTOCOL_VFS = \"vfs\";\n    public static final String JAR_FILE_EXTENSION = \".jar\";\n    public static final String JAR_URL_SEPARATOR = \"!/\";\n\n    public ResourceUtils() {\n    }\n\n    public static boolean isUrl(String resourceLocation) {\n        if (resourceLocation == null) {\n            return false;\n        } else if (resourceLocation.startsWith(\"classpath:\")) {\n            return true;\n        } else {\n            try {\n                new URL(resourceLocation);\n                return true;\n            } catch (MalformedURLException var2) {\n                return false;\n            }\n        }\n    }\n\n    public static URL getURL(String resourceLocation) throws FileNotFoundException {\n        Assert.notNull(resourceLocation, \"Resource location must not be null\");\n        if (resourceLocation.startsWith(\"classpath:\")) {\n            String path = resourceLocation.substring(\"classpath:\".length());\n            ClassLoader cl = ClassUtils.getDefaultClassLoader();\n            URL url = cl != null ? cl.getResource(path) : ClassLoader.getSystemResource(path);\n            if (url == null) {\n                String description = \"class path resource [\" + path + \"]\";\n                throw new FileNotFoundException(description + \" cannot be resolved to URL because it does not exist\");\n            } else {\n                return url;\n            }\n        } else {\n            try {\n                return new URL(resourceLocation);\n            } catch (MalformedURLException var6) {\n                try {\n                    return (new File(resourceLocation)).toURI().toURL();\n                } catch (MalformedURLException var5) {\n                    throw new FileNotFoundException(\"Resource location [\" + resourceLocation + \"] is neither a URL not a well-formed file path\");\n                }\n            }\n        }\n    }\n\n    public static File getFile(String resourceLocation) throws FileNotFoundException {\n        Assert.notNull(resourceLocation, \"Resource location must not be null\");\n        if (resourceLocation.startsWith(\"classpath:\")) {\n            String path = resourceLocation.substring(\"classpath:\".length());\n            String description = \"class path resource [\" + path + \"]\";\n            ClassLoader cl = ClassUtils.getDefaultClassLoader();\n            URL url = cl != null ? cl.getResource(path) : ClassLoader.getSystemResource(path);\n            if (url == null) {\n                throw new FileNotFoundException(description + \" cannot be resolved to absolute file path because it does not exist\");\n            } else {\n                return getFile(url, description);\n            }\n        } else {\n            try {\n                return getFile(new URL(resourceLocation));\n            } catch (MalformedURLException var5) {\n                return new File(resourceLocation);\n            }\n        }\n    }\n\n    public static File getFile(URL resourceUrl) throws FileNotFoundException {\n        return getFile(resourceUrl, \"URL\");\n    }\n\n    public static File getFile(URL resourceUrl, String description) throws FileNotFoundException {\n        Assert.notNull(resourceUrl, \"Resource URL must not be null\");\n        if (!\"file\".equals(resourceUrl.getProtocol())) {\n            throw new FileNotFoundException(description + \" cannot be resolved to absolute file path \" + \"because it does not reside in the file system: \" + resourceUrl);\n        } else {\n            try {\n                return new File(toURI(resourceUrl).getSchemeSpecificPart());\n            } catch (URISyntaxException var3) {\n                return new File(resourceUrl.getFile());\n            }\n        }\n    }\n\n    public static File getFile(URI resourceUri) throws FileNotFoundException {\n        return getFile(resourceUri, \"URI\");\n    }\n\n    public static File getFile(URI resourceUri, String description) throws FileNotFoundException {\n        Assert.notNull(resourceUri, \"Resource URI must not be null\");\n        if (!\"file\".equals(resourceUri.getScheme())) {\n            throw new FileNotFoundException(description + \" cannot be resolved to absolute file path \" + \"because it does not reside in the file system: \" + resourceUri);\n        } else {\n            return new File(resourceUri.getSchemeSpecificPart());\n        }\n    }\n\n    public static boolean isFileURL(URL url) {\n        String protocol = url.getProtocol();\n        return \"file\".equals(protocol) || \"vfsfile\".equals(protocol) || \"vfs\".equals(protocol);\n    }\n\n    public static boolean isJarURL(URL url) {\n        String protocol = url.getProtocol();\n        return \"jar\".equals(protocol) || \"zip\".equals(protocol) || \"vfszip\".equals(protocol) || \"wsjar\".equals(protocol);\n    }\n\n    public static boolean isJarFileURL(URL url) {\n        return \"file\".equals(url.getProtocol()) && url.getPath().toLowerCase().endsWith(\".jar\");\n    }\n\n    public static URL extractJarFileURL(URL jarUrl) throws MalformedURLException {\n        String urlFile = jarUrl.getFile();\n        int separatorIndex = urlFile.indexOf(\"!/\");\n        if (separatorIndex != -1) {\n            String jarFile = urlFile.substring(0, separatorIndex);\n\n            try {\n                return new URL(jarFile);\n            } catch (MalformedURLException var5) {\n                if (!jarFile.startsWith(\"/\")) {\n                    jarFile = \"/\" + jarFile;\n                }\n\n                return new URL(\"file:\" + jarFile);\n            }\n        } else {\n            return jarUrl;\n        }\n    }\n\n    public static URI toURI(URL url) throws URISyntaxException {\n        return toURI(url.toString());\n    }\n\n    public static URI toURI(String location) throws URISyntaxException {\n        return new URI(StringUtils.replace(location, \" \", \"%20\"));\n    }\n\n    public static void useCachesIfNecessary(URLConnection con) {\n        con.setUseCaches(con.getClass().getSimpleName().startsWith(\"JNLP\"));\n    }\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/util/RouteUtils.java",
    "content": "package com.pinecone.summer.util;\n\nimport javax.servlet.http.HttpServletRequest;\n\npublic abstract class RouteUtils {\n    private static final String[] HEADERS_TO_TRY = {\n            \"X-Forwarded-For\",\n            \"x-forwarded-for\",\n            \"Proxy-Client-IP\",\n            \"WL-Proxy-Client-IP\",\n            \"HTTP_X_FORWARDED_FOR\",\n            \"HTTP_X_FORWARDED\",\n            \"HTTP_X_CLUSTER_CLIENT_IP\",\n            \"HTTP_CLIENT_IP\",\n            \"HTTP_FORWARDED_FOR\",\n            \"HTTP_FORWARDED\",\n            \"HTTP_VIA\",\n            \"REMOTE_ADDR\",\n            \"X-Real-IP\"\n    };\n\n\n    public static String getRealRemoteAddr( HttpServletRequest request ) {\n        for ( String header : RouteUtils.HEADERS_TO_TRY ) {\n            String ip = request.getHeader(header);\n            if ( ip != null && ip.length() != 0 && !\"unknown\".equalsIgnoreCase(ip) ) {\n                return ip;\n            }\n        }\n        return request.getRemoteAddr();\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Summer/src/main/java/com/pinecone/summer/util/WebUtils.java",
    "content": "package com.pinecone.summer.util;\n\nimport com.pinecone.framework.util.Assert;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.unit.LinkedMultiValueMap;\nimport com.pinecone.framework.unit.MultiValueMap;\n\nimport javax.servlet.*;\nimport javax.servlet.http.Cookie;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpSession;\nimport java.io.File;\nimport java.io.FileNotFoundException;\nimport java.util.*;\n\n\n\npublic abstract class WebUtils {\n    public static final String INCLUDE_REQUEST_URI_ATTRIBUTE = \"javax.servlet.include.request_uri\";\n    public static final String INCLUDE_CONTEXT_PATH_ATTRIBUTE = \"javax.servlet.include.context_path\";\n    public static final String INCLUDE_SERVLET_PATH_ATTRIBUTE = \"javax.servlet.include.servlet_path\";\n    public static final String INCLUDE_PATH_INFO_ATTRIBUTE = \"javax.servlet.include.path_info\";\n    public static final String INCLUDE_QUERY_STRING_ATTRIBUTE = \"javax.servlet.include.query_string\";\n    public static final String FORWARD_REQUEST_URI_ATTRIBUTE = \"javax.servlet.forward.request_uri\";\n    public static final String FORWARD_CONTEXT_PATH_ATTRIBUTE = \"javax.servlet.forward.context_path\";\n    public static final String FORWARD_SERVLET_PATH_ATTRIBUTE = \"javax.servlet.forward.servlet_path\";\n    public static final String FORWARD_PATH_INFO_ATTRIBUTE = \"javax.servlet.forward.path_info\";\n    public static final String FORWARD_QUERY_STRING_ATTRIBUTE = \"javax.servlet.forward.query_string\";\n    public static final String ERROR_STATUS_CODE_ATTRIBUTE = \"javax.servlet.error.status_code\";\n    public static final String ERROR_EXCEPTION_TYPE_ATTRIBUTE = \"javax.servlet.error.exception_type\";\n    public static final String ERROR_MESSAGE_ATTRIBUTE = \"javax.servlet.error.message\";\n    public static final String ERROR_EXCEPTION_ATTRIBUTE = \"javax.servlet.error.exception\";\n    public static final String ERROR_REQUEST_URI_ATTRIBUTE = \"javax.servlet.error.request_uri\";\n    public static final String ERROR_SERVLET_NAME_ATTRIBUTE = \"javax.servlet.error.servlet_name\";\n    public static final String CONTENT_TYPE_CHARSET_PREFIX = \";charset=\";\n    public static final String DEFAULT_CHARACTER_ENCODING = \"ISO-8859-1\";\n    public static final String TEMP_DIR_CONTEXT_ATTRIBUTE = \"javax.servlet.context.tempdir\";\n    public static final String HTML_ESCAPE_CONTEXT_PARAM = \"defaultHtmlEscape\";\n    public static final String RESPONSE_ENCODED_HTML_ESCAPE_CONTEXT_PARAM = \"responseEncodedHtmlEscape\";\n    public static final String WEB_APP_ROOT_KEY_PARAM = \"webAppRootKey\";\n    public static final String DEFAULT_WEB_APP_ROOT_KEY = \"webapp.root\";\n    public static final String[] SUBMIT_IMAGE_SUFFIXES = new String[]{\".x\", \".y\"};\n    public static final String SESSION_MUTEX_ATTRIBUTE = WebUtils.class.getName() + \".MUTEX\";\n\n    public WebUtils() {\n    }\n\n    public static void setWebAppRootSystemProperty(ServletContext servletContext) throws IllegalStateException {\n        Assert.notNull(servletContext, \"ServletContext must not be null\");\n        String root = servletContext.getRealPath(\"/\");\n        if (root == null) {\n            throw new IllegalStateException(\"Cannot set web app root system property when WAR file is not expanded\");\n        } else {\n            String param = servletContext.getInitParameter(\"webAppRootKey\");\n            String key = param != null ? param : \"webapp.root\";\n            String oldValue = System.getProperty(key);\n            if (oldValue != null && !StringUtils.pathEquals(oldValue, root)) {\n                throw new IllegalStateException(\"Web app root system property already set to different value: '\" + key + \"' = [\" + oldValue + \"] instead of [\" + root + \"] - \" + \"Choose unique values for the 'webAppRootKey' context-param in your web.xml files!\");\n            } else {\n                System.setProperty(key, root);\n                servletContext.log(\"Set web app root system property: '\" + key + \"' = [\" + root + \"]\");\n            }\n        }\n    }\n\n    public static void removeWebAppRootSystemProperty(ServletContext servletContext) {\n        Assert.notNull(servletContext, \"ServletContext must not be null\");\n        String param = servletContext.getInitParameter(\"webAppRootKey\");\n        String key = param != null ? param : \"webapp.root\";\n        System.getProperties().remove(key);\n    }\n\n    @Deprecated\n    public static boolean isDefaultHtmlEscape(ServletContext servletContext) {\n        if (servletContext == null) {\n            return false;\n        } else {\n            String param = servletContext.getInitParameter(\"defaultHtmlEscape\");\n            return Boolean.valueOf(param);\n        }\n    }\n\n    public static Boolean getDefaultHtmlEscape(ServletContext servletContext) {\n        if (servletContext == null) {\n            return null;\n        } else {\n            String param = servletContext.getInitParameter(\"defaultHtmlEscape\");\n            return StringUtils.hasText(param) ? Boolean.valueOf(param) : null;\n        }\n    }\n\n    public static Boolean getResponseEncodedHtmlEscape(ServletContext servletContext) {\n        if (servletContext == null) {\n            return null;\n        } else {\n            String param = servletContext.getInitParameter(\"responseEncodedHtmlEscape\");\n            return StringUtils.hasText(param) ? Boolean.valueOf(param) : null;\n        }\n    }\n\n    public static File getTempDir(ServletContext servletContext) {\n        Assert.notNull(servletContext, \"ServletContext must not be null\");\n        return (File)servletContext.getAttribute(\"javax.servlet.context.tempdir\");\n    }\n\n    public static String getRealPath(ServletContext servletContext, String path) throws FileNotFoundException {\n        Assert.notNull(servletContext, \"ServletContext must not be null\");\n        if (!path.startsWith(\"/\")) {\n            path = \"/\" + path;\n        }\n\n        String realPath = servletContext.getRealPath(path);\n        if (realPath == null) {\n            throw new FileNotFoundException(\"ServletContext resource [\" + path + \"] cannot be resolved to absolute file path - \" + \"web application archive not expanded?\");\n        } else {\n            return realPath;\n        }\n    }\n\n    public static String getSessionId(HttpServletRequest request) {\n        Assert.notNull(request, \"Request must not be null\");\n        HttpSession session = request.getSession(false);\n        return session != null ? session.getId() : null;\n    }\n\n    public static Object getSessionAttribute(HttpServletRequest request, String name) {\n        Assert.notNull(request, \"Request must not be null\");\n        HttpSession session = request.getSession(false);\n        return session != null ? session.getAttribute(name) : null;\n    }\n\n    public static Object getRequiredSessionAttribute(HttpServletRequest request, String name) throws IllegalStateException {\n        Object attr = getSessionAttribute(request, name);\n        if (attr == null) {\n            throw new IllegalStateException(\"No session attribute '\" + name + \"' found\");\n        } else {\n            return attr;\n        }\n    }\n\n    public static void setSessionAttribute(HttpServletRequest request, String name, Object value) {\n        Assert.notNull(request, \"Request must not be null\");\n        if (value != null) {\n            request.getSession().setAttribute(name, value);\n        } else {\n            HttpSession session = request.getSession(false);\n            if (session != null) {\n                session.removeAttribute(name);\n            }\n        }\n\n    }\n\n    public static Object getOrCreateSessionAttribute(HttpSession session, String name, Class<?> clazz) throws IllegalArgumentException {\n        Assert.notNull(session, \"Session must not be null\");\n        Object sessionObject = session.getAttribute(name);\n        if (sessionObject == null) {\n            try {\n                sessionObject = clazz.newInstance();\n            } catch (InstantiationException var5) {\n                throw new IllegalArgumentException(\"Could not instantiate class [\" + clazz.getName() + \"] for session attribute '\" + name + \"': \" + var5.getMessage());\n            } catch (IllegalAccessException var6) {\n                throw new IllegalArgumentException(\"Could not access default constructor of class [\" + clazz.getName() + \"] for session attribute '\" + name + \"': \" + var6.getMessage());\n            }\n\n            session.setAttribute(name, sessionObject);\n        }\n\n        return sessionObject;\n    }\n\n    public static Object getSessionMutex(HttpSession session) {\n        Assert.notNull(session, \"Session must not be null\");\n        Object mutex = session.getAttribute(SESSION_MUTEX_ATTRIBUTE);\n        if (mutex == null) {\n            mutex = session;\n        }\n\n        return mutex;\n    }\n\n    public static <T> T getNativeRequest(ServletRequest request, Class<T> requiredType) {\n        if (requiredType != null) {\n            if (requiredType.isInstance(request)) {\n                return (T) request;\n            }\n\n            if (request instanceof ServletRequestWrapper) {\n                return getNativeRequest(((ServletRequestWrapper)request).getRequest(), requiredType);\n            }\n        }\n\n        return null;\n    }\n\n    public static <T> T getNativeResponse(ServletResponse response, Class<T> requiredType) {\n        if (requiredType != null) {\n            if (requiredType.isInstance(response)) {\n                return (T) response;\n            }\n\n            if (response instanceof ServletResponseWrapper) {\n                return getNativeResponse(((ServletResponseWrapper)response).getResponse(), requiredType);\n            }\n        }\n\n        return null;\n    }\n\n    public static boolean isIncludeRequest(ServletRequest request) {\n        return request.getAttribute(\"javax.servlet.include.request_uri\") != null;\n    }\n\n    public static void exposeErrorRequestAttributes(HttpServletRequest request, Throwable ex, String servletName) {\n        exposeRequestAttributeIfNotPresent(request, \"javax.servlet.error.status_code\", 200);\n        exposeRequestAttributeIfNotPresent(request, \"javax.servlet.error.exception_type\", ex.getClass());\n        exposeRequestAttributeIfNotPresent(request, \"javax.servlet.error.message\", ex.getMessage());\n        exposeRequestAttributeIfNotPresent(request, \"javax.servlet.error.exception\", ex);\n        exposeRequestAttributeIfNotPresent(request, \"javax.servlet.error.request_uri\", request.getRequestURI());\n        exposeRequestAttributeIfNotPresent(request, \"javax.servlet.error.servlet_name\", servletName);\n    }\n\n    private static void exposeRequestAttributeIfNotPresent(ServletRequest request, String name, Object value) {\n        if (request.getAttribute(name) == null) {\n            request.setAttribute(name, value);\n        }\n\n    }\n\n    public static void clearErrorRequestAttributes(HttpServletRequest request) {\n        request.removeAttribute(\"javax.servlet.error.status_code\");\n        request.removeAttribute(\"javax.servlet.error.exception_type\");\n        request.removeAttribute(\"javax.servlet.error.message\");\n        request.removeAttribute(\"javax.servlet.error.exception\");\n        request.removeAttribute(\"javax.servlet.error.request_uri\");\n        request.removeAttribute(\"javax.servlet.error.servlet_name\");\n    }\n\n    public static void exposeRequestAttributes(ServletRequest request, Map<String, ?> attributes) {\n        Assert.notNull(request, \"Request must not be null\");\n        Assert.notNull(attributes, \"Attributes Map must not be null\");\n        Iterator var2 = attributes.entrySet().iterator();\n\n        while(var2.hasNext()) {\n            Map.Entry<String, ?> entry = (Map.Entry)var2.next();\n            request.setAttribute((String)entry.getKey(), entry.getValue());\n        }\n\n    }\n\n    public static Cookie getCookie(HttpServletRequest request, String name) {\n        Assert.notNull(request, \"Request must not be null\");\n        Cookie[] cookies = request.getCookies();\n        if (cookies != null) {\n            Cookie[] var3 = cookies;\n            int var4 = cookies.length;\n\n            for(int var5 = 0; var5 < var4; ++var5) {\n                Cookie cookie = var3[var5];\n                if (name.equals(cookie.getName())) {\n                    return cookie;\n                }\n            }\n        }\n\n        return null;\n    }\n\n    public static boolean hasSubmitParameter(ServletRequest request, String name) {\n        Assert.notNull(request, \"Request must not be null\");\n        if (request.getParameter(name) != null) {\n            return true;\n        } else {\n            String[] var2 = SUBMIT_IMAGE_SUFFIXES;\n            int var3 = var2.length;\n\n            for(int var4 = 0; var4 < var3; ++var4) {\n                String suffix = var2[var4];\n                if (request.getParameter(name + suffix) != null) {\n                    return true;\n                }\n            }\n\n            return false;\n        }\n    }\n\n    public static String findParameterValue(ServletRequest request, String name) {\n        return findParameterValue(request.getParameterMap(), name);\n    }\n\n    public static String findParameterValue(Map<String, ?> parameters, String name) {\n        Object value = parameters.get(name);\n        if (value instanceof String[]) {\n            String[] values = (String[])((String[])value);\n            return values.length > 0 ? values[0] : null;\n        } else if (value != null) {\n            return value.toString();\n        } else {\n            String prefix = name + \"_\";\n            Iterator var4 = parameters.keySet().iterator();\n\n            String paramName;\n            do {\n                if (!var4.hasNext()) {\n                    return null;\n                }\n\n                paramName = (String)var4.next();\n            } while(!paramName.startsWith(prefix));\n\n            String[] var6 = SUBMIT_IMAGE_SUFFIXES;\n            int var7 = var6.length;\n\n            for(int var8 = 0; var8 < var7; ++var8) {\n                String suffix = var6[var8];\n                if (paramName.endsWith(suffix)) {\n                    return paramName.substring(prefix.length(), paramName.length() - suffix.length());\n                }\n            }\n\n            return paramName.substring(prefix.length());\n        }\n    }\n\n    public static Map<String, Object> getParametersStartingWith(ServletRequest request, String prefix) {\n        Assert.notNull(request, \"Request must not be null\");\n        Enumeration<String> paramNames = request.getParameterNames();\n        Map<String, Object> params = new TreeMap();\n        if (prefix == null) {\n            prefix = \"\";\n        }\n\n        while(paramNames != null && paramNames.hasMoreElements()) {\n            String paramName = (String)paramNames.nextElement();\n            if (\"\".equals(prefix) || paramName.startsWith(prefix)) {\n                String unprefixed = paramName.substring(prefix.length());\n                String[] values = request.getParameterValues(paramName);\n                if (values != null && values.length != 0) {\n                    if (values.length > 1) {\n                        params.put(unprefixed, values);\n                    } else {\n                        params.put(unprefixed, values[0]);\n                    }\n                }\n            }\n        }\n\n        return params;\n    }\n\n    public static int getTargetPage(ServletRequest request, String paramPrefix, int currentPage) {\n        Enumeration paramNames = request.getParameterNames();\n\n        String paramName;\n        do {\n            if (!paramNames.hasMoreElements()) {\n                return currentPage;\n            }\n\n            paramName = (String)paramNames.nextElement();\n        } while(!paramName.startsWith(paramPrefix));\n\n        for(int i = 0; i < SUBMIT_IMAGE_SUFFIXES.length; ++i) {\n            String suffix = SUBMIT_IMAGE_SUFFIXES[i];\n            if (paramName.endsWith(suffix)) {\n                paramName = paramName.substring(0, paramName.length() - suffix.length());\n            }\n        }\n\n        return Integer.parseInt(paramName.substring(paramPrefix.length()));\n    }\n\n    public static String extractFilenameFromUrlPath(String urlPath) {\n        String filename = extractFullFilenameFromUrlPath(urlPath);\n        int dotIndex = filename.lastIndexOf(46);\n        if (dotIndex != -1) {\n            filename = filename.substring(0, dotIndex);\n        }\n\n        return filename;\n    }\n\n    public static String extractFullFilenameFromUrlPath(String urlPath) {\n        int end = urlPath.indexOf(59);\n        if (end == -1) {\n            end = urlPath.indexOf(63);\n            if (end == -1) {\n                end = urlPath.length();\n            }\n        }\n\n        int begin = urlPath.lastIndexOf(47, end) + 1;\n        return urlPath.substring(begin, end);\n    }\n\n    public static MultiValueMap<String, String> parseMatrixVariables(String matrixVariables) {\n        MultiValueMap<String, String> result = new LinkedMultiValueMap();\n        if (!StringUtils.hasText(matrixVariables)) {\n            return result;\n        } else {\n            StringTokenizer pairs = new StringTokenizer(matrixVariables, \";\");\n\n            while(true) {\n                while(pairs.hasMoreTokens()) {\n                    String pair = pairs.nextToken();\n                    int index = pair.indexOf(61);\n                    if (index != -1) {\n                        String name = pair.substring(0, index);\n                        String rawValue = pair.substring(index + 1);\n                        String[] var7 = StringUtils.commaDelimitedListToStringArray(rawValue);\n                        int var8 = var7.length;\n\n                        for(int var9 = 0; var9 < var8; ++var9) {\n                            String value = var7[var9];\n                            result.add(name, value);\n                        }\n                    } else {\n                        result.add(pair, \"\");\n                    }\n                }\n\n                return result;\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>pinecones</artifactId>\n        <groupId>com.pinecones</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-surefire-plugin</artifactId>\n                <version>3.1.2</version>\n                <configuration>\n                    <includes>\n                        <include>**/*Tests.java</include>\n                        <include>**/*Test.java</include>\n                        <include>**/Test*.java</include>\n                    </includes>\n                </configuration>\n            </plugin>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>9</source>\n                    <target>9</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.ulf</groupId>\n    <artifactId>ulfhedinn</artifactId>\n    <version>1.2.1</version>\n    <packaging>jar</packaging>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>commons-collections</groupId>\n            <artifactId>commons-collections</artifactId>\n            <version>3.2.2</version>\n        </dependency>\n        <dependency>\n            <groupId>commons-lang</groupId>\n            <artifactId>commons-lang</artifactId>\n            <version>2.6</version>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.commons</groupId>\n            <artifactId>commons-lang3</artifactId>\n            <version>3.12.0</version>\n        </dependency>\n        <dependency>\n            <groupId>org.javassist</groupId>\n            <artifactId>javassist</artifactId>\n            <version>3.29.0-GA</version>\n        </dependency>\n\n        <dependency>\n            <groupId>org.thymeleaf</groupId>\n            <artifactId>thymeleaf</artifactId>\n            <version>3.0.12.RELEASE</version>\n        </dependency>\n        <dependency>\n            <groupId>org.freemarker</groupId>\n            <artifactId>freemarker</artifactId>\n            <version>2.3.31</version>\n        </dependency>\n\n        <dependency>\n            <groupId>org.junit.jupiter</groupId>\n            <artifactId>junit-jupiter-api</artifactId>\n            <version>5.8.2</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.junit.jupiter</groupId>\n            <artifactId>junit-jupiter-engine</artifactId>\n            <version>5.0.1</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.assertj</groupId>\n            <artifactId>assertj-core</artifactId>\n            <version>3.24.2</version>\n            <scope>test</scope>\n        </dependency>\n    </dependencies>\n\n</project>"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/bson/ArchJSONDecompiler.java",
    "content": "package com.pinecone.ulf.util.bson;\n\nimport com.pinecone.framework.util.Bytes;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONCompilerException;\nimport com.pinecone.framework.util.json.JSONDecompiler;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.util.List;\nimport java.util.Map;\n\npublic abstract class ArchJSONDecompiler implements JSONDecompiler {\n    protected int             mnParseAt ;\n    protected InputStream     mInputStream;\n\n    public ArchJSONDecompiler( InputStream is ) {\n        this.mInputStream = is;\n        this.mnParseAt    = 0;\n    }\n\n    protected int nextByte() throws JSONCompilerException {\n        try {\n            int b = this.mInputStream.read();\n            if( b < 0 ) {\n                throw new JSONCompilerException( \"Illegal decompiled byte or IO error.\", this.mnParseAt );\n            }\n            ++this.mnParseAt;\n            return b;\n        }\n        catch ( IOException e ){\n            throw new JSONCompilerException( e, this.mnParseAt );\n        }\n    }\n\n    protected byte[] nextBytes( int length ) throws JSONCompilerException {\n        byte[] bytes = new byte[ length ];\n        try {\n            int read = this.mInputStream.read( bytes );\n            if ( read != length ) {\n                throw new JSONCompilerException( \"Unexpected end of stream.\", this.mnParseAt );\n            }\n            this.mnParseAt += length;\n            return bytes;\n        }\n        catch ( IOException e ) {\n            throw new JSONCompilerException( e, this.mnParseAt );\n        }\n    }\n\n    protected String nextString() throws JSONCompilerException {\n        int   length = Bytes.bytesToInt32LE( this.nextBytes( 4 ) );\n        byte[] bytes = this.nextBytes( length );\n        return new String( bytes );\n    }\n\n    protected short nextInt16() throws JSONCompilerException {\n        return Bytes.bytesToInt16LE( this.nextBytes(2) );\n    }\n\n    protected int nextInt32() throws JSONCompilerException {\n        return Bytes.bytesToInt32LE( this.nextBytes(4) );\n    }\n\n    protected long nextInt64() throws JSONCompilerException {\n        return Bytes.bytesToInt64LE( this.nextBytes(8) );\n    }\n\n    protected float nextFloat32() throws JSONCompilerException {\n        return Bytes.bytesToFloat32LE( this.nextBytes(4) );\n    }\n\n    protected double nextFloat64() throws JSONCompilerException {\n        return Bytes.bytesToFloat64LE( this.nextBytes(8) );\n    }\n\n    protected boolean nextBool() throws JSONCompilerException {\n        return this.nextByte() != 0;\n    }\n\n    protected BigInteger nextBigInteger() throws JSONCompilerException {\n        int   length = Bytes.bytesToInt32LE( this.nextBytes(4) );\n        byte[] bytes = this.nextBytes(length);\n        return new BigInteger(bytes);\n    }\n\n    protected BigDecimal nextBigDecimal() throws JSONCompilerException {\n        int   length = Bytes.bytesToInt32LE( this.nextBytes(4) );\n        byte[] bytes = nextBytes(length);\n        int    scale = Bytes.bytesToInt32LE( this.nextBytes(4) );\n        return new BigDecimal( new BigInteger(bytes), scale );\n    }\n\n    protected abstract List<Object > newJSONArray( Object parent );\n\n    protected abstract Map<String, Object > newJSONObject( Object parent );\n\n    protected Object nextJSONObject( Object parent ) throws JSONCompilerException {\n        Map<String, Object > map = this.newJSONObject( parent );\n        int length = (int) Bytes.bytesToInt64LE( this.nextBytes(8) );\n        for ( int i = 0; i < length; ++i ) {\n            Object k     = this.nextValue();\n            String key ;\n            if( !(k instanceof String) ) {\n                throw new JSONCompilerException( \"Illegal JSONObject::Key, key should be String.\", this.mnParseAt );\n            }\n            key          = (String) k;\n            Object value = this.nextValue( map );\n            map.put( key, value );\n        }\n        int endType = this.nextByte();\n        if ( endType != DataTypeCode.JSONOBJECT_END.getValue() ) {\n            throw new JSONCompilerException(\"Expected end of JSON object.\", this.mnParseAt);\n        }\n        return map;\n    }\n\n    protected Object nextJSONArray( Object parent ) throws JSONCompilerException {\n        List<Object > list = this.newJSONArray( parent );\n        int length = (int) Bytes.bytesToInt64LE( this.nextBytes(8) );\n        for ( int i = 0; i < length; ++i ) {\n            Object value = this.nextValue( list );\n            list.add( value );\n        }\n        int endType = this.nextByte();\n        if ( endType != DataTypeCode.JSONARRAY_END.getValue() ) {\n            throw new JSONCompilerException( \"Expected end of JSON array.\", this.mnParseAt );\n        }\n        return list;\n    }\n\n    protected Object nextUnidentifiedObject( int type ) throws JSONCompilerException {\n        throw new JSONCompilerException( \"Unidentified compiled bytecode `[0x\" + Integer.toHexString( type ).toUpperCase() + \"]`, with unknown version or damaged binary data.\", this.mnParseAt );\n    }\n\n    @Override\n    public Object nextValue( Object parent ) throws JSONCompilerException {\n        int type = this.nextByte();\n\n        try{\n            DataTypeCode typeCode = DataTypeCode.asCode( type );\n\n            switch ( typeCode ) {\n                case NULL:\n                case UNDEFINED: {\n                    return JSON.NULL;\n                }\n                case STRING: {\n                    return this.nextString();\n                }\n                case BYTE8: {\n                    return this.nextByte();\n                }\n                case INT16: {\n                    return this.nextInt16();\n                }\n                case INT32: {\n                    return this.nextInt32();\n                }\n                case INT64: {\n                    return this.nextInt64();\n                }\n                case FLOAT32: {\n                    return this.nextFloat32();\n                }\n                case FLOAT64: {\n                    return this.nextFloat64();\n                }\n                case BOOL: {\n                    return this.nextBool();\n                }\n                case BIG_INTEGER: {\n                    return this.nextBigInteger();\n                }\n                case BIG_DECIMAL: {\n                    return this.nextBigDecimal();\n                }\n                case JSONOBJECT: {\n                    return this.nextJSONObject( parent );\n                }\n                case JSONARRAY: {\n                    return this.nextJSONArray( parent );\n                }\n                default: {\n                    return this.nextUnidentifiedObject( type );\n                }\n            }\n        }\n        catch ( IllegalArgumentException e ) {\n            return this.nextUnidentifiedObject( type );\n        }\n    }\n\n    @Override\n    public Object nextValue() throws JSONCompilerException {\n        return this.nextValue( null );\n    }\n\n    @Override\n    public Object decompile( Object parent ) {\n        try{\n            return this.nextValue( parent );\n        }\n        catch ( JSONCompilerException e ) {\n            return null;\n        }\n    }\n\n    @Override\n    public Object decompile() {\n        return this.decompile( null );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/bson/DataTypeCode.java",
    "content": "package com.pinecone.ulf.util.bson;\n\npublic enum DataTypeCode {\n    UNDEFINED                ( 0x01, \"Undefined\"            ),\n    NULL                     ( 0x02, \"Null\"                 ),\n    BYTE8                    ( 0x03, \"Byte8\"                ),\n    INT16                    ( 0x04, \"Int16\"                ),\n    INT32                    ( 0x05, \"Int32\"                ),\n    INT64                    ( 0x06, \"Int64\"                ),\n    FLOAT32                  ( 0x07, \"Float32\"              ),\n    FLOAT64                  ( 0x08, \"Float64\"              ),\n    BOOL                     ( 0x09, \"Bool\"                 ),\n    BIG_INTEGER              ( 0x0A, \"BigInteger\"           ),\n    BIG_DECIMAL              ( 0x0B, \"BigDecimal\"           ),\n    STRING                   ( 0x0C, \"String\"               ),\n\n\n    JSONOBJECT               ( 0xFA, \"JSONObject\"           ),\n    JSONARRAY                ( 0xFB, \"JSONArray\"            ),\n\n    JSONOBJECT_END           ( 0xEA, \"JSONObject$End\"       ),\n    JSONARRAY_END            ( 0xEB, \"JSONArray$End\"        ),\n\n    SERIALIZABLE_OBJ         ( 0xFC, \"SerializableObj\"      );\n\n    private final int value;\n\n    private final String name;\n\n    DataTypeCode( int codeVal, String name ){\n        this.value = codeVal;\n        this.name  = name;\n    }\n\n    public String getName(){\n        return this.name;\n    }\n\n    public int getValue() {\n        return this.value;\n    }\n\n    public byte getByteValue() {\n        return (byte) this.value;\n    }\n\n    @Override\n    public String toString() {\n        return this.getName();\n    }\n\n    public static DataTypeCode asCode( int codeVal ) {\n        for ( DataTypeCode type : DataTypeCode.values() ) {\n            if ( type.getValue() == codeVal ) {\n                return type;\n            }\n        }\n        throw new IllegalArgumentException( \"Invalid DataTypeCode value: \" + codeVal );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/bson/UlfJSONCompiler.java",
    "content": "package com.pinecone.ulf.util.bson;\n\nimport com.pinecone.framework.util.Bytes;\n\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.framework.util.json.JSONCompiler;\nimport com.pinecone.framework.util.json.JSONString;\nimport com.pinecone.framework.util.json.binary.BsonTraits;\nimport com.pinecone.framework.util.json.binary.Bsonut;\n\nimport java.io.IOException;\nimport java.io.ObjectOutputStream;\nimport java.io.OutputStream;\nimport java.io.Serializable;\nimport java.math.BigDecimal;\nimport java.math.BigInteger;\nimport java.util.Collection;\nimport java.util.Map;\n\npublic class UlfJSONCompiler implements JSONCompiler {\n    public UlfJSONCompiler() { }\n\n    protected OutputStream compileUnidentifiedObject ( Object that, OutputStream outputStream ) throws IOException {\n        // Ignore them\n        return outputStream;\n    }\n\n    protected OutputStream compileUnknownAnyObject   ( Object that, OutputStream outputStream ) throws IOException {\n        if ( that != null ) {\n            try {\n                BsonTraits.invokeBsonSerialize( that, outputStream );\n            }\n            catch ( Exception e ){\n                try {\n                    outputStream.write( BsonTraits.invokeToBsonBytes( that ) );\n                }\n                catch ( Exception e1 ){\n                    return this.compileUnidentifiedObject( that, outputStream ) ;\n                }\n            }\n        }\n        else {\n            outputStream.write( DataTypeCode.NULL.getValue() );\n        }\n        return outputStream;\n    }\n\n    @Override\n    public OutputStream compile( Map that, OutputStream outputStream ) throws IOException {\n        outputStream.write( DataTypeCode.JSONOBJECT.getValue() );\n        Map<?, ?> map = (Map<?, ?>) that;\n        outputStream.write( Bytes.int64ToBytesLE( map.size() ) );\n        for ( Map.Entry<?, ?> entry : map.entrySet() ) {\n            this.compile( entry.getKey(), outputStream );\n            this.compile( entry.getValue(), outputStream );\n        }\n        outputStream.write( DataTypeCode.JSONOBJECT_END.getValue() );\n        return outputStream;\n    }\n\n    @Override\n    public OutputStream compile( Collection that, OutputStream outputStream ) throws IOException {\n        outputStream.write( DataTypeCode.JSONARRAY.getValue() );\n        Collection<?> collection = (Collection<?>) that;\n        outputStream.write( Bytes.int64ToBytesLE( collection.size() ) );\n        for ( Object item : collection ) {\n            this.compile( item, outputStream );\n        }\n        outputStream.write( DataTypeCode.JSONARRAY_END.getValue() );\n        return outputStream;\n    }\n\n    @Override\n    public OutputStream compile( Object[] those, OutputStream outputStream ) throws IOException {\n        outputStream.write( DataTypeCode.JSONARRAY.getValue() );\n        int length = those.length;\n        outputStream.write( Bytes.int64ToBytesLE( length ) );\n        for ( int i = 0; i < length; ++i ) {\n            this.compile( those[i], outputStream );\n        }\n        outputStream.write( DataTypeCode.JSONARRAY_END.getValue() );\n        return outputStream;\n    }\n\n    @Override\n    public OutputStream compile( Object that, OutputStream outputStream ) throws IOException {\n        if ( that != null ) {\n            if ( that == JSON.NULL ) {\n                outputStream.write( DataTypeCode.NULL.getValue() );\n            }\n            else if ( that instanceof Map ) {\n                this.compile( (Map) that, outputStream );\n            }\n            else if ( that instanceof Collection ) {\n                this.compile( (Collection) that, outputStream );\n            }\n            else if ( that instanceof String ) {\n                String str = (String) that;\n                outputStream.write( DataTypeCode.STRING.getValue() );\n                outputStream.write( Bytes.int32ToBytesLE( str.length() ) );\n                outputStream.write( str.getBytes());\n            }\n            else if ( that.getClass().isArray() ) {\n                this.compile( (Object[]) that, outputStream );\n            }\n            else if ( that instanceof Number ) {\n                if ( that instanceof Byte ) {\n                    outputStream.write( DataTypeCode.BYTE8.getValue() );\n                    outputStream.write( (Byte) that );\n                }\n                else if ( that instanceof Short ) {\n                    outputStream.write( DataTypeCode.INT16.getValue() );\n                    outputStream.write( Bytes.int16ToBytesLE((Short) that));\n                }\n                else if ( that instanceof Integer ) {\n                    outputStream.write( DataTypeCode.INT32.getValue() );\n                    outputStream.write( Bytes.int32ToBytesLE((Integer) that));\n                }\n                else if ( that instanceof Long ) {\n                    outputStream.write( DataTypeCode.INT64.getValue() );\n                    outputStream.write( Bytes.int64ToBytesLE((Long) that));\n                }\n                else if ( that instanceof Float ) {\n                    outputStream.write( DataTypeCode.FLOAT32.getValue() );\n                    outputStream.write( Bytes.float32ToBytesLE((Float) that));\n                }\n                else if ( that instanceof Double ) {\n                    outputStream.write( DataTypeCode.FLOAT64.getValue() );\n                    outputStream.write( Bytes.float64ToBytesLE((Double) that));\n                }\n                else if ( that instanceof BigInteger ) {\n                    outputStream.write( DataTypeCode.BIG_INTEGER.getValue() );\n                    byte[] bigIntBytes = ((BigInteger) that).toByteArray();\n                    outputStream.write( Bytes.int32ToBytesLE( bigIntBytes.length ) );\n                    outputStream.write( bigIntBytes );\n                }\n                else if ( that instanceof BigDecimal ) {\n                    outputStream.write( DataTypeCode.BIG_DECIMAL.getValue() );\n                    BigDecimal bigDecimal = (BigDecimal) that;\n                    byte[] bigIntBytes = bigDecimal.unscaledValue().toByteArray();\n                    int scale = bigDecimal.scale();\n                    outputStream.write( Bytes.int32ToBytesLE( bigIntBytes.length ) );\n                    outputStream.write( bigIntBytes);\n                    outputStream.write( Bytes.int32ToBytesLE(scale) );\n                }\n            }\n            else if ( that instanceof Boolean ) {\n                outputStream.write( DataTypeCode.BOOL.getValue() );\n                outputStream.write( (Boolean) that ? 1 : 0 );\n            }\n            else if ( that instanceof JSONString ) {\n                String jsonString = ( (JSONString) that).toJSONString();\n                outputStream.write( DataTypeCode.STRING.getValue() );\n                outputStream.write( Bytes.int32ToBytesLE( jsonString.length() ) );\n                outputStream.write( jsonString.getBytes());\n            }\n            else if ( that instanceof Bsonut ) {\n                (( Bsonut ) that).bsonSerialize( outputStream );\n            }\n            else {\n                this.compileUnknownAnyObject( that, outputStream );\n            }\n        }\n        else {\n            outputStream.write( DataTypeCode.NULL.getValue() );\n        }\n        return outputStream;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/bson/UlfJSONDecompiler.java",
    "content": "package com.pinecone.ulf.util.bson;\n\n\nimport com.pinecone.framework.util.json.JSONArraytron;\nimport com.pinecone.framework.util.json.JSONDecompiler;\nimport com.pinecone.framework.util.json.JSONMaptron;\n\nimport java.io.InputStream;\nimport java.util.List;\nimport java.util.Map;\n\npublic class UlfJSONDecompiler extends ArchJSONDecompiler implements JSONDecompiler {\n    public UlfJSONDecompiler(InputStream is ) {\n        super( is );\n    }\n\n    @Override\n    protected Map<String, Object > newJSONObject( Object parent ) {\n        return new JSONMaptron();\n    }\n\n    @Override\n    protected List<Object > newJSONArray( Object parent ) {\n        return new JSONArraytron();\n    }\n\n    @Override\n    public Object decompile( Object parent ) {\n        return super.decompile( parent );\n    }\n\n    @Override\n    public Object decompile() {\n        return this.decompile( null );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/GUIDs.java",
    "content": "package com.pinecone.ulf.util.guid;\n\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128V2;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128V7;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocatorHC128V7;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i64.GUID64;\nimport com.pinecone.ulf.util.guid.i64.GUID72;\nimport com.pinecone.ulf.util.guid.i64.GuidAllocator72V2;\nimport com.pinecone.ulf.util.guid.i64.worker.WorkerIdAssigner;\n\npublic final class GUIDs {\n    public static GUID64 GUID64( String s ) {\n        return new GUID64( s );\n    }\n\n    public static GUID72 GUID72( String s ) {\n        return new GUID72( s );\n    }\n\n    public static GUID128 GUID128( String s ) {\n        UUID128 uuid128 = new UUID128(s);\n        return uuid128;\n    }\n\n    public static GUID72 Dummy72() {\n        return new GUID72();\n    }\n\n    public static UUID128 Dummy128() {\n        return new UUID128();\n    }\n\n    public static GuidAllocator newGuidAllocator( WorkerIdAssigner idAssigner ) {\n        if( idAssigner == null ) {\n            return new GuidAllocator72V2();\n        }\n        return new GuidAllocator72V2( idAssigner );\n    }\n\n    public static GuidAllocator newGuidAllocator() {\n        return newGuidAllocator( 0 );\n    }\n\n    public static GuidAllocator newGuidAllocator( int machineId ) {\n        if ( machineId <= 0 ) {\n            return new GuidAllocator128V7();\n        }\n        return new GuidAllocatorHC128V7( machineId );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/ArchGuidAllocator128.java",
    "content": "package com.pinecone.ulf.util.guid.i128;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.Arrays;\n\npublic abstract class ArchGuidAllocator128 implements GuidAllocator128 {\n    public static final int GUID_CHARS = 36;\n\n    @Override\n    public GUID parse( String hexId ) {\n        return Parser.parse( hexId );\n    }\n\n    static final class Parser {\n\n        private static final byte[] MAP;\n\n        static {\n            byte[] mapping = new byte[256];\n            Arrays.fill(mapping, (byte) -1);\n            mapping['0'] = 0;\n            mapping['1'] = 1;\n            mapping['2'] = 2;\n            mapping['3'] = 3;\n            mapping['4'] = 4;\n            mapping['5'] = 5;\n            mapping['6'] = 6;\n            mapping['7'] = 7;\n            mapping['8'] = 8;\n            mapping['9'] = 9;\n            mapping['a'] = 10;\n            mapping['b'] = 11;\n            mapping['c'] = 12;\n            mapping['d'] = 13;\n            mapping['e'] = 14;\n            mapping['f'] = 15;\n            mapping['A'] = 10;\n            mapping['B'] = 11;\n            mapping['C'] = 12;\n            mapping['D'] = 13;\n            mapping['E'] = 14;\n            mapping['F'] = 15;\n            MAP = mapping;\n        }\n\n        private static final int DASH_POSITION_1 = 8;\n        private static final int DASH_POSITION_2 = 13;\n        private static final int DASH_POSITION_3 = 18;\n        private static final int DASH_POSITION_4 = 23;\n\n\n        public static GUID parse(final String string) {\n            UUID128 neo = new UUID128();\n            parse( string, neo );\n            return neo;\n        }\n\n        public static void parse(final String string, UUID128 that) {\n\n            validate(string);\n\n            long msb = 0;\n            long lsb = 0;\n\n            for (int i = 0; i < 8; i++) {\n                msb = (msb << 4) | get(string, i);\n            }\n\n            for (int i = 9; i < 13; i++) {\n                msb = (msb << 4) | get(string, i);\n            }\n\n            for (int i = 14; i < 18; i++) {\n                msb = (msb << 4) | get(string, i);\n            }\n\n            for (int i = 19; i < 23; i++) {\n                lsb = (lsb << 4) | get(string, i);\n            }\n\n            for (int i = 24; i < 36; i++) {\n                lsb = (lsb << 4) | get(string, i);\n            }\n\n            that.mostSigBits  = msb;\n            that.leastSigBits = lsb;\n        }\n\n        public static boolean valid(final String guid) {\n            try {\n                parse(guid);\n                return true;\n            } catch (IllegalArgumentException e) {\n                return false;\n            }\n        }\n\n        private static long get(final String string, int i) {\n\n            final int chr = string.charAt(i);\n            if (chr > 255) {\n                throw exception(string);\n            }\n\n            final byte value = MAP[chr];\n            if (value < 0) {\n                throw exception(string);\n            }\n            return value & 0xffL;\n        }\n\n        private static RuntimeException exception(final String str) {\n            return new IllegalArgumentException(\"Invalid UUID: \" + str);\n        }\n\n        private static void validate(final String string) {\n            if (string == null || string.length() != GUID_CHARS) {\n                throw exception(string);\n            }\n            if (string.charAt(DASH_POSITION_1) != '-' || string.charAt(DASH_POSITION_2) != '-'\n                    || string.charAt(DASH_POSITION_3) != '-' || string.charAt(DASH_POSITION_4) != '-') {\n                throw exception(string);\n            }\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GUID128.java",
    "content": "package com.pinecone.ulf.util.guid.i128;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.UUID;\n\npublic interface GUID128 extends GUID {\n\n    long getMostSignificantBits();\n\n    long getLeastSignificantBits();\n\n    UUID toUUID();\n\n    int version();\n\n    int variant();\n\n    int clockSequence() ;\n\n    long node() ;\n\n    @Override\n    default int sizeof() {\n        return 16; // 128 bits = 16 bytes\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128.java",
    "content": "package com.pinecone.ulf.util.guid.i128;\n\nimport com.pinecone.framework.util.id.GuidAllocator;\n\npublic interface GuidAllocator128 extends GuidAllocator {\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V1.java",
    "content": "package com.pinecone.ulf.util.guid.i128;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.security.SecureRandom;\nimport java.time.Instant;\nimport java.util.Random;\nimport java.util.concurrent.ThreadLocalRandom;\nimport java.util.function.LongSupplier;\n\npublic class GuidAllocator128V1 extends ArchGuidAllocator128 implements GuidAllocator128{\n\n    private final long MASK_12 = 0x0000_0000_0000_0fffL;\n\n    private final long MASK_16 = 0x0000_0000_0000_ffffL;\n\n    private final long MULTICAST = 0x0000_0100_0000_0000L;\n\n    @Override\n    public GUID nextGUID() {\n        return this.nextGUID( System::currentTimeMillis, TLRandom::nextLong );\n    }\n\n    public GUID nextGUID( Instant instant, Random random ) {\n        return nextGUID(optional(instant), optional(random));\n    }\n\n    private GUID nextGUID( LongSupplier msec, LongSupplier random ) {\n        final long time = gregorian(msec.getAsLong());\n        final long msb = (time << 32) | ((time >>> 16) & (MASK_16 << 16)) | ((time >>> 48) & MASK_12);\n        final long lsb = random.getAsLong() | MULTICAST;\n        return version(msb, lsb, 1);\n    }\n\n    private LongSupplier optional(Instant instant) {\n        return instant == null ? System::currentTimeMillis : instant::toEpochMilli;\n    }\n\n    private LongSupplier optional(Random random) {\n        return random == null ? TLRandom::nextLong : random::nextLong;\n    }\n\n    private long gregorian( final long millisecons ) {\n        // 1582-10-15T00:00:00Z\n        final long factor = 10_000L;\n        final long offset = 12219292800000L;\n        return ((millisecons + offset) * factor);\n    }\n\n    GUID version(long hi, long lo, int version) {\n        // set the 4 most significant bits of the 7th byte\n        final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version\n        // set the 2 most significant bits of the 9th byte to 1 and 0\n        final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant\n        return new UUID128(msb, lsb);\n    }\n\n    static private class TLRandom {\n\n        // The JVM unique number tries to mitigate the fact that the thread\n        // local random is not seeded with a secure random seed by default.\n        // Their seeds are based on temporal data and predefined constants.\n        // Although the seeds are unique per JVM, they are not across JVMs.\n        // It helps to generate different sequences of numbers even if two\n        // ThreadLocalRandom are by chance instantiated with the same seed.\n        // Of course it doesn't better the output, but doesn't hurt either.\n         static final long JVM_UNIQUE_NUMBER = new SecureRandom().nextLong();\n\n        static private long nextLong() {\n            return ThreadLocalRandom.current().nextLong() ^ JVM_UNIQUE_NUMBER;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V2.java",
    "content": "package com.pinecone.ulf.util.guid.i128;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class GuidAllocator128V2 extends ArchGuidAllocator128 implements GuidAllocator128 {\n\n    private static final GuidAllocator128 v1 = new GuidAllocator128V1();\n\n    private final long MASK_32 = 0x0000_0000_ffff_ffffL;\n\n    private final long MASK_08 = 0x0000_0000_0000_00ffL;\n\n    @Override\n    public GUID nextGUID() {\n        return this.nextGUID((byte) 0, (int) 0);\n    }\n\n    public GUID nextGUID( byte localDomain, int localIdentifier ) {\n        return this.nextGUID( localDomain, localIdentifier, (GUID128) v1.nextGUID() );\n    }\n\n    private GUID nextGUID( byte localDomain, int localIdentifier, GUID128 guid ) {\n        final long msb = (guid.getMostSignificantBits() & MASK_32) | ((localIdentifier & MASK_32) << 32);\n        final long lsb = (guid.getLeastSignificantBits() & 0x3f00_ffff_ffff_ffffL) | ((localDomain & MASK_08) << 48);\n        return version(msb, lsb, 2);\n    }\n\n     GUID version(long hi, long lo, int version) {\n        // set the 4 most significant bits of the 7th byte\n        final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version\n        // set the 2 most significant bits of the 9th byte to 1 and 0\n        final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant\n        return new UUID128(msb, lsb);\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V3.java",
    "content": "package com.pinecone.ulf.util.guid.i128;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.nio.ByteBuffer;\nimport java.nio.charset.StandardCharsets;\nimport java.security.MessageDigest;\nimport java.security.NoSuchAlgorithmException;\nimport java.util.Objects;\n\npublic class GuidAllocator128V3 extends ArchGuidAllocator128 implements GuidAllocator128 {\n\n    private final GUID128 NIL = new UUID128(0x0000000000000000L, 0x0000000000000000L);\n\n    @Override\n    public GUID nextGUID() {\n        return this.nextGUID(this.NIL,\"\");\n    }\n\n    /**\n     * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3).\n     * <p>\n     * Usage:\n     *\n     * <pre>{@code\n     * GUID guid = GUID.v3(Uuid.NAMESPACE_DNS, \"www.example.com\");\n     * }</pre>\n     *\n     * @param namespace a GUID (optional)\n     * @param name      a string\n     * @return a GUID\n     * @throws NullPointerException if the name is null\n     */\n    public GUID nextGUID( GUID128 namespace, String name ) {\n        return hash(3, \"MD5\", namespace, name);\n    }\n\n    /**\n     * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3).\n     * <p>\n     * Usage:\n     *\n     * <pre>{@code\n     * GUID guid = GUID.v3(myNameSpace, myBytes);\n     * }</pre>\n     *\n     * @param namespace a GUID (optional)\n     * @param bytes     a byte array\n     * @return a GUID\n     * @throws NullPointerException if the byte array is null\n     */\n    public GUID nextGUID( GUID128 namespace, byte[] bytes ) {\n        return hash(3, \"MD5\", namespace, bytes);\n    }\n\n    private GUID hash(int version, String algorithm, GUID128 namespace, String name) {\n        Objects.requireNonNull(name, \"Null name\");\n        return hash(version, algorithm, namespace, name.getBytes(StandardCharsets.UTF_8));\n    }\n\n    private GUID hash(int version, String algorithm, GUID128 namespace, byte[] bytes) {\n\n        Objects.requireNonNull(bytes, \"Null bytes\");\n        MessageDigest hasher = hasher(algorithm);\n\n        if (namespace != null) {\n            ByteBuffer ns = ByteBuffer.allocate(16);\n            ns.putLong(namespace.getMostSignificantBits());\n            ns.putLong(namespace.getLeastSignificantBits());\n            hasher.update(ns.array());\n        }\n\n        hasher.update(bytes);\n        ByteBuffer hash = ByteBuffer.wrap(hasher.digest());\n\n        final long msb = hash.getLong();\n        final long lsb = hash.getLong();\n\n        return version(msb, lsb, version);\n    }\n\n    private MessageDigest hasher(String algorithm) {\n        try {\n            return MessageDigest.getInstance(algorithm);\n        } catch (NoSuchAlgorithmException e) {\n            throw new IllegalArgumentException(e.getMessage());\n        }\n    }\n\n    private GUID version(long hi, long lo, int version) {\n        // set the 4 most significant bits of the 7th byte\n        final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version\n        // set the 2 most significant bits of the 9th byte to 1 and 0\n        final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant\n        return new UUID128(msb, lsb);\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V4.java",
    "content": "package com.pinecone.ulf.util.guid.i128;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.security.SecureRandom;\nimport java.util.Objects;\nimport java.util.Random;\nimport java.util.UUID;\nimport java.util.concurrent.ThreadLocalRandom;\n\npublic class GuidAllocator128V4 extends ArchGuidAllocator128 implements GuidAllocator128 {\n\n    @Override\n    public GUID nextGUID() {\n        return this.version(TLRandom.nextLong(), TLRandom.nextLong(), 4);\n    }\n\n    /**\n     * Returns a random-based unique identifier (UUIDv4).\n     * <p>\n     * It is equivalent to {@link UUID#randomUUID()}.\n     * <p>\n     * Usage:\n     *\n     * <pre>{@code\n     * SecureRandom random = new SecureRandom();\n     * GUID guid = GUID.v4(random);\n     * }</pre>\n     *\n     * @param random a random generator\n     * @return a GUID\n     * @throws NullPointerException if the random is null\n     */\n    public GUID v4(Random random) {\n        Objects.requireNonNull(random, \"Null random\");\n        return version(random.nextLong(), random.nextLong(), 4);\n    }\n\n    private GUID version(long hi, long lo, int version) {\n        // set the 4 most significant bits of the 7th byte\n        final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version\n        // set the 2 most significant bits of the 9th byte to 1 and 0\n        final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant\n        return new UUID128(msb, lsb);\n    }\n\n    static private class TLRandom {\n\n        // The JVM unique number tries to mitigate the fact that the thread\n        // local random is not seeded with a secure random seed by default.\n        // Their seeds are based on temporal data and predefined constants.\n        // Although the seeds are unique per JVM, they are not across JVMs.\n        // It helps to generate different sequences of numbers even if two\n        // ThreadLocalRandom are by chance instantiated with the same seed.\n        // Of course it doesn't better the output, but doesn't hurt either.\n        static final long JVM_UNIQUE_NUMBER = new SecureRandom().nextLong();\n\n        static private long nextLong() {\n            return ThreadLocalRandom.current().nextLong() ^ JVM_UNIQUE_NUMBER;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V5.java",
    "content": "package com.pinecone.ulf.util.guid.i128;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.nio.ByteBuffer;\nimport java.nio.charset.StandardCharsets;\nimport java.security.MessageDigest;\nimport java.security.NoSuchAlgorithmException;\nimport java.util.Objects;\n\npublic class GuidAllocator128V5 extends ArchGuidAllocator128 implements GuidAllocator128 {\n\n    public final GUID128 NIL = new UUID128(0x0000000000000000L, 0x0000000000000000L);\n\n    @Override\n    public GUID nextGUID() {\n        return this.nextGUID( this.NIL, \"\" );\n    }\n\n    public GUID nextGUID( GUID128 namespace, String name ) {\n        return hash(5, \"SHA-1\", namespace, name);\n    }\n\n    /**\n     * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5).\n     * <p>\n     * Usage:\n     *\n     * <pre>{@code\n     * GUID guid = GUID.v5(myNameSpace, myBytes);\n     * }</pre>\n     *\n     * @param namespace a GUID (optional)\n     * @param bytes     a byte array\n     * @return a GUID\n     * @throws NullPointerException if the byte array is null\n     */\n    public GUID nextGUID( GUID128 namespace, byte[] bytes ) {\n        return hash(5, \"SHA-1\", namespace, bytes);\n    }\n\n    private GUID hash( int version, String algorithm, GUID128 namespace, String name ) {\n        Objects.requireNonNull(name, \"Null name\");\n        return hash(version, algorithm, namespace, name.getBytes(StandardCharsets.UTF_8));\n    }\n\n    private GUID hash(int version, String algorithm, GUID128 namespace, byte[] bytes) {\n\n        Objects.requireNonNull(bytes, \"Null bytes\");\n        MessageDigest hasher = hasher(algorithm);\n\n        if (namespace != null) {\n            ByteBuffer ns = ByteBuffer.allocate(16);\n            ns.putLong(namespace.getMostSignificantBits());\n            ns.putLong(namespace.getLeastSignificantBits());\n            hasher.update(ns.array());\n        }\n\n        hasher.update(bytes);\n        ByteBuffer hash = ByteBuffer.wrap(hasher.digest());\n\n        final long msb = hash.getLong();\n        final long lsb = hash.getLong();\n\n        return version(msb, lsb, version);\n    }\n\n    private MessageDigest hasher(String algorithm) {\n        try {\n            return MessageDigest.getInstance(algorithm);\n        } catch (NoSuchAlgorithmException e) {\n            throw new IllegalArgumentException(e.getMessage());\n        }\n    }\n\n    private GUID version(long hi, long lo, int version) {\n        // set the 4 most significant bits of the 7th byte\n        final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version\n        // set the 2 most significant bits of the 9th byte to 1 and 0\n        final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant\n        return new UUID128(msb, lsb);\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V6.java",
    "content": "package com.pinecone.ulf.util.guid.i128;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.security.SecureRandom;\nimport java.time.Instant;\nimport java.util.Random;\nimport java.util.concurrent.ThreadLocalRandom;\nimport java.util.function.LongSupplier;\n\npublic class GuidAllocator128V6 extends ArchGuidAllocator128 implements GuidAllocator128 {\n    private final long MASK_12 = 0x0000_0000_0000_0fffL;\n\n    private final long MULTICAST = 0x0000_0100_0000_0000L;\n\n    @Override\n    public GUID nextGUID() {\n        return UuidCreator.getTimeOrdered();\n    }\n\n    public GUID nextSimpleGUID() {\n        return this.nextGUID( System::currentTimeMillis, TLRandom::nextLong );\n    }\n\n    /**\n     * Returns a reordered gregorian time-based unique identifier (UUIDv6).\n     * <p>\n     * The clock sequence and node bits are reset to a pseudo-random value for each\n     * new UUIDv6 generated.\n     * <p>\n     * Usage:\n     *\n     * <pre>{@code\n     * SecureRandom random = new SecureRandom();\n     * GUID guid = GUID.v6(Instant.now(), random);\n     * }</pre>\n     *\n     * @param instant an instant (optional)\n     * @param random  a random generator (optional)\n     * @return a GUID\n     */\n    public GUID nextGUID( Instant instant, Random random ) {\n        return this.nextGUID( optional(instant), optional(random) );\n    }\n\n    private GUID nextGUID( LongSupplier msec, LongSupplier random ) {\n        final long time = gregorian(msec.getAsLong());\n        final long msb = ((time & ~MASK_12) << 4) | (time & MASK_12);\n        final long lsb = random.getAsLong() | MULTICAST;\n        return version(msb, lsb, 6);\n    }\n\n    private LongSupplier optional(Instant instant) {\n        return instant == null ? System::currentTimeMillis : instant::toEpochMilli;\n    }\n\n    private LongSupplier optional(Random random) {\n        return random == null ? TLRandom::nextLong : random::nextLong;\n    }\n\n    private long gregorian(final long millisecons) {\n        // 1582-10-15T00:00:00Z\n        final long factor = 10_000L;\n        final long offset = 12219292800000L;\n        return ((millisecons + offset) * factor);\n    }\n\n    GUID version(long hi, long lo, int version) {\n        // set the 4 most significant bits of the 7th byte\n        final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version\n        // set the 2 most significant bits of the 9th byte to 1 and 0\n        final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant\n        return new UUID128(msb, lsb);\n    }\n\n    static private class TLRandom {\n\n        // The JVM unique number tries to mitigate the fact that the thread\n        // local random is not seeded with a secure random seed by default.\n        // Their seeds are based on temporal data and predefined constants.\n        // Although the seeds are unique per JVM, they are not across JVMs.\n        // It helps to generate different sequences of numbers even if two\n        // ThreadLocalRandom are by chance instantiated with the same seed.\n        // Of course it doesn't better the output, but doesn't hurt either.\n        static final long JVM_UNIQUE_NUMBER = new SecureRandom().nextLong();\n\n        static private long nextLong() {\n            return ThreadLocalRandom.current().nextLong() ^ JVM_UNIQUE_NUMBER;\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocator128V7.java",
    "content": "package com.pinecone.ulf.util.guid.i128;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.security.SecureRandom;\nimport java.time.Instant;\nimport java.util.Random;\nimport java.util.concurrent.ThreadLocalRandom;\nimport java.util.function.LongSupplier;\n\npublic class GuidAllocator128V7 extends ArchGuidAllocator128 implements GuidAllocator128 {\n    private  final long MASK_12 = 0x0000_0000_0000_0fffL;\n\n    @Override\n    public GUID nextGUID() {\n        return UuidCreator.getTimeOrderedEpoch();\n    }\n\n    public GUID nextSimpleGUID() {\n        return this.nextGUID(System::currentTimeMillis, TLRandom::nextLong);\n    }\n\n    /**\n     * Returns a Unix epoch time-based unique identifier (UUIDv7).\n     * <p>\n     * Usage:\n     *\n     * <pre>{@code\n     * SecureRandom random = new SecureRandom();\n     * GUID guid = GUID.v7(Instant.now(), random);\n     * }</pre>\n     *\n     * @param instant an instant (optional)\n     * @param random  a random generator (optional)\n     * @return a GUID\n     */\n    public GUID nextGUID( Instant instant, Random random ) {\n        return this.nextGUID( optional(instant), optional(random) );\n    }\n\n    private GUID nextGUID( LongSupplier msec, LongSupplier random ) {\n        final long time = msec.getAsLong();\n        final long msb = (time << 16) | (TLRandom.nextLong() & MASK_12);\n        final long lsb = random.getAsLong();\n        return this.version(msb, lsb, 7);\n    }\n\n    private LongSupplier optional( Instant instant ) {\n        return instant == null ? System::currentTimeMillis : instant::toEpochMilli;\n    }\n\n    private LongSupplier optional( Random random ) {\n        return random == null ? TLRandom::nextLong : random::nextLong;\n    }\n\n    private long gregorian( final long millisecons ) {\n        // 1582-10-15T00:00:00Z\n        final long factor = 10_000L;\n        final long offset = 12219292800000L;\n        return ((millisecons + offset) * factor);\n    }\n\n    GUID version( long hi, long lo, int version ) {\n        // set the 4 most significant bits of the 7th byte\n        final long msb = (hi & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12; // RFC 9562 version\n        // set the 2 most significant bits of the 9th byte to 1 and 0\n        final long lsb = (lo & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L; // RFC 9562 variant\n        return new UUID128(msb, lsb);\n    }\n\n    static private class TLRandom {\n\n        // The JVM unique number tries to mitigate the fact that the thread\n        // local random is not seeded with a secure random seed by default.\n        // Their seeds are based on temporal data and predefined constants.\n        // Although the seeds are unique per JVM, they are not across JVMs.\n        // It helps to generate different sequences of numbers even if two\n        // ThreadLocalRandom are by chance instantiated with the same seed.\n        // Of course it doesn't better the output, but doesn't hurt either.\n        static final long JVM_UNIQUE_NUMBER = new SecureRandom().nextLong();\n\n        static private long nextLong() {\n            return ThreadLocalRandom.current().nextLong() ^ JVM_UNIQUE_NUMBER;\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/GuidAllocatorHC128V7.java",
    "content": "package com.pinecone.ulf.util.guid.i128;\n\nimport java.util.function.Supplier;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.ulf.util.guid.i128.factory.standard.TimeOrderedEpochFactory;\n\npublic class GuidAllocatorHC128V7 extends ArchGuidAllocator128 implements GuidAllocator {\n\n    protected Logger log = LoggerFactory.getLogger(this.getClass());\n\n    protected TimeOrderedEpochFactory mUuidFactory;\n\n    protected int mnNodeId;\n\n    public GuidAllocatorHC128V7( int nodeId ) {\n        this.mnNodeId = nodeId;\n\n        this.mUuidFactory = new TimeOrderedEpochFactory() ;\n\n        this.log.info( \"[GuidAllocatorHC128V7] <nodeId: {}>, firstGuid: {}>\", nodeId, this.nextGUID() );\n    }\n\n    @Override\n    public GUID nextGUID() {\n        long xorMask = ((long) this.mnNodeId & 0xFFFFFFFFL) << 16; // 32 ~ 48\n        return this.mUuidFactory.createXorUint64LSB( xorMask );\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/UUID128.java",
    "content": "package com.pinecone.ulf.util.guid.i128;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\n\nimport java.util.UUID;\n\npublic class UUID128 implements GUID128 {\n    /**\n     * The most significant bits.\n     */\n    long mostSigBits;\n    /**\n     * The least significant bits.\n     */\n    long leastSigBits;\n\n    public UUID128 () {\n        this( 0, 0  );\n    }\n\n    public UUID128( long mostSignificantBits, long leastSignificantBits ) {\n        this.mostSigBits = mostSignificantBits;\n        this.leastSigBits = leastSignificantBits;\n    }\n\n    public UUID128( String hexId ) {\n        ArchGuidAllocator128.Parser.parse( hexId, this );\n    }\n\n    @Override\n    public Identification parse( String hexID ) {\n        ArchGuidAllocator128.Parser.parse( hexID, this );\n        return this;\n    }\n\n    @Override\n    public long getMostSignificantBits() {\n        return this.mostSigBits;\n    }\n\n    @Override\n    public long getLeastSignificantBits() {\n        return this.leastSigBits;\n    }\n\n    @Override\n    public String toString() {\n        return stringify( this.leastSigBits, this.mostSigBits );\n    }\n\n    @Override\n    public String toJSONString() {\n        return \"\\\"\" + this.toString() + \"\\\"\";\n    }\n\n    public static String stringify( long leastSigBits, long mostSigBits ) {\n        char[] uuidChars = new char[36];\n        hexDigits(uuidChars, 0, mostSigBits >>> 32, 8);\n        uuidChars[8] = '-';\n        hexDigits(uuidChars, 9, mostSigBits >>> 16, 4);\n        uuidChars[13] = '-';\n        hexDigits(uuidChars, 14, mostSigBits, 4);\n        uuidChars[18] = '-';\n        hexDigits(uuidChars, 19, leastSigBits >>> 48, 4);\n        uuidChars[23] = '-';\n        hexDigits(uuidChars, 24, leastSigBits, 12);\n        return new String(uuidChars);\n    }\n\n    private static final char[] HEX_DIGITS = {\n            '0','1','2','3','4','5','6','7',\n            '8','9','a','b','c','d','e','f'\n    };\n\n    private static void hexDigits( char[] dest, int offset, long val, int digits ) {\n        for (int i = offset + digits - 1, shift = 0; i >= offset; i--, shift +=4) {\n            dest[i] = HEX_DIGITS[(int)((val >>> shift) & 0xF)];\n        }\n    }\n\n    @Override\n    public UUID toUUID() {\n        return new UUID( this.mostSigBits, this.leastSigBits );\n    }\n\n    @Override\n    public int version() {\n        // Version is bits masked by 0x000000000000F000 in MS long\n        return (int)((this.mostSigBits >> 12) & 0x0f);\n    }\n\n    @Override\n    public int variant() {\n        // This field is composed of a varying number of bits.\n        // 0    -    -    Reserved for NCS backward compatibility\n        // 1    0    -    The IETF aka Leach-Salz variant (used by this class)\n        // 1    1    0    Reserved, Microsoft backward compatibility\n        // 1    1    1    Reserved for future definition.\n        return (int) ((this.leastSigBits >>> (64 - (this.leastSigBits >>> 62)))\n                & (this.leastSigBits >> 63));\n    }\n\n    @Override\n    public int clockSequence() {\n        if (version() != 1) {\n            throw new UnsupportedOperationException(\"Not a time-based GUID\");\n        }\n\n        return (int)((this.leastSigBits & 0x3FFF000000000000L) >>> 48);\n    }\n\n    @Override\n    public long node() {\n        if (version() != 1) {\n            throw new UnsupportedOperationException(\"Not a time-based UUID\");\n        }\n\n        return this.leastSigBits & 0x0000FFFFFFFFFFFFL;\n    }\n\n    @Override\n    public byte[] toBytesLE() {\n        byte[] bytes = new byte[16];\n\n        // Least significant bits first (little endian)\n        bytes[0]  = (byte) (this.leastSigBits);\n        bytes[1]  = (byte) (this.leastSigBits >> 8);\n        bytes[2]  = (byte) (this.leastSigBits >> 16);\n        bytes[3]  = (byte) (this.leastSigBits >> 24);\n        bytes[4]  = (byte) (this.leastSigBits >> 32);\n        bytes[5]  = (byte) (this.leastSigBits >> 40);\n        bytes[6]  = (byte) (this.leastSigBits >> 48);\n        bytes[7]  = (byte) (this.leastSigBits >> 56);\n\n        // Then most significant bits (little endian)\n        bytes[8]  = (byte) (this.mostSigBits);\n        bytes[9]  = (byte) (this.mostSigBits >> 8);\n        bytes[10] = (byte) (this.mostSigBits >> 16);\n        bytes[11] = (byte) (this.mostSigBits >> 24);\n        bytes[12] = (byte) (this.mostSigBits >> 32);\n        bytes[13] = (byte) (this.mostSigBits >> 40);\n        bytes[14] = (byte) (this.mostSigBits >> 48);\n        bytes[15] = (byte) (this.mostSigBits >> 56);\n\n        return bytes;\n    }\n\n    @Override\n    public byte[] toBytesBE() {\n        byte[] bytes = new byte[16];\n\n        // Most significant bits first (big endian)\n        bytes[0]  = (byte) (this.mostSigBits >> 56);\n        bytes[1]  = (byte) (this.mostSigBits >> 48);\n        bytes[2]  = (byte) (this.mostSigBits >> 40);\n        bytes[3]  = (byte) (this.mostSigBits >> 32);\n        bytes[4]  = (byte) (this.mostSigBits >> 24);\n        bytes[5]  = (byte) (this.mostSigBits >> 16);\n        bytes[6]  = (byte) (this.mostSigBits >> 8);\n        bytes[7]  = (byte) (this.mostSigBits);\n\n        // Then least significant bits (big endian)\n        bytes[8]  = (byte) (this.leastSigBits >> 56);\n        bytes[9]  = (byte) (this.leastSigBits >> 48);\n        bytes[10] = (byte) (this.leastSigBits >> 40);\n        bytes[11] = (byte) (this.leastSigBits >> 32);\n        bytes[12] = (byte) (this.leastSigBits >> 24);\n        bytes[13] = (byte) (this.leastSigBits >> 16);\n        bytes[14] = (byte) (this.leastSigBits >> 8);\n        bytes[15] = (byte) (this.leastSigBits);\n\n        return bytes;\n    }\n\n    @Override\n    public int hashCode() {\n        long hilo = this.mostSigBits ^ this.leastSigBits;\n        return ((int)(hilo >> 32)) ^ (int) hilo;\n    }\n\n    @Override\n    public long hashCode64() {\n        return this.mostSigBits ^ this.leastSigBits;\n    }\n\n    @Override\n    public int intVal() {\n        return this.hashCode();\n    }\n\n    @Override\n    public long longVal() {\n        return this.hashCode64();\n    }\n\n    @Override\n    public boolean equals(Object obj) {\n        if ( !(obj instanceof GUID128) ) {\n            return false;\n        }\n        GUID128 id = (GUID128)obj;\n        return (\n                this.mostSigBits == id.getMostSignificantBits() &&\n                this.leastSigBits == id.getLeastSignificantBits()\n        );\n    }\n\n    @Override\n    public int compareTo( Identification that ) {\n        GUID128 val;\n        if ( that instanceof GUID128 ) {\n            val = (GUID128) that;\n        }\n        else {\n            throw new IllegalArgumentException( \"Not GUID128\" );\n        }\n\n        // The ordering is intentionally set up so that the UUIDs\n        // can simply be numerically compared as two numbers\n        return (\n                this.mostSigBits < val.getMostSignificantBits() ? -1 :\n                (\n                        this.mostSigBits > val.getMostSignificantBits() ? 1 :\n                        (\n                                this.leastSigBits < val.getLeastSignificantBits() ? -1 :\n                                (\n                                        this.leastSigBits > val.getLeastSignificantBits() ? 1 :\n                                        0\n                                )\n                        )\n                )\n        );\n    }\n\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/UuidCreator.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.codec.StandardBinaryCodec;\nimport com.pinecone.ulf.util.guid.i128.codec.StandardStringCodec;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidLocalDomain;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidNamespace;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.factory.UuidFactory;\nimport com.pinecone.ulf.util.guid.i128.factory.UuidFactory.Parameters;\nimport com.pinecone.ulf.util.guid.i128.factory.nonstandard.PrefixCombFactory;\nimport com.pinecone.ulf.util.guid.i128.factory.nonstandard.ShortPrefixCombFactory;\nimport com.pinecone.ulf.util.guid.i128.factory.nonstandard.ShortSuffixCombFactory;\nimport com.pinecone.ulf.util.guid.i128.factory.nonstandard.SuffixCombFactory;\nimport com.pinecone.ulf.util.guid.i128.factory.standard.DceSecurityFactory;\nimport com.pinecone.ulf.util.guid.i128.factory.standard.NameBasedMd5Factory;\nimport com.pinecone.ulf.util.guid.i128.factory.standard.NameBasedSha1Factory;\nimport com.pinecone.ulf.util.guid.i128.factory.standard.RandomBasedFactory;\nimport com.pinecone.ulf.util.guid.i128.factory.standard.TimeBasedFactory;\nimport com.pinecone.ulf.util.guid.i128.factory.standard.TimeOrderedEpochFactory;\nimport com.pinecone.ulf.util.guid.i128.factory.standard.TimeOrderedFactory;\nimport com.pinecone.ulf.util.guid.i128.util.MachineId;\n\nimport java.time.Instant;\nimport java.util.Objects;\nimport java.util.UUID;\nimport java.util.concurrent.ThreadLocalRandom;\nimport java.util.concurrent.locks.ReentrantLock;\nimport java.util.function.Supplier;\n\n/**\n * Facade for everything.\n * <p>\n * All UUID types can be generated from this entry point.\n */\npublic final class UuidCreator {\n\n\t/**\n\t * Name space to be used when the name string is a fully-qualified domain name.\n\t */\n\tpublic static final UuidNamespace NAMESPACE_DNS = UuidNamespace.NAMESPACE_DNS;\n\t/**\n\t * Name space to be used when the name string is a URL.\n\t */\n\tpublic static final UuidNamespace NAMESPACE_URL = UuidNamespace.NAMESPACE_URL;\n\t/**\n\t * Name space to be used when the name string is an ISO OID.\n\t */\n\tpublic static final UuidNamespace NAMESPACE_OID = UuidNamespace.NAMESPACE_OID;\n\t/**\n\t * Name space to be used when the name string is an X.500 DN (DER or text).\n\t */\n\tpublic static final UuidNamespace NAMESPACE_X500 = UuidNamespace.NAMESPACE_X500;\n\n\t/**\n\t * The principal domain, interpreted as POSIX UID domain on POSIX systems.\n\t */\n\tpublic static final UuidLocalDomain LOCAL_DOMAIN_PERSON = UuidLocalDomain.LOCAL_DOMAIN_PERSON;\n\t/**\n\t * The group domain, interpreted as POSIX GID domain on POSIX systems.\n\t */\n\tpublic static final UuidLocalDomain LOCAL_DOMAIN_GROUP = UuidLocalDomain.LOCAL_DOMAIN_GROUP;\n\t/**\n\t * The organization domain, site-defined.\n\t */\n\tpublic static final UuidLocalDomain LOCAL_DOMAIN_ORG = UuidLocalDomain.LOCAL_DOMAIN_ORG;\n\n\tprivate static final GUID UUID_NIL = new UUID128(0x0000000000000000L, 0x0000000000000000L);\n\tprivate static final GUID UUID_MAX = new UUID128(0xffffffffffffffffL, 0xffffffffffffffffL);\n\n\tprivate UuidCreator() {\n\t}\n\n\t/**\n\t * Returns a Nil UUID.\n\t * <p>\n\t * Nil UUID is a special UUID that has all 128 bits set to ZERO.\n\t * <p>\n\t * The canonical string of Nil UUID is\n\t * <code>00000000-0000-0000-0000-000000000000</code>.\n\t * \n\t * @return a Nil UUID\n\t */\n\tpublic static GUID getNil() {\n\t\treturn UUID_NIL;\n\t}\n\n\t/**\n\t * Returns a Max UUID.\n\t * <p>\n\t * Max UUID is a special UUID that has all 128 bits set to ONE.\n\t * <p>\n\t * The canonical string of Max UUID is\n\t * <code>FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF</code>.\n\t * \n\t * @return a Max UUID\n\t * @since 5.0.0\n\t * @see <a href=\n\t *      \"https://www.ietf.org/archive/id/draft-peabody-dispatch-new-uuid-format-04.html\">New\n\t *      UUID Formats</a>\n\t */\n\tpublic static GUID getMax() {\n\t\treturn UUID_MAX;\n\t}\n\n\t/**\n\t * Returns an array of bytes from a UUID.\n\t * \n\t * @param uuid a UUID\n\t * @return an array of bytes\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tpublic static byte[] toBytes(final GUID128 uuid) {\n\t\treturn StandardBinaryCodec.INSTANCE.encode(uuid);\n\t}\n\n\t/**\n\t * Returns a UUID from a byte array.\n\t * <p>\n\t * It also checks if the input byte array is valid.\n\t * \n\t * @param uuid a byte array\n\t * @return a UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tpublic static GUID fromBytes(byte[] uuid) {\n\t\treturn StandardBinaryCodec.INSTANCE.decode(uuid);\n\t}\n\n\t/**\n\t * Returns a string from a UUID.\n\t * <p>\n\t * It can be much faster than {@link UUID#toString()} in JDK 8.\n\t * \n\t * @param uuid a UUID\n\t * @return a UUID string\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tpublic static String toString(GUID128 uuid) {\n\t\treturn StandardStringCodec.INSTANCE.encode(uuid);\n\t}\n\n\t/**\n\t * Returns a UUID from a string.\n\t * <p>\n\t * It accepts strings:\n\t * <ul>\n\t * <li>With URN prefix: \"urn:uuid:\";\n\t * <li>With curly braces: '{' and '}';\n\t * <li>With upper or lower case;\n\t * <li>With or without hyphens.\n\t * </ul>\n\t * <p>\n\t * It can be much faster than {@link UUID#fromString(String)} in JDK 8.\n\t * <p>\n\t * It also can be twice as fast as {@link UUID#fromString(String)} in JDK 11.\n\t * \n\t * @param uuid a UUID string\n\t * @return a UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tpublic static GUID fromString(String uuid) {\n\t\treturn StandardStringCodec.INSTANCE.decode(uuid);\n\t}\n\n\t/**\n\t * Returns a random-based unique identifier (UUIDv4).\n\t * \n\t * @return a UUIDv4\n\t * @see RandomBasedFactory\n\t */\n\tpublic static GUID getRandomBased() {\n\t\treturn UUID4.create();\n\t}\n\n\t/**\n\t * Returns a fast random-based unique identifier (UUIDv4).\n\t * <p>\n\t * It employs {@link ThreadLocalRandom} which works very well, although not\n\t * cryptographically strong. It can be useful, for example, for logging.\n\t * <p>\n\t * Security-sensitive applications that require a cryptographically secure\n\t * pseudo-random generator should use {@link UuidCreator#getRandomBased()}.\n\t * \n\t * @return a UUIDv4\n\t * @see RandomBasedFactory\n\t * @see ThreadLocalRandom\n\t * @since 5.2.0\n\t */\n\tpublic static GUID getRandomBasedFast() {\n\t\treturn UUID4_FAST.create();\n\t}\n\n\t/**\n\t * Returns a time-based unique identifier (UUIDv1).\n\t * <p>\n\t * The default node identifier is a random number that is generated once at\n\t * initialization.\n\t * <p>\n\t * A custom node identifier can be provided by the system property\n\t * 'uuidcreator.node' or the environment variable 'UUIDCREATOR_NODE'.\n\t * \n\t * @return a UUIDv1\n\t * @see TimeBasedFactory\n\t */\n\tpublic static GUID getTimeBased() {\n\t\treturn UUID1.create();\n\t}\n\n\t/**\n\t * Returns a time-based unique identifier (UUIDv1).\n\t * <p>\n\t * The node identifier is a MAC address that is obtained once at initialization.\n\t * \n\t * @return a UUIDv1\n\t * @see TimeBasedFactory\n\t */\n\tpublic static GUID getTimeBasedWithMac() {\n\t\treturn UUID1_MAC.create();\n\t}\n\n\t/**\n\t * Returns a time-based unique identifier (UUIDv1).\n\t * <p>\n\t * The node identifier is a hash that is calculated once at initialization.\n\t * <p>\n\t * The hash input is a string containing host name, MAC and IP.\n\t * \n\t * @return a UUIDv1\n\t * @see TimeBasedFactory\n\t * @see MachineId\n\t */\n\tpublic static GUID getTimeBasedWithHash() {\n\t\treturn UUID1_HASH.create();\n\t}\n\n\t/**\n\t * Returns a time-based unique identifier (UUIDv1).\n\t * <p>\n\t * The node identifier is a random number that is generated with each method\n\t * invocation.\n\t * \n\t * @return a UUIDv1\n\t * @see TimeBasedFactory\n\t */\n\tpublic static GUID getTimeBasedWithRandom() {\n\t\treturn UUID1_RANDOM.create();\n\t}\n\n\t/**\n\t * Returns a time-based unique identifier (UUIDv1).\n\t * <p>\n\t * {@link Instant} accuracy is be limited to 1 millisecond on Linux with JDK 8.\n\t * On Windows, its accuracy may be limited to 15.625ms (64hz).\n\t * <p>\n\t * The clock sequence is a number between 0 and 16383 (2^14 - 1). If the value\n\t * passed as an argument is out of range, the result of MOD 2^14 will be used.\n\t * <p>\n\t * The node identifier is a number between 0 and 281474976710655 (2^48 - 1). If\n\t * the value passed as an argument is out of range, the result of MOD 2^48 will\n\t * be used.\n\t * <p>\n\t * Null arguments are ignored. If all arguments are null, this method works just\n\t * like method {@link UuidCreator#getTimeBased()}.\n\t * \n\t * @param instant  an alternate instant\n\t * @param clockseq an alternate clock sequence between 0 and 2^14-1\n\t * @param nodeid   an alternate node identifier between 0 and 2^48-1\n\t * @return a UUIDv1\n\t * @see TimeBasedFactory\n\t */\n\tpublic static GUID getTimeBased(Instant instant, Integer clockseq, Long nodeid) {\n\t\tTimeBasedFactory.Builder builder = TimeBasedFactory.builder();\n\t\tif (instant != null) {\n\t\t\tbuilder.withInstant(instant);\n\t\t}\n\t\tif (clockseq != null) {\n\t\t\tbuilder.withClockSeq(clockseq);\n\t\t}\n\t\tif (nodeid != null) {\n\t\t\tbuilder.withNodeId(nodeid);\n\t\t}\n\t\treturn builder.build().create();\n\t}\n\n\t/**\n\t * Returns the minimum UUIDv1 for a given instant.\n\t * <p>\n\t * The 60 bits of the timestamp are filled with the bits of the given instant\n\t * and the other 62 bits are all set to ZERO.\n\t * <p>\n\t * For example, the minimum UUIDv1 for 2022-02-22 22:22:22.222 is\n\t * `{@code e7a1c2e0-942d-11ec-8000-000000000000}`, where\n\t * `{@code e7a1c2e0-942d-_1ec}` is the timestamp in hexadecimal.\n\t * <p>\n\t * It can be useful to find all records before or after a specific timestamp in\n\t * a table without a `{@code created_at}` field.\n\t * \n\t * @param instant a given instant\n\t * @return a UUIDv1\n\t */\n\tpublic static GUID getTimeBasedMin(Instant instant) {\n\t\tObjects.requireNonNull(instant, \"Null instant\");\n\t\tTimeBasedFactory.Builder builder = TimeBasedFactory.builder();\n\t\treturn builder.withInstant(instant).withClockSeq(0x0000L).withNodeId(0x000000000000L).build().create();\n\t}\n\n\t/**\n\t * Returns the maximum UUIDv1 for a given instant.\n\t * <p>\n\t * The 60 bits of the timestamp are filled with the bits of the given instant\n\t * and the other 62 bits are all set to ONE.\n\t * <p>\n\t * For example, the maximum UUIDv1 for 2022-02-22 22:22:22.222 is\n\t * `{@code e7a1c2e0-942d-11ec-bfff-ffffffffffff}`, where\n\t * `{@code e7a1c2e0-942d-_1ec}` is the timestamp in hexadecimal.\n\t * <p>\n\t * It can be useful to find all records before or after a specific timestamp in\n\t * a table without a `{@code created_at}` field.\n\t * \n\t * @param instant a given instant\n\t * @return a UUIDv1\n\t */\n\tpublic static GUID getTimeBasedMax(Instant instant) {\n\t\tObjects.requireNonNull(instant, \"Null instant\");\n\t\tTimeBasedFactory.Builder builder = TimeBasedFactory.builder();\n\t\treturn builder.withInstant(instant).withClockSeq(0xffffL).withNodeId(0xffffffffffffL).build().create();\n\t}\n\n\t/**\n\t * Returns a time-ordered unique identifier (UUIDv6).\n\t * <p>\n\t * The default node identifier is a random number that is generated once at\n\t * initialization.\n\t * <p>\n\t * A custom node identifier can be provided by the system property\n\t * 'uuidcreator.node' or the environment variable 'UUIDCREATOR_NODE'.\n\t * \n\t * @return a UUIDv6\n\t * @see TimeOrderedFactory\n\t * @see <a href=\n\t *      \"https://www.ietf.org/archive/id/draft-peabody-dispatch-new-uuid-format-04.html\">New\n\t *      UUID Formats</a>\n\t */\n\tpublic static GUID getTimeOrdered() {\n\t\treturn UUID6.create();\n\t}\n\n\t/**\n\t * Returns a time-ordered unique identifier (UUIDv6).\n\t * <p>\n\t * The node identifier is a MAC address that is obtained once at initialization.\n\t * \n\t * @return a UUIDv6\n\t * @see TimeOrderedFactory\n\t * @see <a href=\n\t *      \"https://www.ietf.org/archive/id/draft-peabody-dispatch-new-uuid-format-04.html\">New\n\t *      UUID Formats</a>\n\t */\n\tpublic static GUID getTimeOrderedWithMac() {\n\t\treturn UUID6_MAC.create();\n\t}\n\n\t/**\n\t * Returns a time-ordered unique identifier (UUIDv6).\n\t * <p>\n\t * The node identifier is a hash that is calculated once at initialization.\n\t * <p>\n\t * The hash input is a string containing host name, MAC and IP.\n\t * \n\t * @return a UUIDv6\n\t * @see TimeOrderedFactory\n\t * @see MachineId\n\t * @see <a href=\n\t *      \"https://www.ietf.org/archive/id/draft-peabody-dispatch-new-uuid-format-04.html\">New\n\t *      UUID Formats</a>\n\t */\n\tpublic static GUID getTimeOrderedWithHash() {\n\t\treturn UUID6_HASH.create();\n\t}\n\n\t/**\n\t * Returns a time-ordered unique identifier (UUIDv6).\n\t * <p>\n\t * The node identifier is a random number that is generated with each method\n\t * invocation.\n\t * \n\t * @return a UUIDv6\n\t * @see TimeOrderedFactory\n\t * @see <a href=\n\t *      \"https://www.ietf.org/archive/id/draft-peabody-dispatch-new-uuid-format-04.html\">New\n\t *      UUID Formats</a>\n\t */\n\tpublic static GUID getTimeOrderedWithRandom() {\n\t\treturn UUID6_RANDOM.create();\n\t}\n\n\t/**\n\t * Returns a time-ordered unique identifier (UUIDv6).\n\t * <p>\n\t * {@link Instant} accuracy is be limited to 1 millisecond on Linux with JDK 8.\n\t * On Windows, its accuracy may be limited to 15.625ms (64hz).\n\t * <p>\n\t * The clock sequence is a number between 0 and 16383 (2^14 - 1). If the value\n\t * passed as an argument is out of range, the result of MOD 2^14 will be used.\n\t * <p>\n\t * The node identifier is a number between 0 and 281474976710655 (2^48 - 1). If\n\t * the value passed as an argument is out of range, the result of MOD 2^48 will\n\t * be used.\n\t * <p>\n\t * Null arguments are ignored. If all arguments are null, this method works just\n\t * like method {@link UuidCreator#getTimeOrdered()}.\n\t * \n\t * @param instant  an alternate instant\n\t * @param clockseq an alternate clock sequence between 0 and 2^14-1\n\t * @param nodeid   an alternate node identifier between 0 and 2^48-1\n\t * @return a UUIDv6\n\t * @see TimeOrderedFactory\n\t * @see <a href=\n\t *      \"https://www.ietf.org/archive/id/draft-peabody-dispatch-new-uuid-format-04.html\">New\n\t *      UUID Formats</a>\n\t */\n\tpublic static GUID getTimeOrdered(Instant instant, Integer clockseq, Long nodeid) {\n\t\tTimeOrderedFactory.Builder builder = TimeOrderedFactory.builder();\n\t\tif (instant != null) {\n\t\t\tbuilder.withInstant(instant);\n\t\t}\n\t\tif (clockseq != null) {\n\t\t\tbuilder.withClockSeq(clockseq);\n\t\t}\n\t\tif (nodeid != null) {\n\t\t\tbuilder.withNodeId(nodeid);\n\t\t}\n\t\treturn builder.build().create();\n\t}\n\n\t/**\n\t * Returns the minimum UUIDv6 for a given instant.\n\t * <p>\n\t * The 60 bits of the timestamp are filled with the bits of the given instant\n\t * and the other 62 bits are all set to ZERO.\n\t * <p>\n\t * For example, the minimum UUIDv6 for 2022-02-22 22:22:22.222 is\n\t * `{@code 1ec942de-7a1c-62e0-8000-000000000000}`, where\n\t * `{@code 1ec942de-7a1c-_2e0}` is the timestamp in hexadecimal.\n\t * <p>\n\t * It can be useful to find all records before or after a specific timestamp in\n\t * a table without a `{@code created_at}` field.\n\t * \n\t * @param instant a given instant\n\t * @return a UUIDv6\n\t */\n\tpublic static GUID getTimeOrderedMin(Instant instant) {\n\t\tObjects.requireNonNull(instant, \"Null instant\");\n\t\tTimeOrderedFactory.Builder builder = TimeOrderedFactory.builder();\n\t\treturn builder.withInstant(instant).withClockSeq(0x0000L).withNodeId(0x000000000000L).build().create();\n\t}\n\n\t/**\n\t * Returns the maximum UUIDv6 for a given instant.\n\t * <p>\n\t * The 60 bits of the timestamp are filled with the bits of the given instant\n\t * and the other 62 bits are all set to ONE.\n\t * <p>\n\t * For example, the maximum UUIDv6 for 2022-02-22 22:22:22.222 is\n\t * `{@code 1ec942de-7a1c-62e0-bfff-ffffffffffff}`, where\n\t * `{@code 1ec942de-7a1c-_2e0}` is the timestamp in hexadecimal.\n\t * <p>\n\t * It can be useful to find all records before or after a specific timestamp in\n\t * a table without a `{@code created_at}` field.\n\t * \n\t * @param instant a given instant\n\t * @return a UUIDv6\n\t */\n\tpublic static GUID getTimeOrderedMax(Instant instant) {\n\t\tObjects.requireNonNull(instant, \"Null instant\");\n\t\tTimeOrderedFactory.Builder builder = TimeOrderedFactory.builder();\n\t\treturn builder.withInstant(instant).withClockSeq(0xffffL).withNodeId(0xffffffffffffL).build().create();\n\t}\n\n\t/**\n\t * Returns a time-ordered unique identifier that uses Unix Epoch (UUIDv7).\n\t * <p>\n\t * This method produces identifiers with 3 parts: time, counter and random.\n\t * <p>\n\t * The counter bits are incremented by 1 when the time repeats.\n\t * <p>\n\t * The random bits are generated with each method invocation.\n\t * \n\t * @return a UUIDv7\n\t * @since 5.0.0\n\t * @see TimeOrderedEpochFactory\n\t * @see <a href=\n\t *      \"https://www.ietf.org/archive/id/draft-peabody-dispatch-new-uuid-format-04.html\">New\n\t *      UUID Formats</a>\n\t */\n\tpublic static GUID getTimeOrderedEpoch() {\n\t\treturn UUID7.create();\n\t}\n\n\t/**\n\t * Returns a fast time-ordered unique identifier that uses Unix Epoch (UUIDv7).\n\t * <p>\n\t * This method produces identifiers with 3 parts: time, counter and random.\n\t * <p>\n\t * The counter bits are incremented by 1 when the time repeats.\n\t * <p>\n\t * The random bits are generated with each method invocation.\n\t * <p>\n\t * It employs {@link ThreadLocalRandom} which works very well, although not\n\t * cryptographically strong. It can be useful, for example, for logging.\n\t * <p>\n\t * Security-sensitive applications that require a cryptographically secure\n\t * pseudo-random generator should use {@link UuidCreator#getTimeOrderedEpoch()}.\n\t * \n\t * @return a UUIDv7\n\t * @since 6.0.0\n\t * @see TimeOrderedEpochFactory\n\t * @see <a href=\n\t *      \"https://www.ietf.org/archive/id/draft-peabody-dispatch-new-uuid-format-04.html\">New\n\t *      UUID Formats</a>\n\t */\n\tpublic static GUID getTimeOrderedEpochFast() {\n\t\treturn UUID7_FAST.create();\n\t}\n\n\t/**\n\t * Returns a time-ordered unique identifier that uses Unix Epoch (UUIDv7).\n\t * <p>\n\t * This method produces identifiers with 2 parts: time and monotonic random.\n\t * <p>\n\t * The monotonic random bits are incremented by 1 when the time repeats.\n\t * \n\t * @return a UUIDv7\n\t * @since 5.0.0\n\t * @see TimeOrderedEpochFactory\n\t * @see <a href=\n\t *      \"https://www.ietf.org/archive/id/draft-peabody-dispatch-new-uuid-format-04.html\">New\n\t *      UUID Formats</a>\n\t */\n\tpublic static GUID getTimeOrderedEpochPlus1() {\n\t\treturn UUID7_PLUS_1.create();\n\t}\n\n\t/**\n\t * Returns a time-ordered unique identifier that uses Unix Epoch (UUIDv7).\n\t * <p>\n\t * This method produces identifiers with 2 parts: time and monotonic random.\n\t * <p>\n\t * The monotonic random bits are incremented by a random number between 1 and\n\t * 2^32 when the time repeats.\n\t * \n\t * @return a UUIDv7\n\t * @since 5.0.0\n\t * @see TimeOrderedEpochFactory\n\t * @see <a href=\n\t *      \"https://www.ietf.org/archive/id/draft-peabody-dispatch-new-uuid-format-04.html\">New\n\t *      UUID Formats</a>\n\t */\n\tpublic static GUID getTimeOrderedEpochPlusN() {\n\t\treturn UUID7_PLUS_N.create();\n\t}\n\n\t/**\n\t * Returns a time-ordered unique identifier that uses Unix Epoch (UUIDv7) for a\n\t * given instant.\n\t * <p>\n\t * This method produces identifiers with 2 parts: time and secure random.\n\t * <p>\n\t * The 48 bits of the time component are filled with the bits of the given\n\t * instant and the other 74 bits are random.\n\t * <p>\n\t * For example, the maximum UUIDv7 for 2022-02-22 22:22:22.222 is\n\t * `{@code 017f2387-460e-7012-b345-6789abcdef01}`, where `{@code 017f2387-460e}`\n\t * is the timestamp in hexadecimal.\n\t * <p>\n\t * The random bits are generated with each method invocation.\n\t * <p>\n\t * You can use this method to produce UUIDs with any instant you want, for\n\t * example to obfuscate the actual generation instant in a simple way. Example:\n\t * <p>\n\t * \n\t * <pre>{@code\n\t * // Shift the generation instant 1 day ahead of system clock\n\t * Instant instant = Instant.now().plus(Duration.ofDays(1));\n\t * UUID uuid = UuidCreator.getTimeOrderedEpoch(instant);\n\t * }</pre>\n\t *\n\t * @param instant a given instant\n\t * @return a UUIDv7\n\t * @since 5.3.3\n\t */\n\tpublic static GUID getTimeOrderedEpoch(Instant instant) {\n\t\treturn UUID7.create(Parameters.builder().withInstant(instant).build());\n\t}\n\n\t/**\n\t * Returns the minimum UUIDv7 for a given instant.\n\t * <p>\n\t * The 48 bits of the time component are filled with the bits of the given\n\t * instant and the other 74 bits are all set to ZERO.\n\t * <p>\n\t * For example, the minimum UUIDv7 for 2022-02-22 22:22:22.222 is\n\t * `{@code 017f2387-460e-7000-8000-000000000000}`, where `{@code 017f2387-460e}`\n\t * is the timestamp in hexadecimal.\n\t * <p>\n\t * It can be useful to find all records before or after a specific timestamp in\n\t * a table without a `{@code created_at}` field.\n\t * \n\t * @param instant a given instant\n\t * @return a UUIDv7\n\t */\n\tpublic static GUID getTimeOrderedEpochMin(Instant instant) {\n\t\tObjects.requireNonNull(instant, \"Null instant\");\n\t\tfinal long time = instant.toEpochMilli();\n\t\treturn new UUID128((time << 16) | 0x7000L, 0x8000000000000000L) {\n\t\t};\n\t}\n\n\t/**\n\t * Returns the maximum UUIDv7 for a given instant.\n\t * <p>\n\t * The 48 bits of the time component are filled with the bits of the given\n\t * instant and the other 74 bits are all set to ONE.\n\t * <p>\n\t * For example, the maximum UUIDv7 for 2022-02-22 22:22:22.222 is\n\t * `{@code 017f2387-460e-7fff-bfff-ffffffffffff}`, where `{@code 017f2387-460e}`\n\t * is the timestamp in hexadecimal.\n\t * <p>\n\t * It can be useful to find all records before or after a specific timestamp in\n\t * a table without a `{@code created_at}` field.\n\t * \n\t * @param instant a given instant\n\t * @return a UUIDv7\n\t */\n\tpublic static GUID getTimeOrderedEpochMax(Instant instant) {\n\t\tObjects.requireNonNull(instant, \"Null instant\");\n\t\tfinal long time = instant.toEpochMilli();\n\t\treturn new UUID128((time << 16) | 0x7fffL, 0xbfffffffffffffffL);\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3).\n\t * <p>\n\t * The name string is encoded into a sequence of bytes using UTF-8.\n\t * \n\t * @param name a string\n\t * @return a GUIDv3\n\t * @see NameBasedMd5Factory\n\t */\n\tpublic static GUID getNameBasedMd5(String name) {\n\t\treturn UUID3.create(Parameters.builder().withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3).\n\t * \n\t * @param name a byte array\n\t * @return a UUIDv3\n\t * @see NameBasedMd5Factory\n\t */\n\tpublic static GUID getNameBasedMd5(byte[] name) {\n\t\treturn UUID3.create(Parameters.builder().withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3).\n\t * <p>\n\t * The name string is encoded into a sequence of bytes using UTF-8.\n\t * \n\t * @param namespace a custom name space UUID\n\t * @param name      a string\n\t * @return a UUIDv3\n\t * @see UuidNamespace\n\t * @see NameBasedMd5Factory\n\t */\n\tpublic static GUID getNameBasedMd5(GUID128 namespace, String name) {\n\t\treturn UUID3.create(Parameters.builder().withNamespace(namespace).withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3).\n\t * \n\t * @param namespace a custom name space UUID\n\t * @param name      a byte array\n\t * @return a UUIDv3\n\t * @see UuidNamespace\n\t * @see NameBasedMd5Factory\n\t */\n\tpublic static GUID getNameBasedMd5(GUID128 namespace, byte[] name) {\n\t\treturn UUID3.create(Parameters.builder().withNamespace(namespace).withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3).\n\t * <p>\n\t * The name string is encoded into a sequence of bytes using UTF-8.\n\t * \n\t * @param namespace a custom name space UUID in string format\n\t * @param name      a string\n\t * @return a UUIDv3\n\t * @throws InvalidUuidException if namespace is invalid\n\t * @see UuidNamespace\n\t * @see NameBasedMd5Factory\n\t */\n\tpublic static GUID getNameBasedMd5(String namespace, String name) {\n\t\treturn UUID3.create(Parameters.builder().withNamespace(namespace).withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3).\n\t * \n\t * @param namespace a custom name space UUID in string format\n\t * @param name      a byte array\n\t * @return a UUIDv3\n\t * @throws InvalidUuidException if namespace is invalid\n\t * @see UuidNamespace\n\t * @see NameBasedMd5Factory\n\t */\n\tpublic static GUID getNameBasedMd5(String namespace, byte[] name) {\n\t\treturn UUID3.create(Parameters.builder().withNamespace(namespace).withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3).\n\t * <p>\n\t * The name string is encoded into a sequence of bytes using UTF-8.\n\t * <p>\n\t * Name spaces predefined by RFC 9562 (Appendix C):\n\t * <ul>\n\t * <li>NAMESPACE_DNS: Name string is a fully-qualified domain name;\n\t * <li>NAMESPACE_URL: Name string is a URL;\n\t * <li>NAMESPACE_OID: Name string is an ISO OID;\n\t * <li>NAMESPACE_X500: Name string is an X.500 DN (in DER or text format).\n\t * </ul>\n\t * \n\t * @param namespace a predefined name space enumeration\n\t * @param name      a string\n\t * @return a UUIDv3\n\t * @see UuidNamespace\n\t * @see NameBasedMd5Factory\n\t */\n\tpublic static GUID getNameBasedMd5(UuidNamespace namespace, String name) {\n\t\treturn UUID3.create(Parameters.builder().withNamespace(namespace).withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses MD5 hashing (UUIDv3).\n\t * <p>\n\t * Name spaces predefined by RFC 9562 (Appendix C):\n\t * <ul>\n\t * <li>NAMESPACE_DNS: Name string is a fully-qualified domain name;\n\t * <li>NAMESPACE_URL: Name string is a URL;\n\t * <li>NAMESPACE_OID: Name string is an ISO OID;\n\t * <li>NAMESPACE_X500: Name string is an X.500 DN (in DER or text format).\n\t * </ul>\n\t * \n\t * @param namespace a predefined name space enumeration\n\t * @param name      a byte array\n\t * @return a UUIDv3\n\t * @see UuidNamespace\n\t * @see NameBasedMd5Factory\n\t */\n\tpublic static GUID getNameBasedMd5(UuidNamespace namespace, byte[] name) {\n\t\treturn UUID3.create(Parameters.builder().withNamespace(namespace).withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5).\n\t * <p>\n\t * The name string is encoded into a sequence of bytes using UTF-8.\n\t * \n\t * @param name a string\n\t * @return a UUIDv5\n\t * @see NameBasedSha1Factory\n\t */\n\tpublic static GUID getNameBasedSha1(String name) {\n\t\treturn UUID5.create(Parameters.builder().withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5).\n\t * \n\t * @param name a byte array\n\t * @return a UUIDv5\n\t * @see NameBasedSha1Factory\n\t */\n\tpublic static GUID getNameBasedSha1(byte[] name) {\n\t\treturn UUID5.create(Parameters.builder().withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5).\n\t * <p>\n\t * The name string is encoded into a sequence of bytes using UTF-8.\n\t * \n\t * @param namespace a custom name space UUID\n\t * @param name      a string\n\t * @return a UUIDv5\n\t * @see UuidNamespace\n\t * @see NameBasedSha1Factory\n\t */\n\tpublic static GUID getNameBasedSha1(GUID128 namespace, String name) {\n\t\treturn UUID5.create(Parameters.builder().withNamespace(namespace).withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5).\n\t * \n\t * @param namespace a custom name space UUID\n\t * @param name      a byte array\n\t * @return a UUIDv5\n\t * @see UuidNamespace\n\t * @see NameBasedSha1Factory\n\t */\n\tpublic static GUID getNameBasedSha1(GUID128 namespace, byte[] name) {\n\t\treturn UUID5.create(Parameters.builder().withNamespace(namespace).withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5).\n\t * <p>\n\t * The name string is encoded into a sequence of bytes using UTF-8.\n\t * \n\t * @param namespace a custom name space UUID in string format\n\t * @param name      a string\n\t * @return a UUIDv5\n\t * @throws InvalidUuidException if namespace is invalid\n\t * @see UuidNamespace\n\t * @see NameBasedSha1Factory\n\t */\n\tpublic static GUID getNameBasedSha1(String namespace, String name) {\n\t\treturn UUID5.create(Parameters.builder().withNamespace(namespace).withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5).\n\t * \n\t * @param namespace a custom name space UUID in string format\n\t * @param name      a byte array\n\t * @return a UUIDv5\n\t * @throws InvalidUuidException if namespace is invalid\n\t * @see UuidNamespace\n\t * @see NameBasedSha1Factory\n\t */\n\tpublic static GUID getNameBasedSha1(String namespace, byte[] name) {\n\t\treturn UUID5.create(Parameters.builder().withNamespace(namespace).withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5).\n\t * <p>\n\t * The name string is encoded into a sequence of bytes using UTF-8.\n\t * <p>\n\t * Name spaces predefined by RFC 9562 (Appendix C):\n\t * <ul>\n\t * <li>NAMESPACE_DNS: Name string is a fully-qualified domain name;\n\t * <li>NAMESPACE_URL: Name string is a URL;\n\t * <li>NAMESPACE_OID: Name string is an ISO OID;\n\t * <li>NAMESPACE_X500: Name string is an X.500 DN (in DER or text format).\n\t * </ul>\n\t * \n\t * @param namespace a predefined name space enumeration\n\t * @param name      a string\n\t * @return a UUIDv5\n\t * @see UuidNamespace\n\t * @see NameBasedSha1Factory\n\t */\n\tpublic static GUID getNameBasedSha1(UuidNamespace namespace, String name) {\n\t\treturn UUID5.create(Parameters.builder().withNamespace(namespace).withName(name).build());\n\t}\n\n\t/**\n\t * Returns a name-based unique identifier that uses SHA-1 hashing (UUIDv5).\n\t * <p>\n\t * Name spaces predefined by RFC 9562 (Appendix C):\n\t * <ul>\n\t * <li>NAMESPACE_DNS: Name string is a fully-qualified domain name;\n\t * <li>NAMESPACE_URL: Name string is a URL;\n\t * <li>NAMESPACE_OID: Name string is an ISO OID;\n\t * <li>NAMESPACE_X500: Name string is an X.500 DN (in DER or text format).\n\t * </ul>\n\t * \n\t * @param namespace a predefined name space enumeration\n\t * @param name      a byte array\n\t * @return a UUIDv5\n\t * @see UuidNamespace\n\t * @see NameBasedSha1Factory\n\t */\n\tpublic static GUID getNameBasedSha1(UuidNamespace namespace, byte[] name) {\n\t\treturn UUID5.create(Parameters.builder().withNamespace(namespace).withName(name).build());\n\t}\n\n\t/**\n\t * Returns a DCE Security unique identifier (UUIDv2).\n\t * \n\t * @param localDomain     a custom local domain byte\n\t * @param localIdentifier a local identifier\n\t * @return a UUIDv2\n\t * @see UuidLocalDomain\n\t * @see DceSecurityFactory\n\t */\n\tpublic static GUID getDceSecurity(byte localDomain, int localIdentifier) {\n\t\treturn UUID2\n\t\t\t\t.create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build());\n\t}\n\n\t/**\n\t * Returns a DCE Security unique identifier (UUIDv2).\n\t * \n\t * @param localDomain     a custom local domain byte\n\t * @param localIdentifier a local identifier\n\t * @return a UUIDv2\n\t * @see UuidLocalDomain\n\t * @see DceSecurityFactory\n\t */\n\tpublic static GUID getDceSecurityWithMac(byte localDomain, int localIdentifier) {\n\t\treturn UUID2_MAC\n\t\t\t\t.create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build());\n\t}\n\n\t/**\n\t * Returns a DCE Security unique identifier (UUIDv2).\n\t * \n\t * @param localDomain     a custom local domain byte\n\t * @param localIdentifier a local identifier\n\t * @return a UUIDv2\n\t * @see UuidLocalDomain\n\t * @see DceSecurityFactory\n\t */\n\tpublic static GUID getDceSecurityWithHash(byte localDomain, int localIdentifier) {\n\t\treturn UUID2_HASH\n\t\t\t\t.create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build());\n\t}\n\n\t/**\n\t * Returns a DCE Security unique identifier (UUIDv2).\n\t *\n\t * @param localDomain     a custom local domain byte\n\t * @param localIdentifier a local identifier\n\t * @return a UUIDv2\n\t * @see UuidLocalDomain\n\t * @see DceSecurityFactory\n\t */\n\tpublic static GUID getDceSecurityWithRandom(byte localDomain, int localIdentifier) {\n\t\treturn UUID2_RANDOM\n\t\t\t\t.create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build());\n\t}\n\n\t/**\n\t * Returns a DCE Security unique identifier (UUIDv2).\n\t * <p>\n\t * Local domains predefined by DCE 1.1 Authentication and Security Services\n\t * (Chapter 11):\n\t * <ul>\n\t * <li>LOCAL_DOMAIN_PERSON: 0 (interpreted as POSIX UID domain);\n\t * <li>LOCAL_DOMAIN_GROUP: 1 (interpreted as POSIX GID domain);\n\t * <li>LOCAL_DOMAIN_ORG: 2.\n\t * </ul>\n\t * \n\t * @param localDomain     a predefined local domain enumeration\n\t * @param localIdentifier a local identifier\n\t * @return a UUIDv2\n\t * @see UuidLocalDomain\n\t * @see DceSecurityFactory\n\t */\n\tpublic static GUID getDceSecurity(UuidLocalDomain localDomain, int localIdentifier) {\n\t\treturn UUID2\n\t\t\t\t.create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build());\n\t}\n\n\t/**\n\t * Returns a DCE Security unique identifier (UUIDv2).\n\t * <p>\n\t * Local domains predefined by DCE 1.1 Authentication and Security Services\n\t * (Chapter 11):\n\t * <ul>\n\t * <li>LOCAL_DOMAIN_PERSON: 0 (interpreted as POSIX UID domain);\n\t * <li>LOCAL_DOMAIN_GROUP: 1 (interpreted as POSIX GID domain);\n\t * <li>LOCAL_DOMAIN_ORG: 2.\n\t * </ul>\n\t * \n\t * @param localDomain     a predefined local domain enumeration\n\t * @param localIdentifier a local identifier\n\t * @return a UUIDv2\n\t * @see UuidLocalDomain\n\t * @see DceSecurityFactory\n\t */\n\tpublic static GUID getDceSecurityWithMac(UuidLocalDomain localDomain, int localIdentifier) {\n\t\treturn UUID2_MAC\n\t\t\t\t.create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build());\n\t}\n\n\t/**\n\t * Returns a DCE Security unique identifier (UUIDv2).\n\t * <p>\n\t * Local domains predefined by DCE 1.1 Authentication and Security Services\n\t * (Chapter 11):\n\t * <ul>\n\t * <li>LOCAL_DOMAIN_PERSON: 0 (interpreted as POSIX UID domain);\n\t * <li>LOCAL_DOMAIN_GROUP: 1 (interpreted as POSIX GID domain);\n\t * <li>LOCAL_DOMAIN_ORG: 2.\n\t * </ul>\n\t * \n\t * @param localDomain     a predefined local domain enumeration\n\t * @param localIdentifier a local identifier\n\t * @return a UUIDv2\n\t * @see UuidLocalDomain\n\t * @see DceSecurityFactory\n\t */\n\tpublic static GUID getDceSecurityWithHash(UuidLocalDomain localDomain, int localIdentifier) {\n\t\treturn UUID2_HASH\n\t\t\t\t.create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build());\n\t}\n\n\t/**\n\t * Returns a DCE Security unique identifier (UUIDv2).\n\t * <p>\n\t * Local domains predefined by DCE 1.1 Authentication and Security Services\n\t * (Chapter 11):\n\t * <ul>\n\t * <li>LOCAL_DOMAIN_PERSON: 0 (interpreted as POSIX UID domain);\n\t * <li>LOCAL_DOMAIN_GROUP: 1 (interpreted as POSIX GID domain);\n\t * <li>LOCAL_DOMAIN_ORG: 2.\n\t * </ul>\n\t * \n\t * @param localDomain     a predefined local domain enumeration\n\t * @param localIdentifier a local identifier\n\t * @return a UUIDv2\n\t * @see UuidLocalDomain\n\t * @see DceSecurityFactory\n\t */\n\tpublic static GUID getDceSecurityWithRandom(UuidLocalDomain localDomain, int localIdentifier) {\n\t\treturn UUID2_RANDOM\n\t\t\t\t.create(Parameters.builder().withLocalDomain(localDomain).withLocalIdentifier(localIdentifier).build());\n\t}\n\n\t/**\n\t * Returns a Prefix COMB GUID.\n\t * <p>\n\t * The creation millisecond is a 6 bytes PREFIX is at the MOST significant bits.\n\t * \n\t * @return a GUID\n\t * @see PrefixCombFactory\n\t * @see <a href=\"http://www.informit.com/articles/article.aspx?p=25862\">The Cost\n\t *      of GUIDs as Primary Keys</a>\n\t */\n\tpublic static GUID getPrefixComb() {\n\t\treturn COMB_PREFIX.create();\n\t}\n\n\t/**\n\t * Returns the minimum Prefix COMB GUID for a given instant.\n\t * <p>\n\t * The 48 bits of the time component are filled with the bits of the given\n\t * instant and the other 74 bits are all set to ZERO.\n\t * <p>\n\t * For example, the minimum GUID for 2022-02-22 22:22:22.222 is\n\t * `{@code 017f2387-460e-4000-8000-000000000000}`, where `{@code 017f2387-460e}`\n\t * is the timestamp in hexadecimal.\n\t * <p>\n\t * It can be useful to find all records before or after a specific timestamp in\n\t * a table without a `{@code created_at}` field.\n\t * \n\t * @param instant a given instant\n\t * @return a GUID\n\t */\n\tpublic static GUID getPrefixCombMin(Instant instant) {\n\t\tObjects.requireNonNull(instant, \"Null instant\");\n\t\tfinal long time = instant.toEpochMilli();\n\t\treturn new UUID128((time << 16) | 0x4000L, 0x8000000000000000L);\n\t}\n\n\t/**\n\t * Returns the maximum Prefix COMB GUID for a given instant.\n\t * <p>\n\t * The 48 bits of the time component are filled with the bits of the given\n\t * instant and the other 74 bits are all set to ONE.\n\t * <p>\n\t * For example, the maximum GUID for 2022-02-22 22:22:22.222 is\n\t * `{@code 017f2387-460e-4fff-bfff-ffffffffffff}`, where `{@code 017f2387-460e}`\n\t * is the timestamp in hexadecimal.\n\t * <p>\n\t * It can be useful to find all records before or after a specific timestamp in\n\t * a table without a `{@code created_at}` field.\n\t * \n\t * @param instant a given instant\n\t * @return a GUID\n\t */\n\tpublic static GUID getPrefixCombMax(Instant instant) {\n\t\tObjects.requireNonNull(instant, \"Null instant\");\n\t\tfinal long time = instant.toEpochMilli();\n\t\treturn new UUID128((time << 16) | 0x4fffL, 0xbfffffffffffffffL);\n\t}\n\n\t/**\n\t * Returns a Suffix COMB GUID.\n\t * \n\t * The creation millisecond is a 6 bytes SUFFIX is at the LEAST significant\n\t * bits.\n\t * \n\t * @return a GUID\n\t * @see SuffixCombFactory\n\t * @see <a href=\"http://www.informit.com/articles/article.aspx?p=25862\">The Cost\n\t *      of GUIDs as Primary Keys</a>\n\t */\n\tpublic static GUID getSuffixComb() {\n\t\treturn COMB_SUFFIX.create();\n\t}\n\n\t/**\n\t * Returns the minimum Suffix COMB GUID for a given instant.\n\t * <p>\n\t * The 48 bits of the time component are filled with the bits of the given\n\t * instant and the other 74 bits are all set to ZERO.\n\t * <p>\n\t * For example, the minimum GUID for 2022-02-22 22:22:22.222 is\n\t * `{@code 00000000-0000-4000-8000-017f2387460e}`, where `{@code 017f2387460e}`\n\t * is the timestamp in hexadecimal.\n\t * <p>\n\t * It can be useful to find all records before or after a specific timestamp in\n\t * a table without a `{@code created_at}` field.\n\t * \n\t * @param instant a given instant\n\t * @return a GUID\n\t */\n\tpublic static GUID getSuffixCombMin(Instant instant) {\n\t\tObjects.requireNonNull(instant, \"Null instant\");\n\t\tfinal long time = instant.toEpochMilli();\n\t\treturn new UUID128(0x0000000000004000L, 0x8000000000000000L | (time & 0x0000ffffffffffffL));\n\t}\n\n\t/**\n\t * Returns the maximum Suffix COMB GUID for a given instant.\n\t * <p>\n\t * The 48 bits of the time component are filled with the bits of the given\n\t * instant and the other 74 bits are all set to ONE.\n\t * <p>\n\t * For example, the maximum GUID for 2022-02-22 22:22:22.222 is\n\t * `{@code ffffffff-ffff-4fff-bfff-017f2387460e}`, where `{@code 017f2387460e}`\n\t * is the timestamp in hexadecimal.\n\t * <p>\n\t * It can be useful to find all records before or after a specific timestamp in\n\t * a table without a `{@code created_at}` field.\n\t * \n\t * @param instant a given instant\n\t * @return a GUID\n\t */\n\tpublic static GUID getSuffixCombMax(Instant instant) {\n\t\tObjects.requireNonNull(instant, \"Null instant\");\n\t\tfinal long time = instant.toEpochMilli();\n\t\treturn new UUID128(0xffffffffffff4fffL, 0xbfff000000000000L | (time & 0x0000ffffffffffffL));\n\t}\n\n\t/**\n\t * Returns n Short Prefix COMB GUID.\n\t * <p>\n\t * The creation minute is a 2 bytes PREFIX is at the MOST significant bits.\n\t * <p>\n\t * The prefix wraps around every ~45 days (2^16/60/24 = ~45).\n\t * \n\t * @return a GUID\n\t * @see ShortPrefixCombFactory\n\t * @see <a href=\n\t *      \"https://www.2ndquadrant.com/en/blog/sequential-uuid-generators\">Sequential\n\t *      UUID Generators</a>\n\t */\n\tpublic static GUID getShortPrefixComb() {\n\t\treturn COMB_SHORT_PREFIX.create();\n\t}\n\n\t/**\n\t * Returns a Short Suffix COMB GUID.\n\t * <p>\n\t * The creation minute is a 2 bytes SUFFIX is at the LEAST significant bits.\n\t * <p>\n\t * The suffix wraps around every ~45 days (2^16/60/24 = ~45).\n\t * \n\t * @return a GUID\n\t * @see ShortSuffixCombFactory\n\t * @see <a href=\n\t *      \"https://www.2ndquadrant.com/en/blog/sequential-uuid-generators\">Sequential\n\t *      UUID Generators</a>\n\t */\n\tpublic static GUID getShortSuffixComb() {\n\t\treturn COMB_SHORT_SUFFIX.create();\n\t}\n\n\t// ***************************************\n\t// Lazy holders\n\t// ***************************************\n\n\tprivate static final Proxy UUID1 = new Proxy(Proxy.UUID1);\n\tprivate static final Proxy UUID1_MAC = new Proxy(Proxy.UUID1_MAC);\n\tprivate static final Proxy UUID1_HASH = new Proxy(Proxy.UUID1_HASH);\n\tprivate static final Proxy UUID1_RANDOM = new Proxy(Proxy.UUID1_RANDOM);\n\tprivate static final Proxy UUID2 = new Proxy(Proxy.UUID2);\n\tprivate static final Proxy UUID2_MAC = new Proxy(Proxy.UUID2_MAC);\n\tprivate static final Proxy UUID2_HASH = new Proxy(Proxy.UUID2_HASH);\n\tprivate static final Proxy UUID2_RANDOM = new Proxy(Proxy.UUID2_RANDOM);\n\tprivate static final Proxy UUID3 = new Proxy(Proxy.UUID3);\n\tprivate static final Proxy UUID4 = new Proxy(Proxy.UUID4);\n\tprivate static final Proxy UUID4_FAST = new Proxy(Proxy.UUID4_FAST);\n\tprivate static final Proxy UUID5 = new Proxy(Proxy.UUID5);\n\tprivate static final Proxy UUID6 = new Proxy(Proxy.UUID6);\n\tprivate static final Proxy UUID6_MAC = new Proxy(Proxy.UUID6_MAC);\n\tprivate static final Proxy UUID6_HASH = new Proxy(Proxy.UUID6_HASH);\n\tprivate static final Proxy UUID6_RANDOM = new Proxy(Proxy.UUID6_RANDOM);\n\tprivate static final Proxy UUID7 = new Proxy(Proxy.UUID7);\n\tprivate static final Proxy UUID7_FAST = new Proxy(Proxy.UUID7_FAST);\n\tprivate static final Proxy UUID7_PLUS_1 = new Proxy(Proxy.UUID7_PLUS_1);\n\tprivate static final Proxy UUID7_PLUS_N = new Proxy(Proxy.UUID7_PLUS_N);\n\tprivate static final Proxy COMB_PREFIX = new Proxy(Proxy.COMB_PREFIX);\n\tprivate static final Proxy COMB_SUFFIX = new Proxy(Proxy.COMB_SUFFIX);\n\tprivate static final Proxy COMB_SHORT_PREFIX = new Proxy(Proxy.COMB_SHORT_PREFIX);\n\tprivate static final Proxy COMB_SHORT_SUFFIX = new Proxy(Proxy.COMB_SHORT_SUFFIX);\n\n\tprivate static class Proxy extends UuidFactory {\n\n\t\tprivate UuidFactory factory = null;\n\t\tprivate Supplier<UuidFactory> supplier;\n\t\tprivate static final ReentrantLock lock = new ReentrantLock();\n\n\t\t// @formatter:off\n\t\tstatic final Supplier<UuidFactory> UUID1 = TimeBasedFactory::new;\n\t\tstatic final Supplier<UuidFactory> UUID1_MAC = () -> TimeBasedFactory.builder().withMacNodeId().build();\n\t\tstatic final Supplier<UuidFactory> UUID1_HASH = () -> TimeBasedFactory.builder().withHashNodeId().build();\n\t\tstatic final Supplier<UuidFactory> UUID1_RANDOM = () -> TimeBasedFactory.builder().withRandomNodeId().build();\n\t\tstatic final Supplier<UuidFactory> UUID2 = DceSecurityFactory::new;\n\t\tstatic final Supplier<UuidFactory> UUID2_MAC = () -> DceSecurityFactory.builder().withMacNodeId().build();\n\t\tstatic final Supplier<UuidFactory> UUID2_HASH = () -> DceSecurityFactory.builder().withHashNodeId().build();\n\t\tstatic final Supplier<UuidFactory> UUID2_RANDOM = () -> DceSecurityFactory.builder().withRandomNodeId().build();\n\t\tstatic final Supplier<UuidFactory> UUID3 = NameBasedMd5Factory::new;\n\t\tstatic final Supplier<UuidFactory> UUID4 = RandomBasedFactory::new;\n\t\tstatic final Supplier<UuidFactory> UUID4_FAST = () -> RandomBasedFactory.builder().withFastRandom().build();\n\t\tstatic final Supplier<UuidFactory> UUID5 = NameBasedSha1Factory::new;\n\t\tstatic final Supplier<UuidFactory> UUID6 = TimeOrderedFactory::new;\n\t\tstatic final Supplier<UuidFactory> UUID6_MAC = () -> TimeOrderedFactory.builder().withMacNodeId().build();\n\t\tstatic final Supplier<UuidFactory> UUID6_HASH = () -> TimeOrderedFactory.builder().withHashNodeId().build();\n\t\tstatic final Supplier<UuidFactory> UUID6_RANDOM = () -> TimeOrderedFactory.builder().withRandomNodeId().build();\n\t\tstatic final Supplier<UuidFactory> UUID7 = TimeOrderedEpochFactory::new;\n\t\tstatic final Supplier<UuidFactory> UUID7_FAST = () -> TimeOrderedEpochFactory.builder().withFastRandom().build();\n\t\tstatic final Supplier<UuidFactory> UUID7_PLUS_1 = () -> TimeOrderedEpochFactory.builder().withIncrementPlus1().build();\n\t\tstatic final Supplier<UuidFactory> UUID7_PLUS_N = () -> TimeOrderedEpochFactory.builder().withIncrementPlusN().build();\n\t\tstatic final Supplier<UuidFactory> COMB_PREFIX = PrefixCombFactory::new;\n\t\tstatic final Supplier<UuidFactory> COMB_SUFFIX = SuffixCombFactory::new;\n\t\tstatic final Supplier<UuidFactory> COMB_SHORT_PREFIX = ShortPrefixCombFactory::new;\n\t\tstatic final Supplier<UuidFactory> COMB_SHORT_SUFFIX = ShortSuffixCombFactory::new;\n\t\t// @formatter:on\n\n\t\tpublic Proxy(Supplier<UuidFactory> supplier) {\n\t\t\tthis.supplier = supplier;\n\t\t}\n\n\t\tprivate UuidFactory get() {\n\n\t\t\tif (factory != null) {\n\t\t\t\treturn factory;\n\t\t\t}\n\n\t\t\tlock.lock();\n\t\t\ttry {\n\t\t\t\tif (factory == null) {\n\t\t\t\t\tthis.factory = supplier.get();\n\t\t\t\t}\n\t\t\t\treturn this.factory;\n\t\t\t} finally {\n\t\t\t\tlock.unlock();\n\t\t\t}\n\t\t}\n\n\t\t@Override\n\t\tpublic GUID128 create() {\n\t\t\treturn this.get().create();\n\t\t}\n\n\n\n        @Override\n\t\tpublic GUID128 create(Parameters parameters) {\n\t\t\treturn get().create(parameters);\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/GuidCodec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\n\nimport java.util.UUID;\n\n/**\n * Interface to be implemented by all codecs of this package.\n * <p>\n * All implementations of this interface throw {@link InvalidUuidException} if\n * an invalid argument argument is given.\n * <p>\n * The {@link RuntimeException} cases that can be detected beforehand are\n * translated into an {@link InvalidUuidException}.\n * \n * @param <T> the type encoded to and decoded from.\n * @see InvalidUuidException\n */\npublic interface GuidCodec<T> {\n\n\t/**\n\t * Get a generic type from a UUID.\n\t * \n\t * @param uuid a UUID\n\t * @return a generic type\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tT encode(GUID128 uuid);\n\n\t/**\n\t * Get a UUID from a generic type.\n\t * \n\t * @param type a generic type\n\t * @return a UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tGUID128 decode(T type);\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/StandardBinaryCodec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.util.UuidValidator;\n\nimport java.util.UUID;\n\n/**\n * Codec for UUID binary encoding as defined in RFC 9562.\n * <p>\n * The UUID is encoded as 16 octets (bytes).\n * \n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc9562.html\">RFC 9562</a>\n */\npublic class StandardBinaryCodec implements GuidCodec<byte[]> {\n\n\t/**\n\t * A shared immutable instance.\n\t */\n\tpublic static final StandardBinaryCodec INSTANCE = new StandardBinaryCodec();\n\n\t/**\n\t * Get an array of bytes from a UUID.\n\t * \n\t * @param uuid a UUID\n\t * @return an array of bytes\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic byte[] encode(final GUID128 uuid) {\n\n\t\tUuidValidator.validate(uuid);\n\n\t\tfinal byte[] bytes = new byte[16];\n\t\tfinal long msb = uuid.getMostSignificantBits();\n\t\tfinal long lsb = uuid.getLeastSignificantBits();\n\n\t\tbytes[0x0] = (byte) (msb >>> 56);\n\t\tbytes[0x1] = (byte) (msb >>> 48);\n\t\tbytes[0x2] = (byte) (msb >>> 40);\n\t\tbytes[0x3] = (byte) (msb >>> 32);\n\t\tbytes[0x4] = (byte) (msb >>> 24);\n\t\tbytes[0x5] = (byte) (msb >>> 16);\n\t\tbytes[0x6] = (byte) (msb >>> 8);\n\t\tbytes[0x7] = (byte) (msb);\n\n\t\tbytes[0x8] = (byte) (lsb >>> 56);\n\t\tbytes[0x9] = (byte) (lsb >>> 48);\n\t\tbytes[0xa] = (byte) (lsb >>> 40);\n\t\tbytes[0xb] = (byte) (lsb >>> 32);\n\t\tbytes[0xc] = (byte) (lsb >>> 24);\n\t\tbytes[0xd] = (byte) (lsb >>> 16);\n\t\tbytes[0xe] = (byte) (lsb >>> 8);\n\t\tbytes[0xf] = (byte) (lsb);\n\n\t\treturn bytes;\n\t}\n\n\t/**\n\t * Get a UUID from an array of bytes.\n\t * \n\t * @param bytes an array of bytes\n\t * @return a UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic GUID128 decode(final byte[] bytes) {\n\n\t\tUuidValidator.validate(bytes);\n\n\t\tlong msb = 0;\n\t\tlong lsb = 0;\n\n\t\tmsb |= (bytes[0x0] & 0xffL) << 56;\n\t\tmsb |= (bytes[0x1] & 0xffL) << 48;\n\t\tmsb |= (bytes[0x2] & 0xffL) << 40;\n\t\tmsb |= (bytes[0x3] & 0xffL) << 32;\n\t\tmsb |= (bytes[0x4] & 0xffL) << 24;\n\t\tmsb |= (bytes[0x5] & 0xffL) << 16;\n\t\tmsb |= (bytes[0x6] & 0xffL) << 8;\n\t\tmsb |= (bytes[0x7] & 0xffL);\n\n\t\tlsb |= (bytes[0x8] & 0xffL) << 56;\n\t\tlsb |= (bytes[0x9] & 0xffL) << 48;\n\t\tlsb |= (bytes[0xa] & 0xffL) << 40;\n\t\tlsb |= (bytes[0xb] & 0xffL) << 32;\n\t\tlsb |= (bytes[0xc] & 0xffL) << 24;\n\t\tlsb |= (bytes[0xd] & 0xffL) << 16;\n\t\tlsb |= (bytes[0xe] & 0xffL) << 8;\n\t\tlsb |= (bytes[0xf] & 0xffL);\n\n\t\treturn new UUID128(msb, lsb);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/StandardStringCodec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.base.Base16Codec;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.util.UuidValidator;\nimport com.pinecone.ulf.util.guid.i128.util.immutable.CharArray;\nimport com.pinecone.ulf.util.guid.i128.util.internal.JavaVersionUtil;\n\nimport java.util.UUID;\n\n/**\n * Codec for UUID canonical string as defined in RFC 9562.\n * <p>\n * In the canonical textual representation, the 16 bytes of a UUID are\n * represented as 32 hexadecimal (base-16) digits, displayed in five groups\n * separated by hyphens, in the form 8-4-4-4-12 for a total of 36 characters (32\n * hexadecimal characters and 4 hyphens).\n * <p>\n * This codec decodes (parses) strings in these formats:\n * <ul>\n * <li>000000000000V0000000000000000000 (hexadecimal string)\n * <li>00000000-0000-0000-0000-000000000000 (THE canonical string)\n * <li>{00000000-0000-0000-0000-000000000000} (Microsoft string)\n * <li>urn:uuid:00000000-0000-0000-0000-000000000000 (URN string)\n * </ul>\n * <p>\n * The encoding and decoding processes can be much faster (7x) than\n * {@link UUID#toString()} and {@link UUID#fromString(String)} in JDK 8.\n * <p>\n * If you prefer a string representation without hyphens, use\n * {@link Base16Codec} instead of {@link StandardStringCodec}.\n * {@link Base16Codec} can be much faster (22x) than doing\n * <code>uuid.toString().replaceAll(\"-\", \"\")</code>.\n * <p>\n * \n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc9562.html\">RFC 9562</a>\n */\npublic class StandardStringCodec implements GuidCodec<String> {\n\n\t/**\n\t * A shared immutable instance.\n\t */\n\tpublic static final StandardStringCodec INSTANCE = new StandardStringCodec();\n\n\tprivate static final int DASH_POSITION_1 = 8;\n\tprivate static final int DASH_POSITION_2 = 13;\n\tprivate static final int DASH_POSITION_3 = 18;\n\tprivate static final int DASH_POSITION_4 = 23;\n\n\tprivate static final int LENGTH_WITH_DASH = 36;\n\tprivate static final int LENGTH_WITHOUT_DASH = 32;\n\tprivate static final int LENGTH_WITH_URN_PREFIX = 45;\n\tprivate static final int LENGTH_WITH_CURLY_BRACES = 38;\n\n\tprivate static final byte[] MAP = Base16Codec.INSTANCE.getBase().getMap().array();\n\tprivate static final CharArray ALPHABET = Base16Codec.INSTANCE.getBase().getAlphabet();\n\n\tprivate static final String URN_PREFIX = \"urn:uuid:\";\n\tprivate static final boolean JAVA_VERSION_GREATER_THAN_8 = JavaVersionUtil.getJavaVersion() > 8;\n\n\t/**\n\t * Get a string from a UUID.\n\t * <p>\n\t * It can be much faster than {@link UUID#toString()} in JDK 8.\n\t * \n\t * @param uuid a UUID\n\t * @return a UUID string\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic String encode(GUID128 uuid) {\n\n\t\tUuidValidator.validate(uuid);\n\n\t\tif (JAVA_VERSION_GREATER_THAN_8) {\n\t\t\treturn uuid.toString();\n\t\t}\n\n\t\tfinal char[] chars = new char[36];\n\t\tfinal long msb = uuid.getMostSignificantBits();\n\t\tfinal long lsb = uuid.getLeastSignificantBits();\n\n\t\tchars[0x00] = ALPHABET.get((int) (msb >>> 0x3c & 0xf));\n\t\tchars[0x01] = ALPHABET.get((int) (msb >>> 0x38 & 0xf));\n\t\tchars[0x02] = ALPHABET.get((int) (msb >>> 0x34 & 0xf));\n\t\tchars[0x03] = ALPHABET.get((int) (msb >>> 0x30 & 0xf));\n\t\tchars[0x04] = ALPHABET.get((int) (msb >>> 0x2c & 0xf));\n\t\tchars[0x05] = ALPHABET.get((int) (msb >>> 0x28 & 0xf));\n\t\tchars[0x06] = ALPHABET.get((int) (msb >>> 0x24 & 0xf));\n\t\tchars[0x07] = ALPHABET.get((int) (msb >>> 0x20 & 0xf));\n\t\tchars[0x08] = '-'; // 8\n\t\tchars[0x09] = ALPHABET.get((int) (msb >>> 0x1c & 0xf));\n\t\tchars[0x0a] = ALPHABET.get((int) (msb >>> 0x18 & 0xf));\n\t\tchars[0x0b] = ALPHABET.get((int) (msb >>> 0x14 & 0xf));\n\t\tchars[0x0c] = ALPHABET.get((int) (msb >>> 0x10 & 0xf));\n\t\tchars[0x0d] = '-'; // 13\n\t\tchars[0x0e] = ALPHABET.get((int) (msb >>> 0x0c & 0xf));\n\t\tchars[0x0f] = ALPHABET.get((int) (msb >>> 0x08 & 0xf));\n\t\tchars[0x10] = ALPHABET.get((int) (msb >>> 0x04 & 0xf));\n\t\tchars[0x11] = ALPHABET.get((int) (msb & 0xf));\n\t\tchars[0x12] = '-'; // 18\n\t\tchars[0x13] = ALPHABET.get((int) (lsb >>> 0x3c & 0xf));\n\t\tchars[0x14] = ALPHABET.get((int) (lsb >>> 0x38 & 0xf));\n\t\tchars[0x15] = ALPHABET.get((int) (lsb >>> 0x34 & 0xf));\n\t\tchars[0x16] = ALPHABET.get((int) (lsb >>> 0x30 & 0xf));\n\t\tchars[0x17] = '-'; // 23\n\t\tchars[0x18] = ALPHABET.get((int) (lsb >>> 0x2c & 0xf));\n\t\tchars[0x19] = ALPHABET.get((int) (lsb >>> 0x28 & 0xf));\n\t\tchars[0x1a] = ALPHABET.get((int) (lsb >>> 0x24 & 0xf));\n\t\tchars[0x1b] = ALPHABET.get((int) (lsb >>> 0x20 & 0xf));\n\t\tchars[0x1c] = ALPHABET.get((int) (lsb >>> 0x1c & 0xf));\n\t\tchars[0x1d] = ALPHABET.get((int) (lsb >>> 0x18 & 0xf));\n\t\tchars[0x1e] = ALPHABET.get((int) (lsb >>> 0x14 & 0xf));\n\t\tchars[0x1f] = ALPHABET.get((int) (lsb >>> 0x10 & 0xf));\n\t\tchars[0x20] = ALPHABET.get((int) (lsb >>> 0x0c & 0xf));\n\t\tchars[0x21] = ALPHABET.get((int) (lsb >>> 0x08 & 0xf));\n\t\tchars[0x22] = ALPHABET.get((int) (lsb >>> 0x04 & 0xf));\n\t\tchars[0x23] = ALPHABET.get((int) (lsb & 0xf));\n\n\t\treturn new String(chars);\n\t}\n\n\t/**\n\t * Get a UUID from a string.\n\t * <p>\n\t * It accepts strings:\n\t * <ul>\n\t * <li>With URN prefix: \"urn:uuid:\";\n\t * <li>With curly braces: '{' and '}';\n\t * <li>With upper or lower case;\n\t * <li>With or without hyphens.\n\t * </ul>\n\t * <p>\n\t * It can be much faster than {@link UUID#fromString(String)} in JDK 8.\n\t * <p>\n\t * It also can be twice as fast as {@link UUID#fromString(String)} in JDK 11.\n\t * \n\t * @param string a UUID string\n\t * @return a UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic GUID128 decode(final String string) {\n\n\t\tif (string == null) {\n\t\t\tthrow InvalidUuidException.newInstance(null);\n\t\t}\n\n\t\tfinal String modified = modify(string);\n\n\t\tif (modified.length() == LENGTH_WITH_DASH) {\n\t\t\tvalidate(modified);\n\t\t\treturn parse(modified);\n\t\t}\n\n\t\tif (modified.length() == LENGTH_WITHOUT_DASH) {\n\t\t\treturn Base16Codec.INSTANCE.decode(modified);\n\t\t}\n\n\t\tthrow InvalidUuidException.newInstance(modified);\n\t}\n\n\tprivate GUID128 parse(final String string) {\n\n\t\tlong msb = 0L;\n\t\tlong lsb = 0L;\n\n\t\tfor (int i = 0; i < 8; i++) {\n\t\t\tmsb = (msb << 4) | get(string, i);\n\t\t}\n\n\t\tfor (int i = 9; i < 13; i++) {\n\t\t\tmsb = (msb << 4) | get(string, i);\n\t\t}\n\n\t\tfor (int i = 14; i < 18; i++) {\n\t\t\tmsb = (msb << 4) | get(string, i);\n\t\t}\n\n\t\tfor (int i = 19; i < 23; i++) {\n\t\t\tlsb = (lsb << 4) | get(string, i);\n\t\t}\n\n\t\tfor (int i = 24; i < 36; i++) {\n\t\t\tlsb = (lsb << 4) | get(string, i);\n\t\t}\n\n\t\treturn new UUID128(msb, lsb);\n\t}\n\n\tprotected static String modify(final String string) {\n\n\t\t// UUID URN format: \"urn:uuid:00000000-0000-0000-0000-000000000000\"\n\t\tif (string.length() == LENGTH_WITH_URN_PREFIX && string.startsWith(URN_PREFIX)) {\n\t\t\treturn string.substring(URN_PREFIX.length()); // Remove the URN prefix: \"urn:uuid:\"\n\t\t}\n\n\t\t// Curly braces format: \"{00000000-0000-0000-0000-000000000000}\"\n\t\tif (string.length() == LENGTH_WITH_CURLY_BRACES && string.startsWith(\"{\") && string.endsWith(\"}\")) {\n\t\t\treturn string.substring(1, string.length() - 1); // Remove curly braces: '{' and '}'\n\t\t}\n\n\t\treturn string;\n\t}\n\n\tprivate static void validate(final String string) {\n\t\tif (string.charAt(DASH_POSITION_1) != '-' || string.charAt(DASH_POSITION_2) != '-'\n\t\t\t\t|| string.charAt(DASH_POSITION_3) != '-' || string.charAt(DASH_POSITION_4) != '-') {\n\t\t\tthrow InvalidUuidException.newInstance(string);\n\t\t}\n\t}\n\n\tprivate long get(final String string, final int i) {\n\n\t\tfinal int chr = string.charAt(i);\n\t\tif (chr > 255) {\n\t\t\tthrow InvalidUuidException.newInstance(string);\n\t\t}\n\n\t\tfinal byte value = MAP[chr];\n\t\tif (value < 0) {\n\t\t\tthrow InvalidUuidException.newInstance(string);\n\t\t}\n\n\t\treturn value & 0xffL;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/UriCodec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\n\nimport java.net.URI;\nimport java.util.UUID;\n\n/**\n * Codec for UUID URIs (specifically URNs).\n * <p>\n * {@link UriCodec} encodes UUID to and from an opaque {@link URI}.\n * <p>\n * The URN representation adds the prefix 'urn:uuid:' to a UUID canonical\n * representation.\n * \n * See: https://github.com/f4b6a3/uuid-creator/issues/32\n * \n * \n *  * @see InvalidUuidException\n * @see <a href=\n *      \"https://github.com/f4b6a3/uuid-creator/issues/32\">UUID URIs</a>\n * @see <a href=\n *      \"https://github.com/f4b6a3/uuid-creator/issues/66\">UriCodec.isUuidUrn(java.net.URI\n *      uri)</a>\n * @see <a href=\n *      \"https://stackoverflow.com/questions/4913343/what-is-the-difference-between-uri-url-and-urn\">What\n *      is the difference between URI, URL and URN?</a>\n */\npublic class UriCodec implements GuidCodec<URI> {\n\n\t/**\n\t * A shared immutable instance.\n\t */\n\tpublic static final UriCodec INSTANCE = new UriCodec();\n\n\t/**\n\t * Get a URI from a UUID.\n\t * \n\t * @param uuid a UUID\n\t * @return a URI\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic URI encode(GUID128 uuid) {\n\t\treturn URI.create(UrnCodec.INSTANCE.encode(uuid));\n\t}\n\n\t/**\n\t * Get a UUID from a URI.\n\t * \n\t * @param uri a URI\n\t * @return a UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic GUID128 decode(URI uri) {\n\t\tif (!isUuidUri(uri)) {\n\t\t\tthrow InvalidUuidException.newInstance(uri);\n\t\t}\n\t\treturn StandardStringCodec.INSTANCE.decode(uri.toString());\n\t}\n\n\t/**\n\t * Check if the URI is a UUID URN.\n\t * \n\t * @param uri a URI\n\t * @return true if the it's a URN\n\t */\n\tpublic static boolean isUuidUri(URI uri) {\n\t\treturn uri != null && UrnCodec.isUuidUrn(uri.toString());\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/UrnCodec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.util.UuidValidator;\n\nimport java.util.UUID;\n\n/**\n * Codec for UUID URNs.\n * <p>\n * {@link UrnCodec} encodes UUID to and from an URN.\n * <p>\n * The URN representation adds the prefix 'urn:uuid:' to a UUID canonical\n * representation.\n * \n * @see InvalidUuidException\n * @see <a href= \"https://github.com/f4b6a3/uuid-creator/issues/32\">UUID\n *      URIs</a>\n * @see <a href=\n *      \"https://github.com/f4b6a3/uuid-creator/issues/66\">UriCodec.isUuidUrn(java.net.URI\n *      uri)</a>\n * @see <a href=\n *      \"https://stackoverflow.com/questions/4913343/what-is-the-difference-between-uri-url-and-urn\">What\n *      is the difference between URI, URL and URN?</a>\n */\npublic class UrnCodec implements GuidCodec<String> {\n\n\t/**\n\t * A shared immutable instance.\n\t */\n\tpublic static final UrnCodec INSTANCE = new UrnCodec();\n\n\tprivate static final String URN_PREFIX = \"urn:uuid:\";\n\n\t/**\n\t * Get a URN string from a UUID.\n\t * \n\t * @param uuid a UUID\n\t * @return a URN string\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic String encode(GUID128 uuid) {\n\t\tUuidValidator.validate(uuid);\n\t\treturn URN_PREFIX + StandardStringCodec.INSTANCE.encode(uuid);\n\t}\n\n\t/**\n\t * Get a UUID from a URN string.\n\t * \n\t * @param urn a URN string\n\t * @return a UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic GUID128 decode(String urn) {\n\t\tif (!isUuidUrn(urn)) {\n\t\t\tthrow InvalidUuidException.newInstance(urn);\n\t\t}\n\t\treturn StandardStringCodec.INSTANCE.decode(urn);\n\t}\n\n\t/**\n\t * Check if a URN string is a UUID URN.\n\t * \n\t * @param urn a string\n\t * @return true if the it's a URN\n\t */\n\tpublic static boolean isUuidUrn(String urn) {\n\t\tfinal int stringLength = 45; // URN string length\n\t\tfinal int prefixLength = 9; // URN prefix length\n\t\tif (urn != null && urn.length() == stringLength) {\n\t\t\tString uuid = urn.substring(prefixLength);\n\t\t\treturn UuidValidator.isValid(uuid);\n\t\t}\n\t\treturn false;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/Base16Codec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base;\n\nimport com.pinecone.ulf.util.guid.i128.codec.base.function.Base16Decoder;\nimport com.pinecone.ulf.util.guid.i128.codec.base.function.Base16Encoder;\n\n/**\n * Codec for base-16 as defined in RFC-4648.\n * <p>\n * It is case insensitive, so it decodes from lower and upper case, but encodes\n * to lower case only.\n * <p>\n * It can be up to 22x faster than doing\n * <code>uuid.toString().replaceAll(\"-\", \"\")`</code>.\n * \n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc4648\">RFC-4648</a>\n */\npublic final class Base16Codec extends BaseNCodec {\n\n\tprivate static final BaseN BASE_N = new BaseN(\"0-9a-f\");\n\n\t/**\n\t * A shared immutable instance.\n\t */\n\tpublic static final Base16Codec INSTANCE = new Base16Codec();\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic Base16Codec() {\n\t\tsuper(BASE_N, new Base16Encoder(BASE_N), new Base16Decoder(BASE_N));\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/Base32Codec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base;\n\nimport com.pinecone.ulf.util.guid.i128.codec.base.function.Base32Decoder;\nimport com.pinecone.ulf.util.guid.i128.codec.base.function.Base32Encoder;\n\n/**\n * Codec for base-32 as defined in RFC-4648.\n * <p>\n * It is case insensitive, so it decodes from lower and upper case, but encodes\n * to lower case only.\n * <p>\n * This codec complies with RFC-4648, encoding a byte array sequentially. If you\n * need a codec that encodes integers using the remainder operator (modulus),\n * use the static factory {@link BaseNCodec#newInstance(BaseN)}.\n * \n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc4648\">RFC-4648</a>\n */\npublic final class Base32Codec extends BaseNCodec {\n\n\tprivate static final BaseN BASE_N = new BaseN(\"a-z2-7\");\n\n\t/**\n\t * A shared immutable instance.\n\t */\n\tpublic static final Base32Codec INSTANCE = new Base32Codec();\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic Base32Codec() {\n\t\tsuper(BASE_N, new Base32Encoder(BASE_N), new Base32Decoder(BASE_N));\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/Base58BtcCodec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base;\n\n/**\n * Codec for base-58.\n * <p>\n * It is case SENSITIVE.\n * <p>\n * It encodes using remainder operator (modulus).\n * <p>\n * The alphabet for this codec is the same used in Bitcoin (BTC).\n * \n * @see <a href=\"https://tools.ietf.org/html/draft-msporny-base58-03\">The Base58 Encoding Scheme</a>\n */\npublic final class Base58BtcCodec extends BaseNCodec {\n\n\tprivate static final BaseN BASE_N = new BaseN(\"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz\");\n\n\t/**\n\t * A shared immutable instance.\n\t */\n\tpublic static final Base58BtcCodec INSTANCE = new Base58BtcCodec();\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic Base58BtcCodec() {\n\t\tsuper(BASE_N);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/Base62Codec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base;\n\n/**\n * Codec for base-62.\n * <p>\n * It is case SENSITIVE.\n * <p>\n * It encodes using remainder operator (modulus).\n */\npublic final class Base62Codec extends BaseNCodec {\n\n\tprivate static final BaseN BASE_N = new BaseN(\"0-9A-Za-z\");\n\n\t/**\n\t * A shared immutable instance.\n\t */\n\tpublic static final Base62Codec INSTANCE = new Base62Codec();\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic Base62Codec() {\n\t\tsuper(BASE_N);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/Base64Codec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base;\n\nimport com.pinecone.ulf.util.guid.i128.codec.base.function.Base64Decoder;\nimport com.pinecone.ulf.util.guid.i128.codec.base.function.Base64Encoder;\n\n/**\n * Codec for base-64 as defined in RFC-4648.\n * <p>\n * It is case SENSITIVE.\n * <p>\n * The only difference between base-64 and base-64-url is that the second\n * substitutes the chars '+' and '/' with '-' and '_'.\n * <p>\n * This codec complies with RFC-4648, encoding a byte array sequentially. If you\n * need a codec that encodes integers using the remainder operator (modulus),\n * use the static factory {@link BaseNCodec#newInstance(BaseN)}.\n * \n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc4648\">RFC-4648</a>\n */\npublic final class Base64Codec extends BaseNCodec {\n\n\tprivate static final BaseN BASE_N = new BaseN(\"A-Za-z0-9+/\");\n\n\t/**\n\t * A shared immutable instance.\n\t */\n\tpublic static final Base64Codec INSTANCE = new Base64Codec();\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic Base64Codec() {\n\t\tsuper(BASE_N, new Base64Encoder(BASE_N), new Base64Decoder(BASE_N));\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/Base64UrlCodec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base;\n\nimport com.pinecone.ulf.util.guid.i128.codec.base.function.Base64Decoder;\nimport com.pinecone.ulf.util.guid.i128.codec.base.function.Base64Encoder;\n\n/**\n * Codec for base-64-url as defined in RFC-4648.\n * <p>\n * It is case SENSITIVE.\n * <p>\n * The only difference between base-64 and base-64-url is that the second\n * substitutes the chars '+' and '/' with '-' and '_'.\n * <p>\n * This codec complies with RFC-4648, encoding a byte array sequentially. If you\n * need a codec that encodes integers using the remainder operator (modulus),\n * use the static factory {@link BaseNCodec#newInstance(BaseN)}.\n * \n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc4648\">RFC-4648</a>\n */\npublic final class Base64UrlCodec extends BaseNCodec {\n\n\tprivate static final BaseN BASE_N = new BaseN(\"A-Za-z0-9-_\");\n\n\t/**\n\t * A shared immutable instance.\n\t */\n\tpublic static final Base64UrlCodec INSTANCE = new Base64UrlCodec();\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic Base64UrlCodec() {\n\t\tsuper(BASE_N, new Base64Encoder(BASE_N), new Base64Decoder(BASE_N));\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/BaseN.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base;\n\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.util.immutable.ByteArray;\nimport com.pinecone.ulf.util.guid.i128.util.immutable.CharArray;\n\nimport java.util.Arrays;\n\n/**\n * Class that represents the base-n encodings.\n */\npublic final class BaseN {\n\n\tprivate final int radix;\n\tprivate final int length;\n\tprivate final char padding;\n\tprivate final boolean sensitive;\n\n\tprivate final CharArray alphabet;\n\tprivate final ByteArray map;\n\n\t/**\n\t * The minimum radix: 2.\n\t */\n\tprotected static final int RADIX_MIN = 2;\n\t/**\n\t * The maximum radix: 64.\n\t */\n\tprotected static final int RADIX_MAX = 64;\n\n\t/**\n\t * The default alphabet for case-insensitive base-n.\n\t */\n\tprotected static final String ALPHABET_36 = \"0123456789abcdefghijklmnopqrstuvwxyz\";\n\t/**\n\t * The default alphabet for case-sensitive base-n.\n\t */\n\tprotected static final String ALPHABET_64 = \"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-_\";\n\n\tprivate static final int UUID_BITS = 128;\n\n\t/**\n\t * Public constructor for the base-n object.\n\t * <p>\n\t * The radix is the alphabet size.\n\t * <p>\n\t * The supported alphabet sizes are from 2 to 64.\n\t * <p>\n\t * If there are mixed cases in the alphabet, the base-n is case SENSITIVE.\n\t * <p>\n\t * The encoded string length is equal to `CEIL(128 / LOG2(n))`, where n is the\n\t * radix. The encoded string is padded to fit the expected length.\n\t * <p>\n\t * The padding character is the first character of the string. For example, the\n\t * padding character for the alphabet \"abcdef0123456\" is 'a'.\n\t * <p>\n\t * The example below shows how to create a {@link BaseN} for an hypothetical\n\t * base-26 encoding that contains only letters. You only need to pass a number\n\t * 40.\n\t * \n\t * <pre>{@code\n\t * String radix = 40;\n\t * BaseN base = new BaseN(radix);\n\t * }</pre>\n\t * \n\t * <p>\n\t * If radix is greater than 36, the alphabet generated is a subset of the\n\t * character sequence \"0-9A-Za-z-_\". Otherwise it is a subset of \"0-9a-z\". In\n\t * the example above the resulting alphabet is\n\t * \"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcd\" (0-9A-Za-d).\n\t * \n\t * @param radix the radix to be used\n\t */\n\tpublic BaseN(int radix) {\n\t\tthis(expand(radix));\n\t}\n\n\t/**\n\t * Public constructor for the base-n object.\n\t * <p>\n\t * The radix is the alphabet size.\n\t * <p>\n\t * The supported alphabet sizes are from 2 to 64.\n\t * <p>\n\t * If there are mixed cases in the alphabet, the base-n is case SENSITIVE.\n\t * <p>\n\t * The encoded string length is equal to `CEIL(128 / LOG2(n))`, where n is the\n\t * radix. The encoded string is padded to fit the expected length.\n\t * <p>\n\t * The padding character is the first character of the string. For example, the\n\t * padding character for the alphabet \"abcdef0123456\" is 'a'.\n\t * <p>\n\t * The example below shows how to create a {@link BaseN} for an hypothetical\n\t * base-26 encoding that contains only letters. You only need to pass a string\n\t * with 26 characters.\n\t * \n\t * <pre>{@code\n\t * String alphabet = \"abcdefghijklmnopqrstuvwxyz\";\n\t * BaseN base = new BaseN(alphabet);\n\t * }</pre>\n\t * \n\t * Alphabet strings similar to \"a-f0-9\" are expanded to \"abcdef0123456789\". The\n\t * same example using the string \"a-z\" instead of \"abcdefghijklmnopqrstuvwxyz\":\n\t * \n\t * <pre>{@code\n\t * String alphabet = \"a-z\";\n\t * BaseN base = new BaseN(alphabet);\n\t * }</pre>\n\t * \n\t * @param alphabet the alphabet to be used\n\t */\n\tpublic BaseN(String alphabet) {\n\n\t\t// expand the alphabet, if necessary\n\t\tString charset = alphabet.indexOf('-') >= 0 ? expand(alphabet) : alphabet;\n\n\t\t// check the alphabet length\n\t\tif (charset.length() < RADIX_MIN || charset.length() > RADIX_MAX) {\n\t\t\tthrow new IllegalArgumentException(\"Unsupported length: \" + charset.length());\n\t\t}\n\n\t\t// set the radix field\n\t\tthis.radix = charset.length();\n\n\t\t// set the length field\n\t\tthis.length = (int) Math.ceil(UUID_BITS / (Math.log(this.radix) / Math.log(2)));\n\n\t\t// set the padding field\n\t\tthis.padding = charset.charAt(0);\n\n\t\t// set the sensitive field\n\t\tthis.sensitive = sensitive(charset);\n\n\t\t// set the alphabet field\n\t\tthis.alphabet = CharArray.from(charset.toCharArray());\n\n\t\t// set the map field\n\t\tthis.map = map(charset, sensitive);\n\t}\n\n\t/**\n\t * Returns the radix of the base-n.\n\t * \n\t * @return the radix\n\t */\n\tpublic int getRadix() {\n\t\treturn radix;\n\t}\n\n\t/**\n\t * Returns the length of encoded UUIDs.\n\t * \n\t * @return the length\n\t */\n\tpublic int getLength() {\n\t\treturn length;\n\t}\n\n\t/**\n\t * Return the padding character.\n\t * \n\t * @return a character\n\t */\n\tpublic char getPadding() {\n\t\treturn padding;\n\t}\n\n\t/**\n\t * Informs if the base-n is case-sensitive.\n\t * \n\t * @return true if it is case-sensitive\n\t */\n\tpublic boolean isSensitive() {\n\t\treturn sensitive;\n\t}\n\n\t/**\n\t * Returns the alphabet of the base-n.\n\t * \n\t * @return the alphabet\n\t */\n\tpublic CharArray getAlphabet() {\n\t\treturn this.alphabet;\n\t}\n\n\t/**\n\t * Returns the map of the base-n.\n\t * \n\t * @return a map\n\t */\n\tpublic ByteArray getMap() {\n\t\treturn this.map;\n\t}\n\n\t/**\n\t * Checks if the UUID string is valid.\n\t * \n\t * @param uuid a UUID string\n\t * @return true if valid, false if invalid\n\t */\n\tpublic boolean isValid(String uuid) {\n\t\tif (uuid == null || uuid.length() != this.length) {\n\t\t\treturn false;\n\t\t}\n\t\tfor (int i = 0; i < this.length; i++) {\n\t\t\tif (this.map.get(uuid.charAt(i)) == -1) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\t\treturn true;\n\t}\n\n\t/**\n\t * Checks if the UUID string is a valid.\n\t * \n\t * @param uuid a UUID string\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tpublic void validate(String uuid) {\n\t\tif (!isValid(uuid)) {\n\t\t\tthrow InvalidUuidException.newInstance(uuid);\n\t\t}\n\t}\n\n\tprivate static boolean sensitive(String charset) {\n\t\tString lowercase = charset.toLowerCase();\n\t\tString uppercase = charset.toUpperCase();\n\t\treturn !(charset.equals(lowercase) || charset.equals(uppercase));\n\t}\n\n\tprivate static ByteArray map(String alphabet, boolean sensitive) {\n\t\t\n\t\t// initialize the map with -1\n\t\tbyte[] mapping = new byte[256];\n\t\tArrays.fill(mapping, (byte) -1);\n\t\t\n\t\t// map the alphabets chars to values\n\t\tfor (int i = 0; i < alphabet.length(); i++) {\n\t\t\tif (sensitive) {\n\t\t\t\tmapping[alphabet.charAt(i)] = (byte) i;\n\t\t\t} else {\n\t\t\t\tmapping[alphabet.toLowerCase().charAt(i)] = (byte) i;\n\t\t\t\tmapping[alphabet.toUpperCase().charAt(i)] = (byte) i;\n\t\t\t}\n\t\t}\n\t\t\n\t\treturn ByteArray.from(mapping);\n\t}\n\n\tprivate static String expand(int radix) {\n\t\tif (radix < RADIX_MIN || radix > RADIX_MAX) {\n\t\t\tthrow new IllegalArgumentException(\"Unsupported radix: \" + radix);\n\t\t}\n\t\tif (radix > 36) {\n\t\t\treturn ALPHABET_64.substring(0, radix); // 0-9A-Za-z-_\n\t\t}\n\t\treturn ALPHABET_36.substring(0, radix); // 0-9a-z\n\t}\n\n\t/**\n\t * Expands character sequences similar to 0-9, a-z and A-Z.\n\t * \n\t * @param string a string to be expanded\n\t * @return a string\n\t */\n\tprotected static String expand(String string) {\n\n\t\tStringBuilder buffer = new StringBuilder();\n\n\t\tint i = 1;\n\t\twhile (i <= string.length()) {\n\t\t\tfinal char a = string.charAt(i - 1); // previous char\n\t\t\tif ((i < string.length() - 1) && (string.charAt(i) == '-')) {\n\t\t\t\tfinal char b = string.charAt(i + 1); // next char\n\t\t\t\tchar[] expanded = expand(a, b);\n\t\t\t\tif (expanded.length != 0) {\n\t\t\t\t\ti += 2; // skip\n\t\t\t\t\tbuffer.append(expanded);\n\t\t\t\t} else {\n\t\t\t\t\tbuffer.append(a);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tbuffer.append(a);\n\t\t\t}\n\t\t\ti++;\n\t\t}\n\n\t\treturn buffer.toString();\n\t}\n\n\t/**\n\t * Expands a character sequence similar to 0-9, a-z and A-Z.\n\t * \n\t * @param a the first character of the sequence\n\t * @param b the last character of the sequence\n\t * @return an expanded sequence of characters\n\t */\n\tprotected static char[] expand(char a, char b) {\n\t\tchar[] expanded = expand(a, b, '0', '9'); // digits (0-9)\n\t\tif (expanded.length == 0) {\n\t\t\texpanded = expand(a, b, 'a', 'z'); // lower case letters (a-z)\n\t\t}\n\t\tif (expanded.length == 0) {\n\t\t\texpanded = expand(a, b, 'A', 'Z'); // upper case letters (A-Z)\n\t\t}\n\t\treturn expanded;\n\t}\n\n\tprivate static char[] expand(char a, char b, char min, char max) {\n\t\tif (!isValidRange(a, b, min, max)) {\n\t\t\treturn new char[0];\n\t\t}\n\n\t\treturn fillRange(a, b);\n\t}\n\n\tprivate static boolean isValidRange(char start, char end, char min, char max) {\n\t\treturn start <= end && start >= min && end <= max;\n\t}\n\n\tprivate static char[] fillRange(char start, char end) {\n\t\tchar[] buffer = new char[(end - start) + 1];\n\t\tfor (int i = 0; i < buffer.length; i++) {\n\t\t\tbuffer[i] = (char) (start + i);\n\t\t}\n\t\treturn buffer;\n\t}\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/BaseNCodec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.GuidCodec;\nimport com.pinecone.ulf.util.guid.i128.codec.base.function.BaseNDecoder;\nimport com.pinecone.ulf.util.guid.i128.codec.base.function.BaseNEncoder;\nimport com.pinecone.ulf.util.guid.i128.codec.base.function.BaseNRemainderDecoder;\nimport com.pinecone.ulf.util.guid.i128.codec.base.function.BaseNRemainderEncoder;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.util.UuidValidator;\n\nimport java.util.UUID;\nimport java.util.function.Function;\n\n/**\n * Abstract class that contains the basic functionality for base-n codecs of\n * this package.\n */\npublic abstract class BaseNCodec implements GuidCodec<String> {\n\n\t/**\n\t * The base-n.\n\t */\n\tprotected final BaseN base;\n\n\t/**\n\t * An encoder function.\n\t */\n\tprotected final Function<GUID128, String> encoder;\n\t/**\n\t * A decoder function.\n\t */\n\tprotected final Function<String, GUID128> decoder;\n\n\t/**\n\t * A division function that returns quotient and remainder.\n\t * <p>\n\t * It MUST perform SIGNED long division.\n\t * <p>\n\t * Example:\n\t * \n\t * <pre>{@code\n\t * \n\t * CustomDivider divideBy64 = x -> new long[] { x / 64, x % 64 };\n\t * \n\t * long[] answer = divideBy64(1024);\n\t * \n\t * }</pre>\n\t */\n\t@FunctionalInterface\n\tpublic interface CustomDivider {\n\t\t/**\n\t\t * Divide a number by x.\n\t\t * \n\t\t * Returned pair of longs: [x / divider, x % divider]\n\t\t * \n\t\t * @param x the divisor\n\t\t * @return a pair of longs\n\t\t */\n\t\tlong[] divide(long x);\n\t}\n\n\t/**\n\t * @param base an object that represents the base-n encoding\n\t */\n\tprotected BaseNCodec(BaseN base) {\n\t\tthis(base, null);\n\t}\n\n\t/**\n\t * @param base    an object that represents the base-n encoding\n\t * @param divider a division function that returns quotient and remainder\n\t */\n\tprotected BaseNCodec(BaseN base, CustomDivider divider) {\n\t\tthis(base, new BaseNRemainderEncoder(base, divider), new BaseNRemainderDecoder(base));\n\t}\n\n\t/**\n\t * @param base    an object that represents the base-n encoding\n\t * @param encoder a functional encoder\n\t * @param decoder a functional decoder\n\t */\n\tprotected BaseNCodec(BaseN base, BaseNEncoder encoder, BaseNDecoder decoder) {\n\t\tthis.base = base;\n\t\tthis.encoder = encoder;\n\t\tthis.decoder = decoder;\n\t}\n\n\t/**\n\t * Static factory that returns a new instance of {@link BaseNCodec} using the\n\t * specified {@link BaseN}.\n\t * <p>\n\t * This method can be used if none of the existing concrete codecs of this\n\t * package class is desired.\n\t * <p>\n\t * The {@link BaseNCodec} objects provided by this method encode UUIDs using\n\t * remainder operation (modulus), a common approach to encode integers.\n\t * <p>\n\t * If you need a {@link BaseN} that is not available in this package, use the\n\t * static factories {@link BaseNCodec#newInstance(String)} or\n\t * {@link BaseNCodec#newInstance(int)}.\n\t * \n\t * @param base an object that represents the base-n encoding\n\t * @return a {@link BaseNCodec}\n\t */\n\tpublic static BaseNCodec newInstance(BaseN base) {\n\t\treturn newInstance(base, null);\n\t}\n\n\t/**\n\t * Static factory that returns a new instance of {@link BaseNCodec} using the\n\t * specified radix.\n\t * <p>\n\t * This method can be used if none of the existing concrete codecs of this\n\t * package class is desired.\n\t * <p>\n\t * The {@link BaseNCodec} objects provided by this method encode UUIDs using\n\t * remainder operator (modulus), a common approach to encode integers.\n\t * <p>\n\t * The example below shows how to create a {@link BaseNCodec} for an\n\t * hypothetical base-40 encoding that contains only letters. You only need to\n\t * pass a number 40. The {@link BaseNCodec} instantiates a {@link BaseN} object\n\t * internally. See {@link BaseN}.\n\t * \n\t * <pre>{@code\n\t * String radix = 40;\n\t * BaseNCodec codec = BaseNCodec.newInstance(radix);\n\t * }</pre>\n\t * \n\t * <p>\n\t * If radix is greater than 36, the alphabet generated is a subset of the\n\t * character sequence \"0-9A-Za-z-_\". Otherwise it is a subset of \"0-9a-z\". In\n\t * the example above the resulting alphabet is\n\t * \"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcd\" (0-9A-Za-d).\n\t * \n\t * @param radix the radix to be used\n\t * @return a {@link BaseNCodec}\n\t */\n\tpublic static BaseNCodec newInstance(int radix) {\n\t\treturn newInstance(radix, null);\n\t}\n\n\t/**\n\t * Static factory that returns a new instance of {@link BaseNCodec} using the\n\t * specified alphabet.\n\t * <p>\n\t * This method can be used if none of the existing concrete codecs of this\n\t * package class is desired.\n\t * <p>\n\t * The {@link BaseNCodec} objects provided by this method encode UUIDs using\n\t * remainder operator (modulus), a common approach to encode integers.\n\t * <p>\n\t * The example below shows how to create a {@link BaseNCodec} for an\n\t * hypothetical base-26 encoding that contains only letters. You only need to\n\t * pass a string with 26 characters. The {@link BaseNCodec} instantiates a\n\t * {@link BaseN} object internally. See {@link BaseN}.\n\t * \n\t * <pre>{@code\n\t * String alphabet = \"abcdefghijklmnopqrstuvwxyz\";\n\t * BaseNCodec codec = BaseNCodec.newInstance(alphabet);\n\t * }</pre>\n\t * \n\t * <p>\n\t * Alphabet strings similar to \"a-f0-9\" are expanded to \"abcdef0123456789\". The\n\t * same example using the string \"a-z\" instead of \"abcdefghijklmnopqrstuvwxyz\":\n\t * \n\t * <pre>{@code\n\t * String alphabet = \"a-z\";\n\t * BaseNCodec codec = BaseNCodec.newInstance(alphabet);\n\t * }</pre>\n\t * \n\t * @param alphabet the alphabet to be used\n\t * @return a {@link BaseNCodec}\n\t */\n\tpublic static BaseNCodec newInstance(String alphabet) {\n\t\treturn newInstance(alphabet, null);\n\t}\n\n\t/**\n\t * Static factory that returns a new instance of {@link BaseNCodec} using the\n\t * specified {@link BaseN} and a {@link CustomDivider}.\n\t * \n\t * @param base    an object that represents the base-n encoding\n\t * @param divider a division function that returns quotient and remainder\n\t * @return a {@link BaseNCodec}\n\t */\n\tpublic static BaseNCodec newInstance(BaseN base, CustomDivider divider) {\n\t\treturn new BaseNCodec(base, divider) {\n\t\t};\n\t}\n\n\t/**\n\t * Static factory that returns a new instance of {@link BaseNCodec} using the\n\t * specified radix and a {@link CustomDivider}.\n\t * \n\t * @param radix   the radix to be used\n\t * @param divider a division function that returns quotient and remainder\n\t * @return a {@link BaseNCodec}\n\t */\n\tpublic static BaseNCodec newInstance(int radix, CustomDivider divider) {\n\t\tBaseN base = new BaseN(radix);\n\t\treturn newInstance(base, divider);\n\t}\n\n\t/**\n\t * Static factory that returns a new instance of {@link BaseNCodec} using the\n\t * specified alphabet and a {@link CustomDivider}.\n\t * \n\t * @param alphabet the alphabet to be used\n\t * @param divider  a division function that returns quotient and remainder\n\t * @return a {@link BaseNCodec}\n\t */\n\tpublic static BaseNCodec newInstance(String alphabet, CustomDivider divider) {\n\t\tBaseN base = new BaseN(alphabet);\n\t\treturn newInstance(base, divider);\n\t}\n\n\t/**\n\t * Get the base-n encoding object.\n\t * \n\t * @return a base-n encoding object\n\t */\n\tpublic BaseN getBase() {\n\t\treturn this.base;\n\t}\n\n\t/**\n\t * Get an encoded string from a UUID.\n\t * \n\t * @param uuid a UUID\n\t * @return an encoded string\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic String encode(GUID128 uuid) {\n\t\ttry {\n\t\t\tUuidValidator.validate(uuid);\n\t\t\treturn encoder.apply(uuid);\n\t\t} catch (RuntimeException e) {\n\t\t\tthrow new InvalidUuidException(e.getMessage(), e);\n\t\t}\n\t}\n\n\t/**\n\t * Get a UUID from an encoded string.\n\t * \n\t * @param string the encoded string\n\t * @return a UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic GUID128 decode(String string) {\n\t\ttry {\n\t\t\tvalidate(string);\n\t\t\treturn decoder.apply(string);\n\t\t} catch (RuntimeException e) {\n\t\t\tthrow new InvalidUuidException(e.getMessage(), e);\n\t\t}\n\t}\n\n\tprotected void validate(String string) {\n\t\tif (string == null || string.length() != this.base.getLength()) {\n\t\t\tthrow InvalidUuidException.newInstance(string);\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/Base16Decoder.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base.function;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.base.BaseN;\n\nimport java.util.UUID;\n\n/**\n * Function that decodes a base-16 string to a UUID.\n * <p>\n * It is case insensitive, so it decodes in lower case and upper case.\n * \n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc4648\">RFC-4648</a>\n */\npublic final class Base16Decoder extends BaseNDecoder {\n\n\t/**\n\t * Constructor with a base-n.\n\t * \n\t * @param base a base-n\n\t */\n\tpublic Base16Decoder(BaseN base) {\n\t\tsuper(base);\n\t}\n\n\t@Override\n\tpublic GUID128 apply(String string) {\n\n\t\tlong msb = 0;\n\t\tlong lsb = 0;\n\n\t\tfor (int i = 0; i < 16; i++) {\n\t\t\tmsb = (msb << 4) | get(string, i);\n\t\t}\n\n\t\tfor (int i = 16; i < 32; i++) {\n\t\t\tlsb = (lsb << 4) | get(string, i);\n\t\t}\n\n\t\treturn new UUID128(msb, lsb);\n\t}\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/Base16Encoder.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base.function;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.base.BaseN;\n\nimport java.util.UUID;\n\n/**\n * Function that encodes a UUID to a base-16 string.\n * <p>\n * It encodes in lower case only.\n * \n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc4648\">RFC-4648</a>\n */\npublic final class Base16Encoder extends BaseNEncoder {\n\n\tprivate static final int CHAR_LENGTH = 32;\n\n\t/**\n\t * Constructor with a base-n.\n\t * \n\t * @param base a base-n\n\t */\n\tpublic Base16Encoder(BaseN base) {\n\t\tsuper(base);\n\t}\n\n\t@Override\n\tpublic String apply(GUID128 uuid) {\n\n\t\tfinal char[] chars = new char[CHAR_LENGTH];\n\t\tfinal long msb = uuid.getMostSignificantBits();\n\t\tfinal long lsb = uuid.getLeastSignificantBits();\n\n\t\tchars[0x00] = get(msb >>> 0x3c & 0xf);\n\t\tchars[0x01] = get(msb >>> 0x38 & 0xf);\n\t\tchars[0x02] = get(msb >>> 0x34 & 0xf);\n\t\tchars[0x03] = get(msb >>> 0x30 & 0xf);\n\t\tchars[0x04] = get(msb >>> 0x2c & 0xf);\n\t\tchars[0x05] = get(msb >>> 0x28 & 0xf);\n\t\tchars[0x06] = get(msb >>> 0x24 & 0xf);\n\t\tchars[0x07] = get(msb >>> 0x20 & 0xf);\n\t\tchars[0x08] = get(msb >>> 0x1c & 0xf);\n\t\tchars[0x09] = get(msb >>> 0x18 & 0xf);\n\t\tchars[0x0a] = get(msb >>> 0x14 & 0xf);\n\t\tchars[0x0b] = get(msb >>> 0x10 & 0xf);\n\t\tchars[0x0c] = get(msb >>> 0x0c & 0xf);\n\t\tchars[0x0d] = get(msb >>> 0x08 & 0xf);\n\t\tchars[0x0e] = get(msb >>> 0x04 & 0xf);\n\t\tchars[0x0f] = get(msb >>> 0x00 & 0xf);\n\n\t\tchars[0x10] = get(lsb >>> 0x3c & 0xf);\n\t\tchars[0x11] = get(lsb >>> 0x38 & 0xf);\n\t\tchars[0x12] = get(lsb >>> 0x34 & 0xf);\n\t\tchars[0x13] = get(lsb >>> 0x30 & 0xf);\n\t\tchars[0x14] = get(lsb >>> 0x2c & 0xf);\n\t\tchars[0x15] = get(lsb >>> 0x28 & 0xf);\n\t\tchars[0x16] = get(lsb >>> 0x24 & 0xf);\n\t\tchars[0x17] = get(lsb >>> 0x20 & 0xf);\n\t\tchars[0x18] = get(lsb >>> 0x1c & 0xf);\n\t\tchars[0x19] = get(lsb >>> 0x18 & 0xf);\n\t\tchars[0x1a] = get(lsb >>> 0x14 & 0xf);\n\t\tchars[0x1b] = get(lsb >>> 0x10 & 0xf);\n\t\tchars[0x1c] = get(lsb >>> 0x0c & 0xf);\n\t\tchars[0x1d] = get(lsb >>> 0x08 & 0xf);\n\t\tchars[0x1e] = get(lsb >>> 0x04 & 0xf);\n\t\tchars[0x1f] = get(lsb >>> 0x00 & 0xf);\n\n\t\treturn new String(chars);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/Base32Decoder.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base.function;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.base.BaseN;\n\nimport java.util.UUID;\n\n/**\n * Function that decodes a base-32 string to a UUID.\n * <p>\n * It is case insensitive, so it decodes in lower case and upper case.\n * \n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc4648\">RFC-4648</a>\n */\npublic final class Base32Decoder extends BaseNDecoder {\n\n\t/**\n\t * Constructor with a base-n.\n\t * \n\t * @param base a base-n\n\t */\n\tpublic Base32Decoder(BaseN base) {\n\t\tsuper(base);\n\t}\n\n\t@Override\n\tpublic GUID128 apply(String string) {\n\n\t\tlong msb = 0;\n\t\tlong lsb = 0;\n\n\t\tfor (int i = 0; i < 12; i++) {\n\t\t\tmsb = (msb << 5) | get(string, i);\n\t\t}\n\n\t\tmsb = (msb << 4) | (get(string, 12) >>> 1);\n\t\tlsb = (lsb << 5) | get(string, 12);\n\n\t\tfor (int i = 13; i < 25; i++) {\n\t\t\tlsb = (lsb << 5) | get(string, i);\n\t\t}\n\n\t\tlsb = (lsb << 3) | (get(string, 25) >>> 2);\n\n\t\treturn new UUID128(msb, lsb);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/Base32Encoder.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base.function;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.base.BaseN;\n\nimport java.util.UUID;\n\n/**\n * Function that encodes a UUID to a base-32 string.\n * <p>\n * It encodes in lower case only.\n * \n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc4648\">RFC-4648</a>\n */\npublic final class Base32Encoder extends BaseNEncoder {\n\n\tprivate static final int CHAR_LENGTH = 26;\n\n\t/**\n\t * Constructor with a base-n.\n\t * \n\t * @param base a base-n\n\t */\n\tpublic Base32Encoder(BaseN base) {\n\t\tsuper(base);\n\t}\n\n\t@Override\n\tpublic String apply(GUID128 uuid) {\n\n\t\tfinal char[] chars = new char[CHAR_LENGTH];\n\t\tlong msb = uuid.getMostSignificantBits();\n\t\tlong lsb = uuid.getLeastSignificantBits();\n\n\t\tchars[0x00] = get((msb >>> 59) & 0b11111);\n\t\tchars[0x01] = get((msb >>> 54) & 0b11111);\n\t\tchars[0x02] = get((msb >>> 49) & 0b11111);\n\t\tchars[0x03] = get((msb >>> 44) & 0b11111);\n\t\tchars[0x04] = get((msb >>> 39) & 0b11111);\n\t\tchars[0x05] = get((msb >>> 34) & 0b11111);\n\t\tchars[0x06] = get((msb >>> 29) & 0b11111);\n\t\tchars[0x07] = get((msb >>> 24) & 0b11111);\n\t\tchars[0x08] = get((msb >>> 19) & 0b11111);\n\t\tchars[0x09] = get((msb >>> 14) & 0b11111);\n\t\tchars[0x0a] = get((msb >>> 9) & 0b11111);\n\t\tchars[0x0b] = get((msb >>> 4) & 0b11111);\n\n\t\tchars[0x0c] = get(((msb << 1) & 0b11111) | ((lsb >>> 63) & 0b11111));\n\n\t\tchars[0x0d] = get((lsb >>> 58) & 0b11111);\n\t\tchars[0x0e] = get((lsb >>> 53) & 0b11111);\n\t\tchars[0x0f] = get((lsb >>> 48) & 0b11111);\n\t\tchars[0x10] = get((lsb >>> 43) & 0b11111);\n\t\tchars[0x11] = get((lsb >>> 38) & 0b11111);\n\t\tchars[0x12] = get((lsb >>> 33) & 0b11111);\n\t\tchars[0x13] = get((lsb >>> 28) & 0b11111);\n\t\tchars[0x14] = get((lsb >>> 23) & 0b11111);\n\t\tchars[0x15] = get((lsb >>> 18) & 0b11111);\n\t\tchars[0x16] = get((lsb >>> 13) & 0b11111);\n\t\tchars[0x17] = get((lsb >>> 8) & 0b11111);\n\t\tchars[0x18] = get((lsb >>> 3) & 0b11111);\n\t\tchars[0x19] = get((lsb << 2) & 0b11111);\n\n\t\treturn new String(chars);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/Base64Decoder.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base.function;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.base.BaseN;\n\nimport java.util.UUID;\n\n/**\n * Function that decodes a base-64 string to a UUID.\n * <p>\n * It is case SENSITIVE.\n * \n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc4648\">RFC-4648</a>\n */\npublic final class Base64Decoder extends BaseNDecoder {\n\n\t/**\n\t * Constructor with a base-n.\n\t * \n\t * @param base a base-n\n\t */\n\tpublic Base64Decoder(BaseN base) {\n\t\tsuper(base);\n\t}\n\n\t@Override\n\tpublic GUID128 apply(String string) {\n\n\t\tlong msb = 0;\n\t\tlong lsb = 0;\n\n\t\tfor (int i = 0; i < 10; i++) {\n\t\t\tmsb = (msb << 6) | get(string, i);\n\t\t}\n\n\t\tmsb = (msb << 4) | (get(string, 10) >>> 2);\n\t\tlsb = (lsb << 6) | get(string, 10);\n\n\t\tfor (int i = 11; i < 21; i++) {\n\t\t\tlsb = (lsb << 6) | get(string, i);\n\t\t}\n\n\t\tlsb = (lsb << 2) | (get(string, 21) >>> 4);\n\n\t\treturn new UUID128(msb, lsb);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/Base64Encoder.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base.function;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.base.BaseN;\n\nimport java.util.UUID;\n\n/**\n * Function that encodes a UUID to a base-64 string.\n * \n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc4648\">RFC-4648</a>\n */\npublic final class Base64Encoder extends BaseNEncoder {\n\n\tprivate static final int CHAR_LENGTH = 22;\n\n\t/**\n\t * Constructor with a base-n.\n\t * \n\t * @param base a base-n\n\t */\n\tpublic Base64Encoder(BaseN base) {\n\t\tsuper(base);\n\t}\n\n\t@Override\n\tpublic String apply(GUID128 uuid) {\n\n\t\tfinal char[] chars = new char[CHAR_LENGTH];\n\t\tlong msb = uuid.getMostSignificantBits();\n\t\tlong lsb = uuid.getLeastSignificantBits();\n\n\t\tchars[0x00] = get((msb >>> 58) & 0b111111);\n\t\tchars[0x01] = get((msb >>> 52) & 0b111111);\n\t\tchars[0x02] = get((msb >>> 46) & 0b111111);\n\t\tchars[0x03] = get((msb >>> 40) & 0b111111);\n\t\tchars[0x04] = get((msb >>> 34) & 0b111111);\n\t\tchars[0x05] = get((msb >>> 28) & 0b111111);\n\t\tchars[0x06] = get((msb >>> 22) & 0b111111);\n\t\tchars[0x07] = get((msb >>> 16) & 0b111111);\n\t\tchars[0x08] = get((msb >>> 10) & 0b111111);\n\t\tchars[0x09] = get((msb >>> 4) & 0b111111);\n\n\t\tchars[0x0a] = get(((msb << 2) & 0b111111) | ((lsb >>> 62) & 0b111111));\n\n\t\tchars[0x0b] = get((lsb >>> 56) & 0b111111);\n\t\tchars[0x0c] = get((lsb >>> 50) & 0b111111);\n\t\tchars[0x0d] = get((lsb >>> 44) & 0b111111);\n\t\tchars[0x0e] = get((lsb >>> 38) & 0b111111);\n\t\tchars[0x0f] = get((lsb >>> 32) & 0b111111);\n\t\tchars[0x10] = get((lsb >>> 26) & 0b111111);\n\t\tchars[0x11] = get((lsb >>> 20) & 0b111111);\n\t\tchars[0x12] = get((lsb >>> 14) & 0b111111);\n\t\tchars[0x13] = get((lsb >>> 8) & 0b111111);\n\t\tchars[0x14] = get((lsb >>> 2) & 0b111111);\n\t\tchars[0x15] = get((lsb << 4) & 0b111111);\n\n\t\treturn new String(chars);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/BaseNDecoder.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base.function;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.base.BaseN;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.util.immutable.ByteArray;\n\nimport java.util.UUID;\nimport java.util.function.Function;\n\n/**\n * Abstract function to be extended by all decoder functions of this package.\n * <p>\n * If the base-n is case insensitive, it decodes in lower case and upper case.\n */\npublic abstract class BaseNDecoder implements Function<String, GUID128> {\n\n\t/**\n\t * The base-n.\n\t */\n\tprotected final BaseN base;\n\n\t/**\n\t * The base-n map.\n\t */\n\tprotected final ByteArray map;\n\n\t/**\n\t * @param base an enumeration that represents the base-n encoding\n\t */\n\tpublic BaseNDecoder(BaseN base) {\n\t\tthis.base = base;\n\t\tthis.map = base.getMap();\n\t}\n\n\tprotected long get(String string, int i) {\n\n\t\tfinal int chr = string.charAt(i);\n\t\tif (chr > 255) {\n\t\t\tthrow InvalidUuidException.newInstance(string);\n\t\t}\n\n\t\tfinal byte value = map.get(chr);\n\t\tif (value < 0) {\n\t\t\tthrow InvalidUuidException.newInstance(string);\n\t\t}\n\t\treturn value & 0xffL;\n\t}\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/BaseNEncoder.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base.function;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.base.BaseN;\nimport com.pinecone.ulf.util.guid.i128.util.immutable.CharArray;\n\nimport java.util.UUID;\nimport java.util.function.Function;\n\n/**\n * Abstract function to be extended by all encoder functions of this package.\n * <p>\n * If the base-n is case insensitive, it encodes in lower case only.\n */\npublic abstract class BaseNEncoder implements Function<GUID128, String> {\n\n\t/**\n\t * The base-n.\n\t */\n\tprotected final BaseN base;\n\n\t/**\n\t * The base-n alphabet.\n\t */\n\tprotected final CharArray alphabet;\n\n\t/**\n\t * @param base an object that represents the base-n encoding\n\t */\n\tpublic BaseNEncoder(BaseN base) {\n\t\tthis.base = base;\n\t\tthis.alphabet = base.getAlphabet();\n\t}\n\n\tprotected char get(final long index) {\n\t\treturn alphabet.get((int) index);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/BaseNRemainderDecoder.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base.function;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.base.BaseN;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\n\nimport java.util.UUID;\n\n/**\n * Function that decodes a base-n string to a UUID.\n * <p>\n * It decodes strings created by encoders that use remainder operator (modulus),\n * a common approach to encode integers.\n * <p>\n * The decoding process is performed using integer arithmetic.\n */\npublic final class BaseNRemainderDecoder extends BaseNDecoder {\n\n\tprivate final int multiplier;\n\n\tprivate static final long MASK = 0x00000000ffffffffL;\n\n\t/**\n\t * Constructor with a base-n.\n\t * \n\t * @param base a base-n\n\t */\n\tpublic BaseNRemainderDecoder(BaseN base) {\n\t\tsuper(base);\n\t\tmultiplier = base.getRadix();\n\t}\n\n\tpublic GUID128 apply(String string) {\n\n\t\tlong msb = 0;\n\t\tlong lsb = 0;\n\n\t\tlong rem = 0; // remainder\n\t\tlong[] ans; // [product, overflow]\n\n\t\tfor (int i = 0; i < base.getLength(); i++) {\n\t\t\trem = get(string, i);\n\t\t\tans = multiply(lsb, multiplier, rem);\n\t\t\tlsb = ans[0];\n\t\t\trem = ans[1];\n\t\t\tans = multiply(msb, multiplier, rem);\n\t\t\tmsb = ans[0];\n\t\t\trem = ans[1];\n\t\t}\n\n\t\tif (rem != 0) {\n\t\t\tthrow new InvalidUuidException(\"Invalid encoded string (overflow): \\\"\" + string + \"\\\"\");\n\t\t}\n\n\t\treturn new UUID128(msb, lsb);\n\t}\n\n\t// multiply a long as unsigned 64 bit integer\n\t/**\n\t * Multiply a long as unsigned 64 bit integer\n\t * \n\t * @param x          a number to be multiplied\n\t * @param multiplier a multiplier\n\t * @param rem        the reminder\n\t * @return an array of longs\n\t */\n\tprotected static long[] multiply(final long x, final long multiplier, final long rem) {\n\n\t\tlong mul;\n\t\tlong overflow;\n\t\tfinal long product1;\n\t\tfinal long product2;\n\n\t\t// multiply the last 32 bits\n\t\tmul = ((x & MASK) * multiplier) + rem;\n\t\tproduct1 = mul & MASK;\n\t\toverflow = mul >>> 32;\n\n\t\t// multiply the first 32 bits\n\t\tmul = ((x >>> 32) * multiplier) + overflow;\n\t\tproduct2 = mul & MASK;\n\t\toverflow = mul >>> 32;\n\n\t\t// prepare the answer\n\t\tfinal long[] answer = new long[2];\n\t\tanswer[0] = (product2 << 32) | (product1 & MASK);\n\t\tanswer[1] = overflow;\n\n\t\treturn answer;\n\t}\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/base/function/BaseNRemainderEncoder.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.base.function;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.base.BaseN;\nimport com.pinecone.ulf.util.guid.i128.codec.base.BaseNCodec.CustomDivider;\n\nimport java.util.UUID;\n\n/**\n * Function that encodes a UUID to a base-n string.\n * <p>\n * It encodes using remainder operator (modulus), a common approach to encode\n * integers.\n * <p>\n * The encoding process is performed using integer arithmetic.\n */\npublic final class BaseNRemainderEncoder extends BaseNEncoder {\n\n\tprivate final int length;\n\tprivate final char padding;\n\n\t/**\n\t * A custom divider for optimization.\n\t */\n\tprotected final CustomDivider divider;\n\n\tprivate static final long MASK = 0x00000000ffffffffL;\n\n\t/**\n\t * Constructor with a base-n.\n\t * \n\t * @param base a base-n\n\t */\n\tpublic BaseNRemainderEncoder(BaseN base) {\n\t\tthis(base, null);\n\t}\n\n\t/**\n\t * Constructor with a base-n and a custom divider.\n\t * \n\t * @param base    a base-n\n\t * @param divider a custom divider\n\t */\n\tpublic BaseNRemainderEncoder(BaseN base, CustomDivider divider) {\n\t\tsuper(base);\n\n\t\tlength = base.getLength();\n\t\tpadding = base.getPadding();\n\t\tfinal long radix = base.getRadix();\n\n\t\tif (divider != null) {\n\t\t\tthis.divider = divider;\n\t\t} else {\n\t\t\tthis.divider = x -> new long[] { x / radix, x % radix };\n\t\t}\n\t}\n\n\t@Override\n\tpublic String apply(GUID128 uuid) {\n\n\t\tlong msb = uuid.getMostSignificantBits();\n\t\tlong lsb = uuid.getLeastSignificantBits();\n\n\t\tint b = length; // buffer index\n\t\tchar[] buffer = new char[length];\n\n\t\tlong rem = 0; // remainder\n\t\tlong[] ans; // [quotient, remainder]\n\n\t\t// fill in the buffer backwards\n\t\twhile (msb != 0 || lsb != 0) {\n\t\t\trem = 0;\n\t\t\tans = divide(msb, divider, rem);\n\t\t\tmsb = ans[0]; // quotient\n\t\t\trem = ans[1]; // remainder\n\t\t\tans = divide(lsb, divider, rem);\n\t\t\tlsb = ans[0]; // quotient\n\t\t\trem = ans[1]; // remainder\n\t\t\tbuffer[--b] = alphabet.get((int) rem);\n\t\t}\n\n\t\t// complete padding\n\t\twhile (b > 0) {\n\t\t\tbuffer[--b] = padding;\n\t\t}\n\n\t\treturn new String(buffer);\n\t}\n\n\t/**\n\t * Divide a long as unsigned 64 bit integer\n\t * \n\t * @param x       a number to be divided\n\t * @param divider a custom divider\n\t * @param rem     the reminder\n\t * @return an array of longs\n\t */\n\tprotected static long[] divide(final long x, CustomDivider divider, final long rem) {\n\n\t\tlong[] div;\n\t\tlong remainder;\n\t\tfinal long quotient1;\n\t\tfinal long quotient2;\n\n\t\t// divide the first 32 bits\n\t\tdiv = divider.divide((rem << 32) | (x >>> 32));\n\t\tquotient1 = div[0];\n\t\tremainder = div[1];\n\n\t\t// divide the last 32 bits\n\t\tdiv = divider.divide((remainder << 32) | (x & MASK));\n\t\tquotient2 = div[0];\n\t\tremainder = div[1];\n\n\t\t// prepare the answer\n\t\tfinal long[] answer = new long[2];\n\t\tanswer[0] = (quotient1 << 32) | (quotient2 & MASK);\n\t\tanswer[1] = remainder;\n\n\t\treturn answer;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/other/DotNetGuid1Codec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.other;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.GuidCodec;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.util.UuidUtil;\nimport com.pinecone.ulf.util.guid.i128.util.UuidValidator;\n\nimport java.util.UUID;\n\n/**\n * Codec for time-based .Net Guids.\n */\npublic class DotNetGuid1Codec implements GuidCodec<GUID128> {\n\n\t/**\n\t * A shared immutable instance.\n\t */\n\tpublic static final DotNetGuid1Codec INSTANCE = new DotNetGuid1Codec();\n\n\t/**\n\t * Get a .Ned Guid from a time-based UUID (v1).\n\t * <p>\n\t * This codec converts a time-based UUID (v1) to a .Net Guid.\n\t * <p>\n\t * It rearranges the most significant bytes from big-endian to little-endian,\n\t * and vice-versa.\n\t * <p>\n\t * The .Net Guid stores the most significant bytes as little-endian, while the\n\t * least significant bytes are stored as big-endian (network order).\n\t * \n\t * @param uuid a UUID\n\t * @return another UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic GUID128 encode(GUID128 uuid) {\n\t\tUuidValidator.validate(uuid);\n\t\tif (!UuidUtil.isTimeBased(uuid)) {\n\t\t\tthrow new InvalidUuidException(String.format(\"Not a time-based UUID: %s.\", uuid.toString()));\n\t\t}\n\t\treturn toAndFromDotNetGuid(uuid);\n\t}\n\n\t/**\n\t * Get a time-based UUID (v4) from a .Net Guid.\n\t * <p>\n\t * It rearranges the most significant bytes from big-endian to little-endian,\n\t * and vice-versa.\n\t * <p>\n\t * The .Net Guid stores the most significant bytes as little-endian, while the\n\t * least significant bytes are stored as big-endian (network order).\n\t * \n\t * @param uuid a UUID\n\t * @return another UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic GUID128 decode(GUID128 uuid) {\n\t\tUuidValidator.validate(uuid);\n\t\tGUID128 uuidv1 = toAndFromDotNetGuid(uuid);\n\t\tif (!UuidUtil.isTimeBased(uuidv1)) {\n\t\t\tthrow new InvalidUuidException(String.format(\"Not a time-based UUID: %s.\", uuidv1.toString()));\n\t\t}\n\t\treturn uuidv1;\n\t}\n\n\t/**\n\t * Convert a UUID to and from a .Net Guid.\n\t * <p>\n\t * It rearranges the most significant bytes from big-endian to little-endian,\n\t * and vice-versa.\n\t * <p>\n\t * The .Net Guid stores the most significant bytes as little-endian, while the\n\t * least significant bytes are stored as big-endian (network order).\n\t * \n\t * @see <a href=\n\t *      \"https://blogs.msdn.microsoft.com/dbrowne/2012/07/03/how-to-generate-sequential-guids-for-sql-server-in-net/\">How\n\t *      to Generate Sequential GUIDs for SQL Server in .NET</a>\n\t * @see <a href=\n\t *      \"http://sqlblog.com/blogs/alberto_ferrari/archive/2007/08/31/how-are-guids-sorted-by-sql-server.aspx\">How\n\t *      are GUIDs sorted by SQL Server?</a>\n\t * \n\t * @param uuid a UUID\n\t * @return another UUID\n\t */\n\tprotected static GUID128 toAndFromDotNetGuid(GUID128 uuid) {\n\n\t\tlong msb = uuid.getMostSignificantBits();\n\t\tlong lsb = uuid.getLeastSignificantBits();\n\n\t\tlong newMsb = 0x0000000000000000L;\n\t\t// high bits\n\t\tnewMsb |= (msb & 0xff000000_0000_0000L) >>> 24;\n\t\tnewMsb |= (msb & 0x00ff0000_0000_0000L) >>> 8;\n\t\tnewMsb |= (msb & 0x0000ff00_0000_0000L) << 8;\n\t\tnewMsb |= (msb & 0x000000ff_0000_0000L) << 24;\n\t\t// mid bits\n\t\tnewMsb |= (msb & 0x00000000_ff00_0000L) >>> 8;\n\t\tnewMsb |= (msb & 0x00000000_00ff_0000L) << 8;\n\t\t// low bits\n\t\tnewMsb |= (msb & 0x00000000_0000_ff00L) >>> 8;\n\t\tnewMsb |= (msb & 0x00000000_0000_00ffL) << 8;\n\n\t\treturn new UUID128(newMsb, lsb);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/other/DotNetGuid4Codec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.other;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.GuidCodec;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.util.UuidUtil;\nimport com.pinecone.ulf.util.guid.i128.util.UuidValidator;\n\nimport java.util.UUID;\n\nimport static com.pinecone.ulf.util.guid.i128.codec.other.DotNetGuid1Codec.toAndFromDotNetGuid;\n\n/**\n * Codec for random-based .Net Guids.\n */\npublic class DotNetGuid4Codec implements GuidCodec<GUID128> {\n\n\t/**\n\t * A shared immutable instance.\n\t */\n\tpublic static final DotNetGuid4Codec INSTANCE = new DotNetGuid4Codec();\n\n\t/**\n\t * Get a .Ned Guid from a random-based UUID (v4).\n\t * <p>\n\t * It rearranges the most significant bytes from big-endian to little-endian,\n\t * and vice-versa.\n\t * <p>\n\t * The .Net Guid stores the most significant bytes as little-endian, while the\n\t * least significant bytes are stored as big-endian (network order).\n\t * \n\t * @param uuid a UUID\n\t * @return another UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic GUID128 encode(GUID128 uuid) {\n\t\tUuidValidator.validate(uuid);\n\t\tif (!UuidUtil.isRandomBased(uuid)) {\n\t\t\tthrow new InvalidUuidException(String.format(\"Not a random-based UUID: %s.\", uuid.toString()));\n\t\t}\n\t\treturn toAndFromDotNetGuid(uuid);\n\t}\n\n\t/**\n\t * Get a random-based UUID (v4) from a .Net Guid.\n\t * <p>\n\t * It rearranges the most significant bytes from big-endian to little-endian,\n\t * and vice-versa.\n\t * <p>\n\t * The .Net Guid stores the most significant bytes as little-endian, while the\n\t * least significant bytes are stored as big-endian (network order).\n\t * \n\t * @param uuid a UUID\n\t * @return another UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic GUID128 decode(GUID128 uuid) {\n\t\tUuidValidator.validate(uuid);\n\t\tGUID128 uuidv4 = toAndFromDotNetGuid(uuid);\n\t\tif (!UuidUtil.isRandomBased(uuidv4)) {\n\t\t\tthrow new InvalidUuidException(String.format(\"Not a random-based UUID: %s.\", uuidv4.toString()));\n\t\t}\n\t\treturn uuidv4;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/other/NcnameCodec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.other;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.StandardBinaryCodec;\nimport com.pinecone.ulf.util.guid.i128.codec.GuidCodec;\nimport com.pinecone.ulf.util.guid.i128.codec.base.Base32Codec;\nimport com.pinecone.ulf.util.guid.i128.codec.base.Base64UrlCodec;\nimport com.pinecone.ulf.util.guid.i128.codec.base.BaseNCodec;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.util.UuidValidator;\nimport com.pinecone.ulf.util.guid.i128.util.immutable.ByteArray;\nimport com.pinecone.ulf.util.guid.i128.util.immutable.CharArray;\nimport com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil;\n\nimport java.util.Arrays;\nimport java.util.UUID;\n\n/**\n * Codec for UUID NCNames.\n * <p>\n * A UUID NCName is a shorter string representation that conforms to the\n * constraints of various other identifiers such as NCName in XML documents.\n * <p>\n * The {@link NcnameCodec} turns a UUID into a string that does not start with\n * digits (0-9). But due to the default base-64-url encoding, it is <b>case\n * sensitive</b> and may contain '-' and '_'.\n * <p>\n * The {@link Base32Codec} can be passed to the {@link NcnameCodec} constructor\n * to generate base-32 NCNames. Due to the base-32 alphabet, it is case\n * insensitive and it contains only letters (a-zA-Z) and digits (2-7). This\n * encoding substitution can be done to avoid the characters '-' and '_' of the\n * base-64-url encoding, but it makes the NCName case insensitive.\n * <p>\n * The transformation scheme is outlined in this RFC:\n * https://tools.ietf.org/html/draft-taylor-uuid-ncname-00. The draft describes\n * schemes for base-64-url and base-32.\n * <p>\n * {@link SlugCodec} and {@link NcnameCodec} are very similar. The difference\n * between the two is the bit shift they do with the original UUID to transform\n * it into a string.\n * \n * @see <a href=\"https://github.com/f4b6a3/uuid-creator/issues/31\">UUID\n *      NCNames</a>\n */\npublic final class NcnameCodec implements GuidCodec<String> {\n\n\t/**\n\t * A shared immutable instance using `base64url`\n\t */\n\tpublic static final NcnameCodec INSTANCE = new NcnameCodec();\n\n\tprivate final int radix;\n\tprivate final int length;\n\tprivate final int shift;\n\tprivate final char padding;\n\tprivate final BaseNCodec codec;\n\n\tprivate static final CharArray VERSION_UPPERCASE = CharArray.from(\"ABCDEFGHIJKLMNOP\".toCharArray());\n\tprivate static final CharArray VERSION_LOWERCASE = CharArray.from(\"abcdefghijklmnop\".toCharArray());\n\n\tprivate static final ByteArray VERSION_MAP;\n\tstatic {\n\t\t\n\t\t// initialize the array with -1\n\t\tbyte[] mapping = new byte[256];\n\t\tArrays.fill(mapping, (byte) -1);\n\n\t\t// upper case for base-64\n\t\tmapping['A'] = 0x0;\n\t\tmapping['B'] = 0x1;\n\t\tmapping['C'] = 0x2;\n\t\tmapping['D'] = 0x3;\n\t\tmapping['E'] = 0x4;\n\t\tmapping['F'] = 0x5;\n\t\tmapping['G'] = 0x6;\n\t\tmapping['H'] = 0x7;\n\t\tmapping['I'] = 0x8;\n\t\tmapping['J'] = 0x9;\n\t\tmapping['K'] = 0xa;\n\t\tmapping['L'] = 0xb;\n\t\tmapping['M'] = 0xc;\n\t\tmapping['N'] = 0xd;\n\t\tmapping['O'] = 0xe;\n\t\tmapping['P'] = 0xf;\n\t\t// lower case for base-16 and base-32\n\t\tmapping['a'] = 0x0;\n\t\tmapping['b'] = 0x1;\n\t\tmapping['c'] = 0x2;\n\t\tmapping['d'] = 0x3;\n\t\tmapping['e'] = 0x4;\n\t\tmapping['f'] = 0x5;\n\t\tmapping['g'] = 0x6;\n\t\tmapping['h'] = 0x7;\n\t\tmapping['i'] = 0x8;\n\t\tmapping['j'] = 0x9;\n\t\tmapping['k'] = 0xa;\n\t\tmapping['l'] = 0xb;\n\t\tmapping['m'] = 0xc;\n\t\tmapping['n'] = 0xd;\n\t\tmapping['o'] = 0xe;\n\t\tmapping['p'] = 0xf;\n\n\t\tVERSION_MAP = ByteArray.from(mapping);\n\t}\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic NcnameCodec() {\n\t\tthis(Base64UrlCodec.INSTANCE);\n\t}\n\n\t/**\n\t * Constructor with a base-n codec.\n\t * \n\t * @param codec a base-n codec\n\t */\n\tpublic NcnameCodec(BaseNCodec codec) {\n\n\t\tif (!(codec instanceof Base64UrlCodec || codec instanceof Base32Codec)) {\n\t\t\tthrow new IllegalArgumentException(\"Unsupported base-n codec\");\n\t\t}\n\n\t\tthis.codec = codec;\n\t\tthis.radix = codec.getBase().getRadix();\n\t\tthis.length = codec.getBase().getLength();\n\t\tthis.padding = codec.getBase().getPadding();\n\n\t\tswitch (this.radix) {\n\t\tcase 32:\n\t\t\tthis.shift = 1;\n\t\t\tbreak;\n\t\tcase 64:\n\t\t\tthis.shift = 2;\n\t\t\tbreak;\n\t\tdefault:\n\t\t\tthis.shift = 0; // unspecified\n\t\t\tbreak;\n\t\t}\n\t}\n\n\t/**\n\t * Get a NCName from a UUID.\n\t * \n\t * @param uuid a UUID\n\t * @return a NCName\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic String encode(GUID128 uuid) {\n\n\t\tUuidValidator.validate(uuid);\n\n\t\tint version = uuid.version();\n\t\tbyte[] bytes = StandardBinaryCodec.INSTANCE.encode(uuid);\n\t\tint[] ints = ByteUtil.toInts(bytes);\n\n\t\tint variant = (ints[2] & 0xf0000000) >>> 24;\n\n\t\tints[1] = (ints[1] & 0xffff0000) | ((ints[1] & 0x00000fff) << 4) | ((ints[2] & 0x0fffffff) >>> 24);\n\t\tints[2] = (ints[2] & 0x00ffffff) << 8 | (ints[3] >>> 24);\n\t\tints[3] = (ints[3] << 8) | variant;\n\n\t\tbytes = ByteUtil.fromInts(ints);\n\t\tbytes[15] = (byte) ((bytes[15] & 0xff) >>> this.shift);\n\n\t\tGUID128 uuuu = StandardBinaryCodec.INSTANCE.decode(bytes);\n\t\tString encoded = this.codec.encode(uuuu).substring(0, this.length - 1);\n\n\t\t// if base is 64, use upper case version, else use lower case\n\t\tchar v = this.radix == 64 ? VERSION_UPPERCASE.get(version) : VERSION_LOWERCASE.get(version);\n\n\t\treturn v + encoded;\n\t}\n\n\t/**\n\t * Get a UUID from a NCName.\n\t * \n\t * @param ncname a NCName\n\t * @return a UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic GUID128 decode(String ncname) {\n\n\t\tif (ncname == null || ncname.length() != this.length) {\n\t\t\tthrow new InvalidUuidException(\"Invalid UUID NCName: \\\"\" + ncname + \"\\\"\");\n\t\t}\n\n\t\t// check if the bookends are valid chars: [A-Pa-p]\n\t\tint bookend1 = (int) VERSION_MAP.get(ncname.charAt(0));\n\t\tint bookend2 = (int) VERSION_MAP.get(ncname.charAt(ncname.length() - 1));\n\t\tif (bookend1 == -1 || bookend2 == -1) {\n\t\t\tthrow new InvalidUuidException(\"Invalid UUID NCName: \\\"\" + ncname + \"\\\"\");\n\t\t}\n\n\t\tint version = bookend1 & 0xf;\n\n\t\tString substring = ncname.substring(1, ncname.length());\n\t\tGUID128 uuid = this.codec.decode(substring + padding);\n\n\t\tbyte[] bytes = StandardBinaryCodec.INSTANCE.encode(uuid);\n\t\tbytes[15] = (byte) ((bytes[15] & 0xff) << this.shift);\n\n\t\tint[] ints = ByteUtil.toInts(bytes);\n\n\t\tint variant = (ints[3] & 0xf0) << 24;\n\n\t\tints[3] >>>= 8;\n\t\tints[3] |= ((ints[2] & 0xff) << 24);\n\t\tints[2] >>>= 8;\n\t\tints[2] |= ((ints[1] & 0xf) << 24) | variant;\n\t\tints[1] = (ints[1] & 0xffff0000) | (version << 12) | ((ints[1] >>> 4) & 0xfff);\n\n\t\tbytes = ByteUtil.fromInts(ints);\n\n\t\treturn StandardBinaryCodec.INSTANCE.decode(bytes);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/other/SlugCodec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.other;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.GuidCodec;\nimport com.pinecone.ulf.util.guid.i128.codec.base.Base32Codec;\nimport com.pinecone.ulf.util.guid.i128.codec.base.Base64UrlCodec;\nimport com.pinecone.ulf.util.guid.i128.codec.base.BaseNCodec;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.util.UuidValidator;\n\nimport java.util.UUID;\n\n/**\n * Codec for UUID Slugs.\n * <p>\n * A UUID Slug is a shorter string representation that can be safely included in\n * URLs and file names.\n * <p>\n * The {@link SlugCodec} turns a UUID into a string that does not start with\n * digits (0-9). Due to the default base-64-url alphabet, it is <b>case\n * sensitive</b> and may contain '-' and '_'.\n * <p>\n * The {@link Base32Codec} can be passed to the {@link SlugCodec} constructor to\n * generate base-32 slugs. Due to the base-32 alphabet, it is case insensitive\n * and it contains only letters (a-zA-Z) and digits (2-7). This encoding\n * substitution can be done to avoid the characters '-' and '_' of the\n * base-64-url encoding, but it makes the slug case insensitive.\n * <p>\n * To turn a UUID into a slug, the version and variant nibbles are are moved to\n * the first position of the UUID byte array. The slugs generated of the same\n * UUID version show a constant letter in the first position of the base-64-url\n * string.\n * <p>\n * This is how the UUID bits are rearranged:\n * \n * <pre>{@code\n *   aaaaaaaa-bbbb-Vccc-Rddd-eeeeeeeeeeee\n *                 |    |            ^\n *   ,-------------'    |   encode   |\n *   |,-----------------'      |   decode\n *   ||                        v\n *   VRaaaaaa-aabb-bbcc-cddd-eeeeeeeeeeee\n *               shift >>|\n *\n *   V: version nibble or character\n *   R: variant nibble or character\n * }</pre>\n * \n * <p>\n * This table shows the slug prefixes for each UUID version:\n * \n * <pre>\n * VERSON  PREFIX   EXAMPLE\n *    1       G     GxA1e7vco3Ib6_mjtptP3w\n *    2       K     KryezRARVgTHLQ3zJpAXIw\n *    3       O     O9JfSS1IqIabkEWC-uXWNA\n *    4       S     S5iPSZYDt7q2w0qiIFZVwQ\n *    5       W     WY-Uv6WAY5os7Gfv4ILnvQ\n *    6       a     aMKkEoaymw0FSQNJRDL7Gw\n * </pre>\n * \n * <p>\n * If you don't like the change in the bytes layout before the encoding to\n * base-64-url, use the {@link Base64UrlCodec} instead of {@link SlugCodec} to\n * generate slugs.\n * <p>\n * {@link SlugCodec} and {@link NcnameCodec} are very similar. The difference\n * between the two is the bit shift they do with the original UUID to transform\n * it into a string.\n * <p>\n * In the case someone is interested in implementing this type of slug in\n * another language, the change in the bytes layout don't have to be done with\n * bit shifting. Since a base-16 character corresponds to a nibble, the layout\n * change could be easily done by moving characters instead of by shifting bits.\n * See {@code SlugCodecTest#moveCharacters()}.\n * \n * @see <a href=\"https://github.com/f4b6a3/uuid-creator/issues/30\">UUID\n *      Slugs</a>\n */\npublic final class SlugCodec implements GuidCodec<String> {\n\n\t/**\n\t * A shared immutable instance using `base64url`\n\t */\n\tpublic static final SlugCodec INSTANCE = new SlugCodec();\n\n\tprivate final int length;\n\tprivate final BaseNCodec codec;\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic SlugCodec() {\n\t\tthis(Base64UrlCodec.INSTANCE);\n\t}\n\n\t/**\n\t * @param codec a base-n codec to be used (the default is base-64-url)\n\t */\n\tpublic SlugCodec(BaseNCodec codec) {\n\t\tif (codec == null) {\n\t\t\tthrow new IllegalArgumentException(\"Null codec\");\n\t\t}\n\t\tthis.codec = codec;\n\t\tthis.length = codec.getBase().getLength();\n\t}\n\n\t/**\n\t * Get a Slug from a UUID.\n\t * \n\t * @param uuid a UUID\n\t * @return a Slug\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic String encode(GUID128 uuid) {\n\n\t\tUuidValidator.validate(uuid);\n\n\t\tlong long1 = uuid.getMostSignificantBits();\n\t\tlong long2 = uuid.getLeastSignificantBits();\n\n\t\tlong msb = 0;\n\t\tlong lsb = 0;\n\n\t\tmsb |= (long1 & 0x000000000000f000L) << 48; // move version nibble to bit positions 0, 1, 2, and 3\n\t\tmsb |= (long2 & 0xf000000000000000L) >>> 4; // move variant nibble to bit positions 4, 5, 6, and 7\n\t\tmsb |= (long1 & 0xffffffffffff0000L) >>> 8;\n\t\tmsb |= (long1 & 0x0000000000000fffL) >>> 4;\n\n\t\tlsb |= (long1 & 0x000000000000000fL) << 60;\n\t\tlsb |= (long2 & 0x0fffffffffffffffL);\n\n\t\treturn this.codec.encode(new UUID128(msb, lsb));\n\t}\n\n\t/**\n\t * Get a UUID from a Slug.\n\t * \n\t * @param slug a Slug\n\t * @return a UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic GUID128 decode(String slug) {\n\n\t\tif (slug == null || slug.length() != this.length) {\n\t\t\tthrow new InvalidUuidException(\"Invalid UUID Slug: \\\"\" + slug + \"\\\"\");\n\t\t}\n\n\t\tGUID128 uuid = this.codec.decode(slug);\n\n\t\tlong long1 = uuid.getMostSignificantBits();\n\t\tlong long2 = uuid.getLeastSignificantBits();\n\n\t\tlong msb = 0;\n\t\tlong lsb = 0;\n\n\t\tmsb |= (long1 & 0xf000000000000000L) >>> 48; // move version nibble to its original position\n\t\tmsb |= (long2 & 0xf000000000000000L) >>> 60; // move variant nibble to its original position\n\t\tmsb |= (long1 & 0x00ffffffffffff00L) << 8;\n\t\tmsb |= (long1 & 0x00000000000000ffL) << 4;\n\n\t\tlsb |= (long1 & 0x0f00000000000000L) << 4;\n\t\tlsb |= (long2 & 0x0fffffffffffffffL);\n\n\t\treturn new UUID128(msb, lsb);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/codec/other/TimeOrderedCodec.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.codec.other;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.GuidCodec;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.util.UuidUtil;\nimport com.pinecone.ulf.util.guid.i128.util.UuidValidator;\n\nimport java.util.UUID;\n\n/**\n * Codec for time-ordered UUIDs\n * <p>\n * This codec converts time-based UUIDs (UUIDv1) to time-ordered UUIDs (UUIDv6).\n */\npublic class TimeOrderedCodec implements GuidCodec<GUID128> {\n\n\t/**\n\t * A shared immutable instance.\n\t */\n\tpublic static final TimeOrderedCodec INSTANCE = new TimeOrderedCodec();\n\n\t/**\n\t * Get a time-ordered UUID from a time-based UUID.\n\t * \n\t * @param uuid a time-based UUID\n\t * @return a time-ordered UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic GUID128 encode(GUID128 uuid) {\n\n\t\tUuidValidator.validate(uuid);\n\n\t\tif (!UuidUtil.isTimeBased(uuid)) {\n\t\t\tthrow new InvalidUuidException(\"Not a time-based UUID: \" + uuid);\n\t\t}\n\n\t\tlong timestamp = UuidUtil.getTimestamp(uuid);\n\n\t\tlong msb = ((timestamp & 0x0ffffffffffff000L) << 4) //\n\t\t\t\t| (timestamp & 0x0000000000000fffL) //\n\t\t\t\t| 0x0000000000006000L; // set version 6\n\n\t\tlong lsb = uuid.getLeastSignificantBits();\n\n\t\treturn new UUID128(msb, lsb);\n\t}\n\n\t/**\n\t * Get a time-based UUID from a time-ordered UUID.\n\t * \n\t * @param uuid a time-ordered UUID\n\t * @return a time-based UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\t@Override\n\tpublic GUID128 decode(GUID128 uuid) {\n\n\t\tUuidValidator.validate(uuid);\n\n\t\tif (!UuidUtil.isTimeOrdered(uuid)) {\n\t\t\tthrow new InvalidUuidException(\"Not a time-ordered UUID: \" + uuid);\n\t\t}\n\n\t\tlong timestamp = UuidUtil.getTimestamp(uuid);\n\n\t\tlong timeHigh = (timestamp & 0x0fff_0000_00000000L) >>> 48;\n\t\tlong timeMid = (timestamp & 0x0000_ffff_00000000L) >>> 16;\n\t\tlong timeLow = (timestamp & 0x0000_0000_ffffffffL) << 32;\n\t\tlong version = 0x0000000000001000L; // Set version 1\n\n\t\t// Combine the parts to form the Most Significant Bits (MSB)\n\t\tlong msb = timeHigh | timeMid | timeLow | version;\n\n\t\tlong lsb = uuid.getLeastSignificantBits();\n\n\t\treturn new UUID128(msb, lsb);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/enums/UuidLocalDomain.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.enums;\n\npublic enum UuidLocalDomain {\n\n\t/**\n\t * The principal domain, interpreted as POSIX UID domain on POSIX systems.\n\t */\n\tLOCAL_DOMAIN_PERSON((byte) 0),\n\t/**\n\t * The group domain, interpreted as POSIX GID domain on POSIX systems.\n\t */\n\tLOCAL_DOMAIN_GROUP((byte) 1),\n\t/**\n\t * The organization domain, site-defined.\n\t */\n\tLOCAL_DOMAIN_ORG((byte) 2);\n\n\tprivate final byte value;\n\n\tUuidLocalDomain(byte value) {\n\t\tthis.value = value;\n\t}\n\n\t/**\n\t * Get the byte value.\n\t * \n\t * @return a byte\n\t */\n\tpublic byte getValue() {\n\t\treturn this.value;\n\t}\n\n\t/**\n\t * Get the enum value.\n\t * \n\t * @param value a byte.\n\t * @return the enum\n\t */\n\tpublic static UuidLocalDomain getLocalDomain(byte value) {\n\t\tfor (UuidLocalDomain domain : UuidLocalDomain.values()) {\n\t\t\tif (domain.getValue() == value) {\n\t\t\t\treturn domain;\n\t\t\t}\n\t\t}\n\t\treturn null;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/enums/UuidNamespace.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.enums;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\n\npublic enum UuidNamespace {\n\n\t/**\n\t * Name space to be used when the name string is a fully-qualified domain name.\n\t */\n\tNAMESPACE_DNS(new UUID128(0x6ba7b8109dad11d1L, 0x80b400c04fd430c8L)),\n\t/**\n\t * Name space to be used when the name string is a URL.\n\t */\n\tNAMESPACE_URL(new UUID128(0x6ba7b8119dad11d1L, 0x80b400c04fd430c8L)),\n\t/**\n\t * Name space to be used when the name string is an ISO OID.\n\t */\n\tNAMESPACE_OID(new UUID128(0x6ba7b8129dad11d1L, 0x80b400c04fd430c8L)),\n\t/**\n\t * Name space to be used when the name string is an X.500 DN (DER or text).\n\t */\n\tNAMESPACE_X500(new UUID128(0x6ba7b8149dad11d1L, 0x80b400c04fd430c8L));\n\n\tprivate final GUID128 value;\n\n\tUuidNamespace(GUID128 value) {\n\t\tthis.value = value;\n\t}\n\n\t/**\n\t * Get the UUID value\n\t * \n\t * @return a UUID\n\t */\n\tpublic GUID128 getValue() {\n\t\treturn this.value;\n\t}\n\n\t/**\n\t * Get the enum value.\n\t * \n\t * @param value a UUID.\n\t * @return the enum\n\t */\n\tpublic static UuidNamespace getNamespace(GUID value) {\n\t\tfor (UuidNamespace namespace : UuidNamespace.values()) {\n\t\t\tif (namespace.getValue().equals(value)) {\n\t\t\t\treturn namespace;\n\t\t\t}\n\t\t}\n\t\treturn null;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/enums/UuidVariant.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.enums;\n\n\npublic enum UuidVariant {\n\n\t/**\n\t * Reserved for NCS backward compatibility.\n\t */\n\tVARIANT_RESERVED_NCS(0),\n\t/**\n\t * The variant specified in RFC 9562.\n\t */\n\tVARIANT_STANDARD(2),\n\t/**\n\t * Reserved for Microsoft Corporation backward compatibility.\n\t */\n\tVARIANT_RESERVED_MICROSOFT(6),\n\t/**\n\t * Reserved for future definition.\n\t */\n\tVARIANT_RESERVED_FUTURE(7);\n\n\tprivate final int value;\n\n\tUuidVariant(int value) {\n\t\tthis.value = value;\n\t}\n\n\t/**\n\t * Get the number value.\n\t * \n\t * @return a number\n\t */\n\tpublic int getValue() {\n\t\treturn this.value;\n\t}\n\n\t/**\n\t * Get the enum value.\n\t * \n\t * @param value a number.\n\t * @return the enum\n\t */\n\tpublic static UuidVariant getVariant(int value) {\n\t\tfor (UuidVariant variant : UuidVariant.values()) {\n\t\t\tif (variant.getValue() == value) {\n\t\t\t\treturn variant;\n\t\t\t}\n\t\t}\n\t\treturn null;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/enums/UuidVersion.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.enums;\n\npublic enum UuidVersion {\n\n\t/**\n\t * An unknown version.\n\t */\n\tVERSION_UNKNOWN(0),\n\t/**\n\t * The time-based version with gregorian epoch specified in RFC 9562.\n\t */\n\tVERSION_TIME_BASED(1),\n\t/**\n\t * The DCE Security version, with embedded POSIX UIDs.\n\t */\n\tVERSION_DCE_SECURITY(2),\n\t/**\n\t * The name-based version specified in RFC 9562 that uses MD5 hashing.\n\t */\n\tVERSION_NAME_BASED_MD5(3),\n\t/**\n\t * The randomly or pseudo-randomly generated version specified in RFC 9562.\n\t */\n\tVERSION_RANDOM_BASED(4),\n\t/**\n\t * The name-based version specified in RFC 9562 that uses SHA-1 hashing.\n\t */\n\tVERSION_NAME_BASED_SHA1(5),\n\t/**\n\t * The time-ordered version with gregorian epoch proposed by Peabody and Davis.\n\t */\n\tVERSION_TIME_ORDERED(6),\n\t/**\n\t * The time-ordered version with Unix epoch proposed by Peabody and Davis.\n\t */\n\tVERSION_TIME_ORDERED_EPOCH(7),\n\t/**\n\t * The custom or free-form version proposed by Peabody and Davis.\n\t */\n\tVERSION_CUSTOM(8);\n\n\tprivate final int value;\n\n\tUuidVersion(int value) {\n\t\tthis.value = value;\n\t}\n\n\t/**\n\t * Get the number value.\n\t * \n\t * @return a number\n\t */\n\tpublic int getValue() {\n\t\treturn this.value;\n\t}\n\n\t/**\n\t * Get the enum value.\n\t * \n\t * @param value a number.\n\t * @return the enum\n\t */\n\tpublic static UuidVersion getVersion(int value) {\n\t\tfor (UuidVersion version : UuidVersion.values()) {\n\t\t\tif (version.getValue() == value) {\n\t\t\t\treturn version;\n\t\t\t}\n\t\t}\n\t\treturn null;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/exception/InvalidUuidException.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.exception;\n\nimport java.util.Arrays;\n\n/**\n * Runtime exception to be used when an invalid UUID is received as argument.\n */\npublic final class InvalidUuidException extends RuntimeException {\n\n\tprivate static final long serialVersionUID = 1L;\n\n\t/**\n\t * Default constructor with a message.\n\t * \n\t * @param message a message\n\t */\n\tpublic InvalidUuidException(String message) {\n\t\tsuper(message);\n\t}\n\n\t/**\n\t * Default constructor with a message and the cause.\n\t * \n\t * @param message a message\n\t * @param cause   the cause\n\t */\n\tpublic InvalidUuidException(String message, Throwable cause) {\n\t\tsuper(message, cause);\n\t}\n\n\t/**\n\t * Factory method for creating a runtime exception.\n\t * \n\t * @param obj an object that can, for example, a string of a char array.\n\t * @return a runtime exception\n\t */\n\tpublic static InvalidUuidException newInstance(Object obj) {\n\n\t\tString string;\n\t\tif (obj == null) {\n\t\t\tstring = null;\n\t\t} else if (obj instanceof char[]) {\n\t\t\tstring = String.valueOf((char[]) obj);\n\t\t} else if (obj.getClass().isArray()) {\n\t\t\tstring = Arrays.toString((byte[]) obj);\n\t\t} else {\n\t\t\tstring = String.valueOf(obj);\n\t\t}\n\n\t\tif (string != null) {\n\t\t\tstring = \"\\\"\" + string + \"\\\"\";\n\t\t}\n\n\t\treturn new InvalidUuidException(\"Invalid UUID: \" + string);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/AbstCombFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory;\n\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\n\nimport java.time.Clock;\nimport java.time.Instant;\nimport java.util.function.LongSupplier;\nimport java.util.function.Supplier;\n\n/**\n * Abstract Factory for creating COMB GUIDs.\n * <p>\n * COMB GUIDs combine a creation time and random bytes.\n */\npublic abstract class AbstCombFactory extends AbstRandomBasedFactory {\n\n\t/**\n\t * The instant function.\n\t */\n\tprotected Supplier<Instant> instantFunction;\n\n\t/**\n\t * Constructor whith a version number and a builder.\n\t * \n\t * @param version a version number\n\t * @param builder a builder\n\t */\n\tprotected AbstCombFactory(UuidVersion version, Builder<?, ?> builder) {\n\t\tsuper(version, builder);\n\t\tthis.instantFunction = builder.getInstantFunction();\n\t}\n\n\t/**\n\t * Abstract builder for creating a COMB factory.\n\t *\n\t * @param <T> factory type\n\t * @param <B> builder type\n\t * @see AbstRandomBasedFactory.Builder\n\t */\n\tpublic abstract static class Builder<T, B extends Builder<T, B>> extends AbstRandomBasedFactory.Builder<T, B> {\n\n\t\t/**\n\t\t * The instant function.\n\t\t */\n\t\tprotected Supplier<Instant> instantFunction;\n\n\t\t/**\n\t\t * Get the instant function.\n\t\t * \n\t\t * @return the builder\n\t\t */\n\t\tprotected Supplier<Instant> getInstantFunction() {\n\t\t\tif (this.instantFunction == null) {\n\t\t\t\tthis.instantFunction = () -> Instant.now();\n\t\t\t}\n\t\t\treturn this.instantFunction;\n\t\t}\n\n\t\t/**\n\t\t * Set the clock.\n\t\t * \n\t\t * @param clock a clock\n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withClock(Clock clock) {\n\t\t\tif (clock != null) {\n\t\t\t\tthis.instantFunction = () -> clock.instant();\n\t\t\t}\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set the time function.\n\t\t * \n\t\t * The time is the number of milliseconds since 1970-01-01T00:00:00Z.\n\t\t * \n\t\t * @param timeFunction a function\n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withTimeFunction(LongSupplier timeFunction) {\n\t\t\tthis.instantFunction = () -> Instant.ofEpochMilli(timeFunction.getAsLong());\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set the instant function.\n\t\t * \n\t\t * @param instantFunction a function\n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withInstantFunction(Supplier<Instant> instantFunction) {\n\t\t\tthis.instantFunction = instantFunction;\n\t\t\treturn (B) this;\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/AbstNameBasedFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidNamespace;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil;\n\nimport java.security.MessageDigest;\nimport java.security.NoSuchAlgorithmException;\nimport java.util.Objects;\nimport java.util.UUID;\n\nimport static com.pinecone.ulf.util.guid.i128.enums.UuidVersion.VERSION_NAME_BASED_MD5;\nimport static com.pinecone.ulf.util.guid.i128.enums.UuidVersion.VERSION_NAME_BASED_SHA1;\n\n/**\n * Abstract factory for creating name-based unique identifiers (UUIDv3 and\n * UUIDv5).\n * \n * The name space is optional for compatibility with the JDK's UUID method for\n * generating UUIDv3, which is {@link UUID#nameUUIDFromBytes(byte[])}.\n * \n * @see UuidNamespace\n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc9562.html\">RFC 9562</a>\n */\npublic abstract class AbstNameBasedFactory extends UuidFactory {\n\n\t/**\n\t * The namespace (optional).\n\t */\n\tprotected byte[] namespace; // can be null\n\t/**\n\t * The hash algorithm.\n\t */\n\tprotected final String algorithm; // MD5 or SHA-1\n\n\t/**\n\t * The MD5 algorithm.\n\t */\n\tprotected static final String ALGORITHM_MD5 = \"MD5\";\n\t/**\n\t * The SHA-1 algorithm.\n\t */\n\tprotected static final String ALGORITHM_SHA1 = \"SHA-1\";\n\n\t/**\n\t * Protected constructor that receives the message digest algorithm and an\n\t * optional name space.\n\t * \n\t * @param version   the version number (3 or 5)\n\t * @param algorithm a message digest algorithm (MD5 or SHA-1)\n\t * @param namespace a name space byte array (null or 16 bytes)\n\t */\n\tprotected AbstNameBasedFactory(UuidVersion version, String algorithm, byte[] namespace) {\n\t\tsuper(version);\n\n\t\tif (!VERSION_NAME_BASED_MD5.equals(version) && !VERSION_NAME_BASED_SHA1.equals(version)) {\n\t\t\tthrow new IllegalArgumentException(\"Invalid UUID version\");\n\t\t}\n\n\t\tif (ALGORITHM_MD5.equals(algorithm) || ALGORITHM_SHA1.equals(algorithm)) {\n\t\t\tthis.algorithm = algorithm;\n\t\t} else {\n\t\t\tthrow new IllegalArgumentException(\"Invalid message digest algorithm\");\n\t\t}\n\n\t\tif (namespace != null) {\n\t\t\tif (namespace.length == 16) {\n\t\t\t\t// must be 16 bytes length\n\t\t\t\tthis.namespace = namespace;\n\t\t\t} else {\n\t\t\t\tthrow new IllegalArgumentException(\"Invalid namespace\");\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Returns a name-based UUID.\n\t * \n\t * @param name a byte array\n\t * @return a name-based UUID\n\t * @throws NullPointerException if name is null\n\t */\n\tpublic GUID128 create(final byte[] name) {\n\t\treturn (GUID128) this.create(this.namespace, nameBytes(name));\n\t}\n\n\t/**\n\t * Returns a name-based UUID.\n\t * \n\t * The name string is encoded into a sequence of bytes using UTF-8.\n\t * \n\t * @param name a string\n\t * @return a name-based UUID\n\t * @throws NullPointerException if name is null\n\t */\n\tpublic GUID128 create(final String name) {\n\t\treturn (GUID128) this.create(this.namespace, nameBytes(name));\n\t}\n\n\t/**\n\t * Returns a name-based UUID.\n\t * \n\t * @param namespace a name space UUID\n\t * @param name      a byte array\n\t * @return a name-based UUID\n\t * @throws IllegalArgumentException if name is null\n\t */\n\tpublic GUID128 create(final GUID128 namespace, final byte[] name) {\n\t\treturn (GUID128) this.create(namespaceBytes(namespace), nameBytes(name));\n\t}\n\n\t/**\n\t * Returns a name-based UUID.\n\t * \n\t * The name string is encoded into a sequence of bytes using UTF-8.\n\t * \n\t * @param namespace a name space UUID\n\t * @param name      a string\n\t * @return a name-based UUID\n\t * @throws NullPointerException if name is null\n\t */\n\tpublic GUID128 create(final GUID128 namespace, final String name) {\n\t\treturn (GUID128) this.create(namespaceBytes(namespace), nameBytes(name));\n\t}\n\n\t/**\n\t * Returns a name-based UUID.\n\t * \n\t * @param namespace a name space string\n\t * @param name      a byte array\n\t * @return a name-based UUID\n\t * @throws NullPointerException if name is null\n\t * @throws InvalidUuidException if the name space is invalid\n\t * @see InvalidUuidException\n\t */\n\tpublic GUID128 create(final String namespace, final byte[] name) {\n\t\treturn (GUID128) this.create(namespaceBytes(namespace), nameBytes(name));\n\t}\n\n\t/**\n\t * Returns a name-based UUID.\n\t * <p>\n\t * The name string is encoded into a sequence of bytes using UTF-8.\n\t * \n\t * @param namespace a name space string\n\t * @param name      a string\n\t * @return a name-based UUID\n\t * @throws NullPointerException if name is null\n\t * @throws InvalidUuidException if the name space is invalid\n\t * @see InvalidUuidException\n\t */\n\tpublic GUID128 create(final String namespace, final String name) {\n\t\treturn (GUID128) this.create(namespaceBytes(namespace), nameBytes(name));\n\t}\n\n\t/**\n\t * Returns a name-based UUID.\n\t * \n\t * @param namespace a name space enumeration\n\t * @param name      a byte array\n\t * @return a name-based UUID\n\t * @throws NullPointerException if name is null\n\t */\n\tpublic GUID128 create(final UuidNamespace namespace, final byte[] name) {\n\t\treturn (GUID128) this.create(namespaceBytes(namespace), nameBytes(name));\n\t}\n\n\t/**\n\t * Returns a name-based UUID.\n\t * <p>\n\t * The name string is encoded into a sequence of bytes using UTF-8.\n\t * \n\t * @param namespace a name space enumeration\n\t * @param name      a string\n\t * @return a name-based UUID\n\t * @throws NullPointerException if name is null\n\t */\n\tpublic GUID128 create(final UuidNamespace namespace, final String name) {\n\t\treturn (GUID128) this.create(namespaceBytes(namespace), nameBytes(name));\n\t}\n\n\n\t@Override\n\tpublic GUID128 create() {\n\t\treturn create(Parameters.builder().build());\n\t}\n\n    @Override\n\tpublic GUID128 create(Parameters parameters) {\n\t\treturn (GUID128) this.create( parameters.getNamespace(), parameters.getName() );\n\t}\n\n\tprivate Object create( final byte[] namespace, final byte[] name ) {\n\n\t\tObjects.requireNonNull(name, \"Null name\");\n\n\t\tMessageDigest hasher;\n\n\t\ttry {\n\t\t\thasher = MessageDigest.getInstance(this.algorithm);\n\t\t} catch (NoSuchAlgorithmException e) {\n\t\t\tthrow new IllegalArgumentException(e.getMessage());\n\t\t}\n\n\t\tif (namespace != null) {\n\t\t\t// Prepend the name space\n\t\t\thasher.update(namespace);\n\t\t}\n\n\t\t// Compute the hash of the name\n\t\tfinal byte[] hash = hasher.digest(name);\n\n\t\tfinal long msb = ByteUtil.toNumber(hash, 0, 8);\n\t\tfinal long lsb = ByteUtil.toNumber(hash, 8, 16);\n\n        return this.toUuid(msb, lsb);\n\t}\n\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/AbstRandomBasedFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.factory.function.RandomFunction;\nimport com.pinecone.ulf.util.guid.i128.factory.function.impl.DefaultRandomFunction;\nimport com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil;\n\nimport java.security.SecureRandom;\nimport java.util.Objects;\nimport java.util.Random;\nimport java.util.UUID;\nimport java.util.concurrent.ThreadLocalRandom;\nimport java.util.concurrent.locks.ReentrantLock;\nimport java.util.function.IntFunction;\nimport java.util.function.LongSupplier;\n\n/**\n * Abstract factory for creating random-based unique identifiers (UUIDv4).\n * \n * @see RandomFunction\n */\npublic abstract class AbstRandomBasedFactory extends UuidFactory {\n\n\t/**\n\t * The random generator.\n\t */\n\tprotected final IRandom random;\n\n\t/**\n\t * The number of bytes of a UUID.\n\t */\n\tprotected static final int UUID_BYTES = 16;\n\n\t/**\n\t * The reentrant lock for synchronization.\n\t */\n\tprotected final ReentrantLock lock = new ReentrantLock();\n\n\t/**\n\t * Constructor with a version number and a builder\n\t * \n\t * @param version a version number\n\t * @param builder a builder\n\t */\n\tprotected AbstRandomBasedFactory(UuidVersion version, Builder<?, ?> builder) {\n\t\tsuper(version);\n\t\tthis.random = builder.getRandom();\n\t}\n\n\t@Override\n\tpublic GUID128 create(Parameters parameters) {\n\t\treturn create(); // ignore parameters\n\t}\n\n\t/**\n\t * Abstract builder for creating a random-based factory.\n\t *\n\t * @param <T> factory type\n\t * @param <B> builder type\n\t */\n\tprotected abstract static class Builder<T, B extends Builder<T, B>> {\n\n\t\t/**\n\t\t * A random generator.\n\t\t */\n\t\tprotected IRandom random;\n\n\t\t/**\n\t\t * Get the random generator.\n\t\t * \n\t\t * @return a random generator\n\t\t */\n\t\tprotected IRandom getRandom() {\n\t\t\tif (this.random == null) {\n\t\t\t\tthis.random = new SafeRandom(new DefaultRandomFunction());\n\t\t\t}\n\t\t\treturn this.random;\n\t\t}\n\n\t\t/**\n\t\t * Set the random generator with a fast algorithm.\n\t\t * \n\t\t * Use it to replace the {@link DefaultRandomFunction} with\n\t\t * {@link ThreadLocalRandom}.\n\t\t * \n\t\t * @return the generator\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withFastRandom() {\n\t\t\tthis.random = new FastRandom();\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set the random generator with a safe algorithm.\n\t\t * \n\t\t * Use it to replace the {@link DefaultRandomFunction} with\n\t\t * {@link SecureRandom}.\n\t\t * \n\t\t * @return the generator\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withSafeRandom() {\n\t\t\tthis.random = new SafeRandom();\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set the random generator.\n\t\t * \n\t\t * @param random a random\n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withRandom(Random random) {\n\t\t\tif (random != null) {\n\t\t\t\tif (random instanceof SecureRandom) {\n\t\t\t\t\tthis.random = new SafeRandom(random);\n\t\t\t\t} else {\n\t\t\t\t\tthis.random = new FastRandom(random);\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set a random function which returns random numbers.\n\t\t * \n\t\t * @param randomFunction a function\n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withRandomFunction(LongSupplier randomFunction) {\n\t\t\tthis.random = new FastRandom(randomFunction);\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Finishes the factory building.\n\t\t * \n\t\t * @return the build factory\n\t\t */\n\t\tpublic abstract T build();\n\t}\n\n\t/**\n\t * Interface for random generator.\n\t */\n\tprotected interface IRandom {\n\n\t\t/**\n\t\t * Return a random number.\n\t\t * \n\t\t * @return a number\n\t\t */\n\t\tlong nextLong();\n\n\t\t/**\n\t\t * Return a random number.\n\t\t *\n\t\t * @param length the byte array length\n\t\t * @return a number\n\t\t */\n\t\tlong nextLong(int length);\n\n\t\t/**\n\t\t * Return a random array of bytes.\n\t\t * \n\t\t * @param length the byte array length\n\t\t * @return an array\n\t\t */\n\t\tbyte[] nextBytes(int length);\n\t}\n\n\t/**\n\t * A long random generator.\n\t */\n\tprotected static final class FastRandom implements IRandom {\n\n\t\tprivate final LongSupplier randomFunction;\n\n\t\t/**\n\t\t * Default constructor.\n\t\t */\n\t\tpublic FastRandom() {\n\t\t\tthis(newFastFunction(null));\n\t\t}\n\n\t\t/**\n\t\t * Constructor with a random.\n\t\t * \n\t\t * @param random a random\n\t\t */\n\t\tpublic FastRandom(Random random) {\n\t\t\tthis(newFastFunction(Objects.requireNonNull(random)));\n\t\t}\n\n\t\t/**\n\t\t * Constructor with a function which returns random numbers.\n\t\t * \n\t\t * @param randomFunction a function\n\t\t */\n\t\tpublic FastRandom(LongSupplier randomFunction) {\n\t\t\tthis.randomFunction = Objects.requireNonNull(randomFunction);\n\t\t}\n\n\t\t@Override\n\t\tpublic long nextLong() {\n\t\t\treturn randomFunction.getAsLong();\n\t\t}\n\n\t\t@Override\n\t\tpublic long nextLong(int length) {\n\t\t\tbyte[] bytes = nextBytes(length);\n\t\t\treturn ByteUtil.toNumber(bytes);\n\t\t}\n\n\t\t@Override\n\t\tpublic byte[] nextBytes(int length) {\n\n\t\t\tint shift = 0;\n\t\t\tlong random = 0;\n\t\t\tfinal byte[] bytes = new byte[length];\n\n\t\t\tfor (int i = 0; i < length; i++) {\n\t\t\t\tif (shift < Byte.SIZE) {\n\t\t\t\t\tshift = Long.SIZE;\n\t\t\t\t\trandom = randomFunction.getAsLong();\n\t\t\t\t}\n\t\t\t\tshift -= Byte.SIZE; // 56, 48, 42...\n\t\t\t\tbytes[i] = (byte) (random >>> shift);\n\t\t\t}\n\n\t\t\treturn bytes;\n\t\t}\n\n\t\t/**\n\t\t * Returns a new random function.\n\t\t * \n\t\t * @param random a random\n\t\t * @return a function\n\t\t */\n\t\tprivate static LongSupplier newFastFunction(Random random) {\n\t\t\tif (random != null) {\n\t\t\t\treturn () -> random.nextLong();\n\t\t\t}\n\t\t\treturn () -> ThreadLocalRandom.current().nextLong();\n\t\t}\n\t}\n\n\t/**\n\t * A byte random generator.\n\t */\n\tprotected static final class SafeRandom implements IRandom {\n\n\t\tprivate final IntFunction<byte[]> randomFunction;\n\n\t\t/**\n\t\t * Default constructor.\n\t\t */\n\t\tpublic SafeRandom() {\n\t\t\tthis(newSafeFunction(null));\n\t\t}\n\n\t\t/**\n\t\t * Constructor with a random.\n\t\t * \n\t\t * @param random a random\n\t\t */\n\t\tpublic SafeRandom(Random random) {\n\t\t\tthis(newSafeFunction(Objects.requireNonNull(random)));\n\t\t}\n\n\t\t/**\n\t\t * Constructor with a function which returns random numbers.\n\t\t * \n\t\t * @param randomFunction a function\n\t\t */\n\t\tpublic SafeRandom(IntFunction<byte[]> randomFunction) {\n\t\t\tthis.randomFunction = Objects.requireNonNull(randomFunction);\n\t\t}\n\n\t\t@Override\n\t\tpublic long nextLong() {\n\t\t\tbyte[] bytes = this.randomFunction.apply(Long.BYTES);\n\t\t\treturn ByteUtil.toNumber(bytes);\n\t\t}\n\n\t\tpublic long nextLong(int length) {\n\t\t\tbyte[] bytes = nextBytes(length);\n\t\t\treturn ByteUtil.toNumber(bytes);\n\t\t}\n\n\t\t@Override\n\t\tpublic byte[] nextBytes(int length) {\n\t\t\treturn this.randomFunction.apply(length);\n\t\t}\n\n\t\t/**\n\t\t * Returns a new random function.\n\t\t * \n\t\t * @param random a random\n\t\t * @return a function\n\t\t */\n\t\tprivate static IntFunction<byte[]> newSafeFunction(Random random) {\n\t\t\tfinal Random entropy = random != null ? random : new SecureRandom();\n\t\t\treturn (final int length) -> {\n\t\t\t\tfinal byte[] bytes = new byte[length];\n\t\t\t\tentropy.nextBytes(bytes);\n\t\t\t\treturn bytes;\n\t\t\t};\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/AbstTimeBasedFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.factory.function.ClockSeqFunction;\nimport com.pinecone.ulf.util.guid.i128.factory.function.NodeIdFunction;\nimport com.pinecone.ulf.util.guid.i128.factory.function.TimeFunction;\nimport com.pinecone.ulf.util.guid.i128.factory.function.impl.DefaultClockSeqFunction;\nimport com.pinecone.ulf.util.guid.i128.factory.function.impl.DefaultNodeIdFunction;\nimport com.pinecone.ulf.util.guid.i128.factory.function.impl.DefaultTimeFunction;\nimport com.pinecone.ulf.util.guid.i128.factory.function.impl.HashNodeIdFunction;\nimport com.pinecone.ulf.util.guid.i128.factory.function.impl.MacNodeIdFunction;\nimport com.pinecone.ulf.util.guid.i128.factory.function.impl.RandomNodeIdFunction;\nimport com.pinecone.ulf.util.guid.i128.factory.function.impl.WindowsTimeFunction;\nimport com.pinecone.ulf.util.guid.i128.util.UuidTime;\nimport com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil;\nimport com.pinecone.ulf.util.guid.i128.util.internal.SettingsUtil;\n\nimport java.time.Instant;\nimport java.util.UUID;\nimport java.util.concurrent.locks.ReentrantLock;\n\n/**\n * Abstract factory for creating time-based unique identifiers (UUIDv1, UUIDv2\n * and UUIDv6).\n * <p>\n * The time stamp has 100-nanoseconds resolution, starting from 1582-10-15,\n * which is a date known as Gregorian Epoch. The the time stamp rolls over\n * around AD 5235 (1582 + 2^60 / 365.25 / 24 / 60 / 60 / 10000000).\n * <p>\n * The node identifier can be:\n * <ul>\n * <li>A MAC address;\n * <li>A hash of host name, MAC and IP;\n * <li>A random number that always changes;\n * <li>A specific number chosen by someone.\n * </ul>\n * <p>\n * The node identifier used by this factory can be controlled by defining a\n * system property <code>'uuidcreator.node'</code> or an environment variable\n * <code>'UUIDCREATOR_NODE'</code>. The system property has preference over the\n * environment variable.\n * <p>\n * Options accepted by the system property and the environment variable:\n * <ul>\n * <li>The string \"mac\" for using the MAC address;\n * <li>The string \"hash\" for using a hash of host name, MAC and IP;\n * <li>The string \"random\" for using a random number that always changes;\n * <li>The string representation of a specific number between 0 and 2^48-1.\n * </ul>\n * <p>\n * If a property or variable is defined, all UUIDs generated by this factory\n * will be based on it.\n * <p>\n * Otherwise, if no property or variable is defined, a random node identifier is\n * generated once at instantiation. This is the default.\n * <p>\n * Example of system property definition:\n * \n * <pre>{@code\n * # Append to VM arguments\n * -Duuidcreator.node=\"mac\"\n * }</pre>\n * <p>\n * Example of environment variable definition:\n * \n * <pre>{@code\n * # Append to ~/.profile\n * export UUIDCREATOR_NODE=\"mac\"\n * }</pre>\n *\n * @see TimeFunction\n * @see NodeIdFunction\n * @see ClockSeqFunction\n * @see <a href=\"https://www.rfc-editor.org/rfc/rfc9562.html\">RFC 9562</a>\n */\npublic abstract class AbstTimeBasedFactory extends UuidFactory {\n\n\t/**\n\t * The time function.\n\t */\n\tprotected TimeFunction timeFunction;\n\t/**\n\t * The node function.\n\t */\n\tprotected NodeIdFunction nodeidFunction;\n\t/**\n\t * The clock sequence function.\n\t */\n\tprotected ClockSeqFunction clockseqFunction;\n\n\tprivate static final String NODE_MAC = \"mac\";\n\tprivate static final String NODE_HASH = \"hash\";\n\tprivate static final String NODE_RANDOM = \"random\";\n\n\t/**\n\t * The reentrant lock for synchronization.\n\t */\n\tprotected final ReentrantLock lock = new ReentrantLock();\n\n\tprivate static final long EPOCH_TIMESTAMP = TimeFunction.toUnixTimestamp(UuidTime.EPOCH_GREG);\n\n\t/**\n\t * A protected constructor that receives a builder object.\n\t * \n\t * @param version the version number (1, 2 or 6)\n\t * @param builder a builder object\n\t */\n\tprotected AbstTimeBasedFactory(UuidVersion version, Builder<?, ?> builder) {\n\t\tsuper(version);\n\t\tthis.timeFunction = builder.getTimeFunction();\n\t\tthis.nodeidFunction = builder.getNodeIdFunction();\n\t\tthis.clockseqFunction = builder.getClockSeqFunction();\n\t}\n\n\t/**\n\t * Returns a time-based UUID.\n\t * \n\t * @return a time-based UUID\n\t */\n\t@Override\n\tpublic GUID128 create() {\n\t\tlock.lock();\n\t\ttry {\n\n\t\t\t// Get the time stamp\n\t\t\tfinal long timestamp = TimeFunction.toExpectedRange(this.timeFunction.getAsLong() - EPOCH_TIMESTAMP);\n\n\t\t\t// Get the node identifier\n\t\t\tfinal long nodeIdentifier = NodeIdFunction.toExpectedRange(this.nodeidFunction.getAsLong());\n\n\t\t\t// Get the clock sequence\n\t\t\tfinal long clockSequence = ClockSeqFunction.toExpectedRange(this.clockseqFunction.applyAsLong(timestamp));\n\n\t\t\t// Format the most significant bits\n\t\t\tfinal long msb = this.formatMostSignificantBits(timestamp);\n\n\t\t\t// Format the least significant bits\n\t\t\tfinal long lsb = this.formatLeastSignificantBits(nodeIdentifier, clockSequence);\n\n\t\t\treturn new UUID128(msb, lsb);\n\n\t\t} finally {\n\t\t\tlock.unlock();\n\t\t}\n\t}\n\n\t/**\n\t * Returns a time-based UUID.\n\t * \n\t * @return a time-based UUID\n\t */\n\t@Override\n\tpublic GUID128 create(Parameters parameters) {\n\t\treturn create(); // ignore arguments\n\t}\n\n\t/**\n\t * Returns the most significant bits of the UUID.\n\t * <p>\n\t * It implements the algorithm for generating UUIDv1.\n\t * \n\t * @param timestamp the number of 100-nanoseconds since 1970-01-01 (Unix epoch)\n\t * @return the MSB\n\t */\n\tprotected long formatMostSignificantBits(final long timestamp) {\n\t\treturn ((timestamp & 0x0fff_0000_00000000L) >>> 48) //\n\t\t\t\t| ((timestamp & 0x0000_ffff_00000000L) >>> 16) //\n\t\t\t\t| ((timestamp & 0x0000_0000_ffffffffL) << 32) //\n\t\t\t\t| 0x0000000000001000L; // apply version 1\n\t}\n\n\t/**\n\t * Returns the least significant bits of the UUID.\n\t * \n\t * @param nodeIdentifier a node identifier\n\t * @param clockSequence  a clock sequence\n\t * @return the LSB\n\t */\n\tprotected long formatLeastSignificantBits(final long nodeIdentifier, final long clockSequence) {\n\t\treturn ((((clockSequence << 48) | (nodeIdentifier & 0x0000ffffffffffffL)) //\n\t\t\t\t& 0x3fffffffffffffffL) // clear variant bits\n\t\t\t\t| 0x8000000000000000L); // apply variant bits\n\t}\n\n\t/**\n\t * Select the node identifier function.\n\t * \n\t * This method reads the system property 'uuidcreator.node' and the environment\n\t * variable 'UUIDCREATOR_NODE' to decide what node identifier function must be\n\t * used.\n\t * \n\t * 1. If it finds the string \"mac\", the generator will use the MAC address.\n\t * \n\t * 2. If it finds the string \"hash\", the generator will use the system data\n\t * hash.\n\t * \n\t * 3. If it finds the string \"random\", the generator will use a random number\n\t * that always changes.\n\t * \n\t * 4. If it finds the string representation of a specific number in octal,\n\t * hexadecimal or decimal format, the generator will use the number represented.\n\t * \n\t * 5. Else, a random number will be used by the generator.\n\t * \n\t * @return a node function\n\t */\n\tprotected static NodeIdFunction selectNodeIdFunction() {\n\n\t\tString string = SettingsUtil.getProperty(SettingsUtil.PROPERTY_NODE);\n\n\t\tif (NODE_MAC.equalsIgnoreCase(string)) {\n\t\t\treturn new MacNodeIdFunction();\n\t\t}\n\n\t\tif (NODE_HASH.equalsIgnoreCase(string)) {\n\t\t\treturn new HashNodeIdFunction();\n\t\t}\n\n\t\tif (NODE_RANDOM.equalsIgnoreCase(string)) {\n\t\t\treturn new RandomNodeIdFunction();\n\t\t}\n\n\t\tLong number = SettingsUtil.getNodeIdentifier();\n\t\tif (number != null) {\n\t\t\tfinal long nodeid = NodeIdFunction.toExpectedRange(number);\n\t\t\treturn () -> nodeid;\n\t\t}\n\n\t\treturn new DefaultNodeIdFunction();\n\t}\n\n\t/**\n\t * Select the time function.\n\t * \n\t * If the operating system is WINDOWS, it returns a function that is more\n\t * efficient for its typical time granularity (15.6ms). Otherwise, it returns\n\t * the default time function.\n\t * \n\t * @return a time function\n\t */\n\tprotected static TimeFunction selectTimeFunction() {\n\n\t\t// check if the operating system is WINDOWS\n\t\tfinal String os = System.getProperty(\"os.name\");\n\t\tif (os != null && os.toLowerCase().startsWith(\"win\")) {\n\t\t\treturn new WindowsTimeFunction();\n\t\t}\n\n\t\treturn new DefaultTimeFunction();\n\t}\n\n\t/**\n\t * Abstract builder for creating a time-based factory.\n\t */\n\tpublic abstract static class Builder<T, B extends Builder<T, B>> {\n\n\t\t/**\n\t\t * The time function.\n\t\t */\n\t\tprotected TimeFunction timeFunction;\n\t\t/**\n\t\t * The node function.\n\t\t */\n\t\tprotected NodeIdFunction nodeidFunction;\n\t\t/**\n\t\t * The clock sequence function.\n\t\t */\n\t\tprotected ClockSeqFunction clockseqFunction;\n\n\t\t/**\n\t\t * Get the time function.\n\t\t * \n\t\t * @return a function\n\t\t */\n\t\tprotected TimeFunction getTimeFunction() {\n\t\t\tif (this.timeFunction == null) {\n\t\t\t\tthis.timeFunction = selectTimeFunction();\n\t\t\t}\n\t\t\treturn this.timeFunction;\n\t\t}\n\n\t\t/**\n\t\t * Get the node function.\n\t\t * \n\t\t * @return a function\n\t\t */\n\t\tprotected NodeIdFunction getNodeIdFunction() {\n\t\t\tif (this.nodeidFunction == null) {\n\t\t\t\tthis.nodeidFunction = selectNodeIdFunction();\n\t\t\t}\n\t\t\treturn this.nodeidFunction;\n\t\t}\n\n\t\t/**\n\t\t * Get the clock sequence function.\n\t\t * \n\t\t * @return a function\n\t\t */\n\t\tprotected ClockSeqFunction getClockSeqFunction() {\n\t\t\tif (this.clockseqFunction == null) {\n\t\t\t\tthis.clockseqFunction = new DefaultClockSeqFunction();\n\t\t\t}\n\t\t\treturn this.clockseqFunction;\n\t\t}\n\n\t\t/**\n\t\t * Set the time function.\n\t\t * \n\t\t * @param timeFunction a function\n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withTimeFunction(TimeFunction timeFunction) {\n\t\t\tthis.timeFunction = timeFunction;\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set the node function\n\t\t * \n\t\t * @param nodeidFunction a function\n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withNodeIdFunction(NodeIdFunction nodeidFunction) {\n\t\t\tthis.nodeidFunction = nodeidFunction;\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set the clock sequence function\n\t\t * \n\t\t * @param clockseqFunction a function\n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withClockSeqFunction(ClockSeqFunction clockseqFunction) {\n\t\t\tthis.clockseqFunction = clockseqFunction;\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set the fixed instant.\n\t\t * \n\t\t * @param instant an instant\n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withInstant(Instant instant) {\n\t\t\tfinal long timestamp = TimeFunction.toUnixTimestamp(instant);\n\t\t\tthis.timeFunction = () -> timestamp;\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set the fixed clock sequence.\n\t\t * \n\t\t * @param clockseq a clock sequence\n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withClockSeq(long clockseq) {\n\t\t\tfinal long clockSequence = ClockSeqFunction.toExpectedRange(clockseq);\n\t\t\tthis.clockseqFunction = x -> clockSequence;\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set a fixed clock sequence.\n\t\t * \n\t\t * @param clockseq a clock sequence\n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withClockSeq(byte[] clockseq) {\n\t\t\tfinal long clockSequence = ClockSeqFunction.toExpectedRange(ByteUtil.toNumber(clockseq));\n\t\t\tthis.clockseqFunction = x -> clockSequence;\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set a fixed node.\n\t\t * \n\t\t * @param nodeid a node\n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withNodeId(long nodeid) {\n\t\t\tfinal long nodeIdentifier = NodeIdFunction.toExpectedRange(nodeid);\n\t\t\tthis.nodeidFunction = () -> nodeIdentifier;\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set a fixed node\n\t\t * \n\t\t * @param nodeid a node\n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withNodeId(byte[] nodeid) {\n\t\t\tfinal long nodeIdentifier = NodeIdFunction.toExpectedRange(ByteUtil.toNumber(nodeid));\n\t\t\tthis.nodeidFunction = () -> nodeIdentifier;\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set the node function to MAC strategy.\n\t\t * \n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withMacNodeId() {\n\t\t\tthis.nodeidFunction = new MacNodeIdFunction();\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set the node function to hash strategy.\n\t\t * \n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withHashNodeId() {\n\t\t\tthis.nodeidFunction = new HashNodeIdFunction();\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Set the node function to random strategy.\n\t\t * \n\t\t * @return the builder\n\t\t */\n\t\t@SuppressWarnings(\"unchecked\")\n\t\tpublic B withRandomNodeId() {\n\t\t\tthis.nodeidFunction = new RandomNodeIdFunction();\n\t\t\treturn (B) this;\n\t\t}\n\n\t\t/**\n\t\t * Finish the factory building.\n\t\t * \n\t\t * @return the built factory\n\t\t */\n\t\tpublic abstract T build();\n\t}\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/UuidFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i128.codec.StandardBinaryCodec;\nimport com.pinecone.ulf.util.guid.i128.codec.StandardStringCodec;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidLocalDomain;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidNamespace;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\n\nimport java.nio.charset.StandardCharsets;\nimport java.time.Instant;\nimport java.util.Arrays;\nimport java.util.Objects;\nimport java.util.UUID;\n\n/**\n * Abstract factory that is base for all UUID factories.\n */\npublic abstract class UuidFactory {\n\n\t/**\n\t * Version number.\n\t */\n\tprotected final UuidVersion version;\n\n\t/**\n\t * Version bit mask.\n\t */\n\tprotected final long versionMask;\n\n\t/**\n\t * Default Constructor.\n\t * <p>\n\t * The version used is {@link UuidVersion#VERSION_UNKNOWN}.\n\t */\n\tpublic UuidFactory() {\n\t\tthis.version = UuidVersion.VERSION_UNKNOWN;\n\t\tthis.versionMask = (long) version.getValue() << 12;\n\t}\n\n\t/**\n\t * Constructor with a version number.\n\t * \n\t * @param version a version number\n\t */\n\tpublic UuidFactory(UuidVersion version) {\n\t\tthis.version = version;\n\t\tthis.versionMask = (long) version.getValue() << 12;\n\t}\n\n\t/**\n\t * Returns the version number for this factory.\n\t * \n\t * @return the version number\n\t */\n\tpublic UuidVersion getVersion() {\n\t\treturn this.version;\n\t}\n\n\t/**\n\t * Create a UUID\n\t * \n\t * @return a UUID\n\t */\n\tpublic abstract GUID128 create();\n\n\t/**\n\t * Creates a UUID using parameters.\n\t * \n\t * @param parameters parameters object\n\t * @return a UUID\n\t */\n\tpublic abstract GUID128 create(Parameters parameters);\n\n\t/**\n\t * Parameters object to be used with a {@link UuidFactory#create(Parameters)}.\n\t */\n\tpublic static class Parameters {\n\n\t\t/**\n\t\t * Instant to be used.\n\t\t */\n\t\tprivate final Instant instant;\n\n\t\t/**\n\t\t * Name space byte array.\n\t\t */\n\t\tprivate final byte[] namespace;\n\n\t\t/**\n\t\t * Name byte array.\n\t\t */\n\t\tprivate final byte[] name;\n\n\t\t/**\n\t\t * Local domain byte.\n\t\t */\n\t\tprivate final byte localDomain;\n\n\t\t/**\n\t\t * Local identifier number.\n\t\t */\n\t\tprivate final int localIdentifier;\n\n\t\t/**\n\t\t * Constructor using a builder.\n\t\t * \n\t\t * @param builder a builder\n\t\t */\n\t\tpublic Parameters(Builder builder) {\n\t\t\tObjects.requireNonNull(builder);\n\t\t\tthis.instant = builder.instant;\n\t\t\tthis.namespace = builder.namespace;\n\t\t\tthis.name = builder.name;\n\t\t\tthis.localDomain = builder.localDomain;\n\t\t\tthis.localIdentifier = builder.localIdentifier;\n\t\t}\n\n\t\tpublic Instant getInstant() {\n\t\t\treturn this.instant;\n\t\t}\n\n\t\t/**\n\t\t * Get the name space bytes.\n\t\t * \n\t\t * @return a byte array\n\t\t */\n\t\tpublic byte[] getNamespace() {\n\t\t\treturn this.namespace;\n\t\t}\n\n\t\t/**\n\t\t * Get the name bytes.\n\t\t * \n\t\t * @return a byte array\n\t\t */\n\t\tpublic byte[] getName() {\n\t\t\treturn this.name;\n\t\t}\n\n\t\t/**\n\t\t * Get the local domain.\n\t\t * \n\t\t * @return the local domain\n\t\t */\n\t\tpublic byte getLocalDomain() {\n\t\t\treturn this.localDomain;\n\t\t}\n\n\t\t/**\n\t\t * Get the local identifier.\n\t\t * \n\t\t * @return the local identifier\n\t\t */\n\t\tpublic int getLocalIdentifier() {\n\t\t\treturn this.localIdentifier;\n\t\t}\n\n\t\t/**\n\t\t * Returns a new builder.\n\t\t * \n\t\t * @return a builder\n\t\t */\n\t\tpublic static Builder builder() {\n\t\t\treturn new Builder();\n\t\t}\n\n\t\t/**\n\t\t * Parameters builder.\n\t\t */\n\t\tpublic static class Builder {\n\n\t\t\t/**\n\t\t\t * Instant to be used.\n\t\t\t */\n\t\t\tprivate Instant instant;\n\n\t\t\t/**\n\t\t\t * Name space byte array.\n\t\t\t */\n\t\t\tprivate byte[] namespace = null;\n\n\t\t\t/**\n\t\t\t * Name byte array.\n\t\t\t */\n\t\t\tprivate byte[] name = null;\n\n\t\t\t/**\n\t\t\t * Local domain byte.\n\t\t\t */\n\t\t\tprivate byte localDomain;\n\n\t\t\t/**\n\t\t\t * Local identifier number.\n\t\t\t */\n\t\t\tprivate int localIdentifier;\n\n\t\t\tprivate Builder() {\n\t\t\t}\n\n\t\t\t/**\n\t\t\t * Use the instant provided.\n\t\t\t * \n\t\t\t * @param instant an instant\n\t\t\t * @return the builder\n\t\t\t */\n\t\t\tpublic Builder withInstant(Instant instant) {\n\t\t\t\tthis.instant = instant;\n\t\t\t\treturn this;\n\t\t\t}\n\n\t\t\t/**\n\t\t\t * Use the name space UUID.\n\t\t\t * \n\t\t\t * @param namespace a name space\n\t\t\t * @return the builder\n\t\t\t */\n\t\t\tpublic Builder withNamespace(GUID128 namespace) {\n\t\t\t\tthis.namespace = namespaceBytes(namespace);\n\t\t\t\treturn this;\n\t\t\t}\n\n\t\t\t/**\n\t\t\t * Use the name space string.\n\t\t\t * \n\t\t\t * @param namespace a name space\n\t\t\t * @return the builder\n\t\t\t */\n\t\t\tpublic Builder withNamespace(String namespace) {\n\t\t\t\tthis.namespace = namespaceBytes(namespace);\n\t\t\t\treturn this;\n\t\t\t}\n\n\t\t\t/**\n\t\t\t * Use the name space enum.\n\t\t\t * \n\t\t\t * @param namespace a name space\n\t\t\t * @return the builder\n\t\t\t */\n\t\t\tpublic Builder withNamespace(UuidNamespace namespace) {\n\t\t\t\tthis.namespace = namespaceBytes(namespace);\n\t\t\t\treturn this;\n\t\t\t}\n\n\t\t\t/**\n\t\t\t * Use the name byte array.\n\t\t\t * \n\t\t\t * It makes a copy of the input byte array.\n\t\t\t * \n\t\t\t * @param name a name\n\t\t\t * @return the builder\n\t\t\t */\n\t\t\tpublic Builder withName(byte[] name) {\n\t\t\t\tthis.name = nameBytes(name);\n\t\t\t\treturn this;\n\t\t\t}\n\n\t\t\t/**\n\t\t\t * Use the name string.\n\t\t\t * \n\t\t\t * The string is encoded into UTF-8 byte array.\n\t\t\t * \n\t\t\t * @param name a name\n\t\t\t * @return the builder\n\t\t\t */\n\t\t\tpublic Builder withName(String name) {\n\t\t\t\tthis.name = nameBytes(name);\n\t\t\t\treturn this;\n\t\t\t}\n\n\t\t\t/**\n\t\t\t * Use the local domain.\n\t\t\t * \n\t\t\t * @param localDomain the local domain\n\t\t\t * @return the builder\n\t\t\t */\n\t\t\tpublic Builder withLocalDomain(UuidLocalDomain localDomain) {\n\t\t\t\tthis.localDomain = localDomain.getValue();\n\t\t\t\treturn this;\n\t\t\t}\n\n\t\t\t/**\n\t\t\t * Use the local domain.\n\t\t\t * \n\t\t\t * @param localDomain the local domain\n\t\t\t * @return the builder\n\t\t\t */\n\t\t\tpublic Builder withLocalDomain(byte localDomain) {\n\t\t\t\tthis.localDomain = localDomain;\n\t\t\t\treturn this;\n\t\t\t}\n\n\t\t\t/**\n\t\t\t * Use the local identifier.\n\t\t\t * \n\t\t\t * @param localIdentifier the local identifier\n\t\t\t * @return the builder\n\t\t\t */\n\t\t\tpublic Builder withLocalIdentifier(int localIdentifier) {\n\t\t\t\tthis.localIdentifier = localIdentifier;\n\t\t\t\treturn this;\n\t\t\t}\n\n\t\t\t/**\n\t\t\t * Finishes the parameters build.\n\t\t\t * \n\t\t\t * @return the build parameters.\n\t\t\t */\n\t\t\tpublic Parameters build() {\n\t\t\t\treturn new Parameters(this);\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Returns a copy of the input byte array.\n\t * \n\t * @param name a name string\n\t * @return a byte array\n\t * @throws IllegalArgumentException if the input is null\n\t */\n\tprotected static byte[] nameBytes(byte[] name) {\n\t\tObjects.requireNonNull(name, \"Null name\");\n\t\treturn Arrays.copyOf(name, name.length);\n\t}\n\n\t/**\n\t * Converts a name string into a byte array.\n\t * \n\t * @param name a name string\n\t * @return a byte array\n\t * @throws IllegalArgumentException if the input is null\n\t */\n\tprotected static byte[] nameBytes(String name) {\n\t\tObjects.requireNonNull(name, \"Null name\");\n\t\treturn name.getBytes(StandardCharsets.UTF_8);\n\t}\n\n\t/**\n\t * Converts a name space enumeration into a byte array.\n\t * \n\t * @param namespace a name space enumeration\n\t * @return a byte array\n\t */\n\tprotected static byte[] namespaceBytes(UuidNamespace namespace) {\n\t\tif (namespace != null) {\n\t\t\treturn namespaceBytes(namespace.getValue());\n\t\t}\n\t\treturn null; // the name space can be null\n\t}\n\n\t/**\n\t * Converts a name space UUID into a byte array.\n\t * \n\t * @param namespace a name space UUID\n\t * @return a byte array\n\t */\n\tprotected static byte[] namespaceBytes(GUID128 namespace) {\n\t\tif (namespace != null) {\n\t\t\treturn StandardBinaryCodec.INSTANCE.encode(namespace);\n\t\t}\n\t\treturn null; // the name space can be null\n\t}\n\n\t/**\n\t * Converts a name space string into a byte array.\n\t * \n\t * @param namespace a name space string\n\t * @return a byte array\n\t * @throws InvalidUuidException if the name space is invalid\n\t */\n\tprotected static byte[] namespaceBytes(String namespace) {\n\t\tif (namespace != null) {\n\t\t\treturn StandardBinaryCodec.INSTANCE.encode(StandardStringCodec.INSTANCE.decode(namespace));\n\t\t}\n\t\treturn null; // the name space can be null\n\t}\n\n\t/**\n\t * Creates a UUID from a pair of numbers.\n\t * <p>\n\t * It applies the version and variant numbers to the resulting UUID.\n\t * \n\t * @param msb the most significant bits\n\t * @param lsb the least significant bits\n\t * @return a UUID\n\t */\n\tprotected GUID128 toUuid(final long msb, final long lsb) {\n\t\tfinal long msb0 = (msb & 0xffffffffffff0fffL) | this.versionMask; // set version\n\t\tfinal long lsb0 = (lsb & 0x3fffffffffffffffL) | 0x8000000000000000L; // set variant\n\t\treturn new UUID128(msb0, lsb0);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/ClockSeqFunction.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.function;\n\nimport com.pinecone.ulf.util.guid.i128.util.internal.RandomUtil;\n\nimport java.util.function.LongUnaryOperator;\n\n/**\n * Function that must return a number between 0 and 16383 (2^14-1).\n * <p>\n * It receives as argument a number of 100-nanoseconds since 1970-01-01 (Unix\n * epoch).\n * <p>\n * Example:\n * \n * <pre>{@code\n * // A function that returns new random clock sequences\n * ClockSeqFunction f = t -> ClockSeqFunction.getRandom();\n * }</pre>\n * \n */\n@FunctionalInterface\npublic interface ClockSeqFunction extends LongUnaryOperator {\n\n\t/**\n\t * Returns a new random clock sequence in the range 0 to 16383 (2^14-1).\n\t * \n\t * @return a number in the range 0 to 16383 (2^14-1)\n\t */\n\tstatic long getRandom() {\n\t\treturn toExpectedRange(RandomUtil.newSecureRandom().nextLong());\n\t}\n\n\t/**\n\t * Clears the leading bits so that the resulting number is within the range 0 to\n\t * 16383 (2^14-1).\n\t * <p>\n\t * The result is equivalent to {@code n % 2^14}.\n\t * \n\t * @param clockseq a clock sequence\n\t * @return a number in the range 0 to 16383 (2^14-1).\n\t */\n\tstatic long toExpectedRange(final long clockseq) {\n\t\treturn clockseq & 0x0000000000003fffL;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/NodeIdFunction.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.function;\n\nimport com.pinecone.ulf.util.guid.i128.util.internal.RandomUtil;\n\nimport java.util.function.LongSupplier;\n\n/**\n * Function that must return a number between 0 and 2^48-1.\n * <p>\n * Example:\n * \n * <pre>{@code\n * // A function that returns new random multicast node identifiers\n * NodeIdFunction f = () -> NodeIdFunction.getMulticastRandom();\n * }</pre>\n * \n */\n@FunctionalInterface\npublic interface NodeIdFunction extends LongSupplier {\n\n\t/**\n\t * Returns a new random node identifier.\n\t * \n\t * @return a number in the range 0 to 2^48-1.\n\t */\n\tstatic long getRandom() {\n\t\treturn toExpectedRange(RandomUtil.newSecureRandom().nextLong());\n\t}\n\n\t/**\n\t * Return a new random multicast node identifier.\n\t * \n\t * @return a number in the range 0 to 2^48-1.\n\t */\n\tstatic long getMulticastRandom() {\n\t\treturn toMulticast(getRandom());\n\t}\n\n\t/**\n\t * Clears the leading bits so that the resulting number is in the range 0 to\n\t * 2^48-1.\n\t * <p>\n\t * The result is equivalent to {@code n % 2^48}.\n\t * \n\t * @param nodeid the node identifier\n\t * @return a number in the range 0 to 2^48-1.\n\t */\n\tstatic long toExpectedRange(final long nodeid) {\n\t\treturn nodeid & 0x0000_ffffffffffffL;\n\t}\n\n\t/**\n\t * Sets the multicast bit of a node identifier.\n\t * <p>\n\t * It also clears leading bits so that the resulting number is within the range\n\t * 0 to 2^48-1.\n\t * \n\t * @param nodeid the node identifier\n\t * @return a node identifier with the multicast bit set\n\t */\n\tstatic long toMulticast(long nodeid) {\n\t\treturn (nodeid & 0x0000_ffffffffffffL) | 0x0000_010000000000L;\n\t}\n\n\t/**\n\t * Checks if the multicast bit of a node identifier is set.\n\t * \n\t * @param nodeid a node identifier\n\t * @return true if the node identifier is multicast\n\t */\n\tstatic boolean isMulticast(long nodeid) {\n\t\treturn (nodeid & 0x0000_010000000000L) == 0x0000_010000000000L;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/RandomFunction.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.function;\n\nimport java.util.function.IntFunction;\n\n/**\n * Function that must return an array of bytes with the given length.\n */\n@FunctionalInterface\npublic interface RandomFunction extends IntFunction<byte[]> {\n\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/TimeFunction.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.function;\n\nimport com.pinecone.ulf.util.guid.i128.util.UuidTime;\n\nimport java.time.Instant;\nimport java.util.function.LongSupplier;\n\n/**\n * Function that must return a number of 100-nanoseconds since 1970-01-01 (Unix\n * epoch).\n * <p>\n * Example:\n * \n * <pre>{@code\n * // A function that returns `Instant.now()` as a number of 100ns\n * TimeFunction f = () -> TimeFunction.toUnixTimestamp(Instant.now());\n * }</pre>\n * \n * <p>\n * In JDK 8, {@link Instant#now()} has millisecond precision, in spite of\n * {@link Instant} has nanoseconds resolution. In JDK 9+, {@link Instant#now()}\n * has microsecond precision.\n * \n * @see <a href=\"https://stackoverflow.com/questions/1712205\">Current time in\n *      microseconds in java</a>\n * @see <a href=\"https://bugs.openjdk.java.net/browse/JDK-8068730\">Increase the\n *      precision of the implementation of java.time.Clock.systemUTC()</a>\n */\n@FunctionalInterface\npublic interface TimeFunction extends LongSupplier {\n\n\t/**\n\t * Converts an instant to a number of 100-nanoseconds since 1970-01-01 (Unix\n\t * epoch).\n\t * \n\t * @param instant an instant\n\t * @return a number of 100-nanoseconds since 1970-01-01 (Unix epoch)\n\t */\n\tstatic long toUnixTimestamp(final Instant instant) {\n\t\treturn UuidTime.toUnixTimestamp(instant);\n\t}\n\n\t/**\n\t * Clears the leading bits so that the resulting number is in the range 0 to\n\t * 2^60-1.\n\t * <p>\n\t * The result is equivalent to {@code n % 2^60}.\n\t * \n\t * @param timestamp a number of 100-nanoseconds since 1970-01-01 (Unix epoch)\n\t * @return a number in the range 0 to 2^60-1.\n\t */\n\tstatic long toExpectedRange(final long timestamp) {\n\t\treturn timestamp & 0x0_fffffffffffffffL;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/DefaultClockSeqFunction.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.function.impl;\n\nimport com.pinecone.ulf.util.guid.i128.factory.function.ClockSeqFunction;\n\nimport java.util.SplittableRandom;\nimport java.util.concurrent.atomic.AtomicInteger;\n\n/**\n * Function that returns a clock sequence.\n * \n * @see ClockSeqFunction\n */\npublic final class DefaultClockSeqFunction implements ClockSeqFunction {\n\n\tprivate AtomicInteger sequence;\n\tprivate long lastTimestamp = -1;\n\n\t/**\n\t * The pool of clock sequence numbers.\n\t */\n\tprotected static final ClockSeqPool POOL = new ClockSeqPool();\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic DefaultClockSeqFunction() {\n\t\tfinal int initial = POOL.random();\n\t\tthis.sequence = new AtomicInteger(initial);\n\t}\n\n\t@Override\n\tpublic long applyAsLong(final long timestamp) {\n\t\tif (timestamp > this.lastTimestamp) {\n\t\t\tthis.lastTimestamp = timestamp;\n\t\t\treturn this.sequence.get();\n\t\t}\n\t\tthis.lastTimestamp = timestamp;\n\t\treturn this.next();\n\t}\n\n\t/**\n\t * Get the next random clock sequence number.\n\t * \n\t * @return a number\n\t */\n\tpublic int next() {\n\t\tif (this.sequence.incrementAndGet() > ClockSeqPool.POOL_MAX) {\n\t\t\tthis.sequence.set(ClockSeqPool.POOL_MIN);\n\t\t}\n\t\treturn this.sequence.updateAndGet(POOL::take);\n\t}\n\n\t/**\n\t * Nested class that manages a pool of 16384 clock sequence values.\n\t * <p>\n\t * The pool is implemented as an array of 2048 bytes (16384 bits). Each bit of\n\t * the array corresponds to a clock sequence value.\n\t * <p>\n\t * It is used to avoid that two time-based factories use the same clock sequence\n\t * at same time in a class loader.\n\t */\n\tstatic final class ClockSeqPool {\n\n\t\tprivate final byte[] pool = new byte[2048];\n\t\tprivate static final int POOL_SIZE = 16384; // 2^14 = 16384\n\n\t\t/**\n\t\t * The minimum pool size, which is zero.\n\t\t */\n\t\tpublic static final int POOL_MIN = 0x00000000;\n\n\t\t/**\n\t\t * The maximum pool size, which is 16383 (2^14-1).\n\t\t */\n\t\tpublic static final int POOL_MAX = 0x00003fff; // 2^14-1 = 16383\n\n\t\t/**\n\t\t * Take a value from the pool.\n\t\t * <p>\n\t\t * If the value to be taken is already in use, it is incremented until a free\n\t\t * value is found and returned.\n\t\t * <p>\n\t\t * In the case that all pool values are in use, the pool is cleared and the last\n\t\t * incremented value is returned.\n\t\t * <p>\n\t\t * It does nothing to negative arguments.\n\t\t * \n\t\t * @param take value to be taken from the pool\n\t\t * @return the value to be borrowed if not used\n\t\t */\n\t\tpublic synchronized int take(final int take) {\n\t\t\tint value = take;\n\t\t\tfor (int i = 0; i < POOL_SIZE; i++) {\n\t\t\t\tif (setBit(value)) {\n\t\t\t\t\treturn value;\n\t\t\t\t}\n\t\t\t\tvalue = ++value % POOL_SIZE;\n\t\t\t}\n\t\t\tclearPool();\n\t\t\tsetBit(value);\n\t\t\treturn value;\n\t\t}\n\n\t\t/**\n\t\t * Take a random value from the pool.\n\t\t * \n\t\t * @return the random value to be borrowed if not used\n\t\t */\n\t\tpublic synchronized int random() {\n\t\t\t// Choose a random number between 0 and 16383\n\t\t\tint random = Math.abs(new SplittableRandom().nextInt()) % POOL_SIZE;\n\t\t\treturn this.take(random);\n\t\t}\n\n\t\t/**\n\t\t * Set a bit from the byte array that represents the pool.\n\t\t * <p>\n\t\t * This operation corresponds to setting a value as used.\n\t\t * <p>\n\t\t * It returns false if the value is not free.\n\t\t * <p>\n\t\t * It does nothing to negative arguments.\n\t\t * \n\t\t * @param value the value to be taken from the pool\n\t\t * @return true if success\n\t\t */\n\t\tprivate synchronized boolean setBit(int value) {\n\n\t\t\tif (value < 0) {\n\t\t\t\treturn false;\n\t\t\t}\n\n\t\t\tfinal int byteIndex = value / 8;\n\t\t\tfinal int bitIndex = value % 8;\n\n\t\t\tfinal int mask = (0x00000001 << bitIndex);\n\t\t\tfinal boolean clear = (pool[byteIndex] & mask) == 0;\n\n\t\t\tif (clear) {\n\t\t\t\tpool[byteIndex] = (byte) (pool[byteIndex] | mask);\n\t\t\t\treturn true;\n\t\t\t}\n\n\t\t\treturn false;\n\t\t}\n\n\t\t/**\n\t\t * Check if a value is used out of the pool.\n\t\t * \n\t\t * @param value a value to be checked in the pool\n\t\t * @return true if the value is used\n\t\t */\n\t\tpublic synchronized boolean isUsed(int value) {\n\n\t\t\tfinal int byteIndex = value / 8;\n\t\t\tfinal int bitIndex = value % 8;\n\n\t\t\tfinal int mask = (0x00000001 << bitIndex);\n\t\t\tboolean clear = (pool[byteIndex] & mask) == 0;\n\n\t\t\treturn !clear;\n\t\t}\n\n\t\t/**\n\t\t * Check if a value is free in the pool.\n\t\t * \n\t\t * @param value a value to be checked in the pool\n\t\t * @return true if the value is free\n\t\t */\n\t\tpublic synchronized boolean isFree(int value) {\n\t\t\treturn !this.isUsed(value);\n\t\t}\n\n\t\t/**\n\t\t * Count the used values out of the pool\n\t\t * \n\t\t * @return the count of used values\n\t\t */\n\t\tpublic synchronized int countUsed() {\n\t\t\tint counter = 0;\n\t\t\tfor (int i = 0; i < POOL_SIZE; i++) {\n\t\t\t\tif (this.isUsed(i)) {\n\t\t\t\t\tcounter++;\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn counter;\n\t\t}\n\n\t\t/**\n\t\t * Count the free values in the pool.\n\t\t * \n\t\t * @return the count of free values\n\t\t */\n\t\tpublic synchronized int countFree() {\n\t\t\treturn POOL_SIZE - this.countUsed();\n\t\t}\n\n\t\t/**\n\t\t * Clear all bits of the byte array that represents the pool.\n\t\t * <p>\n\t\t * This corresponds to marking all pool values as free\n\t\t */\n\t\tpublic synchronized void clearPool() {\n\t\t\tfor (int i = 0; i < pool.length; i++) {\n\t\t\t\tpool[i] = 0;\n\t\t\t}\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/DefaultNodeIdFunction.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.function.impl;\n\nimport com.pinecone.ulf.util.guid.i128.factory.function.NodeIdFunction;\n\n/**\n * Function that returns a final random multicast node identifier.\n * <p>\n * The random value is generated once during instantiation.\n * \n * @see NodeIdFunction\n */\npublic final class DefaultNodeIdFunction implements NodeIdFunction {\n\n\tprivate final long nodeIdentifier;\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic DefaultNodeIdFunction() {\n\t\tthis.nodeIdentifier = NodeIdFunction.getMulticastRandom();\n\t}\n\n\t@Override\n\tpublic long getAsLong() {\n\t\treturn this.nodeIdentifier;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/DefaultRandomFunction.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.function.impl;\n\nimport com.pinecone.ulf.util.guid.i128.factory.function.RandomFunction;\nimport com.pinecone.ulf.util.guid.i128.util.internal.RandomUtil;\n\n/**\n * Function that returns an array of bytes with the given length.\n * \n * @see RandomFunction\n * @see RandomUtil\n */\npublic final class DefaultRandomFunction implements RandomFunction {\n\n\t@Override\n\tpublic byte[] apply(final int length) {\n\t\treturn RandomUtil.nextBytes(length);\n\t}\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/DefaultTimeFunction.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.function.impl;\n\nimport com.pinecone.ulf.util.guid.i128.factory.function.TimeFunction;\n\nimport java.time.Clock;\nimport java.util.SplittableRandom;\n\nimport static com.pinecone.ulf.util.guid.i128.util.UuidTime.TICKS_PER_MILLI;\n\n/**\n * Function that returns a number of 100-nanoseconds since 1970-01-01 (Unix\n * epoch).\n * <p>\n * It can advance 1ms or more ahead of system clock on heavy load.\n * \n * @see TimeFunction\n */\npublic final class DefaultTimeFunction implements TimeFunction {\n\n\tprivate final Clock clock;\n\n\tprivate long lastTime = -1;\n\n\t// let go up to 1 second ahead of system clock\n\tprivate static final long advanceMax = 1_000L;\n\n\t// start the counter with a random number between 0 and 9,999\n\tprivate long counter = Math.abs(new SplittableRandom().nextLong()) % TICKS_PER_MILLI;\n\t// start the counter limit with a number between 10,000 and 19,999\n\tprivate long counterMax = counter + TICKS_PER_MILLI;\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic DefaultTimeFunction() {\n\t\tthis.clock = Clock.systemUTC();\n\t}\n\n\t/**\n\t * Default constructor with a {@link Clock} instance.\n\t * \n\t * @param clock a clock\n\t */\n\tpublic DefaultTimeFunction(Clock clock) {\n\t\tthis.clock = clock;\n\t}\n\n\t@Override\n\tpublic long getAsLong() {\n\n\t\tcounter++; // always increment\n\n\t\t// get current system time\n\t\tlong time = clock.millis();\n\n\t\t// is it not too much ahead of system clock?\n\t\tif (advanceMax > Math.abs(lastTime - time)) {\n\t\t\ttime = Math.max(lastTime, time);\n\t\t}\n\n\t\t// check time change\n\t\tif (time == lastTime) {\n\t\t\t// if the time repeats,\n\t\t\t// check the counter limit\n\t\t\tif (counter >= counterMax) {\n\t\t\t\ttime++; // must go ahead of system clock\n\t\t\t\t// reset to a number between 0 and 9,999\n\t\t\t\tcounter = counter % TICKS_PER_MILLI;\n\t\t\t\t// reset to a number between 10,000 and 19,999\n\t\t\t\tcounterMax = counter + TICKS_PER_MILLI;\n\t\t\t}\n\t\t} else {\n\t\t\t// reset to a number between 0 and 9,999\n\t\t\tcounter = counter % TICKS_PER_MILLI;\n\t\t\t// reset to a number between 10,000 and 19,999\n\t\t\tcounterMax = counter + TICKS_PER_MILLI;\n\t\t}\n\n\t\t// save time for the next call\n\t\tlastTime = time;\n\n\t\t// simulate a high resolution clock\n\t\treturn (time * TICKS_PER_MILLI) + counter;\n\t}\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/HashNodeIdFunction.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.function.impl;\n\nimport com.pinecone.ulf.util.guid.i128.factory.function.NodeIdFunction;\nimport com.pinecone.ulf.util.guid.i128.util.MachineId;\nimport com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil;\n\n/**\n * Function that returns a hash of host name, MAC and IP.\n * <p>\n * The hash is calculated once during instantiation.\n * \n * @see NodeIdFunction\n * @see MachineId\n */\npublic final class HashNodeIdFunction implements NodeIdFunction {\n\n\tprivate final long nodeIdentifier;\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic HashNodeIdFunction() {\n\t\tfinal byte[] hash = MachineId.getMachineHash();\n\t\tfinal long number = ByteUtil.toNumber(hash, 0, 6);\n\t\tthis.nodeIdentifier = NodeIdFunction.toMulticast(number);\n\t}\n\n\t@Override\n\tpublic long getAsLong() {\n\t\treturn this.nodeIdentifier;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/MacNodeIdFunction.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.function.impl;\n\nimport com.pinecone.ulf.util.guid.i128.factory.function.NodeIdFunction;\nimport com.pinecone.ulf.util.guid.i128.util.internal.NetworkUtil;\n\nimport java.net.NetworkInterface;\nimport java.net.SocketException;\n\nimport static com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil.toNumber;\n\n/**\n * Function that returns a MAC address.\n * <p>\n * The MAC address is obtained once during instantiation.\n * <p>\n * If no MAC address is found, it returns a random multicast node identifier.\n * \n * @see NodeIdFunction\n */\npublic final class MacNodeIdFunction implements NodeIdFunction {\n\n\tprivate final long nodeIdentifier;\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic MacNodeIdFunction() {\n\t\tthis.nodeIdentifier = getHardwareAddress();\n\t}\n\n\t@Override\n\tpublic long getAsLong() {\n\t\treturn this.nodeIdentifier;\n\t}\n\n\tprivate long getHardwareAddress() {\n\n\t\ttry {\n\t\t\tNetworkInterface nic = NetworkUtil.nic();\n\t\t\tif (nic != null) {\n\t\t\t\treturn toNumber(nic.getHardwareAddress());\n\t\t\t}\n\t\t} catch (SocketException e) {\n\t\t\treturn NodeIdFunction.getMulticastRandom();\n\t\t}\n\n\t\treturn NodeIdFunction.getMulticastRandom();\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/RandomNodeIdFunction.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.function.impl;\n\nimport com.pinecone.ulf.util.guid.i128.factory.function.NodeIdFunction;\nimport com.pinecone.ulf.util.guid.i128.util.internal.RandomUtil;\n\n/**\n * Function that returns a new random multicast node identifier.\n * <p>\n * The random value is generated with each new invocation.\n * \n * @see NodeIdFunction\n */\npublic final class RandomNodeIdFunction implements NodeIdFunction {\n\n\t@Override\n\tpublic long getAsLong() {\n\t\treturn NodeIdFunction.toMulticast(RandomUtil.nextLong());\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/function/impl/WindowsTimeFunction.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.function.impl;\n\nimport com.pinecone.ulf.util.guid.i128.factory.function.TimeFunction;\n\nimport java.time.Clock;\nimport java.util.SplittableRandom;\n\nimport static com.pinecone.ulf.util.guid.i128.util.UuidTime.TICKS_PER_MILLI;\n\n/**\n * Function that returns a number of 100-nanoseconds since 1970-01-01 (Unix\n * epoch).\n * <p>\n * This function is for WINDOWS systems.\n * <p>\n * On WINDOWS, the typical system time granularity is 15.625ms due to a default\n * 64Hz timer frequency.\n * <p>\n * It can advance 16ms or more ahead of system clock on heavy load.\n * \n * @see TimeFunction\n */\npublic final class WindowsTimeFunction implements TimeFunction {\n\n\tprivate final Clock clock;\n\n\tprivate long lastTime = -1;\n\n\t// let go up to 1 second ahead of system clock\n\tprivate static final long advanceMax = 1_000L;\n\n\t// arbitrary granularity greater than 15ms\n\tprivate static final long GRANULARITY = 16;\n\tprivate static final long TICKS_PER_GRANULARITY = TICKS_PER_MILLI * GRANULARITY;\n\n\t// start the counter with a random number between 0 and 159,999\n\tprivate long counter = Math.abs(new SplittableRandom().nextLong()) % TICKS_PER_GRANULARITY;\n\t// start the counter limit with a number between 160,000 and 319,999\n\tprivate long counterMax = counter + TICKS_PER_GRANULARITY;\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic WindowsTimeFunction() {\n\t\tthis.clock = Clock.systemUTC();\n\t}\n\n\t/**\n\t * Constructor with a clock.\n\t * \n\t * @param clock a clock\n\t */\n\tpublic WindowsTimeFunction(Clock clock) {\n\t\tthis.clock = clock;\n\t}\n\n\t@Override\n\tpublic long getAsLong() {\n\n\t\tcounter++; // always increment\n\n\t\t// get calculated system time\n\t\tlong time = calculatedMillis();\n\n\t\t// is it not too much ahead of system clock?\n\t\tif (advanceMax > Math.abs(lastTime - time)) {\n\t\t\ttime = Math.max(lastTime, time);\n\t\t}\n\n\t\t// check time change\n\t\tif (time == lastTime) {\n\t\t\t// if the time repeats,\n\t\t\t// check the counter limit\n\t\t\tif (counter >= counterMax) {\n\t\t\t\ttime += GRANULARITY; // let it go forwards\n\t\t\t\t// reset to a number between 0 and 159,999\n\t\t\t\tcounter = counter % TICKS_PER_GRANULARITY;\n\t\t\t\t// reset to a number between 160,000 and 319,999\n\t\t\t\tcounterMax = counter + TICKS_PER_GRANULARITY;\n\t\t\t}\n\t\t} else {\n\t\t\t// reset to a number between 0 and 159,999\n\t\t\tcounter = counter % TICKS_PER_GRANULARITY;\n\t\t\t// reset to a number between 160,000 and 319,999\n\t\t\tcounterMax = counter + TICKS_PER_GRANULARITY;\n\t\t}\n\n\t\t// save time for the next call\n\t\tlastTime = time;\n\n\t\t// simulate a high resolution clock\n\t\treturn (time * TICKS_PER_MILLI) + counter;\n\t}\n\n\t/**\n\t * Returns the calculated time in milliseconds.\n\t * \n\t * It can be 16ms ahead of system time due to time granularity.\n\t * \n\t * @return the calculated time\n\t */\n\tprivate long calculatedMillis() {\n\t\tfinal long time = clock.millis();\n\t\treturn time + GRANULARITY - (time % GRANULARITY);\n\t}\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/nonstandard/PrefixCombFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.nonstandard;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.factory.AbstCombFactory;\n\nimport java.time.Clock;\nimport java.util.Random;\nimport java.util.UUID;\nimport java.util.function.LongSupplier;\n\n/**\n * Concrete factory for creating Prefix COMB GUIDs.\n * <p>\n * A Prefix COMB GUID is a UUID that combines a creation time with random bits.\n * <p>\n * The creation millisecond is a 6 bytes PREFIX at the MOST significant bits.\n * <p>\n * The created UUID is a UUIDv4 for compatibility with RFC 9562.\n * \n * @see <a href=\"http://www.informit.com/articles/article.aspx?p=25862\">The Cost\n *      of GUIDs as Primary Keys</a>\n */\npublic final class PrefixCombFactory extends AbstCombFactory {\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic PrefixCombFactory() {\n\t\tthis(builder());\n\t}\n\n\t/**\n\t * Constructor with a clock.\n\t * \n\t * @param clock a clock\n\t */\n\tpublic PrefixCombFactory(Clock clock) {\n\t\tthis(builder().withClock(clock));\n\t}\n\n\t/**\n\t * Constructor with a random.\n\t * \n\t * @param random a random generator\n\t */\n\tpublic PrefixCombFactory(Random random) {\n\t\tthis(builder().withRandom(random));\n\t}\n\n\t/**\n\t * Constructor with a random and a clock.\n\t * \n\t * @param random a random\n\t * @param clock  a clock\n\t */\n\tpublic PrefixCombFactory(Random random, Clock clock) {\n\t\tthis(builder().withRandom(random).withClock(clock));\n\t}\n\n\t/**\n\t * Constructor with a function which return random numbers.\n\t * \n\t * @param randomFunction a function\n\t */\n\tpublic PrefixCombFactory(LongSupplier randomFunction) {\n\t\tthis(builder().withRandomFunction(randomFunction));\n\t}\n\n\t/**\n\t * Constructor with a function which a function which return random numbers and\n\t * a clock.\n\t * \n\t * @param randomFunction a function\n\t * @param clock          a clock\n\t */\n\tpublic PrefixCombFactory(LongSupplier randomFunction, Clock clock) {\n\t\tthis(builder().withRandomFunction(randomFunction).withClock(clock));\n\t}\n\n\tprivate PrefixCombFactory(Builder builder) {\n\t\tsuper(UuidVersion.VERSION_RANDOM_BASED, builder);\n\t}\n\n\t/**\n\t * Builder of factories.\n\t */\n\tpublic static class Builder extends AbstCombFactory.Builder<PrefixCombFactory, Builder> {\n\t\t@Override\n\t\tpublic PrefixCombFactory build() {\n\t\t\treturn new PrefixCombFactory(this);\n\t\t}\n\t}\n\n\t/**\n\t * Returns a new builder.\n\t * \n\t * @return a builder\n\t */\n\tpublic static Builder builder() {\n\t\treturn new Builder();\n\t}\n\n\t/**\n\t * Returns a Prefix COMB GUID.\n\t * \n\t * @return a UUIDv4\n\t */\n\t@Override\n\tpublic GUID128 create() {\n\t\tlock.lock();\n\t\ttry {\n\t\t\tfinal long time = instantFunction.get().toEpochMilli();\n\t\t\tfinal long long1 = this.random.nextLong(2);\n\t\t\tfinal long long2 = this.random.nextLong(8);\n\t\t\treturn make(time, long1, long2);\n\t\t} finally {\n\t\t\tlock.unlock();\n\t\t}\n\t}\n\n\tprivate GUID128 make(final long time, final long long1, final long long2) {\n\t\treturn toUuid((time << 16) | (long1 & 0x000000000000ffffL), long2);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/nonstandard/ShortPrefixCombFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.nonstandard;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.factory.AbstCombFactory;\n\nimport java.time.Clock;\nimport java.util.Random;\nimport java.util.UUID;\nimport java.util.function.LongSupplier;\n\n/**\n * Concrete factory for creating Short Prefix COMB GUIDs.\n * <p>\n * A Short Prefix COMB GUID is a UUID that combines a creation time with random\n * bits.\n * <p>\n * The creation minute is a 2 bytes PREFIX at the MOST significant bits.\n * <p>\n * The prefix wraps around every ~45 days (2^16/60/24 = ~45).\n * <p>\n * The created UUID is a UUIDv4 for compatibility with RFC 9562.\n * \n * @see <a href=\n *      \"https://www.2ndquadrant.com/en/blog/sequential-uuid-generators/\">Sequential\n *      UUID Generators</a>\n */\npublic final class ShortPrefixCombFactory extends AbstCombFactory {\n\n\t/**\n\t * Interval in milliseconds.\n\t */\n\tprotected final int interval;\n\n\t/**\n\t * Default interval of 60 seconds in milliseconds.\n\t */\n\tprotected static final int DEFAULT_INTERVAL = 60_000;\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic ShortPrefixCombFactory() {\n\t\tthis(builder());\n\t}\n\n\t/**\n\t * Constructor with a clock.\n\t * \n\t * @param clock a clock\n\t */\n\tpublic ShortPrefixCombFactory(Clock clock) {\n\t\tthis(builder().withClock(clock));\n\t}\n\n\t/**\n\t * Constructor with a random.\n\t * \n\t * @param random a random generator\n\t */\n\tpublic ShortPrefixCombFactory(Random random) {\n\t\tthis(builder().withRandom(random));\n\t}\n\n\t/**\n\t * Constructor with a random and a clock.\n\t * \n\t * @param random a random\n\t * @param clock  a clock\n\t */\n\tpublic ShortPrefixCombFactory(Random random, Clock clock) {\n\t\tthis(builder().withRandom(random).withClock(clock));\n\t}\n\n\t/**\n\t * Constructor with a function which return random numbers.\n\t * \n\t * @param randomFunction a function\n\t */\n\tpublic ShortPrefixCombFactory(LongSupplier randomFunction) {\n\t\tthis(builder().withRandomFunction(randomFunction));\n\t}\n\n\t/**\n\t * Constructor with a function which a function which return random numbers and\n\t * a clock.\n\t * \n\t * @param randomFunction a function\n\t * @param clock          a clock\n\t */\n\tpublic ShortPrefixCombFactory(LongSupplier randomFunction, Clock clock) {\n\t\tthis(builder().withRandomFunction(randomFunction).withClock(clock));\n\t}\n\n\tprivate ShortPrefixCombFactory(Builder builder) {\n\t\tsuper(UuidVersion.VERSION_RANDOM_BASED, builder);\n\t\tthis.interval = builder.getInterval();\n\t}\n\n\t/**\n\t * A builder of factories.\n\t */\n\tpublic static class Builder extends AbstCombFactory.Builder<ShortPrefixCombFactory, Builder> {\n\n\t\tprivate Integer interval;\n\n\t\t/**\n\t\t * Get the interval in milliseconds.\n\t\t * \n\t\t * @return the interval in milliseconds.\n\t\t */\n\t\tprotected int getInterval() {\n\t\t\tif (this.interval == null) {\n\t\t\t\tthis.interval = DEFAULT_INTERVAL;\n\t\t\t}\n\t\t\treturn this.interval;\n\t\t}\n\n\t\t/**\n\t\t * Set the interval in milliseconds.\n\t\t * \n\t\t * @param interval the interval in milliseconds\n\t\t * @return the builder\n\t\t */\n\t\tpublic Builder withInterval(int interval) {\n\t\t\tthis.interval = interval;\n\t\t\treturn this;\n\t\t}\n\n\t\t@Override\n\t\tpublic ShortPrefixCombFactory build() {\n\t\t\treturn new ShortPrefixCombFactory(this);\n\t\t}\n\t}\n\n\t/**\n\t * Returns a new builder.\n\t * \n\t * @return a builder\n\t */\n\tpublic static Builder builder() {\n\t\treturn new Builder();\n\t}\n\n\t/**\n\t * Returns a Short Prefix COMB GUID.\n\t * \n\t * @return a UUIDv4\n\t */\n\t@Override\n\tpublic GUID128 create() {\n\t\tlock.lock();\n\t\ttry {\n\t\t\tfinal long time = instantFunction.get().toEpochMilli() / interval;\n\t\t\tfinal long long1 = this.random.nextLong(6);\n\t\t\tfinal long long2 = this.random.nextLong(8);\n\t\t\treturn make(time, long1, long2);\n\t\t} finally {\n\t\t\tlock.unlock();\n\t\t}\n\t}\n\n\tprivate GUID128 make(final long time, final long long1, final long long2) {\n\t\treturn toUuid((time << 48) | (long1 & 0x0000ffffffffffffL), long2);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/nonstandard/ShortSuffixCombFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.nonstandard;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.factory.AbstCombFactory;\n\nimport java.time.Clock;\nimport java.util.Random;\nimport java.util.UUID;\nimport java.util.function.LongSupplier;\n\n/**\n * Concrete factory for creating Short Suffix COMB GUIDs.\n * <p>\n * A Short Suffix COMB GUID is a UUID that combines a creation time with random\n * bits.\n * <p>\n * The creation minute is a 2 bytes SUFFIX at the LEAST significant bits.\n * <p>\n * The suffix wraps around every ~45 days (2^16/60/24 = ~45).\n * <p>\n * The created UUID is a UUIDv4 for compatibility with RFC 9562.\n * \n * @see <a href=\n *      \"https://www.2ndquadrant.com/en/blog/sequential-uuid-generators/\">Sequential\n *      UUID Generators</a>\n */\npublic final class ShortSuffixCombFactory extends AbstCombFactory {\n\n\t/**\n\t * Interval in milliseconds.\n\t */\n\tprotected final int interval;\n\n\t/**\n\t * Default interval of 60 seconds in milliseconds.\n\t */\n\tprotected static final int DEFAULT_INTERVAL = 60_000;\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic ShortSuffixCombFactory() {\n\t\tthis(builder());\n\t}\n\n\t/**\n\t * Constructor with a clock.\n\t * \n\t * @param clock a clock\n\t */\n\tpublic ShortSuffixCombFactory(Clock clock) {\n\t\tthis(builder().withClock(clock));\n\t}\n\n\t/**\n\t * Constructor with a random.\n\t * \n\t * @param random a random generator\n\t */\n\tpublic ShortSuffixCombFactory(Random random) {\n\t\tthis(builder().withRandom(random));\n\t}\n\n\t/**\n\t * Constructor with a random and a clock.\n\t * \n\t * @param random a random\n\t * @param clock  a clock\n\t */\n\tpublic ShortSuffixCombFactory(Random random, Clock clock) {\n\t\tthis(builder().withRandom(random).withClock(clock));\n\t}\n\n\t/**\n\t * Constructor with a function which return random numbers.\n\t * \n\t * @param randomFunction a function\n\t */\n\tpublic ShortSuffixCombFactory(LongSupplier randomFunction) {\n\t\tthis(builder().withRandomFunction(randomFunction));\n\t}\n\n\t/**\n\t * Constructor with a function which a function which return random numbers and\n\t * a clock.\n\t * \n\t * @param randomFunction a function\n\t * @param clock          a clock\n\t */\n\tpublic ShortSuffixCombFactory(LongSupplier randomFunction, Clock clock) {\n\t\tthis(builder().withRandomFunction(randomFunction).withClock(clock));\n\t}\n\n\tprivate ShortSuffixCombFactory(Builder builder) {\n\t\tsuper(UuidVersion.VERSION_RANDOM_BASED, builder);\n\t\tthis.interval = builder.getInterval();\n\t}\n\n\t/**\n\t * Builder of factories.\n\t */\n\tpublic static class Builder extends AbstCombFactory.Builder<ShortSuffixCombFactory, Builder> {\n\n\t\tprivate Integer interval;\n\n\t\t/**\n\t\t * Get the interval in milliseconds.\n\t\t * \n\t\t * @return the interval in milliseconds.\n\t\t */\n\t\tprotected int getInterval() {\n\t\t\tif (this.interval == null) {\n\t\t\t\tthis.interval = DEFAULT_INTERVAL;\n\t\t\t}\n\t\t\treturn this.interval;\n\t\t}\n\n\t\t/**\n\t\t * Set the interval in milliseconds.\n\t\t * \n\t\t * @param interval the interval in milliseconds\n\t\t * @return the builder\n\t\t */\n\t\tpublic Builder withInterval(int interval) {\n\t\t\tthis.interval = interval;\n\t\t\treturn this;\n\t\t}\n\n\t\t@Override\n\t\tpublic ShortSuffixCombFactory build() {\n\t\t\treturn new ShortSuffixCombFactory(this);\n\t\t}\n\t}\n\n\t/**\n\t * Returns a new builder.\n\t * \n\t * @return a builder\n\t */\n\tpublic static Builder builder() {\n\t\treturn new Builder();\n\t}\n\n\t/**\n\t * Returns a Short Suffix COMB GUID.\n\t * \n\t * @return a UUIDv4\n\t */\n\t@Override\n\tpublic GUID128 create() {\n\t\tlock.lock();\n\t\ttry {\n\t\t\tfinal long time = instantFunction.get().toEpochMilli() / interval;\n\t\t\tfinal long long1 = this.random.nextLong(8);\n\t\t\tfinal long long2 = this.random.nextLong(6);\n\t\t\treturn make(time, long1, long2);\n\t\t} finally {\n\t\t\tlock.unlock();\n\t\t}\n\t}\n\n\tprivate GUID128 make(final long time, final long long1, final long long2) {\n\t\treturn toUuid(long1,\n\t\t\t\t(((long2 & 0x0000ffff00000000L) << 16) | (time & 0xffffL) << 32) | (long2 & 0x00000000ffffffffL));\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/nonstandard/SuffixCombFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.nonstandard;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.factory.AbstCombFactory;\nimport com.pinecone.ulf.util.guid.i128.factory.AbstRandomBasedFactory;\n\nimport java.time.Clock;\nimport java.util.Random;\nimport java.util.UUID;\nimport java.util.function.LongSupplier;\n\n/**\n * Concrete factory for creating Suffix COMB GUIDs.\n * <p>\n * A Suffix COMB GUID is a UUID that combines a creation time with random bits.\n * <p>\n * The creation millisecond is a 6 bytes SUFFIX at the LEAST significant bits.\n * <p>\n * The created UUID is a UUIDv4 for compatibility with RFC 9562.\n * \n * @see AbstCombFactory\n * @see AbstRandomBasedFactory\n * @see <a href=\"http://www.informit.com/articles/article.aspx?p=25862\">The Cost\n *      of GUIDs as Primary Keys</a>\n */\npublic final class SuffixCombFactory extends AbstCombFactory {\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic SuffixCombFactory() {\n\t\tthis(builder());\n\t}\n\n\t/**\n\t * Constructor with a clock.\n\t * \n\t * @param clock a clock\n\t */\n\tpublic SuffixCombFactory(Clock clock) {\n\t\tthis(builder().withClock(clock));\n\t}\n\n\t/**\n\t * Constructor with a random.\n\t * \n\t * @param random a random generator\n\t */\n\tpublic SuffixCombFactory(Random random) {\n\t\tthis(builder().withRandom(random));\n\t}\n\n\t/**\n\t * Constructor with a random and a clock.\n\t * \n\t * @param random a random\n\t * @param clock  a clock\n\t */\n\tpublic SuffixCombFactory(Random random, Clock clock) {\n\t\tthis(builder().withRandom(random).withClock(clock));\n\t}\n\n\t/**\n\t * Constructor with a function which return random numbers.\n\t * \n\t * @param randomFunction a function\n\t */\n\tpublic SuffixCombFactory(LongSupplier randomFunction) {\n\t\tthis(builder().withRandomFunction(randomFunction));\n\t}\n\n\t/**\n\t * Constructor with a function which a function which return random numbers and\n\t * a clock.\n\t * \n\t * @param randomFunction a function\n\t * @param clock          a clock\n\t */\n\tpublic SuffixCombFactory(LongSupplier randomFunction, Clock clock) {\n\t\tthis(builder().withRandomFunction(randomFunction).withClock(clock));\n\t}\n\n\tprivate SuffixCombFactory(Builder builder) {\n\t\tsuper(UuidVersion.VERSION_RANDOM_BASED, builder);\n\t}\n\n\t/**\n\t * Builder of factories.\n\t */\n\tpublic static class Builder extends AbstCombFactory.Builder<SuffixCombFactory, Builder> {\n\t\t@Override\n\t\tpublic SuffixCombFactory build() {\n\t\t\treturn new SuffixCombFactory(this);\n\t\t}\n\t}\n\n\t/**\n\t * Returns a new builder.\n\t * \n\t * @return a builder\n\t */\n\tpublic static Builder builder() {\n\t\treturn new Builder();\n\t}\n\n\t/**\n\t * Returns a Suffix COMB GUID.\n\t * \n\t * @return a UUIDv4\n\t */\n\t@Override\n\tpublic GUID128 create() {\n\t\tlock.lock();\n\t\ttry {\n\t\t\tfinal long time = instantFunction.get().toEpochMilli();\n\t\t\tfinal long long1 = this.random.nextLong(8);\n\t\t\tfinal long long2 = this.random.nextLong(2);\n\t\t\treturn make(time, long1, long2);\n\t\t} finally {\n\t\t\tlock.unlock();\n\t\t}\n\t}\n\n\tprivate GUID128 make(final long time, final long long1, final long long2) {\n\t\treturn toUuid(long1, (long2 << 48) | (time & 0x0000ffffffffffffL));\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/DceSecurityFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.standard;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidLocalDomain;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.factory.AbstTimeBasedFactory;\n\nimport java.util.UUID;\nimport java.util.concurrent.atomic.AtomicInteger;\n\n/**\n * \n * Concrete factory for creating DCE Security unique identifiers (UUIDv2).\n * \n * @see UuidLocalDomain\n * @see <a href=\n *      \"https://pubs.opengroup.org/onlinepubs/9696989899/chap5.htm#tagcjh_08_02_01_01\">DCE\n *      Security UUIDs</a>\n */\npublic final class DceSecurityFactory extends AbstTimeBasedFactory {\n\n\tprivate AtomicInteger counter;\n\n\tprivate final byte localDomain;\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic DceSecurityFactory() {\n\t\tthis(builder());\n\t}\n\n\tprivate DceSecurityFactory(Builder builder) {\n\t\tsuper(UuidVersion.VERSION_DCE_SECURITY, builder);\n\t\tthis.localDomain = builder.localDomain;\n\t\tthis.counter = new AtomicInteger();\n\t}\n\n\t/**\n\t * Returns a builder of DCE Security factory.\n\t * \n\t * @return a builder\n\t */\n\tpublic static Builder builder() {\n\t\treturn new Builder();\n\t}\n\n\t/**\n\t * Concrete builder for creating a DCE Security factory.\n\t * \n\t * @see AbstTimeBasedFactory.Builder\n\t */\n\tpublic static class Builder extends AbstTimeBasedFactory.Builder<DceSecurityFactory, Builder> {\n\n\t\tprivate byte localDomain;\n\n\t\t/**\n\t\t * Set the local domain.\n\t\t * \n\t\t * @param localDomain the local domain\n\t\t * @return the builder\n\t\t */\n\t\tpublic Builder withLocalDomain(UuidLocalDomain localDomain) {\n\t\t\tthis.localDomain = localDomain.getValue();\n\t\t\treturn this;\n\t\t}\n\n\t\t/**\n\t\t * Set the local domain.\n\t\t * \n\t\t * @param localDomain the local domain\n\t\t * @return the builder\n\t\t */\n\t\tpublic Builder withLocalDomain(byte localDomain) {\n\t\t\tthis.localDomain = localDomain;\n\t\t\treturn this;\n\t\t}\n\n\t\t@Override\n\t\tpublic DceSecurityFactory build() {\n\t\t\treturn new DceSecurityFactory(this);\n\t\t}\n\t}\n\n\t/**\n\t * Returns a DCE Security unique identifier (UUIDv2).\n\t * <p>\n\t * A DCE Security UUID is a modified UUIDv1.\n\t * <p>\n\t * Steps of creation:\n\t * <ol>\n\t * <li>Create a Time-based UUIDv1;\n\t * <li>Replace the least significant 8 bits of the clock sequence with the local\n\t * domain;\n\t * <li>Replace the least significant 32 bits of the time stamp with the local\n\t * identifier.\n\t * </ol>\n\t * \n\t * @param localDomain     a local domain\n\t * @param localIdentifier a local identifier\n\t * @return a DCE Security UUID\n\t */\n\tpublic GUID128 create(byte localDomain, int localIdentifier) {\n\n\t\t// Create a UUIDv1\n\t\tGUID128 uuid = super.create();\n\n\t\t// Embed the local domain bits\n\t\tfinal long lsb = embedLocalDomain(uuid.getLeastSignificantBits(), localDomain, this.counter.incrementAndGet());\n\n\t\t// Embed the local identifier bits\n\t\tfinal long msb = emgedLocalIdentifier(uuid.getMostSignificantBits(), localIdentifier);\n\n\t\treturn toUuid(msb, lsb);\n\t}\n\n\t/**\n\t * Returns a DCE Security unique identifier (UUIDv2).\n\t * \n\t * @param localDomain     a local domain\n\t * @param localIdentifier a local identifier\n\t * @return a DCE Security UUID\n\t */\n\tpublic GUID128 create(UuidLocalDomain localDomain, int localIdentifier) {\n\t\treturn create(localDomain.getValue(), localIdentifier);\n\t}\n\n\t/**\n\t * Returns a DCE Security unique identifier (UUIDv2).\n\t * <p>\n\t * The local domain is local domain used by this method defined by builder:\n\t * \n\t * <pre>{@code\n\t * DceSecurityFactory factory = DceSecurityFactory.builder().withLocalDomain(UuidLocalDomain).build();\n\t * }</pre>\n\t * \n\t * @param localIdentifier a local identifier\n\t * @return a UUIDv2\n\t */\n\tpublic GUID128 create(int localIdentifier) {\n\t\treturn create(this.localDomain, localIdentifier);\n\t}\n\n\t/**\n\t * Always throws an exception.\n\t * <p>\n\t * Overrides the method {@link AbstTimeBasedFactory#create()} to throw an\n\t * exception instead of returning a UUID.\n\t * \n\t * @throws UnsupportedOperationException always\n\t */\n\t@Override\n\tpublic GUID128 create() {\n\t\tthrow new UnsupportedOperationException(\"Unsuported operation for DCE Security UUID factory\");\n\t}\n\n\t/**\n\t * Returns a DCE Security unique identifier (UUIDv2).\n\t * \n\t * @return a UUIDv2\n\t */\n\t@Override\n\tpublic GUID128 create(Parameters parameters) {\n\t\treturn create(parameters.getLocalDomain(), parameters.getLocalIdentifier());\n\t}\n\n\t/**\n\t * Embeds the local identifier in into the most significant bits.\n\t * \n\t * @param msb             the MSB\n\t * @param localIdentifier the local identifier\n\t * @return the updated MSB\n\t */\n\tprivate static long emgedLocalIdentifier(long msb, int localIdentifier) {\n\t\treturn (msb & 0x00000000ffffffffL) // clear time_low bits\n\t\t\t\t| ((localIdentifier & 0x00000000ffffffffL) << 32);\n\t}\n\n\t/**\n\t * Embeds the local domain bits in the least significant bits.\n\t * \n\t * @param lsb         the LSB\n\t * @param localDomain a local domain\n\t * @param counter     a counter value\n\t * @return the updated LSB\n\t */\n\tprivate static long embedLocalDomain(long lsb, byte localDomain, long counter) {\n\t\treturn (lsb & 0x0000ffffffffffffL) // clear clock_seq bits\n\t\t\t\t| ((localDomain & 0x00000000000000ffL) << 48) //\n\t\t\t\t| ((counter & 0x00000000000000ffL) << 56);\n\t}\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/NameBasedMd5Factory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.standard;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidNamespace;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.factory.AbstNameBasedFactory;\n\nimport java.util.UUID;\n\n/**\n * Concrete factory for creating name-based unique identifiers using MD5 hashing\n * (UUIDv3).\n * \n * @see AbstNameBasedFactory\n */\npublic final class NameBasedMd5Factory extends AbstNameBasedFactory {\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic NameBasedMd5Factory() {\n\t\tthis((byte[]) null);\n\t}\n\n\t/**\n\t * Constructor with a namespace.\n\t * \n\t * @param namespace a namespace\n\t */\n\tpublic NameBasedMd5Factory(GUID128 namespace) {\n\t\tthis(namespaceBytes(namespace));\n\t}\n\n\t/**\n\t * Constructor with a namespace.\n\t * \n\t * @param namespace a namespace\n\t */\n\tpublic NameBasedMd5Factory(String namespace) {\n\t\tthis(namespaceBytes(namespace));\n\t}\n\n\t/**\n\t * Constructor with a namespace.\n\t * \n\t * @param namespace a namespace\n\t */\n\tpublic NameBasedMd5Factory(UuidNamespace namespace) {\n\t\tthis(namespaceBytes(namespace));\n\t}\n\n\tprivate NameBasedMd5Factory(byte[] namespace) {\n\t\tsuper(UuidVersion.VERSION_NAME_BASED_MD5, ALGORITHM_MD5, namespace);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/NameBasedSha1Factory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.standard;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidNamespace;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.factory.AbstNameBasedFactory;\n\nimport java.util.UUID;\n\n/**\n * Concrete factory for creating name-based unique identifiers using SHA-1\n * hashing (UUIDv5).\n * \n * @see AbstNameBasedFactory\n */\npublic final class NameBasedSha1Factory extends AbstNameBasedFactory {\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic NameBasedSha1Factory() {\n\t\tthis((byte[]) null);\n\t}\n\n\t/**\n\t * Constructor with a namespace.\n\t * \n\t * @param namespace a namespace\n\t */\n\tpublic NameBasedSha1Factory(GUID128 namespace) {\n\t\tthis(namespaceBytes(namespace));\n\t}\n\n\t/**\n\t * Constructor with a namespace.\n\t * \n\t * @param namespace a namespace\n\t */\n\tpublic NameBasedSha1Factory(String namespace) {\n\t\tthis(namespaceBytes(namespace));\n\t}\n\n\t/**\n\t * Constructor with a namespace.\n\t * \n\t * @param namespace a namespace\n\t */\n\tpublic NameBasedSha1Factory(UuidNamespace namespace) {\n\t\tthis(namespaceBytes(namespace));\n\t}\n\n\tprivate NameBasedSha1Factory(byte[] namespace) {\n\t\tsuper(UuidVersion.VERSION_NAME_BASED_SHA1, ALGORITHM_SHA1, namespace);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/RandomBasedFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.standard;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.factory.AbstRandomBasedFactory;\nimport com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil;\n\nimport java.util.Random;\nimport java.util.UUID;\nimport java.util.function.LongSupplier;\n\n/**\n * Concrete factory for creating random-based unique identifiers (UUIDv4).\n */\npublic final class RandomBasedFactory extends AbstRandomBasedFactory {\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic RandomBasedFactory() {\n\t\tthis(builder());\n\t}\n\n\t/**\n\t * Constructor with a {@link Random} instance.\n\t * \n\t * @param random a {@link Random} instance\n\t */\n\tpublic RandomBasedFactory(Random random) {\n\t\tthis(builder().withRandom(random));\n\t}\n\n\t/**\n\t * Constructor with a function which returns random number.\n\t * \n\t * @param randomSupplier a function\n\t */\n\tpublic RandomBasedFactory(LongSupplier randomSupplier) {\n\t\tthis(builder().withRandomFunction(randomSupplier));\n\t}\n\n\tprivate RandomBasedFactory(Builder builder) {\n\t\tsuper(UuidVersion.VERSION_RANDOM_BASED, builder);\n\t}\n\n\t/**\n\t * Concrete builder for creating a random-based factory.\n\t * \n\t * @see AbstRandomBasedFactory.Builder\n\t */\n\tpublic static class Builder extends AbstRandomBasedFactory.Builder<RandomBasedFactory, Builder> {\n\t\t@Override\n\t\tpublic RandomBasedFactory build() {\n\t\t\treturn new RandomBasedFactory(this);\n\t\t}\n\t}\n\n\t/**\n\t * Returns a builder of random-based factory.\n\t * \n\t * @return a builder\n\t */\n\tpublic static Builder builder() {\n\t\treturn new Builder();\n\t}\n\n\t/**\n\t * Returns a random-based UUID.\n\t * \n\t * ### RFC 9562 - 4.4. Algorithms for Creating a UUID from Truly Random or\n\t * Pseudo-Random Numbers\n\t * \n\t * (1) Set the two most significant bits (bits 6 and 7) of the\n\t * clock_seq_hi_and_reserved to zero and one, respectively.\n\t * \n\t * (2) Set the four most significant bits (bits 12 through 15) of the\n\t * time_hi_and_version field to the 4-bit version number from Section 4.1.3.\n\t * \n\t * (3) Set all the other bits to randomly (or pseudo-randomly) chosen values.\n\t * \n\t * @return a random-based UUID\n\t */\n\t@Override\n\tpublic GUID128 create() {\n\t\tlock.lock();\n\t\ttry {\n\t\t\tif (this.random instanceof SafeRandom) {\n\t\t\t\tfinal byte[] bytes = this.random.nextBytes(16);\n\t\t\t\tfinal long msb = ByteUtil.toNumber(bytes, 0, 8);\n\t\t\t\tfinal long lsb = ByteUtil.toNumber(bytes, 8, 16);\n\t\t\t\treturn toUuid(msb, lsb);\n\t\t\t} else {\n\t\t\t\tfinal long msb = this.random.nextLong();\n\t\t\t\tfinal long lsb = this.random.nextLong();\n\t\t\t\treturn toUuid(msb, lsb);\n\t\t\t}\n\t\t} finally {\n\t\t\tlock.unlock();\n\t\t}\n\t}\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/TimeBasedFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.standard;\n\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.factory.AbstTimeBasedFactory;\n\n/**\n * Concrete factory for creating time-based unique identifiers (UUIDv1).\n * \n * @see AbstTimeBasedFactory\n */\npublic final class TimeBasedFactory extends AbstTimeBasedFactory {\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic TimeBasedFactory() {\n\t\tthis(builder());\n\t}\n\n\tprivate TimeBasedFactory(Builder builder) {\n\t\tsuper(UuidVersion.VERSION_TIME_BASED, builder);\n\t}\n\n\t/**\n\t * Returns a builder of time-based factory.\n\t * \n\t * @return a builder\n\t */\n\tpublic static Builder builder() {\n\t\treturn new Builder();\n\t}\n\n\t/**\n\t * Concrete builder for creating a time-based factory.\n\t * \n\t * @see AbstTimeBasedFactory.Builder\n\t */\n\tpublic static class Builder extends AbstTimeBasedFactory.Builder<TimeBasedFactory, Builder> {\n\t\t@Override\n\t\tpublic TimeBasedFactory build() {\n\t\t\treturn new TimeBasedFactory(this);\n\t\t}\n\t}\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/TimeOrderedEpochFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.standard;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.factory.AbstCombFactory;\nimport com.pinecone.ulf.util.guid.i128.factory.nonstandard.PrefixCombFactory;\n\nimport java.time.Clock;\nimport java.time.Instant;\nimport java.util.Objects;\nimport java.util.Random;\nimport java.util.UUID;\nimport java.util.concurrent.locks.ReentrantLock;\nimport java.util.function.Function;\nimport java.util.function.LongSupplier;\nimport java.util.function.Supplier;\n\n/**\n * Concrete factory for creating Unix epoch time-ordered unique identifiers\n * (UUIDv7).\n * <p>\n * UUIDv7 is a new UUID version proposed by Peabody and Davis. It is similar to\n * Prefix COMB GUID and ULID.\n * <p>\n * This factory creates 3 types:\n * <ul>\n * <li><b>Type 1 (default)</b>: this type is divided in 3 components, namely\n * time, counter and random. The counter component is incremented by 1 when the\n * time repeats. The random component is always randomized.\n * <li><b>Type 2 (plus 1)</b>: this type is divided in 2 components, namely time\n * and monotonic random. The monotonic random component is incremented by 1 when\n * the time repeats. This type of UUID is like a Monotonic ULID. It can be much\n * faster than the other types.\n * <li><b>Type 3 (plus n)</b>: this type is also divided in 2 components, namely\n * time and monotonic random. The monotonic random component is incremented by a\n * random positive integer between 1 and 2^32. This type of UUID is also like a\n * Monotonic ULID, but with a random increment instead of 1.\n * </ul>\n * <p>\n * If the underlying runtime provides enough clock precision, the microseconds\n * are also injected in the UUID, specifically in the {@code rand_a} field,\n * which is the name RFC 9562 gives to the 12 bits right after the milliseconds\n * field, from left to right. Otherwise, these 12 bits are randomly generated.\n * <p>\n * In JDK 11, we get 1 microsecond precision. However, in JDK 8, the maximum\n * precision we can get is 1 millisecond. On Windows, it is even worse because\n * the default precision is 15.625ms, due to the system clock's refresh rate of\n * 64Hz.\n * \n * @since 5.0.0\n * @see PrefixCombFactory\n * @see <a href=\"https://github.com/ulid/spec\">ULID Specification</a>\n * @see <a href=\n *      \"https://tools.ietf.org/html/draft-peabody-dispatch-new-uuid-format\">New\n *      UUID formats</a>\n * @see <a href=\"https://datatracker.ietf.org/wg/uuidrev/documents/\">Revise\n *      Universally Unique Identifier Definitions (uuidrev)</a>\n */\npublic final class TimeOrderedEpochFactory extends AbstCombFactory {\n\n\tprivate final UuidFunction uuidFunction;\n\n\tprivate static final int INCREMENT_TYPE_DEFAULT = 0; // add 2^48 to `rand_b`\n\tprivate static final int INCREMENT_TYPE_PLUS_1 = 1; // just add 1 to `rand_b`\n\tprivate static final int INCREMENT_TYPE_PLUS_N = 2; // add a random n to `rand_b`, where 1 <= n <= 2^32\n\n\tprivate static final long INCREMENT_MAX_DEFAULT = 0xffffffffL; // 2^32-1\n\n\tprivate static final long versionBits = 0x000000000000f000L;\n\tprivate static final long variantBits = 0xc000000000000000L;\n\tprivate static final long upper16Bits = 0xffff000000000000L;\n\tprivate static final long upper48Bits = 0xffffffffffff0000L;\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic TimeOrderedEpochFactory() {\n\t\tthis(builder());\n\t}\n\n\t/**\n\t * Constructor with a clock.\n\t * \n\t * @param clock a clock\n\t */\n\tpublic TimeOrderedEpochFactory(Clock clock) {\n\t\tthis(builder().withClock(clock));\n\t}\n\n\t/**\n\t * Constructor with a random.\n\t * \n\t * @param random a random\n\t */\n\tpublic TimeOrderedEpochFactory(Random random) {\n\t\tthis(builder().withRandom(random));\n\t}\n\n\t/**\n\t * Constructor with a random and a clock.\n\t * \n\t * @param random a random\n\t * @param clock  a clock\n\t */\n\tpublic TimeOrderedEpochFactory(Random random, Clock clock) {\n\t\tthis(builder().withRandom(random).withClock(clock));\n\t}\n\n\t/**\n\t * Constructor with a function which return random numbers.\n\t * \n\t * @param randomFunction a function\n\t */\n\tpublic TimeOrderedEpochFactory(LongSupplier randomFunction) {\n\t\tthis(builder().withRandomFunction(randomFunction));\n\t}\n\n\t/**\n\t * Constructor with a function which a function which return random numbers and\n\t * a clock.\n\t * \n\t * @param randomFunction a function\n\t * @param clock          a clock\n\t */\n\tpublic TimeOrderedEpochFactory(LongSupplier randomFunction, Clock clock) {\n\t\tthis(builder().withRandomFunction(randomFunction).withClock(clock));\n\t}\n\n\tprivate TimeOrderedEpochFactory(Builder builder) {\n\t\tsuper(UuidVersion.VERSION_TIME_ORDERED_EPOCH, builder);\n\n\t\tswitch (builder.getIncrementType()) {\n\t\tcase INCREMENT_TYPE_PLUS_1:\n\t\t\tthis.uuidFunction = new Plus1Function(random, instantFunction);\n\t\t\tbreak;\n\t\tcase INCREMENT_TYPE_PLUS_N:\n\t\t\tthis.uuidFunction = new PlusNFunction(random, instantFunction, builder.getIncrementMax());\n\t\t\tbreak;\n\t\tcase INCREMENT_TYPE_DEFAULT:\n\t\tdefault:\n\t\t\tthis.uuidFunction = new DefaultFunction(random, instantFunction);\n\t\t}\n\t}\n\n\t/**\n\t * Concrete builder for creating a Unix epoch time-ordered factory.\n\t *\n\t * @see AbstCombFactory.Builder\n\t */\n\tpublic static class Builder extends AbstCombFactory.Builder<TimeOrderedEpochFactory, Builder> {\n\n\t\tprivate Integer incrementType;\n\t\tprivate Long incrementMax;\n\n\t\t/**\n\t\t * Set the increment type to PLUS 1.\n\t\t * \n\t\t * @return the builder\n\t\t */\n\t\tpublic Builder withIncrementPlus1() {\n\t\t\tthis.incrementType = INCREMENT_TYPE_PLUS_1;\n\t\t\tthis.incrementMax = null;\n\t\t\treturn this;\n\t\t}\n\n\t\t/**\n\t\t * Set the increment type to PLUS N.\n\t\t * \n\t\t * @return the builder\n\t\t */\n\t\tpublic Builder withIncrementPlusN() {\n\t\t\tthis.incrementType = INCREMENT_TYPE_PLUS_N;\n\t\t\tthis.incrementMax = null;\n\t\t\treturn this;\n\t\t}\n\n\t\t/**\n\t\t * Set the increment type to PLUS N and set the max increment.\n\t\t * \n\t\t * @param incrementMax a number\n\t\t * @return the builder\n\t\t */\n\t\tpublic Builder withIncrementPlusN(long incrementMax) {\n\t\t\tthis.incrementType = INCREMENT_TYPE_PLUS_N;\n\t\t\tthis.incrementMax = incrementMax;\n\t\t\treturn this;\n\t\t}\n\n\t\t/**\n\t\t * Set the increment type.\n\t\t * \n\t\t * @return an number\n\t\t */\n\t\tprotected int getIncrementType() {\n\t\t\tif (this.incrementType == null) {\n\t\t\t\tthis.incrementType = INCREMENT_TYPE_DEFAULT;\n\t\t\t}\n\t\t\treturn this.incrementType;\n\t\t}\n\n\t\t/**\n\t\t * Get the max increment.\n\t\t * \n\t\t * @return a number\n\t\t */\n\t\tprotected long getIncrementMax() {\n\t\t\tif (this.incrementMax == null) {\n\t\t\t\tthis.incrementMax = INCREMENT_MAX_DEFAULT;\n\t\t\t}\n\t\t\treturn this.incrementMax;\n\t\t}\n\n\t\t@Override\n\t\tpublic TimeOrderedEpochFactory build() {\n\t\t\treturn new TimeOrderedEpochFactory(this);\n\t\t}\n\t}\n\n\t/**\n\t * Returns a builder of Unix epoch time-ordered factory.\n\t * \n\t * @return a builder\n\t */\n\tpublic static Builder builder() {\n\t\treturn new Builder();\n\t}\n\n\t/**\n\t * Returns a time-ordered unique identifier (UUIDv7).\n\t * \n\t * @return a UUIDv7\n\t */\n\t@Override\n\tpublic GUID128 create() {\n\t\tUUID uuid = this.uuidFunction.apply(null);\n\t\treturn toUuid(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());\n\t}\n\n\tpublic GUID128 createXorUint64LSB( long xorMask ) {\n\t\tUUID uuid = this.uuidFunction.apply(null);\n\n\t\tlong msb = uuid.getMostSignificantBits();\n\t\tlong lsb = uuid.getLeastSignificantBits();\n\n\t\tlong modifiedLsb = lsb ^ xorMask;\n\n\t\treturn toUuid(msb, modifiedLsb);\n\t}\n\n\t/**\n\t * Returns a time-ordered unique identifier (UUIDv7) for a given instant.\n\t * <p>\n\t * The random component is generated with each method invocation.\n\t * \n\t * @return a UUIDv7\n\t * @param instant a given instant\n\t */\n\t@Override\n\tpublic GUID128 create(Parameters parameters) {\n\t\tObjects.requireNonNull(parameters.getInstant(), \"Null instant\");\n\t\tUUID uuid = this.uuidFunction.apply(parameters.getInstant());\n\t\treturn toUuid(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());\n\t}\n\n\tstatic abstract class UuidFunction implements Function<Instant, UUID> {\n\n\t\tprotected long msb = 0L; // most significant bits\n\t\tprotected long lsb = 0L; // least significant bits\n\n\t\tprotected final IRandom random;\n\t\tprotected Supplier<Instant> instantFunction;\n\t\tprotected final ReentrantLock lock = new ReentrantLock();\n\n\t\t// let go up to 1 second ahead of system clock\n\t\tprivate static final long advanceMax = 1_000L;\n\n\t\t// let's try to detect the system clock precision\n\t\tprotected static final int precision = precision();\n\n\t\tprotected static final int PRECISION_MILLISECOND = 1;\n\t\tprotected static final int PRECISION_MICROSECOND = 2;\n\n\t\tprotected static final long overflow = 0x0000000000000000L;\n\n\t\tpublic UuidFunction(IRandom random, Supplier<Instant> instantFunction) {\n\n\t\t\tthis.random = random;\n\t\t\tthis.instantFunction = instantFunction;\n\n\t\t\t// instantiate the internal state\n\t\t\treset(this.instantFunction.get());\n\t\t}\n\n\t\t@Override\n\t\tpublic UUID apply(final Instant instant) {\n\t\t\tlock.lock();\n\t\t\ttry {\n\n\t\t\t\tif (instant != null) {\n\t\t\t\t\treset(instant); // user specified\n\t\t\t\t\treturn new UUID(this.msb, this.lsb);\n\t\t\t\t}\n\n\t\t\t\tInstant now = instantFunction.get();\n\n\t\t\t\tlong lastTime = this.lastTime();\n\t\t\t\tlong time = now.toEpochMilli();\n\n\t\t\t\t// is it not too much ahead of system clock?\n\t\t\t\tif (advanceMax > Math.abs(lastTime - time)) {\n\t\t\t\t\ttime = Math.max(lastTime, time);\n\t\t\t\t}\n\n\t\t\t\tif (time == lastTime) {\n\t\t\t\t\tincrement(now);\n\t\t\t\t} else {\n\t\t\t\t\treset(now);\n\t\t\t\t}\n\n\t\t\t\treturn new UUID(this.msb, this.lsb);\n\n\t\t\t} finally {\n\t\t\t\tlock.unlock();\n\t\t\t}\n\t\t}\n\n\t\t/**\n\t\t * Increment the `rand_b` field.\n\t\t * \n\t\t * If the `rand_b` field rolls over, then `rand_a` should be incremented too.\n\t\t * \n\t\t * Note that as `unix_ts_ms` and `rand_a` are stored in the same `long`\n\t\t * variable, when `rand_a` rolls over, `unix_ts_ms` goes up automatically.\n\t\t * \n\t\t * To be implemented by each specific subclass.\n\t\t * \n\t\t * @param instant an instant\n\t\t */\n\t\tabstract void increment(final Instant instant);\n\n\t\t/**\n\t\t * Reset the `unix_ts_ms` field with the current milliseconds. Also set the\n\t\t * `rand_a` and `rand_b` fields with random bits.\n\t\t * \n\t\t * If there's enough clock precision, inject the current microseconds into the\n\t\t * `rand_a` field instead of random bits.\n\t\t * \n\t\t * @param instant an instant\n\t\t */\n\t\tvoid reset(final Instant instant) {\n\n\t\t\tthis.msb = instant.toEpochMilli() << 16;\n\t\t\tthis.lsb = random.nextLong();\n\n\t\t\tif (precision == PRECISION_MILLISECOND) {\n\t\t\t\t// lack of precision: put random bits in `rand_a`\n\t\t\t\tthis.msb = (msb & upper48Bits) | random.nextLong(2);\n\t\t\t} else {\n\t\t\t\t// set `rand_a` field\n\t\t\t\tmicroseconds(instant);\n\t\t\t}\n\t\t}\n\n\t\t/**\n\t\t * Injects microseconds into the `rand_a` field.\n\t\t * <p>\n\t\t * It only works when the underlying runtime provides at least microsecond\n\t\t * precision. Otherwise, this method won't change the value in `rand_a` field.\n\t\t * \n\t\t * @param instant an instant\n\t\t */\n\t\tvoid microseconds(final Instant instant) {\n\n\t\t\t// do nothing if not enough precision\n\t\t\tif (precision == PRECISION_MILLISECOND) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tfinal long shift = 12;\n\t\t\tfinal long scale = 1_000_000L;\n\t\t\tfinal long nanos = instant.getNano();\n\t\t\tfinal long randa = ((nanos % scale) << shift) / scale;\n\n\t\t\t// previous and next and timestamps\n\t\t\tfinal long prev = (msb & ~versionBits);\n\t\t\tfinal long next = (msb & upper48Bits) | (randa & 0x0fffL);\n\n\t\t\t// don't let the timestamp go backwards\n\t\t\tthis.msb = (next > prev) ? next : prev;\n\t\t}\n\n\t\tlong lastTime() {\n\t\t\treturn this.msb >>> 16;\n\t\t}\n\n\t\t/**\n\t\t * Returns the instant precision detected.\n\t\t * \n\t\t * @param clock a custom clock instance\n\t\t * @return the precision\n\t\t */\n\t\tstatic int precision() {\n\n\t\t\tClock clock = Clock.systemUTC();\n\n\t\t\tint best = 0;\n\t\t\tint loop = 3; // the best of 3\n\n\t\t\tfor (int i = 0; i < loop; i++) {\n\n\t\t\t\tint x = 0;\n\n\t\t\t\tint nanosecond = clock.instant().getNano();\n\n\t\t\t\tif (nanosecond % 1_000_000 != 0) {\n\t\t\t\t\tx = PRECISION_MICROSECOND;\n\t\t\t\t} else {\n\t\t\t\t\tx = PRECISION_MILLISECOND;\n\t\t\t\t}\n\n\t\t\t\tbest = Math.max(best, x);\n\t\t\t}\n\n\t\t\treturn best;\n\t\t}\n\t}\n\n\tstatic final class DefaultFunction extends UuidFunction {\n\n\t\tpublic DefaultFunction(IRandom random, Supplier<Instant> instantFunction) {\n\t\t\tsuper(random, instantFunction);\n\t\t}\n\n\t\t@Override\n\t\tvoid increment(final Instant instant) {\n\n\t\t\t// set `rand_a` field\n\t\t\tmicroseconds(instant);\n\n\t\t\t// add 2^48 to `rand_b`\n\t\t\tthis.lsb = (this.lsb & upper16Bits);\n\t\t\tthis.lsb = (this.lsb | variantBits) + (1L << 48);\n\n\t\t\tif (this.lsb == overflow) {\n\t\t\t\t// add 1 to `rand_a` if overflow occurs\n\t\t\t\tthis.msb = (this.msb | versionBits) + 1L;\n\t\t\t}\n\n\t\t\t// then randomize the lower 48 bits\n\t\t\tthis.lsb = (this.lsb & upper16Bits) | this.random.nextLong(6);\n\t\t}\n\t}\n\n\tstatic final class Plus1Function extends UuidFunction {\n\n\t\tpublic Plus1Function(IRandom random, Supplier<Instant> instantFunction) {\n\t\t\tsuper(random, instantFunction);\n\t\t}\n\n\t\t@Override\n\t\tvoid increment(final Instant instant) {\n\n\t\t\t// set `rand_a` field\n\t\t\tmicroseconds(instant);\n\n\t\t\t// just add 1 to `rand_b`\n\t\t\tthis.lsb = (this.lsb | variantBits) + 1L;\n\n\t\t\tif (this.lsb == overflow) {\n\t\t\t\t// add 1 to `rand_a` if overflow occurs\n\t\t\t\tthis.msb = (this.msb | versionBits) + 1L;\n\t\t\t}\n\t\t}\n\t}\n\n\tstatic final class PlusNFunction extends UuidFunction {\n\n\t\tprivate final LongSupplier plusNFunction;\n\n\t\tpublic PlusNFunction(IRandom random, Supplier<Instant> instantFunction, Long incrementMax) {\n\t\t\tsuper(random, instantFunction);\n\t\t\tthis.plusNFunction = customPlusNFunction(random, incrementMax);\n\t\t}\n\n\t\t@Override\n\t\tvoid increment(final Instant instant) {\n\n\t\t\t// set `rand_a` field\n\t\t\tmicroseconds(instant);\n\n\t\t\t// add a random n to `rand_b`, where 1 <= n <= incrementMax\n\t\t\tthis.lsb = (this.lsb | variantBits) + plusNFunction.getAsLong();\n\n\t\t\tif (this.lsb == overflow) {\n\t\t\t\t// add 1 to `rand_a` if overflow occurs\n\t\t\t\tthis.msb = (this.msb | versionBits) + 1L;\n\t\t\t}\n\t\t}\n\n\t\tprivate LongSupplier customPlusNFunction(IRandom random, Long incrementMax) {\n\t\t\tif (incrementMax == INCREMENT_MAX_DEFAULT) {\n\t\t\t\tif (random instanceof SafeRandom) {\n\t\t\t\t\treturn () -> {\n\t\t\t\t\t\t// return n, where 1 <= n <= 2^32\n\t\t\t\t\t\treturn random.nextLong(Integer.BYTES) + 1;\n\t\t\t\t\t};\n\t\t\t\t} else {\n\t\t\t\t\treturn () -> {\n\t\t\t\t\t\t// return n, where 1 <= n <= 2^32\n\t\t\t\t\t\treturn (random.nextLong() >>> 32) + 1;\n\t\t\t\t\t};\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tfinal long positive = 0x7fffffffffffffffL;\n\t\t\t\tif (random instanceof SafeRandom) {\n\t\t\t\t\t// the minimum number of bits and bytes for incrementMax\n\t\t\t\t\tfinal int bits = (int) Math.ceil(Math.log(incrementMax) / Math.log(2));\n\t\t\t\t\tfinal int size = ((bits - 1) / Byte.SIZE) + 1;\n\t\t\t\t\treturn () -> {\n\t\t\t\t\t\t// return n, where 1 <= n <= incrementMax\n\t\t\t\t\t\treturn ((random.nextLong(size) & positive) % incrementMax) + 1;\n\t\t\t\t\t};\n\t\t\t\t} else {\n\t\t\t\t\treturn () -> {\n\t\t\t\t\t\t// return n, where 1 <= n <= incrementMax\n\t\t\t\t\t\treturn ((random.nextLong() & positive) % incrementMax) + 1;\n\t\t\t\t\t};\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/factory/standard/TimeOrderedFactory.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.factory.standard;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\nimport com.pinecone.ulf.util.guid.i128.factory.AbstTimeBasedFactory;\n\n/**\n * Concrete factory for creating time-ordered unique identifiers (UUIDv6).\n * <p>\n * UUIDv6 is a new UUID version proposed by Peabody and Davis.\n * <p>\n * <b>Warning:</b> this can change in the future.\n * \n * @see AbstTimeBasedFactory\n * @see <a href=\n *      \"https://tools.ietf.org/html/draft-peabody-dispatch-new-uuid-format\">New\n *      UUID formats</a>\n * @see <a href=\"https://datatracker.ietf.org/wg/uuidrev/documents/\">Revise\n *      Universally Unique Identifier Definitions (uuidrev)</a>\n */\npublic final class TimeOrderedFactory extends AbstTimeBasedFactory {\n\n\t/**\n\t * Default constructor.\n\t */\n\tpublic TimeOrderedFactory() {\n\t\tthis(builder());\n\t}\n\n\tprivate TimeOrderedFactory(Builder builder) {\n\t\tsuper(UuidVersion.VERSION_TIME_ORDERED, builder);\n\t}\n\n\t/**\n\t * Returns the most significant bits of the UUID.\n\t * <p>\n\t * It implements the algorithm for generating UUIDv6.\n\t * \n\t * @param timestamp the number of 100-nanoseconds since 1970-01-01 (Unix epoch)\n\t * @return the MSB\n\t */\n\t@Override\n\tprotected long formatMostSignificantBits(final long timestamp) {\n\t\treturn ((timestamp & 0x0ffffffffffff000L) << 4) //\n\t\t\t\t| (timestamp & 0x0000000000000fffL) //\n\t\t\t\t| 0x0000000000006000L; // apply version 6\n\t}\n\n\t/**\n\t * Returns a builder of random-ordered factory.\n\t * \n\t * @return a builder\n\t */\n\tpublic static Builder builder() {\n\t\treturn new Builder();\n\t}\n\n\t/**\n\t * Concrete builder for creating a time-ordered factory.\n\t * \n\t * @see AbstTimeBasedFactory.Builder\n\t */\n\tpublic static class Builder extends AbstTimeBasedFactory.Builder<TimeOrderedFactory, Builder> {\n\t\t@Override\n\t\tpublic TimeOrderedFactory build() {\n\t\t\treturn new TimeOrderedFactory(this);\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/CombUtil.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util;\n\nimport java.time.Instant;\nimport java.util.UUID;\n\n/**\n * Utility for extracting time from COMB GUIDs.\n */\npublic final class CombUtil {\n\n\tprivate CombUtil() {\n\t}\n\n\t/**\n\t * Returns the prefix from a Prefix COMB.\n\t * <p>\n\t * The value returned is equivalent to the number of milliseconds since\n\t * 1970-01-01 (Unix epoch).\n\t * \n\t * @param comb a Prefix COMB\n\t * @return the prefix (the Unix milliseconds)\n\t */\n\tpublic static long getPrefix(UUID comb) {\n\t\treturn (comb.getMostSignificantBits() >>> 16);\n\t}\n\n\t/**\n\t * Returns the suffix from a Suffix COMB.\n\t * <p>\n\t * The value returned is equivalent to the number of milliseconds since\n\t * 1970-01-01 (Unix epoch).\n\t * \n\t * @param comb a Suffix COMB\n\t * @return the suffix (the Unix milliseconds)\n\t */\n\tpublic static long getSuffix(UUID comb) {\n\t\treturn (comb.getLeastSignificantBits() & 0x0000ffffffffffffL);\n\t}\n\n\t/**\n\t * Returns the instant from a Prefix COMB.\n\t * \n\t * @param comb a Prefix COMB\n\t * @return {@link Instant}\n\t */\n\tpublic static Instant getPrefixInstant(UUID comb) {\n\t\tlong milliseconds = getPrefix(comb);\n\t\treturn Instant.ofEpochMilli(milliseconds);\n\t}\n\n\t/**\n\t * Returns the instant from a Suffix COMB.\n\t * \n\t * @param comb a Suffix COMB\n\t * @return {@link Instant}\n\t */\n\tpublic static Instant getSuffixInstant(UUID comb) {\n\t\tlong milliseconds = getSuffix(comb);\n\t\treturn Instant.ofEpochMilli(milliseconds);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/MachineId.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util;\n\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i128.util.internal.NetworkUtil;\n\nimport java.nio.charset.StandardCharsets;\nimport java.security.MessageDigest;\nimport java.security.NoSuchAlgorithmException;\nimport java.util.Arrays;\nimport java.util.UUID;\n\nimport static com.pinecone.ulf.util.guid.i128.util.UuidUtil.setVersion;\nimport static com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil.toHexadecimal;\nimport static com.pinecone.ulf.util.guid.i128.util.internal.ByteUtil.toNumber;\n\n/**\n * Utility for generating machine ID.\n * <p>\n * It works in three steps:\n * <ol>\n * <li>Create a string containing HOSTNAME, MAC and IP;\n * <li>Create a hash of the string using SHA-256 algorithm;\n * <li>Create the identifier using part of the resulting hash.\n * </ol>\n */\npublic final class MachineId {\n\n\tprivate static Long id;\n\tprivate static UUID uuid;\n\tprivate static String hexa;\n\tprivate static byte[] hash;\n\tprivate static String string;\n\n\tprivate MachineId() {\n\t}\n\n\t/**\n\t * Returns a number generated from the machine hash.\n\t * <p>\n\t * It uses the first 8 bytes of the machine hash.\n\t * \n\t * Note that the return value can be negative.\n\t * \n\t * @return a number\n\t */\n\tpublic static long getMachineId() {\n\t\tif (id == null) {\n\t\t\tid = getMachineId(getMachineHash());\n\t\t}\n\t\treturn id;\n\t}\n\n\tstatic long getMachineId(byte[] hash) {\n\t\treturn toNumber(hash, 0, 8);\n\t}\n\n\t/**\n\t * Returns a UUID generated from the machine hash.\n\t * <p>\n\t * It uses the first 16 bytes of the machine hash.\n\t * <p>\n\t * The UUID version is 4.\n\t * \n\t * @return a UUID\n\t */\n\tpublic static UUID getMachineUuid() {\n\t\tif (uuid == null) {\n\t\t\tuuid = getMachineUuid(getMachineHash());\n\t\t}\n\t\treturn uuid;\n\t}\n\n\tstatic UUID getMachineUuid(byte[] hash) {\n\t\tfinal long mostSigBits = toNumber(hash, 0, 8);\n\t\tfinal long leastSigBits = toNumber(hash, 8, 16);\n\t\treturn setVersion(new UUID128(mostSigBits, leastSigBits), 4);\n\t}\n\n\t/**\n\t * Returns the machine hash in hexadecimal format.\n\t * <p>\n\t * The returning string has 64 chars.\n\t * \n\t * @return a string\n\t */\n\tpublic static String getMachineHexa() {\n\t\tif (hexa == null) {\n\t\t\thexa = getMachineHexa(getMachineHash());\n\t\t}\n\t\treturn hexa;\n\t}\n\n\tstatic String getMachineHexa(byte[] hash) {\n\t\treturn toHexadecimal(hash);\n\t}\n\n\t/**\n\t * Returns the machine hash in a byte array.\n\t * <p>\n\t * The returning array has 32 bytes (256 bits).\n\t * \n\t * @return a byte array\n\t */\n\tpublic static byte[] getMachineHash() {\n\t\tif (hash == null) {\n\t\t\thash = getMachineHash(getMachineString());\n\t\t}\n\t\treturn Arrays.copyOf(hash, hash.length);\n\t}\n\n\tstatic byte[] getMachineHash(String string) {\n\t\ttry {\n\t\t\treturn MessageDigest.getInstance(\"SHA-256\").digest(string.getBytes(StandardCharsets.UTF_8));\n\t\t} catch (NoSuchAlgorithmException e) {\n\t\t\tthrow new InternalError(\"Message digest algorithm not supported.\", e);\n\t\t}\n\t}\n\n\t/**\n\t * Returns a string containing host name, MAC and IP.\n\t * <p>\n\t * Output format: \"hostname 11-11-11-11-11-11 222.222.222.222\".\n\t * \n\t * @return a string\n\t */\n\tpublic static String getMachineString() {\n\t\tif (string == null) {\n\t\t\tstring = NetworkUtil.getMachineString();\n\t\t}\n\t\treturn string;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/UuidBuilder.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util;\n\nimport java.nio.BufferOverflowException;\nimport java.nio.BufferUnderflowException;\nimport java.nio.ByteBuffer;\nimport java.util.UUID;\n\n/**\n * A UUID builder.\n * <p>\n * Usage:\n * \n * <pre>{@code\n * SecureRandom random = new SecureRandom();\n * UUID uuid = new UuidBuilder(4) // sets version 4 (random-based)\n * \t\t.put(random.nextLong()) // put the most significant 64 bits\n * \t\t.put(random.nextLong()) // put the least significant 64 bits\n * \t\t.build(); // return the built UUID\n * }</pre>\n */\npublic class UuidBuilder {\n\n\tprivate Integer version;\n\n\t// newly-created byte buffers are always BIG_ENDIAN\n\tprivate ByteBuffer buffer = ByteBuffer.allocate(16);\n\n\t/**\n\t * Instantiates a new builder without a version number.\n\t * \n\t */\n\tpublic UuidBuilder() {\n\t\tthis.version = null;\n\t}\n\n\t/**\n\t * Instantiates a new builder with a version number.\n\t * \n\t * @param version a value between 0 and 15\n\t */\n\tpublic UuidBuilder(int version) {\n\t\tif (version < 0x00L || version > 0xfL) {\n\t\t\tthrow new IllegalArgumentException(\"Invalid version number\");\n\t\t}\n\t\tthis.version = version;\n\t}\n\n\t/**\n\t * Puts 8 bytes containing the given long.\n\t *\n\t * @param value a long value\n\t *\n\t * @return This buffer\n\t *\n\t * @throws BufferOverflowException If there are fewer than 8 bytes remaining\n\t */\n\tpublic synchronized UuidBuilder put(long value) {\n\t\tbuffer.putLong(value);\n\t\treturn this;\n\t}\n\n\t/**\n\t * Puts 4 bytes containing the given int.\n\t *\n\t * @param value an int value\n\t *\n\t * @return This buffer\n\t *\n\t * @throws BufferOverflowException If there are fewer than 4 bytes remaining\n\t */\n\tpublic synchronized UuidBuilder put(int value) {\n\t\tbuffer.putInt(value);\n\t\treturn this;\n\t}\n\n\t/**\n\t * Puts 2 bytes containing the given short.\n\t *\n\t * @param value a short value\n\t *\n\t * @return This buffer\n\t *\n\t * @throws BufferOverflowException If there are fewer than 2 bytes remaining\n\t */\n\tpublic synchronized UuidBuilder put(short value) {\n\t\tbuffer.putShort(value);\n\t\treturn this;\n\t}\n\n\t/**\n\t * Puts the given byte.\n\t *\n\t * @param value a byte value\n\t *\n\t * @return This buffer\n\t *\n\t * @throws BufferOverflowException If there are fewer than 1 bytes remaining\n\t */\n\tpublic synchronized UuidBuilder put(byte value) {\n\t\tbuffer.put(value);\n\t\treturn this;\n\t}\n\n\t/**\n\t * Puts the given byte array.\n\t *\n\t * @param value a byte array\n\t *\n\t * @return This buffer\n\t *\n\t * @throws BufferOverflowException If there are fewer bytes remaining than the\n\t *                                 array length\n\t */\n\tpublic synchronized UuidBuilder put(byte[] array) {\n\t\tbuffer.put(array);\n\t\treturn this;\n\t}\n\n\t/**\n\t * Builds a UUID after all 16 bytes are filled.\n\t * <p>\n\t * This method ends the use of a builder.\n\t * <p>\n\t * Successive calls will always return the same UUID value.\n\t * <p>\n\t * Note: this method overrides bits 48 through 51 (version field) and bits 52\n\t * through 63 (variant field), 6 bits total, to comply the UUID specification.\n\t * \n\t * @throws BufferUnderflowException If there are bytes remaining to be filled\n\t */\n\tpublic synchronized UUID build() {\n\n\t\tvalidate();\n\t\tbuffer.rewind();\n\n\t\tif (this.version != null) {\n\t\t\t// set the 4 most significant bits of the 7th byte (version field)\n\t\t\tfinal long msb = (buffer.getLong() & 0xffff_ffff_ffff_0fffL) | (version & 0xfL) << 12;\n\t\t\t// set the 2 most significant bits of the 9th byte to 1 and 0 (variant field)\n\t\t\tfinal long lsb = (buffer.getLong() & 0x3fff_ffff_ffff_ffffL) | 0x8000_0000_0000_0000L;\n\t\t\treturn new UUID(msb, lsb);\n\t\t}\n\n\t\tfinal long msb = buffer.getLong();\n\t\tfinal long lsb = buffer.getLong();\n\t\treturn new UUID(msb, lsb);\n\t}\n\n\tprivate synchronized void validate() {\n\t\tif (buffer.hasRemaining()) {\n\t\t\tthrow new BufferUnderflowException();\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/UuidComparator.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util;\n\nimport java.util.Comparator;\nimport java.util.UUID;\nimport java.util.function.ToIntBiFunction;\n\n/**\n * Comparator for UUIDs.\n * <p>\n * The default static method compares two time-based UUIDs by comparing the time\n * stamps first and then comparing the least significant bits as unsigned 64-bit\n * integers. If both UUIDs are not time-based then it compares them as unsigned\n * 128-bit integers.\n * <p>\n * The opaque static method compares two UUIDs as unsigned 128-bit integers.\n * It's the same as lexicographic sorting of UUID canonical strings.\n */\npublic final class UuidComparator implements Comparator<UUID> {\n\n\tprivate final ToIntBiFunction<UUID, UUID> comparator;\n\n\tprivate static final UuidComparator INSTANCE_DEFAULT = new UuidComparator(UuidComparator::defaultCompare);\n\tprivate static final UuidComparator INSTANCE_OPAQUE = new UuidComparator(UuidComparator::opaqueCompare);\n\n\tprivate UuidComparator(ToIntBiFunction<UUID, UUID> comparator) {\n\t\tthis.comparator = comparator;\n\t}\n\n\t/**\n\t * Creates a default implementation of {@link UuidComparator}.\n\t * \n\t * @see UuidComparator#defaultCompare(UUID, UUID)\n\t */\n\tpublic UuidComparator() {\n\t\tthis(UuidComparator::defaultCompare);\n\t}\n\n\t/**\n\t * Returns a default implementation of {@link UuidComparator}.\n\t * \n\t * @return a {@link UuidComparator}\n\t * @see UuidComparator#defaultCompare(UUID, UUID)\n\t */\n\tpublic static UuidComparator getDefaultInstance() {\n\t\treturn INSTANCE_DEFAULT;\n\t}\n\n\t/**\n\t * Returns an opaque implementation of {@link UuidComparator}.\n\t * \n\t * @return a opaque {@link UuidComparator}\n\t * @see UuidComparator#opaqueCompare(UUID, UUID)\n\t */\n\tpublic static UuidComparator getOpaqueInstance() {\n\t\treturn INSTANCE_OPAQUE;\n\t}\n\n\t/**\n\t * Compares two UUIDs.\n\t * <p>\n\t * The default static method compares two time-based UUIDs by comparing the time\n\t * stamps first and then comparing the least significant bits as unsigned 64-bit\n\t * integers. If both UUIDs are not time-based then it compares them as unsigned\n\t * 128-bit integers.\n\t * <p>\n\t * The first of two UUIDs is greater than the second if the time stamp is\n\t * greater for the first UUID. If the time stamps are equal, the first of two\n\t * UUIDs is greater than the second if the most significant byte in which they\n\t * differ is greater for the first UUID.\n\t * <p>\n\t * It can be useful for these reasons:\n\t * <ol>\n\t * <li>{@link UUID#compareTo(UUID)} doesn't work well for time-based UUIDs;\n\t * <li>{@link UUID#compareTo(UUID)} can lead to unexpected behavior due to\n\t * signed {@code long} comparison;\n\t * <li>{@link UUID#compareTo(UUID)} throws {@link NullPointerException} if a\n\t * {@code null} UUID is given.\n\t * </ol>\n\t * \n\t * @param uuid1 a {@code UUID}\n\t * @param uuid2 another {@code UUID}\n\t * @return -1, 0 or 1 as {@code u1} is less than, equal to, or greater than\n\t *         {@code u2}\n\t */\n\tpublic static int defaultCompare(UUID uuid1, UUID uuid2) {\n\n\t\tUUID u1 = uuid1 != null ? uuid1 : new UUID(0L, 0L);\n\t\tUUID u2 = uuid2 != null ? uuid2 : new UUID(0L, 0L);\n\n\t\t// time-based comparison is done by timestamp first\n\t\tif (isTimeBased(u1) && isTimeBased(u2)) {\n\t\t\tUUID rearranged1 = new UUID(u1.timestamp(), u1.getLeastSignificantBits());\n\t\t\tUUID rearranged2 = new UUID(u2.timestamp(), u2.getLeastSignificantBits());\n\t\t\treturn opaqueCompare(rearranged1, rearranged2);\n\t\t}\n\n\t\t// unsigned 128 bit integers\n\t\treturn opaqueCompare(u1, u2);\n\t}\n\n\t/**\n\t * Compares two UUIDs.\n\t * <p>\n\t * The opaque static method compares two UUIDs as unsigned 128-bit integers.\n\t * It's the same as lexicographic sorting of UUID canonical strings.\n\t * <p>\n\t * The first of two UUIDs is greater than the second if the most significant\n\t * byte in which they differ is greater for the first UUID.\n\t * <p>\n\t * The opaque method is faster than the default method as it does not check the\n\t * UUID version.\n\t * <p>\n\t * It's referred to as \"opaque\" just because it works like a \"blind byte-to-byte\n\t * comparison\".\n\t * <p>\n\t * It can be useful for these reasons:\n\t * <ol>\n\t * <li>{@link UUID#compareTo(UUID)} can lead to unexpected behavior due to\n\t * signed {@code long} comparison;\n\t * <li>{@link UUID#compareTo(UUID)} throws {@link NullPointerException} if a\n\t * {@code null} UUID is given.\n\t * </ol>\n\t * \n\t * @param uuid1 a {@code UUID}\n\t * @param uuid2 another {@code UUID}\n\t * @return -1, 0 or 1 as {@code u1} is less than, equal to, or greater than\n\t *         {@code u2}\n\t */\n\tpublic static int opaqueCompare(UUID uuid1, UUID uuid2) {\n\n\t\tUUID u1 = uuid1 != null ? uuid1 : new UUID(0L, 0L);\n\t\tUUID u2 = uuid2 != null ? uuid2 : new UUID(0L, 0L);\n\n\t\t// used to compare as UNSIGNED longs\n\t\tfinal long min = 0x8000000000000000L;\n\n\t\tfinal long a = u1.getMostSignificantBits() + min;\n\t\tfinal long b = u2.getMostSignificantBits() + min;\n\n\t\tif (a > b)\n\t\t\treturn 1;\n\t\telse if (a < b)\n\t\t\treturn -1;\n\n\t\tfinal long c = u1.getLeastSignificantBits() + min;\n\t\tfinal long d = u2.getLeastSignificantBits() + min;\n\n\t\tif (c > d)\n\t\t\treturn 1;\n\t\telse if (c < d)\n\t\t\treturn -1;\n\n\t\treturn 0;\n\t}\n\n\t/**\n\t * Compares two UUIDs.\n\t * \n\t * @param uuid1 a {@code UUID}\n\t * @param uuid2 another {@code UUID}\n\t * @return -1, 0 or 1 as {@code u1} is less than, equal to, or greater than\n\t *         {@code u2}\n\t * @see UuidComparator#defaultCompare(UUID, UUID)\n\t */\n\t@Override\n\tpublic int compare(UUID uuid1, UUID uuid2) {\n\t\treturn this.comparator.applyAsInt(uuid1, uuid2);\n\t}\n\n\tprivate static boolean isTimeBased(UUID uuid) {\n\t\treturn uuid.version() == 1 && uuid.variant() == 2;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/UuidTime.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util;\n\nimport java.time.Instant;\n\n/**\n * Utility for UUID time stamps.\n * <p>\n * The UUID time stamp is a 60-bit number.\n * <p>\n * The UUID time stamp resolution is 100ns, i.e., the UUID clock ticks every\n * 100-nanosecond interval.\n * <p>\n * In JDK 8, {@link Instant#now()} has millisecond precision, in spite of\n * {@link Instant} has nanoseconds resolution. In JDK 9+,{@link Instant#now()}\n * has microsecond precision.\n * \n * @see <a href=\"https://stackoverflow.com/questions/1712205\">Current time in\n *      microseconds in java</a>\n * @see <a href=\"https://bugs.openjdk.java.net/browse/JDK-8068730\">Increase the\n *      precision of the implementation of java.time.Clock.systemUTC()</a>\n */\npublic final class UuidTime {\n\n\t/**\n\t * The Unix epoch.\n\t */\n\tpublic static final Instant EPOCH_UNIX = Instant.parse(\"1970-01-01T00:00:00.000Z\"); // 0s\n\t/**\n\t * The Gregorian epoch.\n\t */\n\tpublic static final Instant EPOCH_GREG = Instant.parse(\"1582-10-15T00:00:00.000Z\"); // -12219292800s\n\n\t/**\n\t * The Unix epoch in seconds.\n\t */\n\tpublic static final long EPOCH_UNIX_SECONDS = EPOCH_UNIX.getEpochSecond();\n\t/**\n\t * The Gregorian epoch in seconds.\n\t */\n\tpublic static final long EPOCH_GREG_SECONDS = EPOCH_GREG.getEpochSecond();\n\n\t/**\n\t * Number nanos per clock tick.\n\t */\n\tpublic static final long NANOS_PER_TICK = 100; // 1 tick = 100ns\n\t/**\n\t * Number of clock ticks per millisecond.\n\t */\n\tpublic static final long TICKS_PER_MILLI = 10_000; // 1ms = 10,000 ticks\n\t/**\n\t * Number of clock ticks per second.\n\t */\n\tpublic static final long TICKS_PER_SECOND = 10_000_000; // 1s = 10,000,000 ticks\n\n\tprivate UuidTime() {\n\t}\n\n\t/**\n\t * Returns the number of 100ns since 1970-01-01 (Unix epoch).\n\t * <p>\n\t * It uses {@link Instant#now()} to get the the current time.\n\t * \n\t * @return a number of 100ns since 1970-01-01 (Unix epoch).\n\t */\n\tpublic static long getUnixTimestamp() {\n\t\treturn toUnixTimestamp(Instant.now());\n\t}\n\n\t/**\n\t * Returns the number of 100ns since 1582-10-15 (Gregorian epoch).\n\t * <p>\n\t * It uses {@link Instant#now()} to get the the current time.\n\t * \n\t * @return a number of 100ns since 1582-10-15 (Gregorian epoch).\n\t */\n\tpublic static long getGregTimestamp() {\n\t\treturn toGregTimestamp(Instant.now());\n\t}\n\n\t/**\n\t * Converts a number of 100ns since 1582-10-15 (Gregorian epoch) into a number\n\t * of 100ns since 1970-01-01 (Unix epoch).\n\t * \n\t * @param gregTimestamp a number of 100ns since 1582-10-15 (Gregorian epoch)\n\t * @return a number of 100ns since 1970-01-01 (Unix epoch)\n\t */\n\tpublic static long toUnixTimestamp(final long gregTimestamp) {\n\t\treturn gregTimestamp + (EPOCH_GREG_SECONDS * TICKS_PER_SECOND);\n\t}\n\n\t/**\n\t * Converts a number of 100ns since 1970-01-01 (Unix epoch) into a number of\n\t * 100ns since 1582-10-15 (Gregorian epoch).\n\t * \n\t * @param unixTimestamp a number of 100ns since 1970-01-01 (Unix epoch)\n\t * @return a number of 100ns since 1582-10-15 (Gregorian epoch).\n\t */\n\tpublic static long toGregTimestamp(final long unixTimestamp) {\n\t\treturn unixTimestamp - (EPOCH_GREG_SECONDS * TICKS_PER_SECOND);\n\t}\n\n\t/**\n\t * Converts an {@link Instant} into a number of 100ns since 1970-01-01 (Unix\n\t * epoch).\n\t * \n\t * @param instant an instant\n\t * @return a number of 100ns since 1970-01-01 (Unix epoch).\n\t */\n\tpublic static long toUnixTimestamp(final Instant instant) {\n\t\tfinal long seconds = instant.getEpochSecond() * TICKS_PER_SECOND;\n\t\tfinal long nanos = instant.getNano() / NANOS_PER_TICK;\n\t\treturn seconds + nanos;\n\t}\n\n\t/**\n\t * Converts an {@link Instant} into a number of 100ns since 1582-10-15\n\t * (Gregorian epoch).\n\t * \n\t * @param instant an instant\n\t * @return a number of 100ns since 1582-10-15 (Gregorian epoch).\n\t */\n\tpublic static long toGregTimestamp(final Instant instant) {\n\t\tfinal long seconds = (instant.getEpochSecond() - EPOCH_GREG_SECONDS) * TICKS_PER_SECOND;\n\t\tfinal long nanos = instant.getNano() / NANOS_PER_TICK;\n\t\treturn seconds + nanos;\n\t}\n\n\t/**\n\t * Converts a number of 100ns since 1970-01-01 (Unix epoch) into an\n\t * {@link Instant}.\n\t * \n\t * @param unixTimestamp a number of 100ns since 1970-01-01 (Unix epoch)\n\t * @return an instant\n\t */\n\tpublic static Instant fromUnixTimestamp(final long unixTimestamp) {\n\t\tfinal long seconds = unixTimestamp / TICKS_PER_SECOND;\n\t\tfinal long nanos = (unixTimestamp % TICKS_PER_SECOND) * NANOS_PER_TICK;\n\t\treturn Instant.ofEpochSecond(seconds, nanos);\n\t}\n\n\t/**\n\t * Converts a number of 100ns since 1582-10-15 (Gregorian epoch) into an\n\t * {@link Instant}.\n\t * \n\t * @param gregTimestamp a number of 100ns since 1582-10-15 (Gregorian epoch)\n\t * @return an instant\n\t */\n\tpublic static Instant fromGregTimestamp(final long gregTimestamp) {\n\t\tfinal long seconds = (gregTimestamp / TICKS_PER_SECOND) + EPOCH_GREG_SECONDS;\n\t\tfinal long nanos = (gregTimestamp % TICKS_PER_SECOND) * NANOS_PER_TICK;\n\t\treturn Instant.ofEpochSecond(seconds, nanos);\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/UuidUtil.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util;\n\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.UUID128;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVariant;\nimport com.pinecone.ulf.util.guid.i128.enums.UuidVersion;\n\nimport java.time.Instant;\nimport java.util.Objects;\nimport java.util.UUID;\n\n/**\n * Utility for checking UUID version, extracting UUID details, etc.\n */\npublic final class UuidUtil {\n\n\tprivate static final String MESSAGE_NOT_A_TIME_BASED_UUID = \"Not a time-based, time-ordered or DCE Security UUID: %s.\";\n\tprivate static final String MESSAGE_NOT_A_TIME_ORDERED_EPOCH_UUID = \"Not a time-ordered with Unix Epoch UUID: %s.\";\n\tprivate static final String MESSAGE_NOT_A_DCE_SECURITY_UUID = \"Not a DCE Security UUID: %s.\";\n\n\tprivate UuidUtil() {\n\t}\n\n\t/**\n\t * Get a copy of a UUID.\n\t * <p>\n\t * It is just a convenience method for cloning UUIDs.\n\t * \n\t * @param uuid a UUID\n\t * @return a copy of a UUID\n\t */\n\tpublic static GUID128 copy(GUID128 uuid) {\n\t\treturn new UUID128(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits()) {\n\t\t};\n\t}\n\n\t/**\n\t * Checks whether the UUID is equal to the Nil UUID.\n\t * <p>\n\t * The Nil UUID is special UUID that has all 128 bits set to zero.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is an RFC 9562 variant\n\t * @exception NullPointerException if null\n\t */\n\tpublic static boolean isNil(GUID128 uuid) {\n\t\tObjects.requireNonNull(uuid, \"Null UUID is not equal to Nil UUID\");\n\t\treturn uuid.getMostSignificantBits() == 0L && uuid.getLeastSignificantBits() == 0L;\n\t}\n\n\t/**\n\t * Checks whether the UUID is equal to the Max UUID.\n\t * <p>\n\t * The Max UUID is special UUID that has all 128 bits set to one.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is an RFC 9562 variant\n\t * @exception NullPointerException if null\n\t */\n\tpublic static boolean isMax(GUID128 uuid) {\n\t\tObjects.requireNonNull(uuid, \"Null UUID is not equal to Max UUID\");\n\t\treturn uuid.getMostSignificantBits() == -1L && uuid.getLeastSignificantBits() == -1L;\n\t}\n\n\t/**\n\t * Get the UUID version.\n\t * \n\t * @param uuid a UUID\n\t * @return a {@link UuidVersion}\n\t * @see UuidVersion\n\t */\n\tpublic static UuidVersion getVersion(GUID128 uuid) {\n\t\treturn UuidVersion.getVersion(uuid.version());\n\t}\n\n\t/**\n\t * Get the UUID version.\n\t * \n\t * @param uuid a UUID\n\t * @return a {@link UuidVariant}\n\t * @see UuidVariant\n\t */\n\tpublic static UuidVariant getVariant(GUID128 uuid) {\n\t\treturn UuidVariant.getVariant(uuid.variant());\n\t}\n\n\t/**\n\t * Applies UUID version bits into the UUID\n\t * \n\t * @param uuid    a UUID\n\t * @param version a version\n\t * @return a UUID\n\t */\n\tpublic static UUID setVersion(GUID128 uuid, int version) {\n\t\tlong msb = uuid.getMostSignificantBits();\n\t\tlong lsb = uuid.getLeastSignificantBits();\n\t\tmsb = (msb & 0xffffffffffff0fffL) | ((version & 0x0000000f) << 12); // apply version\n\t\tlsb = (lsb & 0x3fffffffffffffffL) | 0x8000000000000000L; // apply variant\n\t\treturn new UUID(msb, lsb);\n\t}\n\n\t/**\n\t * Checks whether the UUID variant is the one defined by the RFC 9562.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is an RFC 9562 variant\n\t */\n\tpublic static boolean isStandard(GUID128 uuid) {\n\t\treturn isVariant(uuid, UuidVariant.VARIANT_STANDARD);\n\t}\n\n\t/**\n\t * Checks whether the UUID variant is reserved NCS.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is an reserved NCS variant\n\t */\n\tpublic static boolean isReservedNcs(GUID128 uuid) {\n\t\treturn isVariant(uuid, UuidVariant.VARIANT_RESERVED_NCS);\n\t}\n\n\t/**\n\t * Checks whether the UUID variant is reserved Microsoft.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is an reserved Microsoft variant\n\t */\n\tpublic static boolean isReservedMicrosoft(GUID128 uuid) {\n\t\treturn isVariant(uuid, UuidVariant.VARIANT_RESERVED_MICROSOFT);\n\t}\n\n\t/**\n\t * Checks whether the UUID variant is reserved future.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is an reserved future variant\n\t */\n\tpublic static boolean isReservedFuture(GUID128 uuid) {\n\t\treturn isVariant(uuid, UuidVariant.VARIANT_RESERVED_FUTURE);\n\t}\n\n\t/**\n\t * Checks whether the UUID version 4.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is a random UUID\n\t */\n\tpublic static boolean isRandomBased(GUID128 uuid) {\n\t\treturn isVersion(uuid, UuidVersion.VERSION_RANDOM_BASED);\n\t}\n\n\t/**\n\t * Checks whether the UUID version 3.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is a name-based UUID\n\t */\n\tpublic static boolean isNameBasedMd5(GUID128 uuid) {\n\t\treturn isVersion(uuid, UuidVersion.VERSION_NAME_BASED_MD5);\n\t}\n\n\t/**\n\t * Checks whether the UUID version 5.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is a name-based UUID\n\t */\n\tpublic static boolean isNameBasedSha1(GUID128 uuid) {\n\t\treturn isVersion(uuid, UuidVersion.VERSION_NAME_BASED_SHA1);\n\t}\n\n\t/**\n\t * Checks whether the UUID version 1.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is a time-based UUID\n\t */\n\tpublic static boolean isTimeBased(GUID128 uuid) {\n\t\treturn isVersion(uuid, UuidVersion.VERSION_TIME_BASED);\n\t}\n\n\t/**\n\t * Checks whether the UUID version 6.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is a time-ordered UUID\n\t */\n\tpublic static boolean isTimeOrdered(GUID128 uuid) {\n\t\treturn isVersion(uuid, UuidVersion.VERSION_TIME_ORDERED);\n\t}\n\n\t/**\n\t * Checks whether the UUID version 7.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is a time-ordered with Unix Epoch UUID\n\t */\n\tpublic static boolean isTimeOrderedEpoch(GUID128 uuid) {\n\t\treturn isVersion(uuid, UuidVersion.VERSION_TIME_ORDERED_EPOCH);\n\t}\n\n\t/**\n\t * Checks whether the UUID version 2.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is a DCE Security UUID\n\t */\n\tpublic static boolean isDceSecurity(GUID128 uuid) {\n\t\treturn isVersion(uuid, UuidVersion.VERSION_DCE_SECURITY);\n\t}\n\n\t/**\n\t * Checks whether the UUID version 8.\n\t * \n\t * @param uuid a UUID\n\t * @return boolean true if it is a custom UUID\n\t */\n\tpublic static boolean isCustom(GUID128 uuid) {\n\t\treturn isVersion(uuid, UuidVersion.VERSION_CUSTOM);\n\t}\n\n\t/**\n\t * Returns the instant from a time-based, time-ordered or DCE Security UUID.\n\t *\n\t * @param uuid a UUID\n\t * @return {@link Instant}\n\t * @throws IllegalArgumentException if the input is not a time-based,\n\t *                                  time-ordered or DCE Security UUID.\n\t */\n\tpublic static Instant getInstant(GUID128 uuid) {\n\t\tif (isTimeOrderedEpoch(uuid)) {\n\t\t\tfinal long unixTimestamp = getUnixTimestamp(uuid);\n\t\t\treturn UuidTime.fromUnixTimestamp(unixTimestamp);\n\t\t} else {\n\t\t\tfinal long gregTimestamp = getGregTimestamp(uuid);\n\t\t\treturn UuidTime.fromGregTimestamp(gregTimestamp);\n\t\t}\n\t}\n\n\t/**\n\t * Returns the time stamp from a time-based, time-ordered or DCE Security UUID.\n\t * <p>\n\t * The value returned by this method is the number of 100-nanos since 1582-10-15\n\t * (Gregorian epoch).\n\t *\n\t * @param uuid a UUID\n\t * @return long the timestamp\n\t * @throws IllegalArgumentException if the input is not a time-based,\n\t *                                  time-ordered or DCE Security UUID.\n\t */\n\tpublic static long getTimestamp(GUID128 uuid) {\n\t\tif (isTimeOrderedEpoch(uuid)) {\n\t\t\treturn UuidTime.toGregTimestamp(getUnixTimestamp(uuid));\n\t\t} else {\n\t\t\treturn getGregTimestamp(uuid);\n\t\t}\n\t}\n\n\tprivate static long getUnixTimestamp(GUID128 uuid) {\n\t\tif (UuidUtil.isTimeOrderedEpoch(uuid)) {\n\t\t\treturn getTimeOrderedEpochTimestamp(uuid.getMostSignificantBits());\n\t\t} else {\n\t\t\tthrow new IllegalArgumentException(String.format(MESSAGE_NOT_A_TIME_ORDERED_EPOCH_UUID, uuid.toString()));\n\t\t}\n\t}\n\n\tprivate static long getGregTimestamp(GUID128 uuid) {\n\t\tif (UuidUtil.isTimeBased(uuid)) {\n\t\t\treturn getTimeBasedTimestamp(uuid.getMostSignificantBits());\n\t\t} else if (UuidUtil.isTimeOrdered(uuid)) {\n\t\t\treturn getTimeOrderedTimestamp(uuid.getMostSignificantBits());\n\t\t} else if (UuidUtil.isDceSecurity(uuid)) {\n\t\t\treturn getTimeBasedTimestamp(uuid.getMostSignificantBits() & 0x00000000ffffffffL);\n\t\t} else {\n\t\t\tthrow new IllegalArgumentException(String.format(MESSAGE_NOT_A_TIME_BASED_UUID, uuid.toString()));\n\t\t}\n\t}\n\n\t/**\n\t * Get the node identifier from a time-based, time-ordered or DCE Security UUID.\n\t *\n\t * @param uuid a UUID\n\t * @return long the node identifier\n\t * @throws IllegalArgumentException if the input is not a time-based,\n\t *                                  time-ordered or DCE Security UUID.\n\t */\n\tpublic static long getNodeIdentifier(GUID128 uuid) {\n\n\t\tif (!(UuidUtil.isTimeBased(uuid) || UuidUtil.isTimeOrdered(uuid) || UuidUtil.isDceSecurity(uuid))) {\n\t\t\tthrow new IllegalArgumentException(String.format(MESSAGE_NOT_A_TIME_BASED_UUID, uuid.toString()));\n\t\t}\n\n\t\treturn uuid.getLeastSignificantBits() & 0x0000ffffffffffffL;\n\t}\n\n\t/**\n\t * Get the clock sequence from a time-based, time-ordered or DCE Security UUID.\n\t *\n\t * @param uuid a UUID\n\t * @return int the clock sequence\n\t * @throws IllegalArgumentException if the input is not a time-based,\n\t *                                  time-ordered or DCE Security UUID.\n\t */\n\tpublic static int getClockSequence(GUID128 uuid) {\n\n\t\tif (!(UuidUtil.isTimeBased(uuid) || UuidUtil.isTimeOrdered(uuid)) || UuidUtil.isDceSecurity(uuid)) {\n\t\t\tthrow new IllegalArgumentException(String.format(MESSAGE_NOT_A_TIME_BASED_UUID, uuid.toString()));\n\t\t}\n\n\t\tif (UuidUtil.isDceSecurity(uuid)) {\n\t\t\treturn (int) (uuid.getLeastSignificantBits() >>> 56) & 0x0000003f;\n\t\t}\n\n\t\treturn (int) (uuid.getLeastSignificantBits() >>> 48) & 0x00003fff;\n\t}\n\n\t/**\n\t * Get the local domain number from a DCE Security UUID.\n\t *\n\t * @param uuid a UUID\n\t * @return the local domain\n\t * @throws IllegalArgumentException if the input is not a DCE Security UUID.\n\t */\n\tpublic static byte getLocalDomain(GUID128 uuid) {\n\n\t\tif (!UuidUtil.isDceSecurity(uuid)) {\n\t\t\tthrow new IllegalArgumentException(String.format(MESSAGE_NOT_A_DCE_SECURITY_UUID, uuid.toString()));\n\t\t}\n\n\t\treturn (byte) ((uuid.getLeastSignificantBits() & 0x00ff000000000000L) >>> 48);\n\t}\n\n\t/**\n\t * Get the local identifier number from a DCE Security UUID.\n\t *\n\t * @param uuid a UUID\n\t * @return the local identifier\n\t * @throws IllegalArgumentException if the input is not a DCE Security UUID.\n\t */\n\tpublic static int getLocalIdentifier(GUID128 uuid) {\n\n\t\tif (!UuidUtil.isDceSecurity(uuid)) {\n\t\t\tthrow new IllegalArgumentException(String.format(MESSAGE_NOT_A_DCE_SECURITY_UUID, uuid.toString()));\n\t\t}\n\n\t\treturn (int) (uuid.getMostSignificantBits() >>> 32);\n\t}\n\n\t/**\n\t * Check the UUID variant.\n\t * \n\t * @param uuid    a UUID\n\t * @param variant a variant\n\t * @return true if the the the variant is correct\n\t * @exception NullPointerException if null\n\t */\n\tprivate static boolean isVariant(GUID128 uuid, UuidVariant variant) {\n\t\tObjects.requireNonNull(uuid, \"Null UUID\");\n\t\treturn (uuid.variant() == variant.getValue());\n\t}\n\n\t/**\n\t * Check the UUID version.\n\t * \n\t * @param uuid    a UUID\n\t * @param variant a version\n\t * @return true if the the the version is correct\n\t * @exception NullPointerException if null\n\t */\n\tprivate static boolean isVersion(GUID128 uuid, UuidVersion version) {\n\t\tObjects.requireNonNull(uuid, \"Null UUID\");\n\t\treturn isStandard(uuid) && (uuid.version() == version.getValue());\n\t}\n\n\tprivate static long getTimeBasedTimestamp(long msb) {\n\n\t\tlong hii = (msb & 0xffffffff00000000L) >>> 32;\n\t\tlong mid = (msb & 0x00000000ffff0000L) << 16;\n\t\tlong low = (msb & 0x0000000000000fffL) << 48;\n\n\t\treturn (hii | mid | low);\n\t}\n\n\tprivate static long getTimeOrderedTimestamp(long msb) {\n\n\t\tlong himid = (msb & 0xffffffffffff0000L) >>> 4;\n\t\tlong low = (msb & 0x0000000000000fffL);\n\n\t\treturn (himid | low);\n\t}\n\n\tprivate static long getTimeOrderedEpochTimestamp(long msb) {\n\t\t// 100ns ticks since 1970\n\t\tfinal long ticksPerMilli = 10_000; // 1ms = 10,000 ticks\n\t\treturn ((msb & 0xffffffffffff0000L) >>> 16) * ticksPerMilli;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/UuidValidator.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.ulf.util.guid.i128.codec.base.Base16Codec;\nimport com.pinecone.ulf.util.guid.i128.exception.InvalidUuidException;\nimport com.pinecone.ulf.util.guid.i128.util.immutable.ByteArray;\n\nimport java.util.UUID;\n\n/**\n * Utility for UUID validation.\n * <p>\n * Using it is much faster than using on regular expression.\n * <p>\n * Examples of valid string formats:\n * <ul>\n * <li><code>12345678-abcd-abcd-abcd-123456789abcd</code> (36 hexadecimal chars,\n * lower case and with hyphen)\n * <li><code>12345678-ABCD-ABCD-ABCD-123456789ABCD</code> (36 hexadecimal chars,\n * UPPER CASE and with hyphen)\n * <li><code>12345678abcdabcdabcd123456789abcd</code> (32 hexadecimal chars,\n * lower case and WITHOUT hyphen)\n * <li><code>12345678ABCDABCDABCD123456789ABCD</code> (32 hexadecimal chars,\n * UPPER CASE and WITHOUT hyphen)\n * </ul>\n */\npublic final class UuidValidator {\n\n\tprivate static final ByteArray MAP = Base16Codec.INSTANCE.getBase().getMap();\n\n\tprivate UuidValidator() {\n\t}\n\n\t/**\n\t * Checks if the UUID is valid.\n\t * \n\t * @param uuid a UUID\n\t * @return true if valid, false if invalid\n\t */\n\tpublic static boolean isValid(final GUID uuid) {\n\t\treturn uuid != null;\n\t}\n\n\t/**\n\t * Checks if the UUID is valid.\n\t * \n\t * @param uuid    a UUID\n\t * @param version a version number\n\t * @return true if valid, false if invalid\n\t */\n\tpublic static boolean isValid(final GUID uuid, int version) {\n\t\treturn uuid != null && isVersion(uuid, version);\n\t}\n\n\t/**\n\t * Checks if the UUID byte array is valid.\n\t * \n\t * @param uuid a UUID byte array\n\t * @return true if valid, false if invalid\n\t */\n\tpublic static boolean isValid(final byte[] uuid) {\n\t\treturn uuid != null && uuid.length == 16;\n\t}\n\n\t/**\n\t * Checks if the UUID byte array is valid.\n\t * \n\t * @param uuid    a UUID byte array\n\t * @param version a version number\n\t * @return true if valid, false if invalid\n\t */\n\tpublic static boolean isValid(final byte[] uuid, int version) {\n\t\treturn uuid != null && uuid.length == 16 && isVersion(uuid, version);\n\t}\n\n\t/**\n\t * Checks if the UUID string is valid.\n\t * \n\t * @param uuid a UUID string\n\t * @return true if valid, false if invalid\n\t */\n\tpublic static boolean isValid(final String uuid) {\n\t\treturn uuid != null && uuid.length() != 0 && isParseable(uuid.toCharArray());\n\t}\n\n\t/**\n\t * Checks if the UUID string is valid.\n\t * \n\t * @param uuid    a UUID string\n\t * @param version a version number\n\t * @return true if valid, false if invalid\n\t */\n\tpublic static boolean isValid(final String uuid, int version) {\n\t\treturn uuid != null && uuid.length() != 0 && isParseable(uuid.toCharArray(), version);\n\t}\n\n\t/**\n\t * Checks if the UUID char array is valid.\n\t * \n\t * @param uuid a UUID char array\n\t * @return true if valid, false if invalid\n\t */\n\tpublic static boolean isValid(final char[] uuid) {\n\t\treturn uuid != null && uuid.length != 0 && isParseable(uuid);\n\t}\n\n\t/**\n\t * Checks if the UUID char array is valid.\n\t * \n\t * @param uuid    a UUID char array\n\t * @param version a version number\n\t * @return true if valid, false if invalid\n\t */\n\tpublic static boolean isValid(final char[] uuid, int version) {\n\t\treturn uuid != null && uuid.length != 0 && isParseable(uuid, version);\n\t}\n\n\t/**\n\t * Checks if the UUID is valid.\n\t * \n\t * @param uuid a UUID\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tpublic static void validate(final GUID uuid) {\n\t\tif (uuid == null) {\n\t\t\tthrow InvalidUuidException.newInstance(null);\n\t\t}\n\t}\n\n\t/**\n\t * Checks if the UUID is valid.\n\t * \n\t * @param uuid    a UUID\n\t * @param version a version number\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tpublic static void validate(final GUID uuid, int version) {\n\t\tif (uuid == null || !isVersion(uuid, version)) {\n\t\t\tthrow InvalidUuidException.newInstance(uuid);\n\t\t}\n\t}\n\n\t/**\n\t * Checks if the UUID byte array is valid.\n\t * \n\t * @param uuid a UUID byte array\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tpublic static void validate(final byte[] uuid) {\n\t\tif (uuid == null || uuid.length != 16) {\n\t\t\tthrow InvalidUuidException.newInstance(uuid);\n\t\t}\n\t}\n\n\t/**\n\t * Checks if the UUID byte array is valid.\n\t * \n\t * @param uuid    a UUID byte array\n\t * @param version a version number\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tpublic static void validate(final byte[] uuid, int version) {\n\t\tif (uuid == null || uuid.length != 16 || !isVersion(uuid, version)) {\n\t\t\tthrow InvalidUuidException.newInstance(uuid);\n\t\t}\n\t}\n\n\t/**\n\t * Checks if the UUID string is a valid.\n\t * \n\t * @param uuid a UUID string\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tpublic static void validate(final String uuid) {\n\t\tif (uuid == null || !isParseable(uuid.toCharArray())) {\n\t\t\tthrow InvalidUuidException.newInstance(uuid);\n\t\t}\n\t}\n\n\t/**\n\t * Checks if the UUID string is a valid.\n\t * \n\t * @param uuid    a UUID string\n\t * @param version a version number\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tpublic static void validate(final String uuid, int version) {\n\t\tif (uuid == null || !isParseable(uuid.toCharArray(), version)) {\n\t\t\tthrow InvalidUuidException.newInstance(uuid);\n\t\t}\n\t}\n\n\t/**\n\t * Checks if the UUID char array is valid.\n\t * \n\t * @param uuid a UUID char array\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tpublic static void validate(final char[] uuid) {\n\t\tif (uuid == null || !isParseable(uuid)) {\n\t\t\tthrow InvalidUuidException.newInstance(uuid);\n\t\t}\n\t}\n\n\t/**\n\t * Checks if the UUID char array is valid.\n\t * \n\t * @param uuid    a UUID char array\n\t * @param version a version number\n\t * @throws InvalidUuidException if the argument is invalid\n\t */\n\tpublic static void validate(final char[] uuid, int version) {\n\t\tif (uuid == null || !isParseable(uuid, version)) {\n\t\t\tthrow InvalidUuidException.newInstance(uuid);\n\t\t}\n\t}\n\n\tprivate static final int[] DASH_POSITIONS = {8, 13, 18, 23};\n\tprivate static final int WITH_DASH_UUID_LENGTH = 36;\n\tprivate static final int WITHOUT_DASH_UUID_LENGTH = 32;\n\tprivate static final int MAX_DASH_COUNT = 4;\n\t/**\n\t * Checks if the UUID char array can be parsed.\n\t * \n\t * @param chars a char array\n\t * @return true if valid, false if invalid\n\t */\n\tprotected static boolean isParseable(final char[] chars) {\n\t\tint dashCount = 0;\n\t\tfor (int i = 0; i < chars.length; i++) {\n\t\t\tif (chars[i] > MAP.length() || MAP.get(chars[i]) == -1) {\n\t\t\t\tif (chars[i] == '-') {\n\t\t\t\t\tdashCount++;\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\treturn false; // invalid character!\n\t\t\t}\n\t\t}\n\n\t\tif (chars.length == WITH_DASH_UUID_LENGTH && dashCount == MAX_DASH_COUNT) {\n\t\t\t// check if the hyphens positions are correct\n\t\t\treturn chars[DASH_POSITIONS[0]] == '-' && chars[DASH_POSITIONS[1]] == '-' && chars[DASH_POSITIONS[2]] == '-' && chars[DASH_POSITIONS[3]] == '-';\n\t\t}\n\n\t\treturn chars.length == WITHOUT_DASH_UUID_LENGTH && dashCount == 0;\n\t}\n\n\t/**\n\t * Checks if the UUID char array can be parsed.\n\t * \n\t * @param chars   a char array\n\t * @param version a version number\n\t * @return true if valid, false if invalid\n\t */\n\tprotected static boolean isParseable(final char[] chars, int version) {\n\t\treturn isVersion(chars, version) && isParseable(chars);\n\t}\n\n\t/**\n\t * Checks the version number of a UUID.\n\t * \n\t * @param uuid    a UUID\n\t * @param version a version number\n\t * @return true if the UUID version is equal to the expected version number\n\t */\n\tprotected static boolean isVersion(GUID uuid, int version) {\n//\t\tboolean versionOk = ((version & ~0xf) == 0) && (uuid.version() == version);\n//\t\tboolean variantOk = uuid.variant() == 2; // RFC 9562\n//\t\treturn versionOk && variantOk;\n\t\treturn true;\n\t}\n\n\t/**\n\t * Checks the version number of a UUID byte array.\n\t * \n\t * @param bytes   a byte array\n\t * @param version a version number\n\t * @return true if the UUID version is equal to the expected version number\n\t */\n\tprotected static boolean isVersion(byte[] bytes, int version) {\n\t\tboolean versionOk = ((version & ~0xf) == 0) && (((bytes[6] & 0xff) >>> 4) == version);\n\t\tboolean variantOk = ((bytes[8] & 0xff) >>> 6) == 2; // RFC 9562\n\t\treturn versionOk && variantOk;\n\t}\n\n\t/**\n\t * Checks the version number of a UUID char array.\n\t * \n\t * @param chars   a string\n\t * @param version a version number\n\t * @return true if the UUID version is equal to the expected version number\n\t */\n\tprotected static boolean isVersion(char[] chars, int version) {\n\n\t\t// valid if between 0x0 and 0xf\n\t\tif ((version & ~0xf) != 0) {\n\t\t\treturn false;\n\t\t}\n\n\t\tint ver = 0; // version index\n\t\tint var = 0; // variant index\n\n\t\tswitch (chars.length) {\n\t\tcase 32: // without hyphen\n\t\t\tver = 12;\n\t\t\tvar = 16;\n\t\t\tbreak;\n\t\tcase 36: // with hyphen\n\t\t\tver = 14;\n\t\t\tvar = 19;\n\t\t\tbreak;\n\t\tdefault:\n\t\t\treturn false;\n\t\t}\n\n\t\tfinal char[] lower = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' };\n\t\tfinal char[] upper = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' };\n\t\tboolean versionOk = ((version & ~0xf) == 0) && (chars[ver] == lower[version] || chars[ver] == upper[version]);\n\t\tboolean variantOk = chars[var] == '8' || chars[var] == '9' //\n\t\t\t\t|| chars[var] == 'a' || chars[var] == 'b' || chars[var] == 'A' || chars[var] == 'B';\n\n\t\treturn versionOk && variantOk;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/immutable/ByteArray.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util.immutable;\n\nimport java.util.Arrays;\n\n/**\n * Immutable array of bytes.\n */\npublic final class ByteArray {\n\n\tprivate final byte[] array;\n\n\tprivate ByteArray(byte[] a) {\n\t\tarray = Arrays.copyOf(a, a.length);\n\t}\n\n\t/**\n\t * Creates an instance of this class.\n\t * \n\t * @param a an array of bytes\n\t * @return a new instance\n\t */\n\tpublic static ByteArray from(byte[] a) {\n\t\treturn new ByteArray(a);\n\t}\n\n\t/**\n\t * Return the byte at a position.\n\t * \n\t * @param index the position\n\t * @return a byte\n\t */\n\tpublic byte get(int index) {\n\t\treturn array[index];\n\t}\n\n\t/**\n\t * Returns the array length\n\t * \n\t * @return the length\n\t */\n\tpublic int length() {\n\t\treturn this.array.length;\n\t}\n\n\t/**\n\t * Returns copy of the array.\n\t * \n\t * @return an array of bytes\n\t */\n\tpublic byte[] array() {\n\t\treturn Arrays.copyOf(array, array.length);\n\t}\n\n\t@Override\n\tpublic int hashCode() {\n\t\tfinal int prime = 31;\n\t\tint result = 1;\n\t\tresult = prime * result + Arrays.hashCode(array);\n\t\treturn result;\n\t}\n\n\t@Override\n\tpublic boolean equals(Object obj) {\n\t\tif (this == obj)\n\t\t\treturn true;\n\t\tif (obj == null)\n\t\t\treturn false;\n\t\tif (getClass() != obj.getClass())\n\t\t\treturn false;\n\t\tByteArray other = (ByteArray) obj;\n\t\treturn Arrays.equals(array, other.array);\n\t}\n\n\t@Override\n\tpublic String toString() {\n\t\treturn \"ByteArray [array=\" + Arrays.toString(array) + \"]\";\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/immutable/CharArray.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util.immutable;\n\nimport java.util.Arrays;\n\n/**\n * Immutable array of chars.\n */\npublic final class CharArray {\n\n\tprivate final char[] array;\n\n\tprivate CharArray(char[] a) {\n\t\tarray = Arrays.copyOf(a, a.length);\n\t}\n\n\t/**\n\t * Creates an instance of this class.\n\t * \n\t * @param a an array of chars.\n\t * @return a new instance\n\t */\n\tpublic static CharArray from(char[] a) {\n\t\treturn new CharArray(a);\n\t}\n\n\t/**\n\t * Return the char at a position.\n\t * \n\t * @param index the position\n\t * @return a char\n\t */\n\tpublic char get(int index) {\n\t\treturn array[index];\n\t}\n\n\t/**\n\t * Returns the array length.\n\t * \n\t * @return the length\n\t */\n\tpublic int length() {\n\t\treturn this.array.length;\n\t}\n\n\t/**\n\t * Returns copy of the array.\n\t * \n\t * @return an array of chars\n\t */\n\tpublic char[] array() {\n\t\treturn array.clone();\n\t}\n\n\t@Override\n\tpublic int hashCode() {\n\t\tfinal int prime = 31;\n\t\tint result = 1;\n\t\tresult = prime * result + Arrays.hashCode(array);\n\t\treturn result;\n\t}\n\n\t@Override\n\tpublic boolean equals(Object obj) {\n\t\tif (this == obj)\n\t\t\treturn true;\n\t\tif (obj == null)\n\t\t\treturn false;\n\t\tif (getClass() != obj.getClass())\n\t\t\treturn false;\n\t\tCharArray other = (CharArray) obj;\n\t\treturn Arrays.equals(array, other.array);\n\t}\n\n\t@Override\n\tpublic String toString() {\n\t\treturn \"CharArray [array=\" + Arrays.toString(array) + \"]\";\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/internal/ByteUtil.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util.internal;\n\n/**\n * Utility class that contains many static methods for byte handling.\n */\npublic final class ByteUtil {\n\n\tprivate ByteUtil() {\n\t}\n\n\t/**\n\t * Get a number from a given array of bytes.\n\t * \n\t * @param bytes a byte array\n\t * @return a long\n\t */\n\tpublic static long toNumber(final byte[] bytes) {\n\t\treturn toNumber(bytes, 0, bytes.length);\n\t}\n\n\t/**\n\t * Get a number from a given array of bytes.\n\t * \n\t * @param bytes a byte array\n\t * @param start first byte of the array\n\t * @param end   last byte of the array (exclusive)\n\t * @return a long\n\t */\n\tpublic static long toNumber(final byte[] bytes, final int start, final int end) {\n\t\tlong result = 0;\n\t\tfor (int i = start; i < end; i++) {\n\t\t\tresult = (result << 8) | (bytes[i] & 0xffL);\n\t\t}\n\t\treturn result;\n\t}\n\n\t/**\n\t * Get a hexadecimal string from given array of bytes.\n\t *\n\t * @param bytes byte array\n\t * @return a string\n\t */\n\tpublic static String toHexadecimal(final byte[] bytes) {\n\n\t\tfinal char[] chars = new char[bytes.length * 2];\n\t\tfor (int i = 0, j = 0; i < bytes.length; i++, j += 2) {\n\t\t\tfinal int v = bytes[i] & 0xff;\n\t\t\tchars[j] = toHexChar(v >>> 4);\n\t\t\tchars[j + 1] = toHexChar(v & 0x0f);\n\t\t}\n\t\treturn new String(chars);\n\t}\n\n\t/**\n\t * Get a hexadecimal from a number value.\n\t * \n\t * @param number a number\n\t * @return a char\n\t */\n\tprivate static char toHexChar(final int number) {\n\t\tif (number >= 0x00 && number <= 0x09) {\n\t\t\t// ASCII codes from 0 to 9\n\t\t\treturn (char) (0x30 + number);\n\t\t} else if (number >= 0x0a && number <= 0x0f) {\n\t\t\t// ASCII codes from 'a' to 'f'\n\t\t\treturn (char) (0x57 + number);\n\t\t}\n\t\treturn 0;\n\t}\n\n\t/**\n\t * Converts an array of bytes into an array of integers. Each integer is formed by combining 4 bytes\n\t * from the input array. This method assumes that the input byte array is at least 16 bytes long.\n\t * The conversion is done by treating each set of 4 bytes as a single integer, with the first byte being the most significant.\n\t *\n\t * @param bytes An array of bytes to be converted into integers. This array should be at least 16 bytes long.\n\t * @return An array of 4 integers, where each integer is formed by combining 4 bytes from the input array.\n\t */\n\tpublic static int[] toInts(byte[] bytes) {\n\t\tint[] ints = new int[4];\n\t\tints[0] |= (bytes[0x0] & 0xff) << 24;\n\t\tints[0] |= (bytes[0x1] & 0xff) << 16;\n\t\tints[0] |= (bytes[0x2] & 0xff) << 8;\n\t\tints[0] |= (bytes[0x3] & 0xff);\n\t\tints[1] |= (bytes[0x4] & 0xff) << 24;\n\t\tints[1] |= (bytes[0x5] & 0xff) << 16;\n\t\tints[1] |= (bytes[0x6] & 0xff) << 8;\n\t\tints[1] |= (bytes[0x7] & 0xff);\n\t\tints[2] |= (bytes[0x8] & 0xff) << 24;\n\t\tints[2] |= (bytes[0x9] & 0xff) << 16;\n\t\tints[2] |= (bytes[0xa] & 0xff) << 8;\n\t\tints[2] |= (bytes[0xb] & 0xff);\n\t\tints[3] |= (bytes[0xc] & 0xff) << 24;\n\t\tints[3] |= (bytes[0xd] & 0xff) << 16;\n\t\tints[3] |= (bytes[0xe] & 0xff) << 8;\n\t\tints[3] |= (bytes[0xf] & 0xff);\n\t\treturn ints;\n\t}\n\n\t/**\n\t * Converts an array of integers into an array of bytes. Each integer is decomposed into 4 bytes,\n\t * with the most significant byte being placed first. This method produces a byte array of length 16,\n\t * assuming the input array contains exactly 4 integers. The conversion is performed by shifting\n\t * and masking operations to extract each byte from the integers.\n\t *\n\t * @param ints An array of integers to be converted into bytes. This array should contain exactly 4 integers.\n\t * @return A byte array of length 16, where each group of 4 bytes represents one of the integers from the input array.\n\t */\n\tpublic static byte[] fromInts(int[] ints) {\n\t\tbyte[] bytes = new byte[16]; \n\t\tbytes[0x0] = (byte) (ints[0] >>> 24);\n\t\tbytes[0x1] = (byte) (ints[0] >>> 16);\n\t\tbytes[0x2] = (byte) (ints[0] >>> 8);\n\t\tbytes[0x3] = (byte) (ints[0]);\n\t\tbytes[0x4] = (byte) (ints[1] >>> 24);\n\t\tbytes[0x5] = (byte) (ints[1] >>> 16);\n\t\tbytes[0x6] = (byte) (ints[1] >>> 8);\n\t\tbytes[0x7] = (byte) (ints[1]);\n\t\tbytes[0x8] = (byte) (ints[2] >>> 24);\n\t\tbytes[0x9] = (byte) (ints[2] >>> 16);\n\t\tbytes[0xa] = (byte) (ints[2] >>> 8);\n\t\tbytes[0xb] = (byte) (ints[2]);\n\t\tbytes[0xc] = (byte) (ints[3] >>> 24);\n\t\tbytes[0xd] = (byte) (ints[3] >>> 16);\n\t\tbytes[0xe] = (byte) (ints[3] >>> 8);\n\t\tbytes[0xf] = (byte) (ints[3]);\n\t\treturn bytes;\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/internal/JavaVersionUtil.java",
    "content": "/*\n * MIT License\n *\n * Copyright (c) 2018-2025 Fabio Lima\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util.internal;\n\npublic class JavaVersionUtil {\n\n    /**\n     * Returns the java major version number.\n     *\n     * @see <a href= \"https://www.java.com/releases/\">JDK Releases</a>\n     * @return major version number\n     */\n    public static int getJavaVersion() {\n        try {\n\n            String property = System.getProperty(\"java.version\");\n\n            if (property != null) {\n                String[] version = property.split(\"\\\\.\");\n                if (version[0].equals(\"1\")) {\n                    return Integer.parseInt(version[1]);\n                } else {\n                    return Integer.parseInt(version[0]);\n                }\n            } else {\n                return 8;\n            }\n\n        } catch (NumberFormatException | IndexOutOfBoundsException e) {\n            return 8;\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/internal/NetworkUtil.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util.internal;\n\nimport java.net.InetAddress;\nimport java.net.NetworkInterface;\nimport java.net.SocketException;\nimport java.net.UnknownHostException;\nimport java.util.Enumeration;\n\n/**\n * Utility class that returns host name, MAC and IP.\n */\npublic final class NetworkUtil {\n\n\tprivate static String hostname;\n\tprivate static String mac;\n\tprivate static String ip;\n\n\tprivate NetworkUtil() {\n\t}\n\n\t/**\n\t * Returns the host name if found.\n\t * <p>\n\t * Sequence of HOSTNAME search:\n\t * <ol>\n\t * <li>Try to find the HOSTNAME variable on LINUX environment;\n\t * <li>Try to find the COMPUTERNAME variable on WINDOWS environment;\n\t * <li>Try to find the host name by calling\n\t * {@code InetAddress.getLocalHost().getHostName()} (the expensive way);\n\t * <li>If no host name is found, return {@code null}.\n\t * </ol>\n\t * \n\t * @return a string containing the host name\n\t */\n\tpublic static synchronized String hostname() {\n\n\t\tif (hostname != null) {\n\t\t\treturn hostname;\n\t\t}\n\n\t\t// try to find HOSTNAME on LINUX\n\t\thostname = System.getenv(\"HOSTNAME\");\n\t\tif (hostname != null && !hostname.isEmpty()) {\n\t\t\treturn hostname;\n\t\t}\n\n\t\t// try to find COMPUTERNAME on WINDOWS\n\t\thostname = System.getenv(\"COMPUTERNAME\");\n\t\tif (hostname != null && !hostname.isEmpty()) {\n\t\t\treturn hostname;\n\t\t}\n\n\t\ttry {\n\t\t\t// try to find HOSTNAME for the local host\n\t\t\thostname = InetAddress.getLocalHost().getHostName();\n\t\t\tif (hostname != null && !hostname.isEmpty()) {\n\t\t\t\treturn hostname;\n\t\t\t}\n\t\t} catch (UnknownHostException e) {\n\t\t\treturn null;\n\t\t}\n\n\t\t// not found\n\t\treturn null;\n\t}\n\n\t/**\n\t * Returns the MAC address if found.\n\t * <p>\n\t * Output format: \"00-00-00-00-00-00\" (in upper case)\n\t * \n\t * @param nic a network interface\n\t * @return a string containing the MAC address\n\t */\n\tpublic static synchronized String mac(NetworkInterface nic) {\n\n\t\tif (mac != null) {\n\t\t\treturn mac;\n\t\t}\n\n\t\ttry {\n\t\t\tif (nic != null && nic.getHardwareAddress() != null) {\n\t\t\t\tbyte[] ha = nic.getHardwareAddress();\n\t\t\t\tString[] hex = new String[ha.length];\n\t\t\t\tfor (int i = 0; i < ha.length; i++) {\n\t\t\t\t\thex[i] = String.format(\"%02X\", ha[i]);\n\t\t\t\t}\n\t\t\t\tmac = String.join(\"-\", hex);\n\t\t\t\treturn mac;\n\t\t\t}\n\t\t} catch (SocketException e) {\n\t\t\treturn null;\n\t\t}\n\n\t\t// not found\n\t\treturn null;\n\t}\n\n\t/**\n\t * Returns the IP address if found.\n\t * <p>\n\t * Output format: \"0.0.0.0\" (if IPv4)\n\t * \n\t * @param nic a network interface\n\t * @return a string containing the IP address\n\t */\n\tpublic static synchronized String ip(NetworkInterface nic) {\n\n\t\tif (ip != null) {\n\t\t\treturn ip;\n\t\t}\n\n\t\tif (nic != null) {\n\t\t\tEnumeration<InetAddress> ips = nic.getInetAddresses();\n\t\t\tif (ips.hasMoreElements()) {\n\t\t\t\tip = ips.nextElement().getHostAddress();\n\t\t\t\treturn ip;\n\t\t\t}\n\t\t}\n\n\t\t// not found\n\t\treturn null;\n\t}\n\n\t/**\n\t * Returns a string containing host name, MAC and IP.\n\t * <p>\n\t * Output format: \"hostname 11-11-11-11-11-11 222.222.222.222\"\n\t * \n\t * @return a string containing the host name, MAC and IP\n\t */\n\tpublic static synchronized String getMachineString() {\n\n\t\tNetworkInterface nic = nic();\n\n\t\tString hostname = NetworkUtil.hostname();\n\t\tString mac = NetworkUtil.mac(nic);\n\t\tString ip = NetworkUtil.ip(nic);\n\n\t\treturn String.join(\" \", hostname, mac, ip);\n\t}\n\n\t/**\n\t * Returns a network interface.\n\t * <p>\n\t * It tries to return the network interface associated to the host name.\n\t * <p>\n\t * If that network interface is not found, it tries to return the first network\n\t * interface that satisfies these conditions:\n\t * <ul>\n\t * <li>it is up and running;\n\t * <li>it is not loopback;\n\t * <li>it is not virtual;\n\t * <li>it has a hardware address.\n\t * </ul>\n\t * <p>\n\t * If no acceptable network interface is found, it returns null.\n\t * \n\t * @return a network interface.\n\t */\n\tpublic static synchronized NetworkInterface nic() {\n\n\t\ttry {\n\n\t\t\tInetAddress ip = null;\n\t\t\tNetworkInterface nic = null;\n\t\t\tEnumeration<NetworkInterface> enu = null;\n\n\t\t\t// try to find the network interface for the host name\n\t\t\tip = InetAddress.getByName(hostname());\n\t\t\tnic = NetworkInterface.getByInetAddress(ip);\n\t\t\tif (acceptable(nic)) {\n\t\t\t\treturn nic;\n\t\t\t}\n\n\t\t\t// try to find the first network interface\n\t\t\tenu = NetworkInterface.getNetworkInterfaces();\n\t\t\twhile (enu.hasMoreElements()) {\n\t\t\t\tnic = enu.nextElement();\n\t\t\t\tif (acceptable(nic)) {\n\t\t\t\t\treturn nic;\n\t\t\t\t}\n\t\t\t}\n\n\t\t} catch (UnknownHostException | SocketException e) {\n\t\t\treturn null;\n\t\t}\n\n\t\t// NIC not found\n\t\treturn null;\n\t}\n\n\t/**\n\t * Checks if the network interface is acceptable.\n\t * \n\t * @param nic a network interface\n\t * @return true if acceptable\n\t */\n\tprivate static synchronized boolean acceptable(NetworkInterface nic) {\n\t\ttry {\n\t\t\tif (nic != null && nic.isUp() && !nic.isLoopback() && !nic.isVirtual()) {\n\t\t\t\tbyte[] mac = nic.getHardwareAddress();\n\t\t\t\tif (mac != null && mac.length == 6) {\n\t\t\t\t\treturn true;\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (SocketException e) {\n\t\t\treturn false;\n\t\t}\n\n\t\treturn false;\n\t}\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/internal/RandomUtil.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util.internal;\n\nimport java.security.NoSuchAlgorithmException;\nimport java.security.SecureRandom;\nimport java.util.Random;\nimport java.util.concurrent.locks.ReentrantLock;\n\n/**\n * Utility class that provides random generator services.\n * <p>\n * The current implementation uses a pool {@link SecureRandom}.\n * <p>\n * The pool size depends on the number of processors available, up to a maximum\n * of 32. The minimum is 4.\n * <p>\n * The pool items are deleted very often to avoid holding them for too long.\n * They are also deleted to avoid holding more instances than threads running.\n * <p>\n * The PRNG algorithm can be specified by system property or environment\n * variable. See {@link RandomUtil#newSecureRandom()}.\n */\npublic final class RandomUtil {\n\n\tprivate RandomUtil() {\n\t}\n\n\t/**\n\t * Returns a random 64-bit number.\n\t * \n\t * @return a number\n\t */\n\tpublic static long nextLong() {\n\t\treturn SecureRandomPool.nextLong();\n\t}\n\n\t/**\n\t * Returns an array of random bytes.\n\t * \n\t * @param length the array length\n\t * @return a byte array\n\t */\n\tpublic static byte[] nextBytes(final int length) {\n\t\treturn SecureRandomPool.nextBytes(length);\n\t}\n\n\t/**\n\t * Returns a new instance of {@link SecureRandom}.\n\t * <p>\n\t * It tries to create an instance with the algorithm name specified in the\n\t * system property `uuidcreator.securerandom` or in the environment variable\n\t * `UUIDCREATOR_SECURERANDOM`. If the algorithm name is not supported by the\n\t * runtime, it returns an instance with the default algorithm.\n\t * <p>\n\t * It can be useful to make use of SHA1PRNG or DRBG as a non-blocking source of\n\t * random bytes. The SHA1PRNG algorithm is default on operating systems that\n\t * don't have '/dev/random', e.g., on Windows. The DRBG algorithm is available\n\t * in JDK 9+.\n\t * <p>\n\t * To control the algorithm used by this method, use the system property\n\t * `uuidcreator.securerandom` or the environment variable\n\t * `UUIDCREATOR_SECURERANDOM` as in examples below.\n\t * <p>\n\t * System property:\n\t * \n\t * <pre>{@code\n\t * # Use the the algorithm SHA1PRNG for SecureRandom\n\t * -Duuidcreator.securerandom=\"SHA1PRNG\"\n\t * \n\t * # Use the the algorithm DRBG for SecureRandom (JDK9+)\n\t * -Duuidcreator.securerandom=\"DRBG\"\n\t * }</pre>\n\t * \n\t * <p>\n\t * Environment variable:\n\t * \n\t * <pre>{@code\n\t * # Use the the algorithm SHA1PRNG for SecureRandom\n\t * export UUIDCREATOR_SECURERANDOM=\"SHA1PRNG\"\n\t * \n\t * # Use the the algorithm DRBG for SecureRandom (JDK9+)\n\t * export UUIDCREATOR_SECURERANDOM=\"DRBG\"\n\t * }</pre>\n\t * \n\t * @return a new {@link SecureRandom}.\n\t */\n\tpublic static SecureRandom newSecureRandom() {\n\t\tString algorithm = SettingsUtil.getSecureRandom();\n\t\tif (algorithm != null) {\n\t\t\ttry {\n\t\t\t\treturn SecureRandom.getInstance(algorithm);\n\t\t\t} catch (NoSuchAlgorithmException e) {\n\t\t\t\treturn new SecureRandom();\n\t\t\t}\n\t\t}\n\t\treturn new SecureRandom();\n\t}\n\n\tprivate static class SecureRandomPool {\n\n\t\tprivate static final Random random = new Random();\n\t\tprivate static final int POOL_SIZE = processors();\n\t\tprivate static final Random[] POOL = new Random[POOL_SIZE];\n\t\tprivate static final ReentrantLock lock = new ReentrantLock();\n\n\t\tprivate SecureRandomPool() {\n\t\t}\n\n\t\tpublic static long nextLong() {\n\t\t\treturn ByteUtil.toNumber(nextBytes(Long.BYTES));\n\t\t}\n\n\t\tpublic static byte[] nextBytes(final int length) {\n\n\t\t\tfinal byte[] bytes = new byte[length];\n\t\t\tcurrent().nextBytes(bytes);\n\n\t\t\t// every now and then\n\t\t\tif (bytes.length > 0 && bytes[0x00] == 0) {\n\t\t\t\t// delete a random item from the pool\n\t\t\t\tdelete(random.nextInt(POOL_SIZE));\n\t\t\t}\n\n\t\t\treturn bytes;\n\t\t}\n\n\t\tprivate static Random current() {\n\n\t\t\t// calculate the pool index given the current thread ID\n\t\t\tfinal int index = (int) Thread.currentThread().getId() % POOL_SIZE;\n\n\t\t\tlock.lock();\n\t\t\ttry {\n\t\t\t\t// lazy loading instance\n\t\t\t\tif (POOL[index] == null) {\n\t\t\t\t\tPOOL[index] = RandomUtil.newSecureRandom();\n\t\t\t\t}\n\t\t\t\treturn POOL[index];\n\t\t\t} finally {\n\t\t\t\tlock.unlock();\n\t\t\t}\n\t\t}\n\n\t\tprivate static void delete(int index) {\n\t\t\tlock.lock();\n\t\t\ttry {\n\t\t\t\tPOOL[index] = null;\n\t\t\t} finally {\n\t\t\t\tlock.unlock();\n\t\t\t}\n\t\t}\n\n\t\tprivate static int processors() {\n\n\t\t\tfinal int min = 4;\n\t\t\tfinal int max = 32;\n\n\t\t\t// get the number of processors from the runtime\n\t\t\tfinal int processors = Runtime.getRuntime().availableProcessors();\n\n\t\t\tif (processors < min) {\n\t\t\t\treturn min;\n\t\t\t} else if (processors > max) {\n\t\t\t\treturn max;\n\t\t\t}\n\n\t\t\treturn processors;\n\t\t}\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i128/util/internal/SettingsUtil.java",
    "content": "/*\n * MIT License\n * \n * Copyright (c) 2018-2025 Fabio Lima\n * \n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n * \n * The above copyright notice and this permission notice shall be included in all\n * copies or substantial portions of the Software.\n * \n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n * SOFTWARE.\n */\n\npackage com.pinecone.ulf.util.guid.i128.util.internal;\n\n/**\n * Utility class that reads system properties and environment variables.\n * <p>\n * List of system properties:\n * <ul>\n * <li>uuidcreator.node\n * <li>uuidcreator.securerandom\n * </ul>\n * <p>\n * List of environment variables:\n * <ul>\n * <li>UUIDCREATOR_NODE\n * <li>UUIDCREATOR_SECURERANDOM\n * </ul>\n * <p>\n * System properties has prevalence over environment variables.\n */\npublic final class SettingsUtil {\n\n\t/**\n\t * The property name prefix.\n\t */\n\tprotected static final String PROPERTY_PREFIX = \"uuidcreator\";\n\n\t/**\n\t * The property name for the node number.\n\t */\n\tpublic static final String PROPERTY_NODE = \"node\";\n\n\t/**\n\t * The property name for the secure random algorithm.\n\t */\n\tpublic static final String PROPERTY_SECURERANDOM = \"securerandom\";\n\n\t/**\n\t * Default constructor.\n\t */\n\tprotected SettingsUtil() {\n\t}\n\n\t/**\n\t * Get the node identifier.\n\t * \n\t * @return a number\n\t */\n\tpublic static Long getNodeIdentifier() {\n\t\tString value = getProperty(PROPERTY_NODE);\n\t\tif (value == null) {\n\t\t\treturn null;\n\t\t}\n\t\ttry {\n\t\t\treturn Long.decode(value);\n\t\t} catch (NumberFormatException e) {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\t/**\n\t * Set the node identifier\n\t * \n\t * @param node a number\n\t */\n\tpublic static void setNodeIdentifier(Long node) {\n\t\tString value = Long.toString(node);\n\t\tsetProperty(PROPERTY_NODE, value);\n\t}\n\n\t/**\n\t * Get the secure random algorithm.\n\t * \n\t * @return a string\n\t */\n\tpublic static String getSecureRandom() {\n\t\treturn getProperty(PROPERTY_SECURERANDOM);\n\t}\n\n\t/**\n\t * Set the secure random algorithm\n\t * \n\t * @param algorithm a string\n\t */\n\tpublic static void setSecureRandom(String algorithm) {\n\t\tsetProperty(PROPERTY_SECURERANDOM, algorithm);\n\t}\n\n\t/**\n\t * Get a property.\n\t * \n\t * @param name the name\n\t * @return a string\n\t */\n\tpublic static String getProperty(String name) {\n\n\t\tString fullName = getPropertyName(name);\n\t\tString value = System.getProperty(fullName);\n\t\tif (!isEmpty(value)) {\n\t\t\treturn value;\n\t\t}\n\n\t\tfullName = getEnvinronmentName(name);\n\t\tvalue = System.getenv(fullName);\n\t\tif (!isEmpty(value)) {\n\t\t\treturn value;\n\t\t}\n\n\t\treturn null;\n\t}\n\n\t/**\n\t * Set a property.\n\t * \n\t * @param key   the key\n\t * @param value the value\n\t */\n\tpublic static void setProperty(String key, String value) {\n\t\tSystem.setProperty(getPropertyName(key), value);\n\t}\n\n\t/**\n\t * Clear a property.\n\t * \n\t * @param key the key\n\t */\n\tpublic static void clearProperty(String key) {\n\t\tSystem.clearProperty(getPropertyName(key));\n\t}\n\n\t/**\n\t * Get a property name.\n\t * \n\t * @param key a key\n\t * @return a string\n\t */\n\tprotected static String getPropertyName(String key) {\n\t\treturn String.join(\".\", PROPERTY_PREFIX, key);\n\t}\n\n\t/**\n\t * Get an environment variable name.\n\t * \n\t * @param key a key\n\t * @return a string\n\t */\n\tprotected static String getEnvinronmentName(String key) {\n\t\treturn String.join(\"_\", PROPERTY_PREFIX, key).toUpperCase().replace(\".\", \"_\");\n\t}\n\n\tprivate static boolean isEmpty(String value) {\n\t\treturn value == null || value.isEmpty();\n\t}\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/BitsAllocator.java",
    "content": "/*\n * Copyright (c) 2017 Baidu, Inc. All Rights Reserve.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.pinecone.ulf.util.guid.i64;\n\nimport org.apache.commons.lang.builder.ToStringBuilder;\nimport org.apache.commons.lang.builder.ToStringStyle;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.Assert;\n\n\n/**\n * Allocate 64 bits for the UID(long)<br>\n * sign (fixed 1bit) -> deltaSecond -> workerId -> sequence(within the same second)\n * \n * @author yutianbao\n */\npublic class BitsAllocator implements Pinenut {\n    /**\n     * Total 64 bits\n     */\n    public static final int TOTAL_BITS = 1 << 6;\n\n    /**\n     * Bits for [sign-> second-> workId-> sequence]\n     */\n    private int signBits = 1;\n    private final int timestampBits;\n    private final int workerIdBits;\n    private final int sequenceBits;\n\n    /**\n     * Max value for workId & sequence\n     */\n    private final long maxDeltaSeconds;\n    private final long maxWorkerId;\n    private final long maxSequence;\n\n    /**\n     * Shift for timestamp & workerId\n     */\n    private final int timestampShift;\n    private final int workerIdShift;\n\n    /**\n     * Constructor with timestampBits, workerIdBits, sequenceBits<br>\n     * The highest bit used for sign, so <code>63</code> bits for timestampBits, workerIdBits, sequenceBits\n     */\n    public BitsAllocator( int timestampBits, int workerIdBits, int sequenceBits ) {\n        // make sure allocated 64 bits\n        int allocateTotalBits = signBits + timestampBits + workerIdBits + sequenceBits;\n        Assert.isTrue(allocateTotalBits == TOTAL_BITS, \"allocate not enough 64 bits\");\n\n        // initialize bits\n        this.timestampBits = timestampBits;\n        this.workerIdBits = workerIdBits;\n        this.sequenceBits = sequenceBits;\n\n        // initialize max value\n        this.maxDeltaSeconds = ~(-1L << timestampBits);\n        this.maxWorkerId = ~(-1L << workerIdBits);\n        this.maxSequence = ~(-1L << sequenceBits);\n\n        // initialize shift\n        this.timestampShift = workerIdBits + sequenceBits;\n        this.workerIdShift = sequenceBits;\n    }\n\n    /**\n     * Allocate bits for UID according to delta seconds & workerId & sequence<br>\n     * <b>Note that: </b>The highest bit will always be 0 for sign\n     * \n     * @param deltaSeconds\n     * @param workerId\n     * @param sequence\n     * @return\n     */\n    public long allocate( long deltaSeconds, long workerId, long sequence ) {\n        return (deltaSeconds << timestampShift) | (workerId << workerIdShift) | sequence;\n    }\n    \n    /**\n     * Getters\n     */\n    public int getSignBits() {\n        return signBits;\n    }\n\n    public int getTimestampBits() {\n        return timestampBits;\n    }\n\n    public int getWorkerIdBits() {\n        return workerIdBits;\n    }\n\n    public int getSequenceBits() {\n        return sequenceBits;\n    }\n\n    public long getMaxDeltaSeconds() {\n        return maxDeltaSeconds;\n    }\n\n    public long getMaxWorkerId() {\n        return maxWorkerId;\n    }\n\n    public long getMaxSequence() {\n        return maxSequence;\n    }\n\n    public int getTimestampShift() {\n        return timestampShift;\n    }\n\n    public int getWorkerIdShift() {\n        return workerIdShift;\n    }\n    \n    @Override\n    public String toString() {\n        return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE);\n    }\n    \n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/GUID64.java",
    "content": "package com.pinecone.ulf.util.guid.i64;\n\nimport com.pinecone.framework.util.Bytes;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.framework.util.id.IllegalIdentificationException;\n\npublic class GUID64 implements GUID {\n    public static final long SignBits      =  1;\n    public static final long TimestampBits = 29;\n    public static final long WorkerIdBits  = 21;\n    public static final long SequenceBits  = 13;\n    public static final int  Sizeof        = 8;\n\n    protected long guid;\n\n    public GUID64() {\n\n    }\n\n    public GUID64( String hexID64 ) {\n        this.parse( hexID64 );\n    }\n\n    public GUID64( long guid ) {\n        this.guid = guid;\n    }\n\n    public long getSequence() {\n        long totalBits     = BitsAllocator.TOTAL_BITS;\n        return (this.guid << (totalBits - GUID64.SequenceBits)) >>> (totalBits - GUID64.SequenceBits);\n    }\n\n    public long getWorkerId() {\n        long totalBits     = BitsAllocator.TOTAL_BITS;\n        return (this.guid << (GUID64.TimestampBits + GUID64.SignBits)) >>> (totalBits - GUID64.WorkerIdBits);\n    }\n\n    public long getDeltaSeconds() {\n        return this.guid >>> (GUID64.WorkerIdBits + GUID64.SequenceBits);\n    }\n\n\n    protected void parseByStringParts( String[] parts ) throws IllegalIdentificationException {\n        try{\n            // 将十六进制字符串转换为十进制整数\n            long deltaSeconds = Long.parseLong(parts[0], 16);\n            long workerId     = Long.parseLong(parts[1], 16);\n            long sequence     = Long.parseLong(parts[2], 16);\n\n            long deltaSecondsPart = deltaSeconds << (GUID64.WorkerIdBits + GUID64.SequenceBits);\n            long workerIdPart = workerId << GUID64.SequenceBits;\n            this.guid = deltaSecondsPart | workerIdPart | sequence;\n        }\n        catch ( RuntimeException e ) {\n            throw new IllegalIdentificationException( e );\n        }\n    }\n\n    @Override\n    public Identification parse( String hexID64 ) throws IllegalIdentificationException {\n        // 分离UUID的各个部分\n        String[] parts = hexID64.split(\"-\");\n\n        this.parseByStringParts( parts );\n        return this;\n    }\n\n    @Override\n    public String toString(){\n        String deltaSecondsHex = String.format( \"%07x\", this.getDeltaSeconds() );\n        String workerIdHex     = String.format( \"%06x\", this.getWorkerId()     );\n        String sequenceHex     = String.format( \"%04x\", this.getSequence()     );\n        return deltaSecondsHex + \"-\" + workerIdHex + \"-\" + sequenceHex;\n    }\n\n    @Override\n    public String toJSONString() {\n        return \"\\\"\" + this.toString() + \"\\\"\";\n    }\n\n    @Override\n    public boolean equals( Object obj ) {\n        if( !super.equals(obj) )  {\n            if( obj instanceof GUID64 ) {\n                return this.guid == ((GUID64) obj).guid;\n            }\n        }\n        return false;\n    }\n\n    @Override\n    public int hashCode() {\n        return Long.hashCode( this.guid );\n    }\n\n    @Override\n    public long hashCode64() {\n        return this.guid;\n    }\n\n    @Override\n    public int intVal() {\n        return (int) this.guid;\n    }\n\n    @Override\n    public long longVal() {\n        return this.guid;\n    }\n\n    @Override\n    public byte[] toBytesBE() {\n        return Bytes.int64ToBytesBE( this.guid );\n    }\n\n    @Override\n    public byte[] toBytesLE() {\n        return Bytes.int64ToBytesLE( this.guid );\n    }\n\n    @Override\n    public int sizeof() {\n        return Sizeof;\n    }\n\n    @Override\n    public int compareTo( Identification that ) {\n        GUID64 val;\n        if ( that instanceof GUID64 ) {\n            val = (GUID64) that;\n        }\n        else {\n            throw new IllegalArgumentException( \"Not GUID64\" );\n        }\n\n        return Long.compare( this.guid, val.guid );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/GUID72.java",
    "content": "package com.pinecone.ulf.util.guid.i64;\n\nimport com.pinecone.framework.util.id.IllegalIdentificationException;\n\npublic class GUID72 extends GUID64 {\n    public static final int  Sizeof        = 9; // 8 bytes for GUID64 + 1 byte for nanoSeed\n\n    private byte nanoSeed;\n\n    public GUID72() {\n\n    }\n\n    public GUID72( String hexID72 ) {\n        this.parse( hexID72 );\n    }\n\n    public GUID72( long guid64, byte nanoSeed ) {\n        super( guid64 );\n        this.nanoSeed = nanoSeed;\n    }\n\n    public int getNanoSeed() {\n        return this.nanoSeed;\n    }\n\n    public void setNanoSeed( byte nanoSeed ) {\n        this.nanoSeed = nanoSeed;\n    }\n\n    @Override\n    public GUID72 parse( String hexID72 ) throws IllegalIdentificationException {\n        //Debug.trace( \"解析字符串\"+hexID72 );\n        try{\n            String[] parts = hexID72.split(\"-\");\n            this.parseByStringParts( parts );\n            this.nanoSeed  = (byte) Integer.parseInt( parts[3], 16 );\n        }\n        catch ( NumberFormatException | IndexOutOfBoundsException e ) {\n            throw new IllegalIdentificationException( e );\n        }\n\n        return this;\n    }\n\n    @Override\n    public byte[] toBytesLE() {\n        byte[] b = new byte[9];\n\n        b[0] = (byte)  this.guid;\n        b[1] = (byte) (this.guid >> 8);\n        b[2] = (byte) (this.guid >> 16);\n        b[3] = (byte) (this.guid >> 24);\n        b[4] = (byte) (this.guid >> 32);\n        b[5] = (byte) (this.guid >> 40);\n        b[6] = (byte) (this.guid >> 48);\n        b[7] = (byte) (this.guid >> 56);\n        b[8] = this.nanoSeed;\n\n        return b;\n    }\n\n    @Override\n    public byte[] toBytesBE() {\n        byte[] b = new byte[9];\n\n        b[0] = (byte) (this.guid >> 56);\n        b[1] = (byte) (this.guid >> 48);\n        b[2] = (byte) (this.guid >> 40);\n        b[3] = (byte) (this.guid >> 32);\n        b[4] = (byte) (this.guid >> 24);\n        b[5] = (byte) (this.guid >> 16);\n        b[6] = (byte) (this.guid >> 8);\n        b[7] = (byte)  this.guid;\n        b[8] = this.nanoSeed;\n\n        return b;\n    }\n\n    @Override\n    public String toString() {\n        String nanoSeedHex = String.format( \"%02x\", this.nanoSeed      );\n        return super.toString() + \"-\" + nanoSeedHex;\n    }\n\n    @Override\n    public int sizeof() {\n        return Sizeof;\n    }\n\n    @Override\n    public String toJSONString() {\n        return \"\\\"\" + this.toString() + \"\\\"\";\n    }\n\n    @Override\n    public boolean equals( Object obj ) {\n        boolean b = false;\n        if( obj instanceof GUID72 ) {\n            b = this.nanoSeed == ((GUID72) obj).nanoSeed;\n        }\n\n        return super.equals(obj) && b;\n    }\n\n    @Override\n    public int hashCode() {\n        return Long.hashCode( this.guid ) ^ Byte.hashCode( this.nanoSeed );\n    }\n\n    @Override\n    public long hashCode64() {\n        return super.hashCode64() ^ Byte.hashCode( this.nanoSeed );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/GuidAllocator64.java",
    "content": "package com.pinecone.ulf.util.guid.i64;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.id.GuidGenerateException;\nimport com.pinecone.ulf.util.guid.i64.worker.WorkerIdAssigner;\n\npublic interface GuidAllocator64 extends GuidAllocator {\n\n    long nextGUIDi64() throws GuidGenerateException;\n\n    String explain( long guid64 );\n\n    GUID nextGUID64();\n\n    void setWorkerIdAssigner( WorkerIdAssigner workerIdAssigner );\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/GuidAllocator72.java",
    "content": "package com.pinecone.ulf.util.guid.i64;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.id.GuidGenerateException;\nimport com.pinecone.ulf.util.guid.i64.worker.WorkerIdAssigner;\n\npublic interface GuidAllocator72 extends GuidAllocator64 {\n\n    GUID nextGUID72();\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/GuidAllocator72V2.java",
    "content": "package com.pinecone.ulf.util.guid.i64;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidGenerateException;\nimport com.pinecone.ulf.util.guid.i64.utils.DateUtils;\nimport com.pinecone.ulf.util.guid.i64.worker.GenericDisposableWorkerIdAssigner;\nimport com.pinecone.ulf.util.guid.i64.worker.WorkerIdAssigner;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.beans.factory.InitializingBean;\n\nimport java.time.LocalDateTime;\nimport java.time.temporal.ChronoUnit;\nimport java.util.Date;\nimport java.util.concurrent.TimeUnit;\n\npublic class GuidAllocator72V2 implements GuidAllocator72, InitializingBean {\n    private static final Logger LOGGER = LoggerFactory.getLogger(GuidAllocator72V2.class);\n\n    /** Bits allocate */\n    protected int timeBits = 28;\n    protected int workerBits = 22;\n    protected int seqBits = 13;\n\n    /** Customer epoch, unit as second. For example 2016-05-20 (ms: 1463673600000)*/\n    // TODO\n    protected String epochStr = \"2024-10-01\";\n    protected long epochSeconds = TimeUnit.MILLISECONDS.toSeconds( DateUtils.parseByDayPattern( this.epochStr ).getTime() );\n\n    /** Stable fields after spring bean initializing */\n    protected BitsAllocator bitsAllocator;\n    protected long workerId;\n\n    /** Volatile fields caused by nextId() */\n    protected long sequence = 0L;\n    protected long lastSecond = -1L;\n\n    /** Spring property */\n    protected WorkerIdAssigner workerIdAssigner;\n\n    public GuidAllocator72V2() {\n        this( new GenericDisposableWorkerIdAssigner() );\n    }\n\n    public GuidAllocator72V2(WorkerIdAssigner idAssigner ) {\n        this.workerIdAssigner = idAssigner;\n        this.afterPropertiesSet();\n    }\n\n    @Override\n    public void afterPropertiesSet() {\n        // initialize bits allocator\n        this.bitsAllocator = new BitsAllocator(this.timeBits, this.workerBits, this.seqBits);\n\n        // initialize worker id\n        this.workerId = this.workerIdAssigner.assignWorkerId();\n        if ( this.workerId > this.bitsAllocator.getMaxWorkerId() ) {\n            throw new IllegalStateException( \"Worker id \" + this.workerId + \" exceeds the max \" + this.bitsAllocator.getMaxWorkerId() );\n        }\n\n        LOGGER.info( \"Initialized bits(1, {}, {}, {}) for workerID:{}\", this.timeBits, this.workerBits, this.seqBits, this.workerId );\n    }\n\n    @Override\n    public long nextGUIDi64() throws GuidGenerateException {\n        try {\n            return this.nextId();\n        }\n        catch ( Exception e ) {\n            LOGGER.error(\"Generate unique id exception. \", e);\n            throw new GuidGenerateException(e);\n        }\n    }\n\n    @Override\n    public String explain( long guid64 ) {\n        long totalBits = BitsAllocator.TOTAL_BITS;\n        long signBits = this.bitsAllocator.getSignBits();\n        long timestampBits = this.bitsAllocator.getTimestampBits();\n        long workerIdBits = this.bitsAllocator.getWorkerIdBits();\n        long sequenceBits = this.bitsAllocator.getSequenceBits();\n\n        // parse UID\n        long sequence = (guid64 << (totalBits - sequenceBits)) >>> (totalBits - sequenceBits);\n        long workerId = (guid64 << (timestampBits + signBits)) >>> (totalBits - workerIdBits);\n        long deltaSeconds = guid64 >>> (workerIdBits + sequenceBits);\n\n        Date thatTime = new Date(TimeUnit.SECONDS.toMillis(epochSeconds + deltaSeconds));\n        String thatTimeStr = DateUtils.formatByDateTimePattern(thatTime);\n\n        // format as string\n        return String.format(\"{\\\"UID\\\":\\\"%d\\\",\\\"timestamp\\\":\\\"%s\\\",\\\"workerId\\\":\\\"%d\\\",\\\"sequence\\\":\\\"%d\\\"}\",\n                guid64, thatTimeStr, workerId, sequence);\n    }\n\n//    @Override\n//    public GUID64 parseGUID64(long uid) {\n//        long totalBits = BitsAllocator.TOTAL_BITS;\n//        long signBits = bitsAllocator.getSignBits();\n//        long timestampBits = bitsAllocator.getTimestampBits();\n//        long workerIdBits = bitsAllocator.getWorkerIdBits();\n//        long sequenceBits = bitsAllocator.getSequenceBits();\n//\n//        // parse UID\n//        long sequence = (uid << (totalBits - sequenceBits)) >>> (totalBits - sequenceBits);\n//        long workerId = (uid << (timestampBits + signBits)) >>> (totalBits - workerIdBits);\n//        long deltaSeconds = uid >>> (workerIdBits + sequenceBits);\n//\n//        Date thatTime = new Date(TimeUnit.SECONDS.toMillis(epochSeconds + deltaSeconds));\n//        String thatTimeStr = DateUtils.formatByDateTimePattern(thatTime);\n//\n//        // format as string\n//        return new GUID64(sequence, workerId, deltaSeconds);\n//    }\n//\n\n\n    @Override\n    public GUID nextGUID() {\n        return this.nextGUID72();\n    }\n\n    @Override\n    public GUID parse( String hexId ) {\n        return new GUID72( hexId );\n    }\n\n    @Override\n    public GUID nextGUID72() {\n        //先获取GUID64\n        long guid64 = this.nextGUIDi64();\n        //Debug.trace( guid64 );\n\n        //获取纳秒种子\n        LocalDateTime now = LocalDateTime.now();\n        long nanoseconds = now.toLocalTime().truncatedTo( ChronoUnit.NANOS ).getNano();\n        int truncatedNanos = (int) (nanoseconds % 256L); // 截取为8位\n        //String nanoSeed = String.format(\"%02x\", truncatedNanos);\n\n        return new GUID72( guid64, (byte) truncatedNanos );\n    }\n\n    @Override\n    public GUID nextGUID64() {\n        return new GUID64( this.nextGUIDi64() );\n    }\n\n    /**\n     * Get UID\n     *\n     * @return UID\n     * @throws GuidGenerateException in the case: Clock moved backwards; Exceeds the max timestamp\n     */\n    protected synchronized long nextId() {\n        long currentSecond = getCurrentSecond();\n\n        // Clock moved backwards, refuse to generate uid\n        if (currentSecond < this.lastSecond) {\n            long refusedSeconds = this.lastSecond - currentSecond;\n            throw new GuidGenerateException(\"Clock moved backwards. Refusing for %d seconds\", refusedSeconds);\n        }\n\n        // At the same second, increase sequence\n        if (currentSecond == this.lastSecond) {\n            this.sequence = ( this.sequence + 1 ) & this.bitsAllocator.getMaxSequence();\n            // Exceed the max sequence, we wait the next second to generate uid\n            if ( this.sequence == 0 ) {\n                currentSecond = this.getNextSecond( this.lastSecond );\n            }\n\n            // At the different second, sequence restart from zero\n        }\n        else {\n            this.sequence = 0L;\n        }\n\n        this.lastSecond = currentSecond;\n\n        // Allocate bits for UID\n        return this.bitsAllocator.allocate(currentSecond - epochSeconds, this.workerId, this.sequence);\n    }\n\n    /**\n     * Get next millisecond\n     */\n    private long getNextSecond( long lastTimestamp ) {\n        long timestamp = getCurrentSecond();\n        while (timestamp <= lastTimestamp) {\n            timestamp = getCurrentSecond();\n        }\n\n        return timestamp;\n    }\n\n    /**\n     * Get current second\n     */\n    private long getCurrentSecond() {\n        long currentSecond = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());\n        if (currentSecond - epochSeconds > this.bitsAllocator.getMaxDeltaSeconds()) {\n            throw new GuidGenerateException(\"Timestamp bits is exhausted. Refusing UID generate. Now: \" + currentSecond);\n        }\n\n        return currentSecond;\n    }\n\n    @Override\n    public void setWorkerIdAssigner( WorkerIdAssigner workerIdAssigner ) {\n        this.workerIdAssigner = workerIdAssigner;\n    }\n\n    public void setTimeBits   ( int timeBits    ) {\n        if (timeBits > 0) {\n            this.timeBits = timeBits;\n        }\n    }\n\n    public void setWorkerBits ( int workerBits  ) {\n        if (workerBits > 0) {\n            this.workerBits = workerBits;\n        }\n    }\n\n    public void setSeqBits    ( int seqBits     ) {\n        if (seqBits > 0) {\n            this.seqBits = seqBits;\n        }\n    }\n\n    public void setEpochStr   ( String epochStr ) {\n        if ( StringUtils.isNotBlank(epochStr) ) {\n            this.epochStr = epochStr;\n            this.epochSeconds = TimeUnit.MILLISECONDS.toSeconds(DateUtils.parseByDayPattern(epochStr).getTime());\n        }\n    }\n\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/DateUtils.java",
    "content": "/*\n * Copyright (c) 2017 Baidu, Inc. All Rights Reserve.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.pinecone.ulf.util.guid.i64.utils;\n\nimport org.apache.commons.lang.time.DateFormatUtils;\n\nimport java.text.ParseException;\nimport java.util.Calendar;\nimport java.util.Date;\n\n/**\n * DateUtils provides date formatting, parsing\n *\n * @author yutianbao\n */\npublic abstract class DateUtils extends org.apache.commons.lang.time.DateUtils {\n    /**\n     * Patterns\n     */\n    public static final String DAY_PATTERN = \"yyyy-MM-dd\";\n    public static final String DATETIME_PATTERN = \"yyyy-MM-dd HH:mm:ss\";\n    public static final String DATETIME_MS_PATTERN = \"yyyy-MM-dd HH:mm:ss.SSS\";\n\n    public static final Date DEFAULT_DATE = DateUtils.parseByDayPattern(\"1970-01-01\");\n\n    /**\n     * Parse date by 'yyyy-MM-dd' pattern\n     *\n     * @param str\n     * @return\n     */\n    public static Date parseByDayPattern(String str) {\n        return parseDate(str, DAY_PATTERN);\n    }\n\n    /**\n     * Parse date by 'yyyy-MM-dd HH:mm:ss' pattern\n     *\n     * @param str\n     * @return\n     */\n    public static Date parseByDateTimePattern(String str) {\n        return parseDate(str, DATETIME_PATTERN);\n    }\n\n    /**\n     * Parse date without Checked exception\n     *\n     * @param str\n     * @param pattern\n     * @return\n     * @throws RuntimeException when ParseException occurred\n     */\n    public static Date parseDate(String str, String pattern) {\n        try {\n            return parseDate(str, new String[]{pattern});\n        } catch (ParseException e) {\n            throw new RuntimeException(e);\n        }\n    }\n\n    /**\n     * Format date into string\n     *\n     * @param date\n     * @param pattern\n     * @return\n     */\n    public static String formatDate(Date date, String pattern) {\n        return DateFormatUtils.format(date, pattern);\n    }\n\n    /**\n     * Format date by 'yyyy-MM-dd' pattern\n     *\n     * @param date\n     * @return\n     */\n    public static String formatByDayPattern(Date date) {\n        if (date != null) {\n            return DateFormatUtils.format(date, DAY_PATTERN);\n        } else {\n            return null;\n        }\n    }\n\n    /**\n     * Format date by 'yyyy-MM-dd HH:mm:ss' pattern\n     *\n     * @param date\n     * @return\n     */\n    public static String formatByDateTimePattern(Date date) {\n        return DateFormatUtils.format(date, DATETIME_PATTERN);\n    }\n\n    /**\n     * Get current day using format date by 'yyyy-MM-dd HH:mm:ss' pattern\n     *\n     * @return\n     * @author yebo\n     */\n    public static String getCurrentDayByDayPattern() {\n        Calendar cal = Calendar.getInstance();\n        return formatByDayPattern(cal.getTime());\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/DockerUtils.java",
    "content": "/*\n * Copyright (c) 2017 Baidu, Inc. All Rights Reserve.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.pinecone.ulf.util.guid.i64.utils;\n\nimport org.apache.commons.lang.StringUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n/**\n * DockerUtils\n * \n * @author yutianbao\n */\npublic abstract class DockerUtils {\n    private static final Logger LOGGER = LoggerFactory.getLogger(DockerUtils.class);\n\n    /** Environment param keys */\n    private static final String ENV_KEY_HOST = \"JPAAS_HOST\";\n    private static final String ENV_KEY_PORT = \"JPAAS_HTTP_PORT\";\n    private static final String ENV_KEY_PORT_ORIGINAL = \"JPAAS_HOST_PORT_8080\";\n\n    /** Docker host & port */\n    private static String DOCKER_HOST = \"\";\n    private static String DOCKER_PORT = \"\";\n\n    /** Whether is docker */\n    private static boolean IS_DOCKER;\n\n    static {\n        retrieveFromEnv();\n    }\n\n    /**\n     * Retrieve docker host\n     * \n     * @return empty string if not a docker\n     */\n    public static String getDockerHost() {\n        return DOCKER_HOST;\n    }\n\n    /**\n     * Retrieve docker port\n     * \n     * @return empty string if not a docker\n     */\n    public static String getDockerPort() {\n        return DOCKER_PORT;\n    }\n\n    /**\n     * Whether a docker\n     * \n     * @return\n     */\n    public static boolean isDocker() {\n        return IS_DOCKER;\n    }\n\n    /**\n     * Retrieve host & port from environment\n     */\n    private static void retrieveFromEnv() {\n        // retrieve host & port from environment\n        DOCKER_HOST = System.getenv(ENV_KEY_HOST);\n        DOCKER_PORT = System.getenv(ENV_KEY_PORT);\n\n        // not found from 'JPAAS_HTTP_PORT', then try to find from 'JPAAS_HOST_PORT_8080'\n        if (StringUtils.isBlank(DOCKER_PORT)) {\n            DOCKER_PORT = System.getenv(ENV_KEY_PORT_ORIGINAL);\n        }\n\n        boolean hasEnvHost = StringUtils.isNotBlank(DOCKER_HOST);\n        boolean hasEnvPort = StringUtils.isNotBlank(DOCKER_PORT);\n\n        // docker can find both host & port from environment\n        if (hasEnvHost && hasEnvPort) {\n            IS_DOCKER = true;\n\n            // found nothing means not a docker, maybe an actual machine\n        } else if (!hasEnvHost && !hasEnvPort) {\n            IS_DOCKER = false;\n\n        } else {\n            LOGGER.error(\"Missing host or port from env for Docker. host:{}, port:{}\", DOCKER_HOST, DOCKER_PORT);\n            throw new RuntimeException(\n                    \"Missing host or port from env for Docker. host:\" + DOCKER_HOST + \", port:\" + DOCKER_PORT);\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/EnumUtils.java",
    "content": "/*\n * Copyright (c) 2017 Baidu, Inc. All Rights Reserve.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.pinecone.ulf.util.guid.i64.utils;\n\n\nimport com.pinecone.framework.util.Assert;\n\n/**\n * EnumUtils provides the operations for {@link ValuedEnum} such as Parse, value of...\n * \n * @author yutianbao\n */\npublic abstract class EnumUtils {\n\n    /**\n     * Parse the bounded value into ValuedEnum\n     * \n     * @param clz\n     * @param value\n     * @return\n     */\n    public static <T extends ValuedEnum<V>, V> T parse(Class<T> clz, V value) {\n        Assert.notNull(clz, \"clz can not be null\");\n        if (value == null) {\n            return null;\n        }\n\n        for (T t : clz.getEnumConstants()) {\n            if (value.equals(t.value())) {\n                return t;\n            }\n        }\n        return null;\n    }\n\n    /**\n     * Null-safe valueOf function\n     * \n     * @param <T>\n     * @param enumType\n     * @param name\n     * @return\n     */\n    public static <T extends Enum<T>> T valueOf(Class<T> enumType, String name) {\n        if (name == null) {\n            return null;\n        }\n\n        return Enum.valueOf(enumType, name);\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/NamingThreadFactory.java",
    "content": "/*\n * Copyright (c) 2017 Baidu, Inc. All Rights Reserve.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.pinecone.ulf.util.guid.i64.utils;\n\nimport org.apache.commons.lang.ClassUtils;\nimport org.apache.commons.lang.StringUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.lang.Thread.UncaughtExceptionHandler;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ThreadFactory;\nimport java.util.concurrent.atomic.AtomicLong;\n\n/**\n * Named thread in ThreadFactory. If there is no specified name for thread, it\n * will auto detect using the invoker classname instead.\n * \n * @author yutianbao\n */\npublic class NamingThreadFactory implements ThreadFactory {\n    private static final Logger LOGGER = LoggerFactory.getLogger(NamingThreadFactory.class);\n\n    /**\n     * Thread name pre\n     */\n    private String name;\n    /**\n     * Is daemon thread\n     */\n    private boolean daemon;\n    /**\n     * UncaughtExceptionHandler\n     */\n    private UncaughtExceptionHandler uncaughtExceptionHandler;\n    /**\n     * Sequences for multi thread name prefix\n     */\n    private final ConcurrentHashMap<String, AtomicLong> sequences;\n\n    /**\n     * Constructors\n     */\n    public NamingThreadFactory() {\n        this(null, false, null);\n    }\n\n    public NamingThreadFactory(String name) {\n        this(name, false, null);\n    }\n\n    public NamingThreadFactory(String name, boolean daemon) {\n        this(name, daemon, null);\n    }\n\n    public NamingThreadFactory(String name, boolean daemon, UncaughtExceptionHandler handler) {\n        this.name = name;\n        this.daemon = daemon;\n        this.uncaughtExceptionHandler = handler;\n        this.sequences = new ConcurrentHashMap<String, AtomicLong>();\n    }\n\n    @Override\n    public Thread newThread(Runnable r) {\n        Thread thread = new Thread(r);\n        thread.setDaemon(this.daemon);\n\n        // If there is no specified name for thread, it will auto detect using the invoker classname instead.\n        // Notice that auto detect may cause some performance overhead\n        String prefix = this.name;\n        if (StringUtils.isBlank(prefix)) {\n            prefix = getInvoker(2);\n        }\n        thread.setName(prefix + \"-\" + getSequence(prefix));\n\n        // no specified uncaughtExceptionHandler, just do logging.\n        if (this.uncaughtExceptionHandler != null) {\n            thread.setUncaughtExceptionHandler(this.uncaughtExceptionHandler);\n        } else {\n            thread.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {\n                public void uncaughtException(Thread t, Throwable e) {\n                    LOGGER.error(\"unhandled exception in thread: \" + t.getId() + \":\" + t.getName(), e);\n                }\n            });\n        }\n\n        return thread;\n    }\n\n    /**\n     * Get the method invoker's class name\n     * \n     * @param depth\n     * @return\n     */\n    private String getInvoker(int depth) {\n        Exception e = new Exception();\n        StackTraceElement[] stes = e.getStackTrace();\n        if (stes.length > depth) {\n            return ClassUtils.getShortClassName(stes[depth].getClassName());\n        }\n        return getClass().getSimpleName();\n    }\n\n    /**\n     * Get sequence for different naming prefix\n     * \n     * @param invoker\n     * @return\n     */\n    private long getSequence(String invoker) {\n        AtomicLong r = this.sequences.get(invoker);\n        if (r == null) {\n            r = new AtomicLong(0);\n            AtomicLong previous = this.sequences.putIfAbsent(invoker, r);\n            if (previous != null) {\n                r = previous;\n            }\n        }\n\n        return r.incrementAndGet();\n    }\n\n    /**\n     * Getters & Setters\n     */\n    public String getName() {\n        return name;\n    }\n\n    public void setName(String name) {\n        this.name = name;\n    }\n\n    public boolean isDaemon() {\n        return daemon;\n    }\n\n    public void setDaemon(boolean daemon) {\n        this.daemon = daemon;\n    }\n\n    public UncaughtExceptionHandler getUncaughtExceptionHandler() {\n        return uncaughtExceptionHandler;\n    }\n\n    public void setUncaughtExceptionHandler(UncaughtExceptionHandler handler) {\n        this.uncaughtExceptionHandler = handler;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/NetUtils.java",
    "content": "/*\n * Copyright (c) 2017 Baidu, Inc. All Rights Reserve.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.pinecone.ulf.util.guid.i64.utils;\n\nimport java.net.InetAddress;\nimport java.net.NetworkInterface;\nimport java.net.SocketException;\nimport java.util.Enumeration;\n\n/**\n * NetUtils\n * \n * @author yutianbao\n */\npublic abstract class NetUtils {\n\n    /**\n     * Pre-loaded local address\n     */\n    public static InetAddress localAddress;\n\n    static {\n        try {\n            localAddress = getLocalInetAddress();\n        } catch (SocketException e) {\n            throw new RuntimeException(\"fail to get local ip.\");\n        }\n    }\n\n    /**\n     * Retrieve the first validated local ip address(the Public and LAN ip addresses are validated).\n     *\n     * @return the local address\n     * @throws SocketException the socket exception\n     */\n    public static InetAddress getLocalInetAddress() throws SocketException {\n        // enumerates all network interfaces\n        Enumeration<NetworkInterface> enu = NetworkInterface.getNetworkInterfaces();\n\n        while (enu.hasMoreElements()) {\n            NetworkInterface ni = enu.nextElement();\n            if (ni.isLoopback()) {\n                continue;\n            }\n\n            Enumeration<InetAddress> addressEnumeration = ni.getInetAddresses();\n            while (addressEnumeration.hasMoreElements()) {\n                InetAddress address = addressEnumeration.nextElement();\n\n                // ignores all invalidated addresses\n                if (address.isLinkLocalAddress() || address.isLoopbackAddress() || address.isAnyLocalAddress()) {\n                    continue;\n                }\n\n                return address;\n            }\n        }\n\n        throw new RuntimeException(\"No validated local address!\");\n    }\n\n    /**\n     * Retrieve local address\n     * \n     * @return the string local address\n     */\n    public static String getLocalAddress() {\n        return localAddress.getHostAddress();\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/PaddedAtomicLong.java",
    "content": "/*\n * Copyright (c) 2017 Baidu, Inc. All Rights Reserve.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.pinecone.ulf.util.guid.i64.utils;\n\nimport java.util.concurrent.atomic.AtomicLong;\n\n/**\n * Represents a padded {@link AtomicLong} to prevent the FalseSharing problem<p>\n * \n * The CPU cache line commonly be 64 bytes, here is a sample of cache line after padding:<br>\n * 64 bytes = 8 bytes (object reference) + 6 * 8 bytes (padded long) + 8 bytes (a long value)\n * \n * @author yutianbao\n */\npublic class PaddedAtomicLong extends AtomicLong {\n    private static final long serialVersionUID = -3415778863941386253L;\n\n    /** Padded 6 long (48 bytes) */\n    public volatile long p1, p2, p3, p4, p5, p6 = 7L;\n\n    /**\n     * Constructors from {@link AtomicLong}\n     */\n    public PaddedAtomicLong() {\n        super();\n    }\n\n    public PaddedAtomicLong(long initialValue) {\n        super(initialValue);\n    }\n\n    /**\n     * To prevent GC optimizations for cleaning unused padded references\n     */\n    public long sumPaddingToPreventOptimization() {\n        return p1 + p2 + p3 + p4 + p5 + p6;\n    }\n\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/utils/ValuedEnum.java",
    "content": "/*\n * Copyright (c) 2017 Baidu, Inc. All Rights Reserve.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.pinecone.ulf.util.guid.i64.utils;\n\n/**\n * {@code ValuedEnum} defines an enumeration which is bounded to a value, you\n * may implements this interface when you defines such kind of enumeration, that\n * you can use {@link EnumUtils} to simplify parse and valueOf operation.\n *  \n * @author yutianbao\n */\npublic interface ValuedEnum<T> {\n    T value();\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/worker/GenericDisposableWorkerIdAssigner.java",
    "content": "package com.pinecone.ulf.util.guid.i64.worker;\n\n\nimport com.pinecone.ulf.util.guid.i64.utils.DockerUtils;\nimport com.pinecone.ulf.util.guid.i64.utils.NetUtils;\nimport com.pinecone.ulf.util.guid.i64.worker.entity.WorkerNodeEntity;\nimport org.apache.commons.lang.math.RandomUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ThreadLocalRandom;\nimport java.util.concurrent.atomic.AtomicInteger;\n\npublic class GenericDisposableWorkerIdAssigner implements WorkerIdAssigner {\n    private static final Logger LOGGER = LoggerFactory.getLogger(GenericDisposableWorkerIdAssigner.class);\n\n    // 使用 ConcurrentHashMap 保存已分配的 worker ID，键是 worker ID，值是 WorkerNodeEntity\n    private static final ConcurrentHashMap<Long, WorkerNodeEntity > WORKER_NODES = new ConcurrentHashMap<>();\n\n    // 使用 AtomicInteger 作为 worker ID 的分配器\n    private static final AtomicInteger NEXT_ID = new AtomicInteger(0);\n\n    /**\n     * 基于内存中的列表分配 worker ID。\n     *\n     * @return 分配的 worker ID\n     */\n    @Override\n    public long assignWorkerId() {\n        // 构建 worker 节点实体\n        // todo 要将实体类确定下来，再根据其他逻辑生成wordId，目前为测试版本\n        WorkerNodeEntity workerNodeEntity = this.buildWorkerNode();\n\n        // 从 NEXT_ID 获取下一个可用的 worker ID\n        int id = NEXT_ID.getAndIncrement();\n\n        // 将 worker ID 和对应的 WorkerNodeEntity 存入内存中的列表\n        WORKER_NODES.put((long) id, workerNodeEntity);\n\n        // 使用 ThreadLocalRandom 生成介于 0（包括）和 1000（不包括）之间的随机整数\n        return ThreadLocalRandom.current().nextInt( 0, 1000 );\n    }\n\n    /**\n     * 根据 IP 和端口构建 worker 节点实体\n     */\n    private WorkerNodeEntity buildWorkerNode() {\n        WorkerNodeEntity workerNodeEntity = new WorkerNodeEntity();\n        if ( DockerUtils.isDocker() ) {\n            workerNodeEntity.setType(WorkerNodeType.CONTAINER.value());\n            workerNodeEntity.setHostName(DockerUtils.getDockerHost());\n            workerNodeEntity.setPort(DockerUtils.getDockerPort());\n\n        }\n        else {\n            workerNodeEntity.setType(WorkerNodeType.ACTUAL.value());\n            workerNodeEntity.setHostName(NetUtils.getLocalAddress());\n            workerNodeEntity.setPort(System.currentTimeMillis() + \"-\" + RandomUtils.nextInt(100000));\n        }\n\n        return workerNodeEntity;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/worker/WorkerIdAssigner.java",
    "content": "package com.pinecone.ulf.util.guid.i64.worker;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface WorkerIdAssigner extends Pinenut {\n    long assignWorkerId();\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/worker/WorkerNodeType.java",
    "content": "/*\n * Copyright (c) 2017 Baidu, Inc. All Rights Reserve.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.pinecone.ulf.util.guid.i64.worker;\n\n\nimport com.pinecone.ulf.util.guid.i64.utils.ValuedEnum;\n\n/**\n * WorkerNodeType\n * <li>CONTAINER: Such as Docker\n * <li>ACTUAL: Actual machine\n * \n * @author yutianbao\n */\npublic enum WorkerNodeType implements ValuedEnum<Integer> {\n\n    CONTAINER(1), ACTUAL(2);\n\n    /**\n     * Lock type\n     */\n    private final Integer type;\n\n    /**\n     * Constructor with field of type\n     */\n    private WorkerNodeType(Integer type) {\n        this.type = type;\n    }\n\n    @Override\n    public Integer value() {\n        return type;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/guid/i64/worker/entity/WorkerNodeEntity.java",
    "content": "/*\n * Copyright (c) 2017 Baidu, Inc. All Rights Reserve.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n *     http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage com.pinecone.ulf.util.guid.i64.worker.entity;\n\n\nimport com.pinecone.ulf.util.guid.i64.worker.WorkerNodeType;\nimport org.apache.commons.lang.builder.ReflectionToStringBuilder;\nimport org.apache.commons.lang.builder.ToStringStyle;\n\nimport java.util.Date;\n\n/**\n * Entity for M_WORKER_NODE\n *\n * @author yutianbao\n */\npublic class WorkerNodeEntity {\n\n    /**\n     * Entity unique id (table unique)\n     */\n    private long id;\n\n    /**\n     * Type of CONTAINER: HostName, ACTUAL : IP.\n     */\n    private String hostName;\n\n    /**\n     * Type of CONTAINER: Port, ACTUAL : Timestamp + Random(0-10000)\n     */\n    private String port;\n\n    /**\n     * type of {@link WorkerNodeType}\n     */\n    private int type;\n\n    /**\n     * Worker launch date, default now\n     */\n    private Date launchDate = new Date();\n\n    /**\n     * Created time\n     */\n    private Date created;\n\n    /**\n     * Last modified\n     */\n    private Date modified;\n\n    /**\n     * Getters & Setters\n     */\n    public long getId() {\n        return id;\n    }\n\n    public void setId(long id) {\n        this.id = id;\n    }\n\n    public String getHostName() {\n        return hostName;\n    }\n\n    public void setHostName(String hostName) {\n        this.hostName = hostName;\n    }\n\n    public String getPort() {\n        return port;\n    }\n\n    public void setPort(String port) {\n        this.port = port;\n    }\n\n    public int getType() {\n        return type;\n    }\n\n    public void setType(int type) {\n        this.type = type;\n    }\n\n    public Date getLaunchDate() {\n        return launchDate;\n    }\n\n    public void setLaunchDateDate(Date launchDate) {\n        this.launchDate = launchDate;\n    }\n\n    public Date getCreated() {\n        return created;\n    }\n\n    public void setCreated(Date created) {\n        this.created = created;\n    }\n\n    public Date getModified() {\n        return modified;\n    }\n\n    public void setModified(Date modified) {\n        this.modified = modified;\n    }\n\n    @Override\n    public String toString() {\n        return ReflectionToStringBuilder.toString(this, ToStringStyle.SHORT_PREFIX_STYLE);\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/ArchMultiScopeFactory.java",
    "content": "package com.pinecone.ulf.util.lang;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.system.executum.TaskManager;\nimport com.pinecone.framework.util.lang.ArchDynamicFactory;\nimport com.pinecone.framework.util.lang.ClassScope;\nimport com.pinecone.framework.util.name.Name;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\n\npublic abstract class ArchMultiScopeFactory extends ArchDynamicFactory implements MultiScopeFactory {\n    protected TaskManager             mTaskManager        ;\n    protected MultiTraitClassLoader   mTraitClassLoader   ;\n    protected Map<Class<?>, Object>   mInstanceSingletons ;\n\n    protected ArchMultiScopeFactory( TaskManager taskManager, ClassLoader classLoader, MultiTraitClassLoader traitClassLoader, ClassScope classScope ) {\n        super( classLoader, classScope );\n        this.mTaskManager         = taskManager       ;\n        this.mTraitClassLoader    = traitClassLoader  ;\n        this.mInstanceSingletons  = new ConcurrentHashMap<>();\n    }\n\n    @Override\n    public <T> void putInstanceSingleton( Class<T> clazz, T obj ) {\n        this.mInstanceSingletons.put( clazz, obj );\n    }\n\n    @Override\n    public <T> void removeInstanceSingleton(Class<T> clazz) {\n        this.mInstanceSingletons.remove( clazz );\n    }\n\n    @Override\n    public int instanceSingletonSize() {\n        return this.mInstanceSingletons.size();\n    }\n\n    @Override\n    public MultiTraitClassLoader getTraitClassLoader() {\n        return this.mTraitClassLoader;\n    }\n\n    @Override\n    protected Object beforeInstantiate( Class<?> that, Class<?>[] stereotypes, Object[] args ) {\n        return this.mInstanceSingletons.get( that );\n    }\n\n    @Override\n    public Object newInstance ( Class<? > that, Class<?>[] stereotypes, Object[] args ) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {\n        return super.newInstance( that, stereotypes, args );\n    }\n\n    @Override\n    public Object spawn (Name name, Object... args ) throws InvocationTargetException {\n        return this.spawn( name, null, args );\n    }\n\n    @Override\n    public Object spawn ( Name name, Class<?>[] stereotypes, Object... args ) throws InvocationTargetException {\n        List<Class<? > > classes = this.mTraitClassLoader.loads( name );\n\n        Exception lastExp = null;\n        if( !classes.isEmpty() ){\n            for ( Class<? > c : classes ) {\n                try {\n                    return this.newInstance( c, stereotypes, args );\n                }\n                catch ( Exception e ) {\n                    lastExp = e;\n                }\n            }\n        }\n\n        throw new InvocationTargetException(\n                lastExp, String.format( \"%s::spawn, what-> Spawning in all scopes, has compromised.\", this.className() )\n        );\n    }\n\n    @Override\n    public List popping ( Name name, Object... args ) {\n        return this.popping( name, null, args );\n    }\n\n    @Override\n    public List popping ( Name name, Class<?>[] stereotypes, Object... args ) {\n        List<Class<? > > classes = this.mTraitClassLoader.loads( name ); // Try load by explicit name, saving times.\n\n        List<Object > list = new ArrayList<>();\n\n        if( !classes.isEmpty() ){\n            for ( Class<? > c : classes ) {\n                try {\n                    Object o = this.newInstance( c, stereotypes, args );\n                    if( o != null ) {\n                        list.add( o ) ;\n                    }\n                }\n                catch ( Exception e ) {\n                    this.handleIgnoreException( e );\n                }\n            }\n        }\n\n        return list;\n    }\n\n    protected void handleIgnoreException( Exception e ) throws ProvokeHandleException {\n        // Just ignore them.\n        e.printStackTrace();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/ArchMultiScopeLoader.java",
    "content": "package com.pinecone.ulf.util.lang;\nimport com.pinecone.framework.unit.LinkedTreeMap;\nimport com.pinecone.framework.unit.LinkedTreeSet;\nimport com.pinecone.framework.util.lang.ArchClassScopeLoader;\nimport com.pinecone.framework.util.lang.ClassScanner;\nimport com.pinecone.framework.util.lang.ClassScope;\nimport com.pinecone.framework.util.lang.ScopedPackage;\nimport com.pinecone.framework.util.name.MultiScopeName;\nimport com.pinecone.framework.util.name.Name;\n\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.NotFoundException;\nimport javassist.bytecode.annotation.Annotation;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\npublic abstract class ArchMultiScopeLoader extends ArchClassScopeLoader implements MultiTraitClassLoader {\n    protected ClassScanner                         mClassScanner       ;\n    protected ClassPool                            mClassPool          ;\n    protected Map<String, CtClass >                mLoadedClassesPool  ;\n    protected Set<String >                         mVisitedClasses     ;\n    protected HierarchyClassInspector              mClassInspector     ;\n\n    protected ArchMultiScopeLoader( ClassScope classScope, ClassLoader classLoader, ClassPool classPool, ClassScanner classScanner, HierarchyClassInspector classInspector ) {\n        super( classScope, classLoader );\n\n        this.mClassPool            = classPool;\n        this.mLoadedClassesPool    = new LinkedTreeMap<>();\n        this.mVisitedClasses       = new LinkedTreeSet<>();\n        this.mClassScanner         = classScanner;\n        this.mClassInspector       = classInspector;\n    }\n\n    @Override\n    public Class<? > load( Name simpleName ) throws ClassNotFoundException {\n        try{\n            Class<? > c = this.loadByName( simpleName );\n            if( c != null ) {\n                return c;\n            }\n        }\n        catch ( ClassNotFoundException e ) {\n            this.handleIgnoreException( e );\n        }\n\n        return this.loadInClassTrait( simpleName );\n    }\n\n    @Override\n    public List<Class<? > > loads( Name simpleName ) {\n        List<Class<? > > classes = this.loadsByName( simpleName );\n        this.loadsInClassTrait0( simpleName, false, classes );\n        return classes;\n    }\n\n    @Override\n    public Class<? > loadByName( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? >) this.loads0( simpleName, true );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public List<Class<? > > loadsByName( Name simpleName ) {\n        try{\n            return (List<Class<? > >) this.loads0( simpleName, false );\n        }\n        catch ( ClassNotFoundException e ) {\n            return null; // This should never be happened.\n        }\n    }\n\n    @Override\n    public Class<? >  loadInClassTrait( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? >)this.loadsInClassTrait0( simpleName, true, null );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public List<Class<? > > loadsInClassTrait( Name simpleName ) {\n        return (List<Class<? > >)this.loadsInClassTrait0( simpleName, false, new ArrayList<>() );\n    }\n\n    protected abstract boolean isAnnotationQualified( Annotation that, String szName );\n\n    protected Object loadsInClassTrait0( Name simpleName, boolean bOnlyFirst, List<Class<? > > batch ) {\n        this.updateScope();\n\n        for( Map.Entry<String, CtClass > kv : this.mLoadedClassesPool.entrySet() ) {\n            CtClass pc = kv.getValue();\n            Annotation[] annotations = this.mClassInspector.queryVisibleAnnotations( pc );\n            if( annotations != null ) {\n                for( Annotation annotation : annotations ) {\n                    if( this.isAnnotationQualified( annotation, simpleName.getName() ) ) {\n                        try{\n                            Class<?> c = this.mClassLoader.loadClass( kv.getKey() );\n                            if( bOnlyFirst ) {\n                                return c;\n                            }\n                            else {\n                                batch.add( (Class<? >)c );\n                            }\n                        }\n                        catch ( ClassNotFoundException e ) {\n                            this.handleIgnoreException( e );\n                        }\n                    }\n                }\n            }\n        }\n\n        return batch;\n    }\n\n    @Override\n    protected List<String > expandNamespace( Name name ) {\n        if( name instanceof MultiScopeName) {\n            return ((MultiScopeName) name).getFullNames();\n        }\n\n        return List.of( name.getFullName() ) ;\n    }\n\n    @Override\n    protected void registerDefaultFilters() {\n\n    }\n\n    @Override\n    protected abstract Class<? > loadSingleByFullClassName( String szFullClassName );\n\n    @Override\n    public MultiTraitClassLoader updateScope() {\n        try{\n            List<String > candidates = new ArrayList<>();\n            for ( ScopedPackage scope : this.mClassScope.getAllScopes() ) {\n                String szPkgName = scope.packageName();\n                if( this.mVisitedClasses.contains( szPkgName ) ) {\n                    continue;\n                }\n                else {\n                    this.mVisitedClasses.add( szPkgName );\n                }\n\n                try {\n                    this.mClassScanner.scan( szPkgName, true, candidates );\n                }\n                catch ( IOException e ) {\n                    this.handleIgnoreException( e );\n                }\n            }\n\n            for( String ns : candidates ) {\n                this.mLoadedClassesPool.put( ns, this.mClassPool.get( ns ) );\n            }\n        }\n        catch ( NotFoundException e ) {\n            this.handleIgnoreException( e );\n        }\n\n        return this;\n    }\n\n\n    @Override\n    public void clearCache() {\n        this.mLoadedClassesPool.clear();\n        this.mVisitedClasses.clear();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/GenericPreloadClassInspector.java",
    "content": "package com.pinecone.ulf.util.lang;\n\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.CtMethod;\nimport javassist.NotFoundException;\nimport javassist.bytecode.AnnotationsAttribute;\nimport javassist.bytecode.BadBytecode;\nimport javassist.bytecode.ClassFile;\nimport javassist.bytecode.MethodInfo;\nimport javassist.bytecode.SignatureAttribute;\nimport javassist.bytecode.annotation.Annotation;\n\npublic class GenericPreloadClassInspector implements HierarchyClassInspector {\n    protected ClassPool     mClassPool;\n\n    public GenericPreloadClassInspector( ClassPool classPool ) {\n        this.mClassPool = classPool;\n    }\n\n    @Override\n    public CtClass preloadClass( String szClassName ) throws NotFoundException {\n        return this.mClassPool.get( szClassName );\n    }\n\n    @Override\n    public boolean isImplementedDirectly( CtClass clazz, Class<?> interf ) throws NotFoundException {\n        CtClass[] interfaces = clazz.getInterfaces();\n        for ( CtClass iface : interfaces ) {\n            if ( iface.getName().equals( interf.getName() ) ) {\n                return true;\n            }\n        }\n        return false;\n    }\n\n    @Override\n    public boolean isImplemented( CtClass clazz, Class<?> interf ) throws NotFoundException {\n        String szInterfaceName = interf.getName();\n        while ( clazz != null && !clazz.getName().equals( Object.class.getName() ) ) {\n            CtClass[] interfaces = clazz.getInterfaces();\n            for ( CtClass iface : interfaces ) {\n                if ( this.isInterfaceExtended( iface, szInterfaceName ) ) {\n                    return true;\n                }\n            }\n            clazz = clazz.getSuperclass();\n        }\n        return false;\n    }\n\n    @Override\n    public boolean isExtendedDirectly( CtClass clazz, Class<?> parent ) throws NotFoundException {\n        CtClass superClass = clazz.getSuperclass();\n        if ( superClass != null && superClass.getName().equals( parent.getName() ) ) {\n            return true;\n        }\n\n        if( clazz.isInterface() ) {\n            return this.isImplementedDirectly( clazz, parent );\n        }\n        return false;\n    }\n\n    @Override\n    public boolean isExtended( CtClass clazz, Class<?> parent ) throws NotFoundException {\n        if( clazz.isInterface() ) {\n            return this.isInterfaceExtended( clazz, parent.getName() );\n        }\n\n        while ( clazz != null && !clazz.getName().equals( Object.class.getName() ) ) {\n            CtClass superClass = clazz.getSuperclass();\n            if (superClass != null && superClass.getName().equals(parent.getName())) {\n                return true;\n            }\n            clazz = clazz.getSuperclass();\n        }\n\n        return false;\n    }\n\n    private boolean isInterfaceExtended( CtClass clazz, String interfaceName ) throws NotFoundException {\n        if ( clazz == null ) {\n            return false;\n        }\n\n        CtClass[] interfaces = clazz.getInterfaces();\n        for ( CtClass interfaceClass : interfaces ) {\n            if ( interfaceClass.getName().equals( interfaceName ) ) {\n                return true;\n            }\n            if ( this.isInterfaceExtended( interfaceClass, interfaceName ) ) {\n                return true;\n            }\n        }\n\n        CtClass superClass = clazz.getSuperclass();\n        if ( superClass != null ) {\n            return this.isInterfaceExtended( superClass, interfaceName );\n        }\n        return false;\n    }\n\n    @Override\n    public Annotation[] queryVisibleAnnotations( CtClass clazz ) {\n        ClassFile classFile = clazz.getClassFile();\n        AnnotationsAttribute visible = (AnnotationsAttribute) classFile.getAttribute( AnnotationsAttribute.visibleTag );\n        if ( visible != null ) {\n            return visible.getAnnotations();\n        }\n        return null;\n    }\n\n    @Override\n    public boolean hasOwnAnnotation( CtClass clazz, Class<?> annotationClass ) {\n        Annotation[] annotations = this.queryVisibleAnnotations( clazz );\n        if( annotations == null ) {\n            return false;\n        }\n\n        for ( Annotation annotation : annotations ) {\n            if ( annotation.getTypeName().equals( annotationClass.getName() ) ) {\n                return true;\n            }\n        }\n        return false;\n    }\n\n    @Override\n    public boolean hasOwnAnnotations( CtClass clazz, Class<?>[] annotationClasses ) {\n        Annotation[] annotations = this.queryVisibleAnnotations( clazz );\n        if( annotations == null ) {\n            return false;\n        }\n\n        return this.hasOwnAnnotations( annotations, annotationClasses );\n    }\n\n    @Override\n    public boolean hasOwnMethod( CtClass clazz, String methodName ) {\n        try {\n            clazz.getDeclaredMethod( methodName );\n            return true;\n        }\n        catch ( NotFoundException e ) {\n            return false;\n        }\n    }\n\n    @Override\n    public boolean hasOwnMethods( CtClass clazz, String[] methodNames ) {\n        for ( String methodName : methodNames ) {\n            if ( !this.hasOwnMethod(clazz, methodName) ) {\n                return false;\n            }\n        }\n        return true;\n    }\n\n    protected boolean hasOwnAnnotations( Annotation[] annotations, Class<?>[] annotationClasses ) {\n        for ( Class<?> annotationClass : annotationClasses ) {\n            boolean found = false;\n            for ( Annotation annotation : annotations ) {\n                if ( annotation.getTypeName().equals( annotationClass.getName() ) ) {\n                    found = true;\n                    break;\n                }\n            }\n            if (!found) {\n                return false;\n            }\n        }\n        return true;\n    }\n\n    public boolean methodHasAnnotations( CtMethod method, Class<?>[] annotationClasses ) {\n        MethodInfo methodInfo = method.getMethodInfo();\n        AnnotationsAttribute attr = (AnnotationsAttribute) methodInfo.getAttribute( AnnotationsAttribute.visibleTag );\n        if ( attr == null ) {\n            return false;\n        }\n\n        return this.hasOwnAnnotations( attr.getAnnotations(), annotationClasses );\n    }\n\n    public boolean methodHasAnnotations( CtMethod method, String[] annotationNames ) {\n        MethodInfo methodInfo = method.getMethodInfo();\n        AnnotationsAttribute attr = (AnnotationsAttribute) methodInfo.getAttribute( AnnotationsAttribute.visibleTag );\n        if ( attr == null ) {\n            return false;\n        }\n\n        for ( String annotationName : annotationNames ) {\n            boolean found = false;\n            for ( Annotation annotation : attr.getAnnotations() ) {\n                if ( annotation.getTypeName().equals( annotationName ) ) {\n                    found = true;\n                    break;\n                }\n            }\n            if (!found) {\n                return false;\n            }\n        }\n        return false;\n    }\n\n    public boolean methodHasAnnotation( CtMethod method, Class<?> annotationClass ) {\n        return this.methodHasAnnotation( method, annotationClass.getName() );\n    }\n\n    public boolean methodHasAnnotation( CtMethod method, String annotationName ) {\n        MethodInfo methodInfo = method.getMethodInfo();\n        AnnotationsAttribute attr = (AnnotationsAttribute) methodInfo.getAttribute( AnnotationsAttribute.visibleTag );\n        if ( attr != null ) {\n            for ( Annotation annotation : attr.getAnnotations() ) {\n                if ( annotation.getTypeName().equals( annotationName ) ) {\n                    return true;\n                }\n            }\n        }\n        return false;\n    }\n\n    @Override\n    public boolean hasOwnField( CtClass clazz, String fieldName ) {\n        try {\n            clazz.getDeclaredField( fieldName );\n            return true;\n        }\n        catch ( NotFoundException e ) {\n            return false;\n        }\n    }\n\n    @Override\n    public boolean hasOwnFields( CtClass clazz, String[] fieldNames ) {\n        for ( String fieldName : fieldNames ) {\n            if ( !this.hasOwnField( clazz, fieldName ) ) {\n                return false;\n            }\n        }\n        return true;\n    }\n\n\n\n    public static String[] parseGenericParameterTypes( CtMethod method ) throws NotFoundException, BadBytecode {\n        SignatureAttribute.MethodSignature methodSignature = GenericPreloadClassInspector.getMethodSignature( method );\n        if ( methodSignature == null ) {\n            CtClass[] ps = method.getParameterTypes();\n            String[] result = new String[ ps.length ];\n            for ( int i = 0; i < ps.length; ++i ) {\n                result[ i ] = ps[ i ].getName();\n            }\n            return result;\n        }\n\n        SignatureAttribute.Type[] paramTypes = methodSignature.getParameterTypes();\n        String[] result = new String[ paramTypes.length ];\n\n        for ( int i = 0; i < paramTypes.length; ++i ) {\n            result[ i ] = GenericPreloadClassInspector.typeToString( paramTypes[ i ] );\n        }\n\n        return result;\n    }\n\n    public static String parseGenericReturnType( CtMethod method ) throws NotFoundException, BadBytecode {\n        SignatureAttribute.MethodSignature methodSignature = GenericPreloadClassInspector.getMethodSignature( method );\n        if ( methodSignature == null ) {\n            return method.getReturnType().getName();\n        }\n\n        return GenericPreloadClassInspector.typeToString( methodSignature.getReturnType() );\n    }\n\n    public static String[] evalGenericParameterTypes( CtMethod method ) {\n        try {\n            return GenericPreloadClassInspector.parseGenericParameterTypes( method );\n        }\n        catch ( NotFoundException | BadBytecode e ) {\n            return null;\n        }\n    }\n\n    public static String getGenericReturnType( CtMethod method ) {\n        try {\n            SignatureAttribute.MethodSignature methodSignature = GenericPreloadClassInspector.getMethodSignature( method );\n            if ( methodSignature == null ) {\n                return null;\n            }\n\n            return GenericPreloadClassInspector.typeToString( methodSignature.getReturnType() );\n        }\n        catch ( BadBytecode e ) {\n            return null;\n        }\n    }\n\n    public static String evalGenericReturnType( CtMethod method ) {\n        try {\n            return GenericPreloadClassInspector.parseGenericReturnType( method );\n        }\n        catch ( NotFoundException | BadBytecode e ) {\n            return null;\n        }\n    }\n\n    protected static SignatureAttribute.MethodSignature getMethodSignature( CtMethod method ) throws BadBytecode {\n        SignatureAttribute signature = (SignatureAttribute) method.getMethodInfo().getAttribute( SignatureAttribute.tag );\n        if ( signature == null ) {\n            return null;\n        }\n        return SignatureAttribute.toMethodSignature( signature.getSignature() );\n    }\n\n    public static String typeToString( SignatureAttribute.Type type ) {\n        if ( type instanceof SignatureAttribute.ClassType ) {\n            SignatureAttribute.ClassType classType = (SignatureAttribute.ClassType) type;\n            if ( classType.getTypeArguments() != null && classType.getTypeArguments().length > 0 ) {\n                StringBuilder sb = new StringBuilder(classType.getName());\n                sb.append( \"<\" );\n                for ( int i = 0; i < classType.getTypeArguments().length; ++i ) {\n                    if ( i > 0 ) {\n                        sb.append( \", \" );\n                    }\n                    sb.append(classType.getTypeArguments()[i].toString());\n                }\n                sb.append( \">\" );\n                return sb.toString();\n            }\n            return classType.getName();\n        }\n        return type.toString();\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/HierarchyClassInspector.java",
    "content": "package com.pinecone.ulf.util.lang;\n\nimport javassist.CtClass;\nimport javassist.NotFoundException;\n\npublic interface HierarchyClassInspector extends PreloadClassInspector {\n\n    default boolean isImplementedDirectly( String szClassName, Class<?> interf ) throws NotFoundException {\n        return this.isImplementedDirectly( this.preloadClass( szClassName ), interf );\n    }\n\n    boolean isImplementedDirectly(CtClass clazz, Class<?> interf ) throws NotFoundException ;\n\n    default boolean isImplemented( String szClassName, Class<?> interf ) throws NotFoundException {\n        return this.isImplemented( this.preloadClass( szClassName ), interf );\n    }\n\n    boolean isImplemented( CtClass clazz, Class<?> interf ) throws NotFoundException ;\n\n    default boolean isExtendedDirectly( String szClassName, Class<?> interf ) throws NotFoundException {\n        return this.isExtendedDirectly( this.preloadClass( szClassName ), interf );\n    }\n\n    boolean isExtendedDirectly( CtClass clazz, Class<?> parent ) throws NotFoundException ;\n\n    default boolean isExtended( String szClassName, Class<?> interf ) throws NotFoundException {\n        return this.isExtended( this.preloadClass( szClassName ), interf );\n    }\n\n    boolean isExtended( CtClass clazz, Class<?> parent ) throws NotFoundException ;\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/MultiScopeFactory.java",
    "content": "package com.pinecone.ulf.util.lang;\n\nimport com.pinecone.framework.util.lang.ClassScope;\nimport com.pinecone.framework.util.lang.DynamicFactory;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.framework.util.name.ScopeName;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.List;\n\npublic interface MultiScopeFactory extends DynamicFactory {\n\n    MultiTraitClassLoader getTraitClassLoader();\n\n    default Object spawn( String name, Class<?>[] stereotypes, Object... args ) throws InvocationTargetException {\n        return this.spawn( new ScopeName(name), stereotypes, args );\n    }\n\n    Object spawn( Name name, Class<?>[] stereotypes, Object... args ) throws InvocationTargetException;\n\n    default Object spawn( String name, Object... args ) throws InvocationTargetException {\n        return this.spawn( new ScopeName(name), args );\n    }\n\n    Object spawn( Name name, Object... args ) throws InvocationTargetException;\n\n\n    default List popping( String name, Class<?>[] stereotypes, Object... args ) {\n        return this.popping( new ScopeName(name), stereotypes, args );\n    }\n\n    List popping( Name name, Class<?>[] stereotypes, Object... args );\n\n    default List popping( String name, Object... args ) {\n        return this.popping( new ScopeName(name), args );\n    }\n\n    List popping( Name name, Object... args );\n\n\n\n\n    <T> void putInstanceSingleton     ( Class<T> clazz, T obj );\n\n    <T> void removeInstanceSingleton  ( Class<T> clazz );\n\n    int instanceSingletonSize();\n\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/MultiTraitClassLoader.java",
    "content": "package com.pinecone.ulf.util.lang;\n\nimport com.pinecone.framework.util.lang.MultiClassScopeLoader;\nimport com.pinecone.framework.util.name.Name;\n\nimport java.util.List;\n\npublic interface MultiTraitClassLoader extends TraitClassLoader, MultiClassScopeLoader {\n    List<Class<? > > loads( Name name ) ;\n\n    List<Class<? > > loadsByName( Name simpleName );\n\n    List<Class<? > > loadsInClassTrait( Name simpleName ) ;\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/PooledClassCandidateScanner.java",
    "content": "package com.pinecone.ulf.util.lang;\n\nimport com.pinecone.framework.util.lang.*;\nimport javassist.ClassPool;\n\nimport java.io.IOException;\n\npublic class PooledClassCandidateScanner extends ClassCandidateScanner {\n    protected ClassPool        mClassPool;\n\n    public PooledClassCandidateScanner     ( ClassScope searchScope, ClassLoader classLoader, NSProtocolIteratorsFactoryAdapter iteratorsFactory, ClassPool classPool ) {\n        super( searchScope, classLoader, iteratorsFactory );\n        this.mClassPool = classPool;\n    }\n\n    public PooledClassCandidateScanner     ( ClassScope searchScope, ClassLoader classLoader, ClassPool classPool ) {\n        this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), classPool );\n    }\n\n    public PooledClassCandidateScanner     ( ClassScope searchScope, ClassLoader classLoader ) {\n        this( searchScope, classLoader, new ClassScopeNSProtocolIteratorsFactory( classLoader, searchScope ), ClassPool.getDefault() );\n    }\n\n    public void setClassPool ( ClassPool classPool ) {\n        this.mClassPool = classPool;\n    }\n\n    @Override\n    protected boolean filter( String szClassName ) {\n        try{\n            for ( TypeFilter filter : this.mIncludeFilters ) {\n                if ( filter.match( szClassName, this.mClassPool ) ) {\n                    return false;\n                }\n            }\n\n            for ( TypeFilter filter : this.mExcludeFilters ) {\n                if ( filter.match( szClassName, this.mClassPool ) ) {\n                    return true;\n                }\n            }\n        }\n        catch ( IOException e ) {\n            return true;\n        }\n\n        return false;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/PreloadClassInspector.java",
    "content": "package com.pinecone.ulf.util.lang;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport javassist.CtClass;\nimport javassist.NotFoundException;\nimport javassist.bytecode.annotation.Annotation;\n\npublic interface PreloadClassInspector extends Pinenut {\n    CtClass preloadClass( String szClassName ) throws NotFoundException ;\n\n    default boolean hasOwnAnnotation( String szClassName, Class<?> interf ) throws NotFoundException {\n        return this.hasOwnAnnotation( this.preloadClass( szClassName ), interf );\n    }\n\n    Annotation[] queryVisibleAnnotations( CtClass clazz );\n\n    boolean hasOwnAnnotation( CtClass clazz, Class<?> annotationClass ) ;\n\n    default boolean hasOwnAnnotations( String szClassName, Class<?>[] annotationClasses ) throws NotFoundException {\n        return this.hasOwnAnnotations( this.preloadClass( szClassName ), annotationClasses );\n    }\n\n    boolean hasOwnAnnotations( CtClass clazz, Class<?>[] annotationClasses );\n\n    default boolean hasOwnMethod( String szClassName, String methodName ) throws NotFoundException {\n        return this.hasOwnMethod(this.preloadClass(szClassName), methodName);\n    }\n\n    boolean hasOwnMethod( CtClass clazz, String methodName ) ;\n\n    default boolean hasOwnField( String szClassName, String fieldName ) throws NotFoundException {\n        return this.hasOwnField( this.preloadClass(szClassName), fieldName );\n    }\n\n    boolean hasOwnField( CtClass clazz, String fieldName ) ;\n\n    default boolean hasOwnMethods( String szClassName, String[] methodNames ) throws NotFoundException {\n        return this.hasOwnMethods( this.preloadClass(szClassName), methodNames );\n    }\n\n    boolean hasOwnMethods( CtClass clazz, String[] methodNames ) ;\n\n    default boolean hasOwnFields( String szClassName, String[] fieldNames ) throws NotFoundException {\n        return this.hasOwnFields( this.preloadClass(szClassName), fieldNames );\n    }\n\n    boolean hasOwnFields( CtClass clazz, String[] fieldNames ) ;\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/SimpleAnnotationExcludeFilter.java",
    "content": "package com.pinecone.ulf.util.lang;\n\nimport com.pinecone.framework.util.lang.TypeFilter;\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.NotFoundException;\n\nimport java.io.IOException;\n\npublic class SimpleAnnotationExcludeFilter implements TypeFilter {\n    protected HierarchyClassInspector mClassInspector;\n\n    protected Class<? >               mAnnotationType;\n\n    public SimpleAnnotationExcludeFilter( HierarchyClassInspector inspector, Class<? > annotationType ) {\n        this.mClassInspector = inspector;\n        this.mAnnotationType = annotationType;\n    }\n\n    @Override\n    public boolean match( String szClassName, Object pool ) throws IOException {\n        try{\n            CtClass clz = ( (ClassPool) pool ).get( szClassName );\n            return !this.mClassInspector.hasOwnAnnotation( clz, this.mAnnotationType ) ;\n        }\n        catch ( NotFoundException e ) {\n            return true;\n        }\n    }\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/lang/TraitClassLoader.java",
    "content": "package com.pinecone.ulf.util.lang;\n\nimport com.pinecone.framework.util.lang.ClassScopeLoader;\nimport com.pinecone.framework.util.name.Name;\n\npublic interface TraitClassLoader extends ClassScopeLoader {\n    @Override\n    Class<? > load( Name simpleName ) throws ClassNotFoundException ;\n\n    // Directly by it`s name.\n    Class<? > loadByName( Name simpleName ) throws ClassNotFoundException ;\n\n    // Scanning class`s annotations, methods or others.\n    Class<? > loadInClassTrait( Name simpleName ) throws ClassNotFoundException ;\n\n    TraitClassLoader updateScope();\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/BeanProtobufDecoder.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport java.util.LinkedHashMap;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DynamicMessage;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface BeanProtobufDecoder extends Pinenut {\n\n    Map decodeMap( Class<?> clazz, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String > exceptedKeys, Options options );\n\n    default Map decodeMap( Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String > exceptedKeys, Options options ){\n        return this.decodeMap( LinkedHashMap.class, descriptor, dynamicMessage, exceptedKeys, options );\n    }\n\n    <T > T decodeBean( Class<T> clazz, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String > exceptedKeys, Options options );\n\n    <T > T decode( Class<T> clazz, String genericLabel, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String > exceptedKeys, Options options );\n\n    default <T > T decode( Class<T> clazz, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String > exceptedKeys, Options options ) {\n        return this.decode( clazz, null, descriptor, dynamicMessage, exceptedKeys, options );\n    }\n\n    static boolean isNullMessage( DynamicMessage dynamicMessage, Descriptors.Descriptor descriptor ) {\n        return dynamicMessage.getAllFields().isEmpty() && !descriptor.getFields().isEmpty();\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/BeanProtobufEncoder.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport java.util.Collection;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.google.protobuf.DescriptorProtos;\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DynamicMessage;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface BeanProtobufEncoder extends Pinenut {\n    BeanProtobufEncoder DefaultEncoder = new GenericBeanProtobufEncoder();\n\n    Descriptors.Descriptor transform( Object dynamicObject, Set<String > exceptedKeys, Options options );\n\n    Descriptors.Descriptor transform( Map dynamicObject, Set<String > exceptedKeys, Options options );\n\n    DescriptorProtos.FieldDescriptorProto.Builder transform( Collection dynamicObject, Class<? > elementType, String key, int fieldNumber, Options options );\n\n    //Descriptors.Descriptor transform( Object[] dynamicObject, Set<String > exceptedKeys, Options options );\n\n    Descriptors.Descriptor transformBean( Class<?> clazz, Object dynamicObject, Set<String > exceptedKeys, Options options );\n\n    Descriptors.Descriptor transform( Class<?> clazz, Object dynamicObject, Set<String > exceptedKeys, Options options );\n\n    default Descriptors.Descriptor transform( Class<?> clazz, Object dynamicObject, Set<String > exceptedKeys ) {\n        return this.transform( clazz, null, dynamicObject, exceptedKeys );\n    }\n\n    default Descriptors.Descriptor transform( Class<?> clazz, String componentGenericLabel, Object dynamicObject, Set<String > exceptedKeys ) {\n        Descriptors.Descriptor primitiveDesc = this.transformPrimitive( clazz );\n        if( primitiveDesc != null ) {\n            return primitiveDesc;\n        }\n        Descriptors.Descriptor repeatedDesc = this.transformRepeated( clazz, componentGenericLabel );\n        if( repeatedDesc != null ) {\n            return repeatedDesc;\n        }\n\n        return this.transform( clazz, dynamicObject, exceptedKeys, Options.DefaultOptions );\n    }\n\n    default Descriptors.Descriptor transformRepeated( Class<?> clazz, String componentGenericLabel ) {\n        if( RepeatedWrapper.isSupportedRepeated( clazz ) ) {\n            if ( clazz.isArray() ) {\n                return RepeatedWrapper.transform( clazz, clazz.getComponentType(), this );\n            }\n            else {\n                Class<?> dependenceComponentType = ProtobufUtils.loadSingleGenericType( this.getClass(), componentGenericLabel );\n                if ( dependenceComponentType == null ) {\n                    throw new IllegalArgumentException( \"None valued argument (\" + componentGenericLabel + \") can`t be transformed.\" );\n                }\n                return RepeatedWrapper.transform( clazz, dependenceComponentType, this );\n            }\n        }\n\n        return null;\n    }\n\n    default Descriptors.Descriptor transformPrimitive( Class<?> clazz ) {\n        if( PrimitiveWrapper.isSupportedPrimitive( clazz ) ) {\n            return PrimitiveWrapper.transform( clazz );\n        }\n\n        return null;\n    }\n\n    DescriptorProtos.FieldDescriptorProto.Type reinterpret( Class<?> type );\n\n\n    DynamicMessage encode( Descriptors.Descriptor descriptor, Object dynamicObject, Set<String > exceptedKeys, Options options );\n\n    DynamicMessage encodeBean( Descriptors.Descriptor descriptor, Object dynamicObject, Set<String > exceptedKeys, Options options );\n\n    DynamicMessage encode( Descriptors.Descriptor descriptor, Map dynamicObject, Set<String > exceptedKeys, Options options );\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/DescriptorNameNormalizer.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface DescriptorNameNormalizer extends Pinenut {\n    String normalize( String bad );\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/FieldProtobufDecoder.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DynamicMessage;\nimport com.pinecone.framework.lang.field.FieldEntity;\n\npublic interface FieldProtobufDecoder extends BeanProtobufDecoder {\n\n    Map.Entry<String, Object>[] decodeEntries( Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String > exceptedKeys, Options options );\n\n    void decodeEntries( FieldEntity[] entities, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String > exceptedKeys, Options options );\n\n    Object[] decodeValues( FieldEntity[] entities, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String > exceptedKeys, Options options );\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/FieldProtobufEncoder.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DynamicMessage;\nimport com.pinecone.framework.lang.field.FieldEntity;\n\npublic interface FieldProtobufEncoder extends BeanProtobufEncoder {\n    Descriptors.Descriptor transform( Map.Entry<String, Object>[] fields, String szEntityName, Set<String > exceptedKeys, Options options );\n\n    DynamicMessage encode( Descriptors.Descriptor descriptor, Map.Entry<String, Object>[] fields, Set<String > exceptedKeys, Options options );\n\n    Descriptors.Descriptor transform( FieldEntity[] fields, String szEntityName, Set<String > exceptedKeys, Options options );\n\n    DynamicMessage encode( Descriptors.Descriptor descriptor, FieldEntity[] fields, Set<String > exceptedKeys, Options options );\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/FileDescriptorFormater.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface FileDescriptorFormater extends Pinenut {\n    String format( Class<?> type );\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/GenericBeanProtobufDecoder.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport java.lang.reflect.Array;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.HashSet;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DynamicMessage;\nimport com.pinecone.framework.system.stereotype.JavaBeans;\nimport com.pinecone.framework.unit.Units;\n\npublic class GenericBeanProtobufDecoder implements BeanProtobufDecoder {\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public <T> T decode( Class<T> clazz, String genericLabel, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String> exceptedKeys, Options options ) {\n        if( PrimitiveWrapper.isSupportedPrimitive( clazz ) ) {\n            return (T) dynamicMessage.getField( descriptor.findFieldByName( PrimitiveWrapper.FieldName ) );\n        }\n        else if( RepeatedWrapper.isSupportedRepeated( clazz ) ) {\n            Descriptors.FieldDescriptor fieldDescriptor = descriptor.findFieldByName( RepeatedWrapper.FieldName );\n            Object val = dynamicMessage.getField( fieldDescriptor );\n            Object ret = this.decodeRepeated( val, fieldDescriptor, options, clazz, genericLabel );\n            return clazz.cast( ret );\n        }\n        else if( Map.class.isAssignableFrom( clazz ) ) {\n            if( clazz.isInterface() && Map.class.isAssignableFrom( clazz ) ) {\n                clazz = options.getDefaultMapType();\n            }\n            return clazz.cast( this.decodeMap( clazz, descriptor, dynamicMessage, exceptedKeys, options ) );\n        }\n\n        return clazz.cast( this.decodeBean( clazz, descriptor, dynamicMessage, exceptedKeys, options ) );\n    }\n\n    @Override\n    public Map<String, Object> decodeMap( Class<?> clazz, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String> exceptedKeys, Options options ) {\n        if ( descriptor == null || dynamicMessage == null ) {\n            return null;\n        }\n\n        Map<String, Object> result;\n        if( clazz.isInterface() && Map.class.isAssignableFrom( clazz ) ) {\n            result = Units.newInstance( options.getDefaultMapType() );\n        }\n        else {\n            result = Units.newInstance( clazz );\n        }\n\n        for ( Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields() ) {\n            try {\n                String fieldName = fieldDescriptor.getName();\n\n                // Skip excluded keys\n                if ( exceptedKeys != null && exceptedKeys.contains( fieldName ) ) {\n                    continue;\n                }\n\n                Object value = ProtobufUtils.evalValue( dynamicMessage, fieldDescriptor );\n\n                if ( value != null ) {\n                    if ( fieldDescriptor.isRepeated() ) {\n                        List<?> values = (List<?>) value;\n                        List<Object> decodedValues = new ArrayList<>();\n                        for ( Object item : values ) {\n                            decodedValues.add( this.decodeFieldValue( fieldDescriptor, item, item.getClass(), options ) );\n                        }\n                        result.put( fieldName, decodedValues );\n                    }\n                    else if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) {\n                        Descriptors.Descriptor nestedDescriptor = fieldDescriptor.getMessageType();\n                        result.put( fieldName, this.decodeMap( clazz, nestedDescriptor, (DynamicMessage) value, exceptedKeys, options ) );\n                    }\n                    else {\n                        result.put( fieldName, this.decodeFieldValue( fieldDescriptor, value, value.getClass(), options ) );\n                    }\n                }\n            }\n            catch ( Exception e ) {\n                e.printStackTrace();\n            }\n        }\n\n        return result;\n    }\n\n    @Override\n    public <T > T decodeBean( Class<T> targetClass, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String> exceptedKeys, Options options ) {\n        if ( descriptor == null || dynamicMessage == null ) {\n            return null;\n        }\n        else if ( BeanProtobufDecoder.isNullMessage( dynamicMessage, descriptor ) ) {\n            return null;\n        }\n\n        try {\n            if ( targetClass == null ) {\n                return null;\n            }\n\n            Object bean;\n            if( targetClass.isInterface() && Map.class.isAssignableFrom( targetClass ) ) {\n                bean = Units.newInstance( options.getDefaultMapType() );\n            }\n            else {\n                bean = targetClass.getDeclaredConstructor().newInstance();\n            }\n\n            for ( Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields() ) {\n                String fieldName = fieldDescriptor.getName();\n\n                if ( exceptedKeys != null && exceptedKeys.contains( fieldName ) ) {\n                    continue;\n                }\n\n                Object value = ProtobufUtils.evalValue( dynamicMessage, fieldDescriptor );\n\n                if ( value != null ) {\n                    try {\n                        String setterMethod = JavaBeans.MethodMajorKeySet + JavaBeans.methodKeyNameUpperCaseNormalize( fieldName );\n\n                        Method setter = null;\n                        try{\n                            setter = targetClass.getMethod( setterMethod, this.decodeType( fieldDescriptor ) );\n                        }\n                        catch ( NoSuchMethodException | SecurityException e ) {\n                            Method[] methods = targetClass.getMethods();\n                            for( Method method : methods ) {\n                                if( method.getParameterCount() == 1 && method.getName().equals( setterMethod ) ) {\n                                    setter = method;\n                                    break;\n                                }\n                            }\n\n                            if( setter == null ){\n                                continue;\n                            }\n                        }\n\n\n                        if ( fieldDescriptor.isRepeated() ) {\n                            Class<?>[] pars = setter.getParameterTypes();\n                            if( pars.length > 0 ) {\n                                Class<?> nestedType = pars[ 0 ];\n                                String szGType = ProtobufUtils.evalSetterGenericLabel( setter );\n                                setter.invoke( bean, this.decodeRepeated( value, fieldDescriptor, options, nestedType, szGType ) );\n                            }\n                        }\n                        else if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) {\n                            Descriptors.Descriptor nestedDescriptor = fieldDescriptor.getMessageType();\n                            Class<?>[] pars = setter.getParameterTypes();\n                            if( pars.length > 0 ) {\n                                Object nestedBean;\n                                Class<?> nestedType = pars[ 0 ];\n                                String szGType = ProtobufUtils.evalSetterGenericLabel( setter );\n                                if( nestedType.equals( Map.class ) ) {\n                                    nestedBean = this.decodeMap( nestedType, nestedDescriptor, (DynamicMessage) value, exceptedKeys, options );\n                                }\n                                else {\n                                    if ( descriptor.equals( nestedDescriptor ) ) {\n                                        DynamicMessage dyVal =(DynamicMessage) value;\n                                        if ( BeanProtobufDecoder.isNullMessage( dyVal, nestedDescriptor ) ) {\n                                            nestedBean = null;\n                                        }\n                                        else {\n                                            nestedBean = this.decode( nestedType, szGType, nestedDescriptor, (DynamicMessage) value, exceptedKeys, options );\n                                        }\n                                    }\n                                    else {\n                                        nestedBean = this.decode( nestedType, szGType, nestedDescriptor, (DynamicMessage) value, exceptedKeys, options );\n                                    }\n                                }\n\n                                setter.invoke( bean, nestedBean );\n                            }\n                        }\n                        else {\n                            Class<?>[] pars = setter.getParameterTypes();\n                            if( pars.length > 0 ) {\n                                Class<?> nestedType = pars[ 0 ];\n                                String szGType = ProtobufUtils.evalSetterGenericLabel( setter );\n                                setter.invoke( bean, this.decodeFieldValue( fieldDescriptor, value, nestedType, szGType, options ) );\n                            }\n                        }\n                    }\n                    catch ( IllegalAccessException | InvocationTargetException | IllegalArgumentException ignore ) {\n                        //ignore.printStackTrace();\n                    }\n                }\n            }\n\n            return targetClass.cast( bean );\n        }\n        catch ( Exception e ) {\n            e.printStackTrace();\n            return null;\n        }\n    }\n\n    protected Object decodeFieldValue( Descriptors.FieldDescriptor fieldDescriptor, Object value, Class<?> valueType, String genericLabel, Options options ) {\n        switch ( fieldDescriptor.getType() ) {\n            case BOOL: {\n                return value;\n            }\n            case INT32:\n            case SINT32:\n            case SFIXED32: {\n                return value;\n            }\n            case INT64:\n            case SINT64:\n            case SFIXED64: {\n                return value;\n            }\n            case FLOAT: {\n                return value;\n            }\n            case DOUBLE: {\n                return value;\n            }\n            case STRING: {\n                return value.toString();\n            }\n            case BYTES: {\n                return value instanceof com.google.protobuf.ByteString\n                        ? ((com.google.protobuf.ByteString) value).toByteArray()\n                        : value;\n            }\n            case MESSAGE: {\n                Descriptors.Descriptor nestedDescriptor = fieldDescriptor.getMessageType();\n                return this.decode( valueType, genericLabel, nestedDescriptor, (DynamicMessage) value, null, options );\n            }\n            default: {\n                return value;\n            }\n        }\n    }\n\n    protected Object decodeFieldValue( Descriptors.FieldDescriptor fieldDescriptor, Object value, Class<?> valueType, Options options ) {\n        return this.decodeFieldValue( fieldDescriptor, value, valueType, null, options );\n    }\n\n    protected Class<?> decodeType( Descriptors.FieldDescriptor fieldDescriptor ) {\n        switch ( fieldDescriptor.getType() ) {\n            case BOOL: {\n                return Boolean.class;\n            }\n            case INT32:\n            case SINT32:\n            case SFIXED32: {\n                return Integer.class;\n            }\n            case INT64:\n            case SINT64:\n            case SFIXED64: {\n                return Long.class;\n            }\n            case FLOAT: {\n                return Float.class;\n            }\n            case DOUBLE: {\n                return Double.class;\n            }\n            case STRING: {\n                return String.class;\n            }\n            case BYTES: {\n                return byte[].class;\n            }\n            case MESSAGE: {\n                return null;\n            }\n            default: {\n                throw new IllegalArgumentException( \"Unsupported field type: \" + fieldDescriptor.getType() );\n            }\n        }\n    }\n\n    protected void setCollectionRepeated( Collection<?> values, Collection<Object> decodedValues, String genericTypeLabel, Descriptors.FieldDescriptor fieldDescriptor, Options options ) {\n        if ( genericTypeLabel == null ) {\n            throw new IllegalArgumentException( \"Unable to decode `genericTypeLabel` with null.\" );\n        }\n        Class<?> componentType = ProtobufUtils.loadSingleGenericType( this.getClass(), genericTypeLabel );\n        if ( componentType == null ) {\n            throw new IllegalArgumentException( \"Unable to decode `genericTypeLabel` \" + genericTypeLabel + \".\" );\n        }\n\n        for ( Object item : values ) {\n            decodedValues.add( this.decodeFieldValue( fieldDescriptor, item, componentType, options ) );\n        }\n    }\n\n    protected Object decodeRepeated( Object value, Descriptors.FieldDescriptor fieldDescriptor, Options options, Class<?> type, String genericTypeLabel ) {\n        if ( type.isArray() ) {\n            List<?> values = (List<?>) value;\n            Class<?> componentType = type.getComponentType();\n            Object[] ret = (Object[]) Array.newInstance( type.getComponentType(), values.size() );\n            int i = 0;\n            for ( Object item : values ) {\n                ret[ i ] = this.decodeFieldValue( fieldDescriptor, item, componentType, options );\n                ++i;\n            }\n            return ret;\n        }\n        else if ( Set.class.isAssignableFrom( type ) ) {\n            List<?> values = (List<?>) value;\n            Set<Object> decodedValues = new HashSet<>();\n\n            this.setCollectionRepeated( values, decodedValues, genericTypeLabel, fieldDescriptor, options );\n\n            return decodedValues;\n        }\n        else if ( Collection.class.isAssignableFrom( type ) ) {\n            List<?> values = (List<?>) value;\n            List<Object> decodedValues = new ArrayList<>();\n\n            this.setCollectionRepeated( values, decodedValues, genericTypeLabel, fieldDescriptor, options );\n\n            return decodedValues;\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/GenericBeanProtobufEncoder.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport java.lang.reflect.Array;\nimport java.lang.reflect.InvocationTargetException;\nimport java.lang.reflect.Method;\nimport java.lang.reflect.Modifier;\nimport java.lang.reflect.Type;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Objects;\nimport java.util.Set;\nimport java.util.TreeMap;\n\nimport com.google.protobuf.ByteString;\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DescriptorProtos;\nimport com.google.protobuf.DynamicMessage;\nimport com.pinecone.framework.system.stereotype.JavaBeans;\nimport com.pinecone.framework.util.ReflectionUtils;\nimport com.pinecone.framework.util.StringUtils;\n\npublic class GenericBeanProtobufEncoder implements BeanProtobufEncoder {\n    @Override\n    public Descriptors.Descriptor transform( Object dynamicObject, Set<String> exceptedKeys, Options options ) {\n        if ( dynamicObject == null ) {\n            return null;\n        }\n        return this.transform( dynamicObject.getClass(), dynamicObject, exceptedKeys, options );\n    }\n\n\n    protected DescriptorProtos.FieldDescriptorProto.Builder transformEntry(\n            String key, Object value, Class<?> valType, String componentGLabel,int fieldNumber, List<Descriptors.FileDescriptor> dependencies,\n            Set<String> exceptedKeys, Options options, String thisKey\n    ) {\n        if ( valType == null ) {\n            valType = value.getClass();\n        }\n\n        DescriptorProtos.FieldDescriptorProto.Type fieldType = valType == null\n                ? DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING // Default for null values\n                : this.reinterpret( valType );\n\n\n        DescriptorProtos.FieldDescriptorProto.Builder fieldBuilder;\n        Class<?> elemType = valType;\n        if( value != null ) {\n            elemType = value.getClass();\n        }\n\n        Class<?> dependenceComponentType = null;\n        if( Collection.class.isAssignableFrom( elemType ) ) {\n            if ( value != null ) {\n                Collection co = (Collection) value;\n                if( co.isEmpty() ) {\n                    fieldType = DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING;\n                }\n                else {\n                    fieldType = this.reinterpret( co.iterator().next().getClass() );\n                }\n            }\n            else {\n                if ( componentGLabel == null ) {\n                    throw new IllegalArgumentException( \"None valued argument can`t be transformed.\" );\n                }\n\n                dependenceComponentType = ProtobufUtils.loadSingleGenericType( this.getClass(), componentGLabel );\n                if ( dependenceComponentType != null ) {\n                    fieldType = this.reinterpret( dependenceComponentType );\n                }\n                else {\n                    throw new IllegalArgumentException( \"None valued argument (\" + componentGLabel + \") can`t be transformed.\" );\n                }\n            }\n\n            fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder()\n                    .setName( key )\n                    .setNumber( fieldNumber )\n                    .setType( fieldType )\n                    .setLabel( DescriptorProtos.FieldDescriptorProto.Label.LABEL_REPEATED );\n        }\n        else if( elemType.isArray() ) {\n            dependenceComponentType = elemType.getComponentType();\n            fieldType = this.reinterpret( dependenceComponentType );\n\n            fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder()\n                    .setName( key )\n                    .setNumber( fieldNumber )\n                    .setType( this.reinterpret( dependenceComponentType ) )\n                    .setLabel( DescriptorProtos.FieldDescriptorProto.Label.LABEL_REPEATED );\n        }\n        else {\n            fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder()\n                    .setName( key )\n                    .setNumber( fieldNumber )\n                    .setType( fieldType );\n        }\n\n        if ( fieldType == DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE ) {\n            Descriptors.Descriptor nestedDescriptor;\n            if ( dependenceComponentType != null ) {\n                nestedDescriptor = this.transform0( dependenceComponentType, thisKey, value, exceptedKeys, options );\n            }\n            else {\n                nestedDescriptor = this.transform0( valType, thisKey, value, exceptedKeys, options );\n            }\n\n            if ( nestedDescriptor != null ) {\n                fieldBuilder.setTypeName( nestedDescriptor.getFullName() );\n                dependencies.add( nestedDescriptor.getFile() );\n            }\n        }\n\n        return fieldBuilder;\n    }\n\n    protected Descriptors.Descriptor transform0( Map dynamicObject, String thisKey, Set<String> exceptedKeys, Options options ) {\n        if ( dynamicObject == null ) {\n            return null;\n        }\n        try {\n            String szEntityName = thisKey;\n            DescriptorProtos.DescriptorProto.Builder descriptorBuilder = DescriptorProtos.DescriptorProto.newBuilder();\n            List<Descriptors.FileDescriptor> dependencies = new ArrayList<>();\n\n            int fieldNumber = 1;\n\n            for ( Object em : dynamicObject.entrySet() ) {\n                Map.Entry entry = (Map.Entry) em;\n\n                String key = entry.getKey().toString();\n                if ( exceptedKeys != null && exceptedKeys.contains( key ) ) {\n                    continue;\n                }\n\n                DescriptorProtos.FieldDescriptorProto.Builder fieldBuilder = this.transformEntry(\n                        key, entry.getValue(), null, null, fieldNumber, dependencies, exceptedKeys, options, szEntityName + \"_\" + key\n                );\n\n                descriptorBuilder.addField( fieldBuilder );\n                ++fieldNumber;\n            }\n\n            descriptorBuilder.setName( szEntityName );\n            Descriptors.FileDescriptor fileDescriptor = this.evalMessageType( dependencies, descriptorBuilder, szEntityName, options );\n            return fileDescriptor.findMessageTypeByName( szEntityName );\n        }\n        catch ( Descriptors.DescriptorValidationException e ) {\n            e.printStackTrace();\n            return null;\n        }\n    }\n\n    protected Descriptors.FileDescriptor evalMessageType (\n            List<Descriptors.FileDescriptor> dependencies, DescriptorProtos.DescriptorProto.Builder descriptorBuilder, String szEntityName, Options options\n    ) throws Descriptors.DescriptorValidationException {\n        Descriptors.FileDescriptor[] dependencyArray = dependencies.toArray( new Descriptors.FileDescriptor[0] );\n        Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom(\n                DescriptorProtos.FileDescriptorProto.newBuilder()\n                        .setName( szEntityName + options.getDescriptorFileExtend() )\n                        .addMessageType( descriptorBuilder.build() )\n                        .build(),\n                dependencyArray);\n\n        return fileDescriptor;\n    }\n\n    @Override\n    public Descriptors.Descriptor transform( Map dynamicObject, Set<String> exceptedKeys, Options options ) {\n        return this.transform0( dynamicObject, \"Map_root\", exceptedKeys, options );\n    }\n\n    @Override\n    public DescriptorProtos.FieldDescriptorProto.Builder transform( Collection dynamicObject, Class<? > elementType, String key, int fieldNumber, Options options ) {\n        DescriptorProtos.FieldDescriptorProto.Type fieldType = this.reinterpret( elementType );\n\n        return DescriptorProtos.FieldDescriptorProto.newBuilder()\n                .setName( key )\n                .setNumber( fieldNumber )\n                .setType( fieldType )\n                .setLabel( DescriptorProtos.FieldDescriptorProto.Label.LABEL_REPEATED );\n    }\n\n    @Override\n    public Descriptors.Descriptor transformBean( Class<?> clazz, Object dynamicObject, Set<String> exceptedKeys, Options options ) {\n        return this.transformBean0( clazz, \"\", dynamicObject, exceptedKeys, options );\n    }\n\n    protected Descriptors.Descriptor transformBean0( Class<?> clazz, String thisKey, Object dynamicObject, Set<String> exceptedKeys, Options options ) {\n        if ( clazz == null ) {\n            return null;\n        }\n\n        try {\n            String szEntityName = options.formatFileDescType( clazz );\n            DescriptorProtos.DescriptorProto.Builder descriptorBuilder = DescriptorProtos.DescriptorProto.newBuilder();\n            descriptorBuilder.setName( szEntityName );\n\n            List<Descriptors.FileDescriptor> dependencies = new ArrayList<>();\n            boolean includeSuperClass = clazz.getClassLoader() != null;\n            Method[] methods = includeSuperClass ? clazz.getMethods() : clazz.getDeclaredMethods();\n\n            Map<String, Method> methodOrderMap = new TreeMap<>(); // Unified methods order accessing all services.\n            for ( int i = 0; i < methods.length; ++i ) {\n                try {\n                    Method method = methods[i];\n                    if ( Modifier.isPublic( method.getModifiers() ) ) {\n                        String key = JavaBeans.getGetterMethodKeyName( method );\n                        if( !StringUtils.isEmpty( key ) ) {\n                            if ( Character.isUpperCase( key.charAt(0) ) && method.getParameterTypes().length == 0 ) {\n                                key = JavaBeans.methodKeyNameLowerCaseNormalize( key );\n\n                                if( exceptedKeys != null && exceptedKeys.contains( key ) ) {\n                                    continue;\n                                }\n\n                                methodOrderMap.put( key, method );\n                            }\n                        }\n                    }\n                }\n                catch ( Exception ignore ) {\n                    ignore.printStackTrace();\n                    // Do nothing.\n                }\n            }\n\n            int fieldNumber = 1;\n            for ( Map.Entry<String, Method> kv: methodOrderMap.entrySet() ) {\n                try {\n                    String key = kv.getKey();\n                    Method method = kv.getValue();\n\n                    Class<?> elemRetType = method.getReturnType();\n                    DescriptorProtos.FieldDescriptorProto.Type fieldType = this.reinterpret( elemRetType );\n\n                    DescriptorProtos.FieldDescriptorProto.Builder fieldBuilder;\n                    Class<?> dependenceComponentType = null;\n                    if( Collection.class.isAssignableFrom( elemRetType ) ) {\n                        Type gt = method.getGenericReturnType();\n                        String[] genericTypeNames = ReflectionUtils.extractGenericClassNames( gt.getTypeName() );\n                        if( genericTypeNames != null && genericTypeNames.length > 0 ) {\n                            String genericTypeName = genericTypeNames[ 0 ];\n\n                            if( !genericTypeName.equals( \"?\" ) && !genericTypeName.equals( Object.class.getSimpleName() ) ) {\n                                try {\n                                    dependenceComponentType = this.getClass().getClassLoader().loadClass( genericTypeName );\n                                    fieldType = this.reinterpret( dependenceComponentType );\n                                }\n                                catch ( ClassNotFoundException e ) {\n                                    continue;\n                                }\n                            }\n                        }\n\n                        fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder()\n                                .setName( key )\n                                .setNumber( fieldNumber )\n                                .setType( fieldType )\n                                .setLabel( DescriptorProtos.FieldDescriptorProto.Label.LABEL_REPEATED );\n                    }\n                    else if( elemRetType.isArray() && !byte[].class.isAssignableFrom( elemRetType ) ) {\n                        Class<?> componentType = elemRetType.getComponentType();\n                        fieldType = this.reinterpret( componentType );\n                        dependenceComponentType = componentType;\n\n                        fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder()\n                                .setName( key )\n                                .setNumber( fieldNumber )\n                                .setType( this.reinterpret( componentType ) )\n                                .setLabel( DescriptorProtos.FieldDescriptorProto.Label.LABEL_REPEATED );\n                    }\n                    else {\n                        fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder()\n                                .setName( key )\n                                .setNumber( fieldNumber )\n                                .setType( fieldType );\n                    }\n                    fieldNumber++;\n\n\n                    if ( fieldType == DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE ) {\n                        Class<?> nestedClass = method.getReturnType();\n                        Object dyChild = null;\n\n                        if ( dynamicObject != null ) {\n                            try {\n                                method.setAccessible( true );\n                                dyChild = method.invoke( dynamicObject );\n                            }\n                            catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException e ) {\n                                dyChild = null;\n                            }\n                        }\n\n                        if ( !clazz.equals( nestedClass ) ) {\n                            Descriptors.Descriptor nestedDescriptor;\n                            if ( dependenceComponentType != null ) {\n                                // Array / List can`t uses dynamic object.\n                                nestedDescriptor = this.transform0( dependenceComponentType, szEntityName + \"_\" + key, null, exceptedKeys, options );\n                            }\n                            else {\n                                nestedDescriptor = this.transform0( nestedClass, szEntityName + \"_\" + key, dyChild, exceptedKeys, options );\n                            }\n                            if( nestedDescriptor == null ) {\n                                continue;\n                            }\n                            fieldBuilder.setTypeName( nestedDescriptor.getFullName() );\n                            dependencies.add( nestedDescriptor.getFile() );\n                        }\n                        else {\n                            fieldBuilder.setTypeName( szEntityName );\n                        }\n                    }\n\n                    descriptorBuilder.addField( fieldBuilder );\n                }\n                catch ( Exception e ) {\n                    throw new ProtobufEncodeException( e );\n                }\n            }\n\n            Descriptors.FileDescriptor fileDescriptor = this.evalMessageType( dependencies, descriptorBuilder, szEntityName, options );\n            return fileDescriptor.findMessageTypeByName( szEntityName );\n        }\n        catch ( Descriptors.DescriptorValidationException e ) {\n            e.printStackTrace();\n            return null;\n        }\n    }\n\n    @Override\n    public Descriptors.Descriptor transform( Class<?> clazz, Object dynamicObject, Set<String> exceptedKeys, Options options ) {\n        if( dynamicObject instanceof Map ) {\n            return this.transform( (Map) dynamicObject, exceptedKeys, options );\n        }\n\n        return this.transformBean( clazz, dynamicObject, exceptedKeys, options );\n    }\n\n    protected Descriptors.Descriptor transform0( Class<?> clazz, String thisKey, Object dynamicObject, Set<String> exceptedKeys, Options options ) {\n        if( dynamicObject instanceof Map ) {\n            return this.transform0( (Map) dynamicObject, thisKey, exceptedKeys, options );\n        }\n\n        return this.transformBean0( clazz, thisKey, dynamicObject, exceptedKeys, options );\n    }\n\n\n    @Override\n    public DescriptorProtos.FieldDescriptorProto.Type reinterpret( Class<?> type ) {\n        if ( type == int.class || type == Integer.class ) {\n            return DescriptorProtos.FieldDescriptorProto.Type.TYPE_INT32;\n        }\n        else if ( type == long.class || type == Long.class ) {\n            return DescriptorProtos.FieldDescriptorProto.Type.TYPE_INT64;\n        }\n        else if ( type == float.class || type == Float.class ) {\n            return DescriptorProtos.FieldDescriptorProto.Type.TYPE_FLOAT;\n        }\n        else if ( type == double.class || type == Double.class ) {\n            return DescriptorProtos.FieldDescriptorProto.Type.TYPE_DOUBLE;\n        }\n        else if ( type == String.class ) {\n            return DescriptorProtos.FieldDescriptorProto.Type.TYPE_STRING;\n        }\n        else if ( type == boolean.class || type == Boolean.class ) {\n            return DescriptorProtos.FieldDescriptorProto.Type.TYPE_BOOL;\n        }\n        else if ( type == byte[].class ) {\n            return DescriptorProtos.FieldDescriptorProto.Type.TYPE_BYTES;\n        }\n        else if ( type == short.class || type == Short.class ) {\n            return DescriptorProtos.FieldDescriptorProto.Type.TYPE_INT32;\n        }\n        else if ( type == byte.class || type == Byte.class ) {\n            return DescriptorProtos.FieldDescriptorProto.Type.TYPE_INT32;\n        }\n        else {\n            return DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE;\n        }\n    }\n\n    @Override\n    public DynamicMessage encode( Descriptors.Descriptor descriptor, Object dynamicObject, Set<String> exceptedKeys, Options options ) {\n        if( PrimitiveWrapper.isSupportedPrimitive( dynamicObject.getClass() ) ) {\n            dynamicObject = PrimitiveWrapper.wrap( dynamicObject );\n        }\n        else if( RepeatedWrapper.isSupportedRepeated( dynamicObject.getClass() ) ) {\n            dynamicObject = RepeatedWrapper.wrap( dynamicObject );\n        }\n        else if( dynamicObject instanceof Map ) {\n            return this.encode( descriptor, (Map) dynamicObject, exceptedKeys, options );\n        }\n\n        return this.encodeBean( descriptor, dynamicObject, exceptedKeys, options );\n    }\n\n    @Override\n    public DynamicMessage encodeBean( Descriptors.Descriptor descriptor, Object dynamicObject, Set<String> exceptedKeys, Options options ) {\n        if ( descriptor == null || dynamicObject == null ) {\n            return null;\n        }\n\n        DynamicMessage.Builder messageBuilder = DynamicMessage.newBuilder( descriptor );\n\n        for ( Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields() ) {\n            try {\n                String fieldName = fieldDescriptor.getName();\n\n                // Skip excluded keys\n                if ( exceptedKeys != null && exceptedKeys.contains( fieldName ) ) {\n                    continue;\n                }\n\n                try {\n\n                    String szGetterMethod = JavaBeans.MethodMajorKeyGet + JavaBeans.methodKeyNameUpperCaseNormalize( fieldName );\n                    Method         getter ;\n                    try {\n                        getter = dynamicObject.getClass().getMethod( szGetterMethod );\n                    }\n                    catch ( NoSuchMethodException e ) {\n                        getter = null;\n                    }\n\n                    if ( getter == null && fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.BOOL ) {\n                        szGetterMethod = JavaBeans.MethodMajorKeyIs + JavaBeans.methodKeyNameUpperCaseNormalize( fieldName );\n                        getter = dynamicObject.getClass().getMethod( szGetterMethod );\n                    }\n\n                    if ( getter != null ) {\n                        Object value = getter.invoke( dynamicObject );\n\n                        if ( value != null ) {\n                            if ( fieldDescriptor.isRepeated() ) {\n                                if ( value instanceof Collection<?> ) {\n                                    Collection<?> collection = (Collection<?>) value;\n                                    if ( !collection.isEmpty() ) {\n                                        Class<?> componentType = collection.iterator().next().getClass();\n                                        if ( componentType.isPrimitive() ) {\n                                            for ( Object item : (Collection<?>) value ) {\n                                                messageBuilder.addRepeatedField( fieldDescriptor, this.reinterpretFieldValue( item, fieldDescriptor.getType() ) );\n                                            }\n                                        }\n                                        else {\n                                            if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) {\n                                                Descriptors.Descriptor componentDesc = fieldDescriptor.getMessageType();\n                                                for ( Object item : collection ) {\n                                                    DynamicMessage dynamicMessage = this.encode( componentDesc, item, exceptedKeys, options );\n                                                    messageBuilder.addRepeatedField( fieldDescriptor, dynamicMessage );\n                                                }\n                                            }\n                                            else {\n                                                for ( Object item : collection ) {\n                                                    messageBuilder.addRepeatedField( fieldDescriptor, this.reinterpretFieldValue( item, fieldDescriptor.getType() ) );\n                                                }\n                                            }\n                                        }\n                                    }\n                                }\n                                else if ( value.getClass().isArray() ) {\n                                    Class<?> componentType = value.getClass().getComponentType();\n                                    if ( componentType.isPrimitive() ) {\n                                        int length = Array.getLength( value );\n                                        for ( int i = 0; i < length; ++i ) {\n                                            Object element = Array.get( value, i );\n                                            messageBuilder.addRepeatedField( fieldDescriptor, this.reinterpretFieldValue(element, fieldDescriptor.getType()) );\n                                        }\n                                    }\n                                    else {\n                                        if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) {\n                                            Descriptors.Descriptor componentDesc = fieldDescriptor.getMessageType();\n                                            for ( Object item : (Object[]) value ) {\n                                                DynamicMessage dynamicMessage = this.encode( componentDesc, item, exceptedKeys, options );\n                                                messageBuilder.addRepeatedField( fieldDescriptor, dynamicMessage );\n                                            }\n                                        }\n                                        else {\n                                            for ( Object item : (Object[]) value ) {\n                                                messageBuilder.addRepeatedField( fieldDescriptor, this.reinterpretFieldValue( item, fieldDescriptor.getType() ) );\n                                            }\n                                        }\n                                    }\n                                }\n                                else {\n                                    throw new IllegalArgumentException( \"Expected a Collection for repeated field: \" + fieldName );\n                                }\n                            }\n                            else {\n                                this.encodeElement( fieldDescriptor, messageBuilder, value, exceptedKeys, options );\n                            }\n                        }\n                    }\n                }\n                catch ( IllegalAccessException | IllegalArgumentException | InvocationTargetException | NoSuchMethodException ignore ) {\n                    //ignore.printStackTrace();\n                }\n            }\n            catch ( Exception e ) {\n                // Log and continue processing other fields\n                e.printStackTrace();\n            }\n        }\n\n        return messageBuilder.build();\n    }\n\n    protected void encodeElement( Descriptors.FieldDescriptor fieldDescriptor, DynamicMessage.Builder messageBuilder, Object value, Set<String> exceptedKeys, Options options ) {\n        if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) {\n            Descriptors.Descriptor nestedDescriptor = fieldDescriptor.getMessageType();\n            messageBuilder.setField( fieldDescriptor, this.encode( nestedDescriptor, value, exceptedKeys, options ) );\n        }\n        else {\n            messageBuilder.setField( fieldDescriptor, this.reinterpretFieldValue( value, fieldDescriptor.getType() ) );\n        }\n    }\n\n    @Override\n    public DynamicMessage encode( Descriptors.Descriptor descriptor, Map dynamicObject, Set<String> exceptedKeys, Options options ) {\n        if ( descriptor == null || dynamicObject == null ) {\n            return null;\n        }\n\n        try {\n            DynamicMessage.Builder messageBuilder = DynamicMessage.newBuilder( descriptor );\n\n            for ( Object em : dynamicObject.entrySet() ) {\n                Map.Entry entry = (Map.Entry) em;\n\n                this.encodeEntry( descriptor, entry.getKey().toString(), entry.getValue(), messageBuilder, exceptedKeys, options );\n            }\n\n            return messageBuilder.build();\n        }\n        catch ( Exception e ) {\n            e.printStackTrace();\n            return null;\n        }\n    }\n\n    protected Object encodeRepeatedValue ( Descriptors.FieldDescriptor fieldDescriptor, Object val, Set<String> exceptedKeys, Options options ) {\n        if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) {\n            Descriptors.Descriptor componentDesc = fieldDescriptor.getMessageType();\n            return this.encode( componentDesc, val, exceptedKeys, options );\n        }\n        else {\n            return this.reinterpretFieldValue( val, fieldDescriptor.getType() );\n        }\n    }\n\n    public void encodeEntry( Descriptors.Descriptor descriptor, String key, Object value, DynamicMessage.Builder messageBuilder, Set<String> exceptedKeys, Options options ) {\n        if ( exceptedKeys != null && exceptedKeys.contains( key ) ) {\n            return;\n        }\n\n        Descriptors.FieldDescriptor fieldDescriptor = descriptor.findFieldByName( key );\n        if ( fieldDescriptor == null ) {\n            return;\n        }\n\n        if ( value == null ) {\n            if ( fieldDescriptor.isRepeated() ) {\n                messageBuilder.setField( fieldDescriptor, List.of() );\n            }\n            else {\n                messageBuilder.clearField( fieldDescriptor );\n            }\n        }\n        else if ( fieldDescriptor.isRepeated() ) {\n            List<Object> values = new ArrayList<>();\n            if ( value instanceof Collection ) {\n                for ( Object item : (Collection<?>) value ) {\n                    values.add( this.encodeRepeatedValue( fieldDescriptor, item, exceptedKeys, options ) );\n                }\n            }\n            else if ( value.getClass().isArray() ) {\n                for ( int i = 0; i < Array.getLength( value ); i++ ) {\n                    values.add( this.encodeRepeatedValue( fieldDescriptor, Array.get( value, i ), exceptedKeys, options ) );\n                }\n            }\n            messageBuilder.setField( fieldDescriptor, values );\n        }\n        else {\n            this.encodeElement( fieldDescriptor, messageBuilder, value, exceptedKeys, options );\n        }\n    }\n\n    protected Object reinterpretFieldValue( Object value, Descriptors.FieldDescriptor.Type fieldType ) {\n        switch ( fieldType ) {\n            case SINT32:\n            case SFIXED32:\n            case INT32: {\n                return ((Number) value).intValue();\n            }\n            case INT64:\n            case SINT64:\n            case SFIXED64: {\n                return ((Number) value).longValue();\n            }\n            case FLOAT: {\n                return ((Number) value).floatValue();\n            }\n            case DOUBLE: {\n                return ((Number) value).doubleValue();\n            }\n            case STRING: {\n                return value.toString();\n            }\n            case BOOL: {\n                return (Boolean) value;\n            }\n            case BYTES: {\n                return ByteString.copyFrom( (byte[]) value );\n            }\n            default: {\n                return value;\n            }\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/GenericFieldProtobufDecoder.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport java.util.ArrayList;\nimport java.util.LinkedHashMap;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DynamicMessage;\nimport com.pinecone.framework.lang.field.FieldEntity;\nimport com.pinecone.framework.unit.KeyValue;\n\npublic class GenericFieldProtobufDecoder extends GenericBeanProtobufDecoder implements FieldProtobufDecoder {\n\n    @Override\n    public Map.Entry<String, Object>[] decodeEntries( Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String> exceptedKeys, Options options ) {\n        if ( descriptor == null || dynamicMessage == null ) {\n            return null;\n        }\n\n        List<Descriptors.FieldDescriptor> fieldDescriptors = descriptor.getFields();\n\n        @SuppressWarnings( \"unchecked\" )\n        Map.Entry<String, Object>[] result = new Map.Entry[ fieldDescriptors.size() ];\n\n        int i = 0;\n        for ( Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields() ) {\n            try {\n                String fieldName = fieldDescriptor.getName();\n\n                // Skip excluded keys\n                if ( exceptedKeys != null && exceptedKeys.contains( fieldName ) ) {\n                    continue;\n                }\n\n                Object value = ProtobufUtils.evalValue( dynamicMessage, fieldDescriptor );\n\n                if ( value != null ) {\n                    if ( fieldDescriptor.isRepeated() ) {\n                        List<?> values = (List<?>) value;\n                        List<Object> decodedValues = new ArrayList<>();\n                        for ( Object item : values ) {\n                            decodedValues.add( this.decodeFieldValue( fieldDescriptor, item, item.getClass(), options ) );\n                        }\n\n                        result[ i ] = new KeyValue<>( fieldName, decodedValues );\n                    }\n                    else if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) {\n                        Descriptors.Descriptor nestedDescriptor = fieldDescriptor.getMessageType();\n                        result[ i ] = new KeyValue<>( fieldName, this.decodeMap( LinkedHashMap.class, nestedDescriptor, (DynamicMessage) value, exceptedKeys, options ) );\n                    }\n                    else {\n                        result[ i ] = new KeyValue<>( fieldName, this.decodeFieldValue( fieldDescriptor, value, value.getClass(), options ) );\n                    }\n                }\n\n                ++i;\n            }\n            catch ( Exception e ) {\n                e.printStackTrace();\n            }\n        }\n\n        return result;\n    }\n\n    @Override\n    public void decodeEntries( FieldEntity[] entities, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String> exceptedKeys, Options options ) {\n        this.decodeEntries0( entities, descriptor, dynamicMessage, exceptedKeys, options, false );\n    }\n\n\n    @Override\n    public Object[] decodeValues( FieldEntity[] entities, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String> exceptedKeys, Options options ) {\n        return this.decodeEntries0( entities, descriptor, dynamicMessage, exceptedKeys, options, true );\n    }\n\n    protected Object[] decodeEntries0( FieldEntity[] entities, Descriptors.Descriptor descriptor, DynamicMessage dynamicMessage, Set<String> exceptedKeys, Options options, boolean bEvalValue ) {\n        if ( descriptor == null || dynamicMessage == null || entities == null ) {\n            return null;\n        }\n\n        try {\n            int i = 0;\n            Object[] vals = null;\n            if( bEvalValue ) {\n                vals = new Object[ entities.length ];\n            }\n\n            for ( Descriptors.FieldDescriptor fieldDescriptor : descriptor.getFields() ) {\n                String fieldName = fieldDescriptor.getName();\n\n                if ( exceptedKeys != null && exceptedKeys.contains( fieldName ) ) {\n                    continue;\n                }\n\n                Object value = ProtobufUtils.evalValue( dynamicMessage, fieldDescriptor );\n\n                if ( value != null ) {\n                    FieldEntity entity = entities[ i ];\n\n                    if ( fieldDescriptor.isRepeated() ) {\n                        Object decodedValues = this.decodeRepeated( value, fieldDescriptor, options, entity.getType(), entity.getGenericTypeLabel() );\n                        entity.setValue( decodedValues );\n                    }\n                    else if ( fieldDescriptor.getType() == Descriptors.FieldDescriptor.Type.MESSAGE ) {\n                        Descriptors.Descriptor nestedDescriptor = fieldDescriptor.getMessageType();\n                        Object nestedBean;\n                        Class<?> nestedType = entity.getType();\n                        if( Map.class.isAssignableFrom( nestedType ) ) {\n                            if( nestedType.isInterface() && Map.class.isAssignableFrom( nestedType ) ) {\n                                nestedType = options.getDefaultMapType();\n                            }\n                            nestedBean = this.decodeMap( nestedType, nestedDescriptor, (DynamicMessage) value, exceptedKeys, options );\n                        }\n                        else {\n                            nestedBean = this.decode( nestedType, entity.getGenericTypeLabel(), nestedDescriptor, (DynamicMessage) value, exceptedKeys, options );\n                        }\n\n                        entity.setValue( nestedBean );\n                    }\n                    else {\n                        entity.setValue( this.decodeFieldValue( fieldDescriptor, value, entity.getType(), options ) );\n                    }\n\n                    if( bEvalValue ) {\n                        vals[ i ] = entity.getValue();\n                    }\n                }\n                ++i;\n            }\n\n            return vals;\n        }\n        catch ( Exception e ) {\n            e.printStackTrace();\n            return null;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/GenericFieldProtobufEncoder.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;\n\nimport com.google.protobuf.DescriptorProtos;\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DynamicMessage;\nimport com.pinecone.framework.lang.field.FieldEntity;\n\npublic class GenericFieldProtobufEncoder extends GenericBeanProtobufEncoder implements FieldProtobufEncoder {\n    @Override\n    public Descriptors.Descriptor transform( Map.Entry<String, Object>[] fields, String szEntityName, Set<String> exceptedKeys, Options options ) {\n        if ( fields == null ) {\n            return null;\n        }\n        try {\n            DescriptorProtos.DescriptorProto.Builder descriptorBuilder = DescriptorProtos.DescriptorProto.newBuilder();\n            List<Descriptors.FileDescriptor> dependencies = new ArrayList<>();\n\n            int fieldNumber = 1;\n\n            for ( Map.Entry entry : fields ) {\n                String key = entry.getKey().toString();\n                if ( exceptedKeys != null && exceptedKeys.contains( key ) ) {\n                    continue;\n                }\n\n                descriptorBuilder.addField( this.transformEntry(\n                        key, entry.getValue(), null, null, fieldNumber, dependencies, exceptedKeys, options, szEntityName + \"_\" + key\n                ) );\n                ++fieldNumber;\n            }\n\n            descriptorBuilder.setName( szEntityName );\n            Descriptors.FileDescriptor fileDescriptor = this.evalMessageType( dependencies, descriptorBuilder, szEntityName, options );\n            return fileDescriptor.findMessageTypeByName( szEntityName );\n        }\n        catch ( Descriptors.DescriptorValidationException e ) {\n            e.printStackTrace();\n            return null;\n        }\n    }\n\n    @Override\n    public Descriptors.Descriptor transform( FieldEntity[] fields, String szEntityName, Set<String> exceptedKeys, Options options ) {\n        if ( fields == null ) {\n            return null;\n        }\n        try {\n            DescriptorProtos.DescriptorProto.Builder descriptorBuilder = DescriptorProtos.DescriptorProto.newBuilder();\n            List<Descriptors.FileDescriptor> dependencies = new ArrayList<>();\n\n            int fieldNumber = 1;\n\n            for ( FieldEntity entry : fields ) {\n                String key = entry.getName();\n                if ( exceptedKeys != null && exceptedKeys.contains( key ) ) {\n                    continue;\n                }\n\n                descriptorBuilder.addField( this.transformEntry(\n                        key, entry.getValue(), entry.getType(), entry.getGenericTypeLabel(), fieldNumber, dependencies, exceptedKeys, options, szEntityName + \"_\" + key\n                ) );\n                ++fieldNumber;\n            }\n\n            descriptorBuilder.setName( szEntityName );\n            Descriptors.FileDescriptor fileDescriptor = this.evalMessageType( dependencies, descriptorBuilder, szEntityName, options );\n            return fileDescriptor.findMessageTypeByName( szEntityName );\n        }\n        catch ( Descriptors.DescriptorValidationException e ) {\n            e.printStackTrace();\n            return null;\n        }\n    }\n\n    @Override\n    public DynamicMessage encode( Descriptors.Descriptor descriptor, Map.Entry<String, Object>[] fields, Set<String> exceptedKeys, Options options ) {\n        if ( descriptor == null || fields == null ) {\n            return null;\n        }\n\n        try {\n            DynamicMessage.Builder messageBuilder = DynamicMessage.newBuilder( descriptor );\n\n            for ( Map.Entry entry : fields ) {\n                this.encodeEntry( descriptor, entry.getKey().toString(), entry.getValue(), messageBuilder, exceptedKeys, options );\n            }\n\n            return messageBuilder.build();\n        }\n        catch ( Exception e ) {\n            e.printStackTrace();\n            return null;\n        }\n    }\n\n    @Override\n    public DynamicMessage encode( Descriptors.Descriptor descriptor, FieldEntity[] fields, Set<String> exceptedKeys, Options options ) {\n        if ( descriptor == null || fields == null ) {\n            return null;\n        }\n\n        try {\n            DynamicMessage.Builder messageBuilder = DynamicMessage.newBuilder( descriptor );\n\n            for ( FieldEntity entry : fields ) {\n                this.encodeEntry( descriptor, entry.getName(), entry.getValue(), messageBuilder, exceptedKeys, options );\n            }\n\n            return messageBuilder.build();\n        }\n        catch ( Exception e ) {\n            e.printStackTrace();\n            return null;\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/Options.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport java.util.LinkedHashMap;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class Options implements Pinenut {\n    public static final FileDescriptorFormater DefaultFileDescriptorFormater = new FileDescriptorFormater() {\n        @Override\n        public String format( Class<?> type ) {\n            String neo = type.getName().replace( '.', '_' );\n            if( neo.startsWith( \"[\" ) ) {\n                neo = neo.replace( \"[\", \"\" );\n                neo += WolfProtobufConstants.ArrayTransformedName;\n            }\n            return neo.replaceAll( \"[^a-zA-Z0-9_]\", \"_\" );\n        }\n    };\n\n    public static final FileDescriptorFormater DefaultFileDescriptorSimpleFormater = new FileDescriptorFormater() {\n        @Override\n        public String format( Class<?> type ) {\n            return type.getSimpleName();\n        }\n    };\n\n    public static final DescriptorNameNormalizer UnderlineDescriptorNameNormalizer = new DescriptorNameNormalizer() {\n        @Override\n        public String normalize( String bad ) {\n            if ( bad == null ) {\n                return null;\n            }\n            return bad.replaceAll( \"[^a-zA-Z0-9_]\", \"_\" );\n        }\n    };\n\n    public static final Class<?> DefaultMapType = LinkedHashMap.class;\n\n    public static final String DescriptorFileExtend = \"$File\";\n\n    public static final Options DefaultOptions = new Options();\n\n    public static final Options DefaultSimpleOptions = new Options() {\n        @Override\n        public String formatFileDescType( Class<?> type ) {\n            return this.formatFileDescType( type, Options.DefaultFileDescriptorSimpleFormater );\n        }\n    };\n\n    protected FileDescriptorFormater    mFileDescriptorFormater;\n\n    protected String                    mszDescriptorFileExtend;\n\n    protected DescriptorNameNormalizer  mDescriptorNameNormalizer;\n\n    protected Class<?>                  mDefaultMapType;\n\n    public Options( FileDescriptorFormater formater, String szDescriptorFileExtend,  Class<?> defaultMapType ) {\n        this.mFileDescriptorFormater   = formater;\n        this.mszDescriptorFileExtend   = szDescriptorFileExtend;\n        this.mDefaultMapType           = defaultMapType;\n        this.mDescriptorNameNormalizer = Options.UnderlineDescriptorNameNormalizer;\n    }\n\n    public Options() {\n        this( Options.DefaultFileDescriptorFormater, Options.DescriptorFileExtend, Options.DefaultMapType );\n    }\n\n    public String formatFileDescType( Class<?> type, FileDescriptorFormater formater ) {\n        return formater.format( type );\n    }\n\n    public String formatFileDescType( Class<?> type ) {\n        return this.formatFileDescType( type, this.mFileDescriptorFormater );\n    }\n\n    public String getDescriptorFileExtend() {\n        return this.mszDescriptorFileExtend;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    public <T> Class<T> getDefaultMapType() {\n        return (Class<T>) this.mDefaultMapType;\n    }\n\n    public void setDescriptorNameNormalizer( DescriptorNameNormalizer descriptorNameNormalizer ) {\n        this.mDescriptorNameNormalizer = descriptorNameNormalizer;\n    }\n\n    public Options applyDescriptorNameNormalizer( DescriptorNameNormalizer descriptorNameNormalizer ) {\n        this.mDescriptorNameNormalizer = descriptorNameNormalizer;\n        return this;\n    }\n\n    public String normalizeDescriptorName( String szName ) {\n        return this.mDescriptorNameNormalizer.normalize( szName );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/PrimitiveWrapper.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport com.google.protobuf.DescriptorProtos;\nimport com.google.protobuf.Descriptors;\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class PrimitiveWrapper<T> implements Pinenut {\n    public final static String FieldName = \"value\";\n\n    private final T value;\n\n    public PrimitiveWrapper( T value ) {\n        if ( value == null || PrimitiveWrapper.isSupportedPrimitive(value) ) {\n            this.value = value;\n        }\n        else {\n            throw new IllegalArgumentException( \"Unsupported primitive type: \" + value.getClass() );\n        }\n    }\n\n    public T getValue() {\n        return this.value;\n    }\n\n    public boolean isPrimitive() {\n        return this.value == null || PrimitiveWrapper.isSupportedPrimitive(this.value);\n    }\n\n    public static boolean isSupportedPrimitive( Object obj ) {\n        return PrimitiveWrapper.isSupportedPrimitive( obj.getClass() );\n    }\n\n    public static boolean isSupportedPrimitive( Class<?> obj ) {\n        return obj.equals( String.class ) || !BeanProtobufEncoder.DefaultEncoder.reinterpret( obj ).equals( DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE ) ;\n    }\n\n    public static <T> PrimitiveWrapper<T> wrap( T val ) {\n        return new PrimitiveWrapper<>( val );\n    }\n\n    public DescriptorProtos.FieldDescriptorProto.Type reinterpret() {\n        return BeanProtobufEncoder.DefaultEncoder.reinterpret( this.value.getClass() );\n    }\n\n    public Descriptors.Descriptor transform() {\n        return PrimitiveWrapper.transform( this.value.getClass() );\n    }\n\n    public static Descriptors.Descriptor transform( Class<?> elemClass ) {\n        try{\n            DescriptorProtos.DescriptorProto.Builder descriptorBuilder = DescriptorProtos.DescriptorProto.newBuilder();\n            String szEntityName = PrimitiveWrapper.class.getSimpleName() + \"_\" + elemClass.getSimpleName();\n            descriptorBuilder.setName( szEntityName );\n\n            DescriptorProtos.FieldDescriptorProto.Type fieldType = BeanProtobufEncoder.DefaultEncoder.reinterpret( elemClass );\n\n            DescriptorProtos.FieldDescriptorProto.Builder fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder()\n                    .setName( PrimitiveWrapper.FieldName )\n                    .setNumber( 1 )\n                    .setType( fieldType );\n\n            descriptorBuilder.addField( fieldBuilder );\n\n            Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom(\n                    DescriptorProtos.FileDescriptorProto.newBuilder()\n                            .setName( szEntityName + \"$FILE\" )\n                            .addMessageType( descriptorBuilder.build() )\n                            .build(),\n                    new Descriptors.FileDescriptor[0]);\n\n            return fileDescriptor.findMessageTypeByName( szEntityName );\n        }\n        catch ( Descriptors.DescriptorValidationException e ) {\n            return null;\n        }\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/ProtobufEncodeException.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class ProtobufEncodeException extends PineRuntimeException {\n    public ProtobufEncodeException    () {\n        super();\n    }\n\n    public ProtobufEncodeException    ( String message ) {\n        super(message);\n    }\n\n    public ProtobufEncodeException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public ProtobufEncodeException    ( Throwable cause ) {\n        super(cause);\n    }\n\n    protected ProtobufEncodeException ( String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace ) {\n        super( message, cause, enableSuppression, writableStackTrace );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/ProtobufUtils.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport java.lang.reflect.Method;\nimport java.lang.reflect.Type;\n\nimport com.google.protobuf.Descriptors;\nimport com.google.protobuf.DynamicMessage;\nimport com.pinecone.framework.util.ReflectionUtils;\n\npublic final class ProtobufUtils {\n\n    public static Object evalValue(DynamicMessage dynamicMessage, Descriptors.FieldDescriptor fieldDescriptor ) {\n        if ( !fieldDescriptor.isRepeated() ) {\n            boolean bHasField = dynamicMessage.hasField( fieldDescriptor );\n            if ( !bHasField ) {\n                return null;\n            }\n        }\n        return dynamicMessage.getField( fieldDescriptor );\n    }\n\n    static Class<?> loadSingleGenericType( Class<?> parent, String componentGenericLabel ) {\n        try {\n            String[] genericTypeNames = ReflectionUtils.extractGenericClassNames( componentGenericLabel );\n            if( genericTypeNames != null && genericTypeNames.length > 0 ) {\n                String genericTypeName = genericTypeNames[ 0 ];\n\n                if( !genericTypeName.equals( \"?\" ) && !genericTypeName.equals( Object.class.getSimpleName() ) ) {\n                    return parent.getClassLoader().loadClass( genericTypeName );\n                }\n            }\n        }\n        catch ( ClassNotFoundException e ) {\n            return null;\n        }\n\n        return null;\n    }\n\n    public static String evalSetterGenericLabel( Method setter ) {\n        Type[] gType   = setter.getGenericParameterTypes();\n        String szGType ;\n        if ( gType.length > 0 ) {\n            szGType = gType[ 0 ].getTypeName();\n        }\n        else {\n            szGType = null;\n        }\n        return szGType;\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/RepeatedWrapper.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\nimport java.util.Collection;\nimport java.util.Set;\n\nimport com.google.protobuf.DescriptorProtos;\nimport com.google.protobuf.Descriptors;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.Units;\nimport com.pinecone.framework.util.ClassUtils;\n\npublic class RepeatedWrapper<T> implements Pinenut {\n    public final static String FieldName = \"values\";\n\n    private T values;\n\n    private final Class<?> componentType;\n\n    public RepeatedWrapper( T value, Class<?> componentType ) {\n        if ( value == null || RepeatedWrapper.isSupportedRepeated(value) ) {\n            this.values = value;\n            this.componentType = componentType;\n        }\n        else {\n            throw new IllegalArgumentException( \"Unsupported repeated type: \" + value.getClass() );\n        }\n    }\n\n    public RepeatedWrapper( T value ) {\n        this( value, value.getClass().getComponentType() );\n    }\n\n    public T getValues() {\n        return this.values;\n    }\n\n    public void setValues( T values ) {\n        this.values = values;\n    }\n\n    public Class<?> getComponentType() {\n        return this.componentType;\n    }\n\n    public boolean isRepeated() {\n        return this.values == null || RepeatedWrapper.isSupportedRepeated(this.values);\n    }\n\n    public static boolean isSupportedRepeated( Object obj ) {\n        return RepeatedWrapper.isSupportedRepeated( obj.getClass() );\n    }\n\n    public static boolean isSupportedRepeated( Class<?> obj ) {\n        return obj.isArray() || Collection.class.isAssignableFrom( obj ) || !BeanProtobufEncoder.DefaultEncoder.reinterpret( obj ).equals( DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE ) ;\n    }\n\n    public static <T> RepeatedWrapper<T> wrap( T val ) {\n        return new RepeatedWrapper<>( val );\n    }\n\n    public static <T> RepeatedWrapper<T> wrap( T val, Class<?> componentType ) {\n        return new RepeatedWrapper<>( val, componentType );\n    }\n\n    public Descriptors.Descriptor transform( BeanProtobufEncoder encoder ) {\n        return RepeatedWrapper.transform( this.values.getClass(), this.componentType, encoder );\n    }\n\n    public static Descriptors.Descriptor transform( Class<?> elemClass, Class<?> componentType, BeanProtobufEncoder encoder ) {\n        try{\n            DescriptorProtos.DescriptorProto.Builder descriptorBuilder = DescriptorProtos.DescriptorProto.newBuilder();\n            String szEntityName = RepeatedWrapper.class.getSimpleName() + \"_\" + elemClass.getSimpleName();\n            szEntityName = szEntityName.replace( ClassUtils.ARRAY_SUFFIX, WolfProtobufConstants.ArrayTransformedName );\n            descriptorBuilder.setName( szEntityName );\n\n            DescriptorProtos.FieldDescriptorProto.Type fieldType = BeanProtobufEncoder.DefaultEncoder.reinterpret( componentType );\n\n            DescriptorProtos.FieldDescriptorProto.Builder fieldBuilder = DescriptorProtos.FieldDescriptorProto.newBuilder()\n                    .setName( RepeatedWrapper.FieldName )\n                    .setNumber( 1 )\n                    .setType( fieldType )\n                    .setLabel( DescriptorProtos.FieldDescriptorProto.Label.LABEL_REPEATED );\n\n            Descriptors.FileDescriptor[] objectDep = new Descriptors.FileDescriptor[0];\n            if ( fieldType == DescriptorProtos.FieldDescriptorProto.Type.TYPE_MESSAGE ) {\n                Descriptors.Descriptor nestedDescriptor = encoder.transform( componentType, null,null, Units.emptySet() );\n\n                if ( nestedDescriptor != null ) {\n                    fieldBuilder.setTypeName( nestedDescriptor.getFullName() );\n                    objectDep = new Descriptors.FileDescriptor[] { nestedDescriptor.getFile() };\n                }\n            }\n            descriptorBuilder.addField( fieldBuilder );\n\n            Descriptors.FileDescriptor fileDescriptor = Descriptors.FileDescriptor.buildFrom(\n                    DescriptorProtos.FileDescriptorProto.newBuilder()\n                            .setName( szEntityName + \"$REPEATED_FILE\" )\n                            .addMessageType( descriptorBuilder.build() )\n                            .build(),\n                    objectDep);\n\n            return fileDescriptor.findMessageTypeByName( szEntityName );\n        }\n        catch ( Descriptors.DescriptorValidationException e ) {\n            return null;\n        }\n    }\n\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/protobuf/WolfProtobufConstants.java",
    "content": "package com.pinecone.ulf.util.protobuf;\n\npublic final class WolfProtobufConstants {\n    public static final String ArrayTransformedName = \"_ARRAY\";\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/template/UTRFreeMarkerProvider.java",
    "content": "package com.pinecone.ulf.util.template;\n\nimport java.io.IOException;\nimport java.io.StringWriter;\nimport java.io.Writer;\nimport java.util.Map;\n\nimport freemarker.template.Configuration;\nimport freemarker.template.Template;\nimport freemarker.template.TemplateException;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.system.prototype.Objectom;\nimport com.pinecone.framework.util.template.UniformTemplateRenderer;\n\npublic class UTRFreeMarkerProvider implements UniformTemplateRenderer {\n    private Configuration configuration;\n\n    public UTRFreeMarkerProvider( Configuration configuration ) {\n        this.configuration = configuration;\n    }\n\n    public UTRFreeMarkerProvider() {\n        this( new Configuration(Configuration.VERSION_2_3_31) );\n    }\n\n    protected Map<String, Object> makeContext( Objectom vars ) {\n        Object proto = vars.prototype().proto();\n        if (proto instanceof Map) {\n            return (Map<String, Object>) proto;\n        }\n        else {\n            return vars.toMap();\n        }\n    }\n\n    @Override\n    public String render(String tpl, Objectom vars) {\n        try ( StringWriter writer = new StringWriter() ) {\n            this.render( tpl, vars, writer );\n            return writer.toString();\n        }\n        catch ( IOException e ) {\n            throw new ProxyProvokeHandleException( \"Error while rendering template\", e );\n        }\n    }\n\n    @Override\n    public void render( String tpl, Objectom vars, Writer writer ) {\n        try {\n            Template template = new Template( \"anonymous\", tpl, this.configuration );\n            Map<String, Object> context = this.makeContext(vars);\n            template.process( context, writer );\n        }\n        catch ( TemplateException | IOException e ) {\n            throw new ProxyProvokeHandleException( \"Error while rendering template\", e );\n        }\n    }\n}"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/main/java/com/pinecone/ulf/util/template/UTRThymeleafProvider.java",
    "content": "package com.pinecone.ulf.util.template;\n\nimport java.io.Writer;\nimport java.util.Map;\n\nimport org.thymeleaf.TemplateEngine;\nimport org.thymeleaf.context.Context;\nimport org.thymeleaf.templateresolver.ITemplateResolver;\nimport org.thymeleaf.templateresolver.StringTemplateResolver;\n\nimport com.pinecone.framework.system.prototype.Objectom;\nimport com.pinecone.framework.util.template.UniformTemplateRenderer;\n\npublic class UTRThymeleafProvider implements UniformTemplateRenderer {\n    protected TemplateEngine templateEngine;\n\n    public UTRThymeleafProvider( TemplateEngine engine ) {\n        this.templateEngine = engine;\n    }\n\n    public UTRThymeleafProvider( TemplateEngine engine, ITemplateResolver resolver ) {\n        this( engine );\n        this.templateEngine.setTemplateResolver( resolver );\n    }\n\n    public UTRThymeleafProvider() {\n        this( new TemplateEngine(), new StringTemplateResolver() );\n    }\n\n    protected Context makeContext( Objectom vars ) {\n        Context context = new Context();\n\n        Object proto = vars.prototype().proto();\n        if( proto instanceof Map ) {\n            context.setVariables( (Map)proto );\n        }\n        else {\n            context.setVariables( vars.toMap() );\n        }\n\n        return context;\n    }\n\n    @Override\n    public String render( String tpl, Objectom vars ) {\n        Context context = this.makeContext( vars );\n        return this.templateEngine.process( tpl, context );\n    }\n\n    @Override\n    public void render( String tpl, Objectom vars, Writer writer ) {\n        Context context = this.makeContext( vars );\n        this.templateEngine.process( tpl, context, writer );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/test/java/com/TestBson.java",
    "content": "package com;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.ulf.util.bson.UlfJSONCompiler;\nimport com.pinecone.ulf.util.bson.UlfJSONDecompiler;\n\nimport java.io.FileInputStream;\nimport java.io.FileOutputStream;\nimport java.io.InputStream;\nimport java.io.OutputStream;\n\npublic class TestBson {\n    public static void testCompiler() throws Exception {\n        UlfJSONCompiler jsonCompiler = new UlfJSONCompiler();\n        JSONObject object = new JSONMaptron( \"{ key:'ssss jesus christ, hahahaha', int64:64, float64:3.1415926, bool: false, 'null': null, next: { arr: ['ha', 'xi', { k: true, a: [], obj:{} }] } }\" );\n\n        try( OutputStream os = new FileOutputStream( \"E:/test.bson\" ) ){\n            jsonCompiler.compile( object, os );\n        }\n    }\n\n    public static void testDecompiler() throws Exception {\n        InputStream is = new FileInputStream( \"E:/test.bson\" );\n        UlfJSONDecompiler decompiler = new UlfJSONDecompiler( is );\n\n        Object jo = decompiler.nextValue();\n\n        Debug.trace( jo );\n    }\n\n    public static void main( String[] args ) throws Exception {\n        //String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init( (Object...cfg )->{\n\n            //TestBson.testCompiler();\n            TestBson.testDecompiler();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Pinecones/Ulfhedinn/src/test/java/com/UTRTests.java",
    "content": "package com;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.template.UniformTemplateRenderer;\nimport com.pinecone.ulf.util.template.UTRFreeMarkerProvider;\nimport com.pinecone.ulf.util.template.UTRThymeleafProvider;\n\nimport org.junit.jupiter.api.Test;\nimport org.thymeleaf.TemplateEngine;\nimport org.thymeleaf.context.Context;\nimport org.thymeleaf.templateresolver.StringTemplateResolver;\n\nimport java.util.HashMap;\nimport java.util.Map;\n\nimport freemarker.template.Configuration;\n\nclass DummyBean {\n    private String key1;\n\n    public String getKey1() {\n        return this.key1;\n    }\n\n    public void setKey1( String key1 ) {\n        this.key1 = key1;\n    }\n}\n\npublic class UTRTests {\n    @Test\n    void testThymeleaf() {\n        UniformTemplateRenderer renderer = new UTRThymeleafProvider();\n//        Map<String, Object> variables = new HashMap<>();\n//        variables.put(\"key1\", \"Test\");\n\n        DummyBean variables = new DummyBean();\n        variables.setKey1( \"BeanTest\" );\n\n        String jsonTemplate = \"{ \\\"name\\\": \\\"[(${key1})]\\\" }\";\n        Debug.trace( renderer.render( jsonTemplate, variables ) );\n    }\n\n    @Test\n    void testAlmondUTR() {\n        UniformTemplateRenderer renderer = UniformTemplateRenderer.DefaultRenderer;\n\n        Map<String, Object> variables = new HashMap<>();\n        variables.put(\"key1\", \"Test\");\n        Debug.trace( renderer.render( \"{ \\\"name\\\": \\\"${key1}\\\" }\", variables ) );\n    }\n\n    @Test\n    void testFreemarker() {\n        Configuration cfg = new Configuration( Configuration.VERSION_2_3_31 );\n        cfg.setDefaultEncoding(\"UTF-8\");\n\n        UTRFreeMarkerProvider provider = new UTRFreeMarkerProvider( cfg );\n\n        Map<String, Object> variables = new JSONMaptron( \"{ name: test, age: 30, more: { key: 1.364 } }\" );\n\n        String templateContent = \"{ \\\"name\\\": \\\"${name}\\\", \\\"age\\\": ${age}, \\\"more.key\\\": ${more.key} }\";\n\n        String result = provider.render(templateContent, variables);\n        Debug.echo( result );\n    }\n}\n"
  },
  {
    "path": "Pinecones/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sauron</artifactId>\n        <groupId>com.sauron</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecones</groupId>\n    <artifactId>pinecones</artifactId>\n    <packaging>pom</packaging>\n    <version>2.5.1</version>\n\n    <modules>\n        <module>pinecone</module>\n        <module>ulfhedinn</module>\n        <module>slime</module>\n        <module>jelly</module>\n        <module>summer</module>\n        <module>springram</module>\n        <module>ulf-lib-construction</module>\n        <module>ulf-lib-oltp-rdb</module>\n    </modules>\n\n    <dependencies>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Pinecones/ulf-lib-construction/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>pinecones</artifactId>\n        <groupId>com.pinecones</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.ulf</groupId>\n    <artifactId>ulf-lib-construction</artifactId>\n    <version>1.2.1</version>\n    <packaging>jar</packaging>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>provided</scope>\n        </dependency>\n\n        <dependency>\n            <groupId>org.springframework</groupId>\n            <artifactId>spring-core</artifactId>\n            <version>5.3.29</version>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework</groupId>\n            <artifactId>spring-beans</artifactId>\n            <version>5.3.27</version>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Pinecones/ulf-lib-construction/src/main/java/com/pinecone/ulf/beans/aop/UlfurEnableAspectProxy.java",
    "content": "package com.pinecone.ulf.beans.aop;\n\nimport org.springframework.context.annotation.Configuration;\nimport org.springframework.context.annotation.EnableAspectJAutoProxy;\n\n@Configuration\n@EnableAspectJAutoProxy\npublic class UlfurEnableAspectProxy {\n}\n"
  },
  {
    "path": "Pinecones/ulf-lib-construction/src/main/java/com/pinecone/ulf/beans/construction/StructureAnnotationConfiguration.java",
    "content": "package com.pinecone.ulf.beans.construction;\n\nimport org.springframework.beans.factory.config.ConfigurableListableBeanFactory;\nimport org.springframework.context.annotation.Bean;\nimport org.springframework.context.annotation.Configuration;\n\n@Configuration\npublic class StructureAnnotationConfiguration {\n    @Bean\n    public StructureAnnotationProcessor structureAnnotationProcessor( ConfigurableListableBeanFactory beanFactory ) {\n        return new StructureAnnotationProcessor( beanFactory );\n    }\n}\n"
  },
  {
    "path": "Pinecones/ulf-lib-construction/src/main/java/com/pinecone/ulf/beans/construction/StructureAnnotationProcessor.java",
    "content": "package com.pinecone.ulf.beans.construction;\n\nimport org.springframework.beans.BeansException;\nimport org.springframework.beans.factory.config.ConfigurableListableBeanFactory;\nimport org.springframework.beans.factory.config.InstantiationAwareBeanPostProcessor;\n\nimport java.lang.reflect.Field;\nimport java.lang.reflect.Method;\n\nimport com.pinecone.framework.system.construction.ReuseCycle;\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.framework.util.ReflectionUtils;\n\npublic class StructureAnnotationProcessor implements InstantiationAwareBeanPostProcessor {\n\n    private final ConfigurableListableBeanFactory beanFactory;\n\n    public StructureAnnotationProcessor(ConfigurableListableBeanFactory beanFactory) {\n        this.beanFactory = beanFactory;\n    }\n\n    @Override\n    public boolean postProcessAfterInstantiation(Object bean, String beanName) throws BeansException {\n        Class<?> clazz = bean.getClass();\n\n        ReflectionUtils.doWithFields(clazz, field -> {\n            if ( field.isAnnotationPresent(Structure.class) ) {\n                handleStructureField(field, bean);\n            }\n        });\n\n        ReflectionUtils.doWithMethods(clazz, method -> {\n            if ( method.isAnnotationPresent(Structure.class) ) {\n                handleStructureMethod(method, bean);\n            }\n        });\n\n        return true;\n    }\n\n    private void handleStructureField(Field field, Object bean) throws IllegalAccessException {\n        Structure structure = field.getAnnotation(Structure.class);\n        Object dependency = resolveDependency(structure, field.getType());\n        field.setAccessible(true);\n        field.set(bean, dependency);\n    }\n\n    private void handleStructureMethod(Method method, Object bean) {\n        Structure structure = method.getAnnotation(Structure.class);\n        Object dependency = resolveDependency(structure, method.getParameterTypes()[0]);\n        ReflectionUtils.invokeMethod(method, bean, dependency);\n    }\n\n    private Object resolveDependency(Structure structure, Class<?> type) {\n        String beanName = structure.name();\n        Object dependency;\n\n        if ( !beanName.isEmpty() ) {\n            dependency = this.beanFactory.getBean(beanName);\n        }\n        else if ( structure.cycle() == ReuseCycle.Singleton || structure.cycle() == ReuseCycle.PreSingleton ) {\n            dependency = this.beanFactory.getBean(type);\n        }\n        else if ( structure.cycle() == ReuseCycle.Disposable || structure.cycle() == ReuseCycle.Recyclable ) {\n            dependency = this.beanFactory.createBean(type);\n        }\n        else {\n            throw new UnsupportedOperationException( \"Unsupported reuse cycle: \" + structure.cycle() );\n        }\n\n        return dependency;\n    }\n}"
  },
  {
    "path": "Pinecones/ulf-lib-construction/src/main/java/com/pinecone/ulf/beans/construction/UlfInstanceManufacturer.java",
    "content": "package com.pinecone.ulf.beans.construction;\n\nimport org.springframework.context.ConfigurableApplicationContext;\n\nimport com.pinecone.framework.system.construction.InstanceManufacturer;\n\npublic interface UlfInstanceManufacturer extends InstanceManufacturer {\n    ConfigurableApplicationContext getApplicationContext();\n}\n"
  },
  {
    "path": "Pinecones/ulf-lib-construction/src/main/java/com/pinecone/ulf/beans/construction/UlfurInstanceManufacturer.java",
    "content": "package com.pinecone.ulf.beans.construction;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport org.springframework.beans.factory.config.BeanDefinition;\nimport org.springframework.beans.factory.config.ConfigurableListableBeanFactory;\nimport org.springframework.beans.factory.support.DefaultListableBeanFactory;\nimport org.springframework.beans.factory.support.GenericBeanDefinition;\nimport org.springframework.context.ConfigurableApplicationContext;\nimport org.springframework.context.annotation.AnnotationConfigApplicationContext;\nimport org.springframework.context.annotation.Scope;\n\nimport com.pinecone.framework.system.construction.InstanceManufacturer;\nimport com.pinecone.framework.util.Assert;\n\npublic class UlfurInstanceManufacturer implements UlfInstanceManufacturer {\n    private final ConfigurableApplicationContext context;\n\n    public UlfurInstanceManufacturer() {\n        this( new AnnotationConfigApplicationContext() );\n    }\n\n    public UlfurInstanceManufacturer( ConfigurableApplicationContext context ) {\n        this.context = context;\n    }\n\n    public UlfurInstanceManufacturer( Class<?>... componentClasses ) {\n        this();\n\n        for( Class<?> cc : componentClasses ) {\n            this.onlyRegister( cc );\n        }\n    }\n\n\n    @Override\n    public InstanceManufacturer registerInstancing( Class<?> type, Object instance ) {\n        ConfigurableListableBeanFactory beanFactory = this.context.getBeanFactory();\n        beanFactory.registerSingleton( type.getName(), this.allotInstance( type ) );\n        this.context.refresh();\n        return this;\n    }\n\n    public void onlyRegister( Class<?> type ) {\n        DefaultListableBeanFactory beanFactory = (DefaultListableBeanFactory) this.context.getBeanFactory();\n\n        GenericBeanDefinition beanDefinition = new GenericBeanDefinition();\n        beanDefinition.setBeanClass( type );\n\n        Scope scope = type.getAnnotation( Scope.class );\n        if ( scope != null ) {\n            beanDefinition.setScope( scope.value() );\n        }\n        else {\n            beanDefinition.setScope( BeanDefinition.SCOPE_SINGLETON );\n        }\n\n        beanFactory.registerBeanDefinition( type.getName(), beanDefinition );\n    }\n\n    @Override\n    public InstanceManufacturer register( Class<?> type ) {\n        this.onlyRegister( type );\n        this.refresh();\n\n        return this;\n    }\n\n    @Override\n    public InstanceManufacturer registers( List<Class<?>> types ) {\n        for ( Class<?> type : types ) {\n            this.onlyRegister( type );\n        }\n        this.refresh();\n\n        return this;\n    }\n\n    @Override\n    public boolean hasRegistered( Class<?> type ) {\n        return this.context.containsBeanDefinition(type.getName()) || this.context.containsBean(type.getName());\n    }\n\n    @Override\n    public List<Class<?>> fetchRegistered() {\n        List<Class<? > > registeredClasses = new ArrayList<>();\n        String[] beanNames = this.context.getBeanFactory().getBeanDefinitionNames();\n        for ( String beanName : beanNames ) {\n            BeanDefinition beanDefinition = this.context.getBeanFactory().getBeanDefinition(beanName);\n            try {\n                Class<?> beanClass = Class.forName(beanDefinition.getBeanClassName());\n                registeredClasses.add(beanClass);\n            }\n            catch ( ClassNotFoundException e ) {\n                Assert.provokeIrrationally( e );\n            }\n        }\n        return registeredClasses;\n    }\n\n    @Override\n    public String[] fetchRegisteredNames() {\n        return this.context.getBeanFactory().getBeanDefinitionNames();\n    }\n\n    @Override\n    public void free( Object instance ) {\n        String[] beanNames = this.context.getBeanNamesForType( instance.getClass() );\n        DefaultListableBeanFactory beanFactory = (DefaultListableBeanFactory) this.context.getBeanFactory();\n\n        for ( String beanName : beanNames ) {\n            beanFactory.destroySingleton( beanName );\n        }\n    }\n\n    @Override\n    public void free( Class<?> type, Object instance ) {\n        String beanName = type.getName();\n        DefaultListableBeanFactory beanFactory = (DefaultListableBeanFactory) this.context.getBeanFactory();\n\n        if ( beanFactory.containsSingleton(beanName) ) {\n            beanFactory.destroySingleton(beanName);\n        }\n    }\n\n\n    @Override\n    public Object autowire( Object that ) {\n        this.context.getAutowireCapableBeanFactory().autowireBean( that );\n        return that;\n    }\n\n    @Override\n    public Object allotInstance( String type ) {\n        return this.context.getBean( type );\n    }\n\n    @Override\n    public <T> T allotInstance( Class<T> type ) {\n        return this.context.getBean( type );\n    }\n\n    @Override\n    public void close() {\n        this.context.close();\n    }\n\n    @Override\n    public void refresh() {\n        this.context.refresh();\n    }\n\n    @Override\n    public ConfigurableApplicationContext getApplicationContext() {\n        return this.context;\n    }\n}\n"
  },
  {
    "path": "Pinecones/ulf-lib-construction/src/test/java/com/wolf/construction/CanesService.java",
    "content": "package com.wolf.construction;\n\nimport java.util.List;\nimport javax.annotation.Resource;\n\nimport org.springframework.stereotype.Component;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.ulf.beans.aop.UlfurEnableAspectProxy;\nimport com.pinecone.ulf.beans.construction.StructureAnnotationConfiguration;\nimport com.pinecone.ulf.beans.construction.UlfInstanceManufacturer;\nimport com.pinecone.ulf.beans.construction.UlfurInstanceManufacturer;\n\n@Component\npublic class CanesService {\n    @Resource\n    //@Structure\n    private FoxService foxService;\n\n    @Resource\n    private HuskyService huskyService;\n\n    public void test() {\n        Debug.trace( \"Husky and fox are Canidae.\" );\n\n        this.foxService.digging();\n        this.huskyService.run();\n\n        this.foxService.attack( \"Kevin\" );\n\n        this.huskyService.tryFoxBlade();\n    }\n\n    public static void main( String[] args ) throws Exception {\n        UlfInstanceManufacturer manufacturer = new UlfurInstanceManufacturer( StructureAnnotationConfiguration.class, UlfurEnableAspectProxy.class );\n\n        manufacturer.registers( List.of( FoxService.class, HuskyService.class, CanesService.class, FoxBlade.class, CanisAspect.class ) );\n\n        CanesService canes = manufacturer.allotInstance( CanesService.class );\n        canes.test();\n    }\n}\n"
  },
  {
    "path": "Pinecones/ulf-lib-construction/src/test/java/com/wolf/construction/CanisAspect.java",
    "content": "package com.wolf.construction;\n\nimport org.aspectj.lang.annotation.Aspect;\nimport org.aspectj.lang.annotation.Before;\nimport org.springframework.context.annotation.EnableAspectJAutoProxy;\nimport org.springframework.stereotype.Component;\n\nimport com.pinecone.framework.util.Debug;\n\n@Aspect\n@Component\npublic class CanisAspect {\n    @Before(\"execution(* com.wolf.construction..*(..))\")\n    public void beforeMethod() {\n        Debug.whitef( \"We are canes.\" );\n    }\n}\n"
  },
  {
    "path": "Pinecones/ulf-lib-construction/src/test/java/com/wolf/construction/FoxBlade.java",
    "content": "package com.wolf.construction;\n\nimport javax.annotation.Resource;\n\nimport org.springframework.beans.factory.config.BeanDefinition;\nimport org.springframework.context.annotation.Scope;\nimport org.springframework.stereotype.Component;\n\nimport com.pinecone.framework.util.Debug;\n\n@Component\n@Scope(BeanDefinition.SCOPE_PROTOTYPE)\n//@Scope(BeanDefinition.SCOPE_SINGLETON)\npublic class FoxBlade {\n    @Resource\n    private FoxService foxService;\n\n    public void attack() {\n        Debug.bluef( this.foxService.getName() + \" the fox-paladin who is using fox-blade(\" + this.hashCode() + \") to attack.\" );\n    }\n\n    public void trying() {\n        Debug.redf( \"This fox-blade(\" + this.hashCode() + \") is for fox only.\" );\n    }\n}\n"
  },
  {
    "path": "Pinecones/ulf-lib-construction/src/test/java/com/wolf/construction/FoxService.java",
    "content": "package com.wolf.construction;\nimport javax.annotation.Resource;\n\nimport org.springframework.context.annotation.Scope;\nimport org.springframework.stereotype.Component;\n\nimport com.pinecone.framework.util.Debug;\n\n@Component\n@Scope(\"singleton\")\npublic class FoxService {\n    @Resource\n    private FoxBlade foxBlade;\n\n    public void digging() {\n        Debug.trace( \"Fox is digging!\" );\n    }\n\n    public String getName() {\n        return \"Donovan\";\n    }\n\n    public void attack( String target ) {\n        Debug.redf( \"Preparing attack.\" );\n        this.foxBlade.attack();\n        Debug.greenf( \"And \" + target + \" is dead.\" );\n    }\n}"
  },
  {
    "path": "Pinecones/ulf-lib-construction/src/test/java/com/wolf/construction/HuskyService.java",
    "content": "package com.wolf.construction;\n\nimport javax.annotation.Resource;\n\nimport org.springframework.context.annotation.Scope;\nimport org.springframework.stereotype.Component;\n\nimport com.pinecone.framework.util.Debug;\n\n@Component\n@Scope(\"singleton\")\npublic class HuskyService {\n    @Resource\n    private FoxBlade foxBlade;\n\n    public void run() {\n        Debug.trace( \"Husky is running!\" );\n    }\n\n    public void tryFoxBlade() {\n        this.foxBlade.trying();\n    }\n}\n"
  },
  {
    "path": "Pinecones/ulf-lib-oltp-rdb/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>pinecones</artifactId>\n        <groupId>com.pinecones</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.pinecone.ulf</groupId>\n    <artifactId>ulf-lib-oltp-rdb</artifactId>\n    <version>1.2.1</version>\n    <packaging>jar</packaging>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>mysql</groupId>\n            <artifactId>mysql-connector-java</artifactId>\n            <version>8.0.26</version>\n        </dependency>\n        <dependency>\n            <groupId>org.xerial</groupId>\n            <artifactId>sqlite-jdbc</artifactId>\n            <version>3.46.1.0</version>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Pinecones/ulf-lib-oltp-rdb/src/main/java/com/pinecone/ulf/rdb/mysql/MySQLExecutor.java",
    "content": "package com.pinecone.ulf.rdb.mysql;\n\nimport com.pinecone.framework.util.rdb.ArchRDBExecutor;\nimport com.pinecone.framework.util.rdb.RDBHost;\n\npublic class MySQLExecutor extends ArchRDBExecutor {\n    public MySQLExecutor( RDBHost rdbHost ) {\n        super( rdbHost );\n    }\n}\n"
  },
  {
    "path": "Pinecones/ulf-lib-oltp-rdb/src/main/java/com/pinecone/ulf/rdb/mysql/MySQLHost.java",
    "content": "package com.pinecone.ulf.rdb.mysql;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.rdb.RDBHost;\n\nimport java.sql.*;\n\npublic class MySQLHost implements RDBHost {\n    protected String      mszLocation;\n\n    protected String      mszUsername;\n\n    protected String      mszPassword;\n\n    protected String      mszCharset;\n\n    protected String      mszDriver;\n\n    protected Connection  mGlobalConnection;\n\n    public MySQLHost( String dbLocation, String dbUsername, String dbPassword ) throws SQLException {\n        this( dbLocation, dbUsername, dbPassword, \"UTF-8\" );\n    }\n\n    public MySQLHost( String dbLocation, String dbUsername, String dbPassword, String dbCharset ) throws SQLException {\n        this( dbLocation, dbUsername, dbPassword, dbCharset, \"com.mysql.jdbc.Driver\" );\n    }\n\n    public MySQLHost( String dbLocation, String dbUsername, String dbPassword, String dbCharset, String driver ) throws SQLException {\n        this.mszLocation = dbLocation ;\n        this.mszUsername = dbUsername ;\n        this.mszPassword = dbPassword ;\n        this.mszCharset  = dbCharset  ;\n        this.mszDriver   = driver     ;\n        this.connect();\n    }\n\n    @Override\n    public boolean isClosed() {\n        if( this.mGlobalConnection == null ) {\n            return true;\n        }\n\n        try {\n            return this.mGlobalConnection.isClosed();\n        }\n        catch ( SQLException e ) {\n            Debug.cerr( e );\n            return false;\n        }\n    }\n\n    @Override\n    public void connect() throws SQLException {\n        try{\n            Class.forName( this.mszDriver );\n        }\n        catch ( ClassNotFoundException e ){\n            throw new SQLException( \"JDBC Driver is not found.\", \"CLASS_NOT_FOUND\", e );\n        }\n\n        String url = this.mszLocation;\n        if ( !this.mszLocation.startsWith( \"jdbc:\" ) ) {\n            url = \"jdbc:mysql://\" + this.mszLocation + \"?characterEncoding=\"+ this.mszCharset +\"&useSSL=false\";\n        }\n        this.mGlobalConnection = DriverManager.getConnection( url, this.mszUsername, this.mszPassword );\n    }\n\n    @Override\n    public void close() throws SQLException {\n        if( this.mGlobalConnection != null ) {\n            this.mGlobalConnection.close();\n        }\n    }\n\n    @Override\n    public Connection getConnection() {\n        return this.mGlobalConnection;\n    }\n\n    @Override\n    public Statement  createStatement() throws SQLException {\n        if( this.isClosed() ){\n            this.connect();\n        }\n\n        return this.mGlobalConnection.createStatement();\n    }\n\n\n}\n"
  },
  {
    "path": "Pinecones/ulf-lib-oltp-rdb/src/main/java/com/pinecone/ulf/rdb/sqlite/SQLiteExecutor.java",
    "content": "package com.pinecone.ulf.rdb.sqlite;\n\nimport com.pinecone.framework.util.rdb.ArchRDBExecutor;\nimport com.pinecone.framework.util.rdb.RDBHost;\n\npublic class SQLiteExecutor extends ArchRDBExecutor {\n    public SQLiteExecutor( RDBHost rdbHost ) {\n        super( rdbHost );\n    }\n}"
  },
  {
    "path": "Pinecones/ulf-lib-oltp-rdb/src/main/java/com/pinecone/ulf/rdb/sqlite/SQLiteHost.java",
    "content": "package com.pinecone.ulf.rdb.sqlite;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.rdb.RDBHost;\n\nimport java.sql.Connection;\nimport java.sql.DriverManager;\nimport java.sql.SQLException;\nimport java.sql.Statement;\n\npublic class SQLiteHost implements RDBHost {\n    protected String      mszLocation;\n\n    protected String      mszUsername;\n\n    protected String      mszPassword;\n\n    protected String      mszCharset;\n\n    protected String      mszDriver;\n\n    protected Connection  mGlobalConnection;\n\n\n    public SQLiteHost( String dbLocation, String dbUsername, String dbPassword ) throws SQLException {\n        this( dbLocation, dbUsername, dbPassword, \"UTF-8\" );\n    }\n\n    public SQLiteHost( String dbLocation, String dbUsername, String dbPassword, String dbCharset ) throws SQLException {\n        this( dbLocation, dbUsername, dbPassword, dbCharset, \"org.sqlite.JDBC\" );\n    }\n\n    public SQLiteHost( String dbLocation, String dbUsername, String dbPassword, String dbCharset, String driver ) throws SQLException {\n        this.mszLocation = dbLocation ;\n        this.mszUsername = dbUsername ;\n        this.mszPassword = dbPassword ;\n        this.mszCharset  = dbCharset  ;\n        this.mszDriver   = driver     ;\n        this.connect();\n    }\n\n    public SQLiteHost( String dbLocation ) throws SQLException {\n        this.mszLocation = dbLocation;\n        this.mszDriver   = \"org.sqlite.JDBC\";\n        this.connect();\n    }\n\n    @Override\n    public boolean isClosed() {\n        if( this.mGlobalConnection == null ) {\n            return true;\n        }\n\n        try {\n            return this.mGlobalConnection.isClosed();\n        }\n        catch ( SQLException e ) {\n            Debug.cerr( e );\n            return false;\n        }\n    }\n\n    @Override\n    public void connect() throws SQLException {\n        try{\n            Class.forName( this.mszDriver );\n        }\n        catch ( ClassNotFoundException e ){\n            throw new SQLException( \"JDBC Driver is not found.\", \"CLASS_NOT_FOUND\", e );\n        }\n\n        String url = \"jdbc:sqlite:\" + this.mszLocation;\n        if ( !this.mszLocation.startsWith( \"jdbc:\" ) ) {\n            url = \"jdbc:sqlite:\" + this.mszLocation;;\n        }\n        this.mGlobalConnection = DriverManager.getConnection( url );\n        Statement statement = this.mGlobalConnection.createStatement();\n        statement.execute( \"PRAGMA journal_mode=WAL;\" );\n        statement.close();\n    }\n\n    @Override\n    public void close() throws SQLException {\n        Debug.trace(\"关闭\");\n        if( this.mGlobalConnection != null ) {\n            this.mGlobalConnection.close();\n        }\n    }\n\n    @Override\n    public Connection getConnection() {\n        return this.mGlobalConnection;\n    }\n\n    @Override\n    public Statement createStatement() throws SQLException {\n        if( this.isClosed() ){\n            this.connect();\n        }\n\n        return this.mGlobalConnection.createStatement();\n    }\n}\n"
  },
  {
    "path": "Pinecones/ulf-lib-oltp-rdb/src/main/java/com/pinecone/ulf/rdb/sqlite/SQLiteMethod.java",
    "content": "package com.pinecone.ulf.rdb.sqlite;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.sql.ResultSet;\nimport java.sql.ResultSetMetaData;\nimport java.sql.SQLException;\nimport java.sql.Statement;\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic class SQLiteMethod implements Pinenut {\n    private SQLiteHost sqliteHost;\n    private   Statement    statement;\n\n    public SQLiteMethod(SQLiteHost sqliteHost ) throws SQLException {\n        this.sqliteHost = sqliteHost;\n        this.statement  = sqliteHost.createStatement();\n    }\n\n    public Map< String, Object > executeQuery(String sql ) throws SQLException {\n        HashMap<String, Object> map = new HashMap<>();\n        ResultSet resultSet = this.statement.executeQuery(sql);\n\n        ResultSetMetaData metaData = resultSet.getMetaData();\n        int columnCount = metaData.getColumnCount();\n\n        for (int i = 1; i <= columnCount; i++) {\n            map.put( metaData.getColumnName( i ), resultSet.getString( i ) );\n        }\n\n        return map;\n    }\n\n    public int executeUpdate( String sql ) throws SQLException {\n        return this.statement.executeUpdate( sql );\n    }\n}\n"
  },
  {
    "path": "README.md",
    "content": "# Bean Nuts Hazelnut Hydra <br> 九头龙，分布式操作系统\n<p align=\"center\">\n  <strong>\n  真超级个体, 一个人公司, 一个集团, 一个人中台, 大规模AI、数据、任务调度工业架构, 大规模控制, \n  中央情报系统, 大规模分布式爬虫, 大数据处理, 数据仓库, 云计算, 中台\n   </strong>\n</p>\n\n<p align=\"center\">\n  <a href=\"https://docs.nutsky.com/docs/hazelnut_sauron_zh_cn\">\n    <img src=\"https://img.shields.io/badge/文档-简体中文-blue.svg\" alt=\"简体中文文档\" />\n  </a>\n\n   <a href=\"https://github.com/DragonKingpin/Hydra/blob/beta/CHANGELOG.md\" >\n    <img src=\"https://img.shields.io/badge/ChangeLog-English-blue.svg\" alt=\"Update Log\" />\n  </a>\n\n   <a target=\"_blank\" href=\"https://www.oracle.com/technetwork/java/javase/downloads/index.html\" >\n        <img src=\"https://img.shields.io/badge/JDK-11+-green.svg\" />\n    </a>\n   <a target=\"_blank\" href='https://github.com/DragonKingpin/Hydra'>\n        <img src=\"https://img.shields.io/github/stars/DragonKingpin/Hydra.svg\" alt=\"github stars\"/>\n   </a>\n\n   <a target=\"_blank\" href=''>\n        <img src=\"https://img.shields.io/badge/Process-Developing-yellow\" alt=\"github stars\"/>\n   </a>\n</p>\n\n<p align='center'>\n  <b>简体中文</b> | <b>English[TODO]</b> | <a href=\"https://www.nutsky.com\">Nuts Projects</a> | \n  <a href=\"https://www.dragonking.cn\" target=\"_blank\">Dragon King</a> | <a href=\"https://www.wkwja.cn\" target=\"_blank\">Ken 老板</a> \n  | <a href=\"https://www.geniusay.com\" target=\"_blank\">Genius 老板</a> | <a href=\"https://www.welsir.com\" target=\"_blank\">Welsir 老板</a> \n</p>\n\n<p align='center'>\n  文档（持续增量更新）:\n  <a href=\"https://docs.nutsky.com/docs/hazelnut_sauron_zh_cn\" target=\"_blank\">https://docs.nutsky.com/docs/hazelnut_sauron_zh_cn</a> |\n  真实集群搭建过程:\n    <a href=\"https://zhuanlan.zhihu.com/p/634851956\" target=\"_blank\">https://zhuanlan.zhihu.com/p/634851956</a>\n</p>\n\n## 📖 Abstract\nWould you like to own the \"God Eyes\"? Do you crave power? Do you wish to wield all information at your fingertips? \n**Now, data is all you need!**\n\nThe Hazelnut and Hydra ecosystem is a powerful data analysis \"Elder Brain\" designed specifically for \"TJ\" individuals, 'all information all I need'.\n**Hey, commander!** We build a unique personal PB level data warehouse, knowledge base, and search engine just for you, your exclusive \"God Eyes\" !\n\nYour own C4ISR, your own 'global' strike system, Central Intelligence System, Central Staff System, and firepower industrial plants. \nThe underlying architecture of above.\n\n## 📖 摘要 / 简介\n**你想拥有‘上帝之眼’吗？你渴望力量吗？你希望一切信息尽在掌控吗？这个时代，数据即使世界！**\n\nHydra 生态，专为\"TJ\"人打造的大规模数据分析“主脑”，一切尽在掌握之中。\nHydra为你打造个人PB级数仓、知识库、图库、任务编排和服务于 Agent 工厂化的超级个体引擎，你的专属'上帝之眼'，为所欲为！\n\n到底这是什么玩意？属于你自己的C4ISR，你自己的“全球”打击系统、中央情报系统、中央参谋系统和火力军工厂的底层战略架构。\n\n### 字多不看？太高端听不懂？几个场景助你快速了解Hydra理念。\n- **大规模知识库**：构造你的私人知识库，关联任何你感兴趣的知识图谱（金融、新闻、学术、游戏、音乐、电影、视频、小说、美食等），生成巨型知识库和图谱，并交给GPT等大模型给你生成属于你的`上帝报告`。\n- **数据仓库**：海量数据，任你处置，你可以打造自己的数据`全图挂`，甚至可以乘坐时光机，在数据世界中随意穿行。你就是上帝，历史的变迁，触手可及。\n- **数据集市**：打造你的个人GPT，随着算力平民化、大模型技术的平民化。未来，你不想拥有自己的GPT吗？你只需要不断收集属于你的数据集，未来打造你的专属GPT、Diffusion等。\n- **大型采集**：统一并行架构打造大规模战略采集系统，多个实例助你快速入门：\n1).维基百科全站爬取；2).Urban Dictionary全站爬取；3).imdb爬取；4).编年史子项目，每日全世界新闻采集，打造互联网记忆库与情报系统；\n5).金融数据大规模采集（面向资金流向建模）；6).IP反查、ISP追踪、DNS/rDNS、域名、NIC等搜索引擎基架数据采集；等。（避免争议，不提供任何有争议的代码和数据）\n- **数据平台**：面向战略和战术数据分析系统，构建和打通其他开源数据产品，面向智能ETL、数仓、取数、情报等专业大数据分析系统。\n- **中台架构**：面向系统性实现上层应用、面向抽象、统一化，支撑大规模并行、大数据架构，信息、控制、调度、审计、权限等元架构分离。\n\n### 🏆 3A史诗巨献\n全域覆盖、听你指挥、能打胜战、作风优良。\n\n### 什么是 Hydra，他能干嘛？\n- Hydra 由 <a href=\"https://www.dragonking.cn\" target=\"_blank\">DragonKing</a> 及其团队原创的分布式基架系统，\n面向系统性构建上层大型应用。Hydra的设计基础首先是面向控制的，用于实现大规模控制，进而实现通用任务、服务操作系统。\n与其他操作系统设计理念类似，倡导内核做事、统筹规划。\n- 其设计根源是基于对中台架构的创新和一体化，并尝试构造更一致的内核，\n目前的设计尝试由一个迷你中台和云系统（<a href=\"https://www.nutsky.com\">豆子坚果云</a>）不断自底向上迭代。\n- Hydra基架如下图所示，自顶向下整体分为三层：应用（具体）、中级（典型）、底层（抽象）。对应图中应用层、中级应用中台、中台层。\n- 一个操作系统至少需要实现对任务、服务、资源、存储、消息、信号、权限等子系统的控制和管理。\nHydra也是如此，但是由于时间和性价比问题，资源管理内核由具体的第三方系统代理（如Yarn）。\n\n#### 全局架构鸟瞰图\n![HydraArchitecture](assets/imgs/architect/architect_frame_global_2.png)\n\n01. 支持统一高度抽象化的任务、事务、服务等编排，一套接口，可分级、可本地、可集群。\n02. 抽象统一分布式资源树系统，场景树、服务树、任务树、部署树、配置树、存储树等。\n03. 可多级、可嵌套的编排系统，支持配置域管理、复杂配置动态解耦、可继承和重写的多域配置管理。\n04. 可事务化抽象进程、线程模型，让远端服务通过RPC或通信组件通过一套接口，像本地进程一样进行统一管理。\n05. 可事务图化编排方法论设计，就像TensorFlow，更抽象简单的服务、任务设计模式。事务和任务编排支持序列和并行两种模式，更支持性能模式。确保事务绝对执行、回滚、性能执行、并行等多种范式。\n06. 面向统一解释器模式方法论和过程化设计，事务和任务编排逻辑化，支持循环控制、条件控制、散转控制、原子化等。\n07. 抽象统一任务管理器体系，统一生命周期设计，多类任务一套“任务管理器”，就像本地系统一样简单。\n08. 抽象统一系统架构体系，可中心化、可联邦化、可链式化，一切皆有可能。\n09. 抽象统一外部文件系统，基于Common VFS 统一文件系统管理，从复杂底层存储中解放。\n10. 抽象统一内核文件系统，支持级联逻辑卷（简单卷、跨区卷、条带卷）可自由容量编排规划，分布式对象文件系统，支持多种文件系统操作。\n11. 抽象统一数据处理体系，泛容器化思想，抽象化DAO、DTO、Data Manipulation架构，一切皆可是Map、List、Set和Table等。\n12. 抽象化部署模式和抽象云部署，无论是任何系统、本地进程、虚拟机部署、容器部署等。Hydra为您统一，“小程序”化进程模型，就像Springboot一样简单。\n13. 基于分治和MapReduce思想设计，面向大数据处理处理系统设计。\n14. 双工多路RPC设计基于Netty和NIO，支持双向控制（服务端可被动控制客户端），双端可收发，支持JSON、BSON、Protobuf（Java全自动动态编译）。\n15. 传统实例化、IOC化、C/C++风格化，多种对象生命周期模式，更有趣的系统设计。\n16. 可分级、分组、嵌套、级联的设计方法论，确保更灵活的大型系统设计，确保系统结构清晰、规整、可视、整整齐齐。\n17. 无需担心抽象，无需担心\"吹牛逼\"，我们尽可能通过实际案例和有效代码，展示系统功能，也欢迎commit。——以实现小型爬虫搜索引擎为例。\n\n\n### 子系统、框架和实例系统\n#### Bean Nuts Hazelnut Sauron Radium (索伦·镭，分布式爬虫引擎)\n- 该部分为分布式爬虫引擎、爬虫大数据处理、清洗、持久化框架系统的实现。面向分布式大规模系统性爬虫设计，支持任务编排和并行流水线爬虫、支持周期和定时大规模爬虫、支持并行离线数据处理。\n#### Bean Nuts Hazelnut Sauron Shadow (索伦·暗影，以爬虫、小型搜索引擎为例)\n- 该部分基于Pinecone、Ulfhedinn、Slime、Hydra、Radium等子框架最终设计的搜索引擎（数据采集、数据处理侧）应用实例。\n- 多个实例助你快速入门：1).维基百科全站爬取；2).Urban Dictionary全站爬取；3).imdb爬取；4).编年史子项目，每日全世界新闻采集，打造你的互联网记忆库；等。\n#### Bean Nuts Hazelnut Sauron Eyes - The God View (索伦·之眼，数据知识图谱化与检索系统[用户侧终端应用])\n- 数据检索引擎演示实例参考SauronEyes (https://god.nutsky.com | http://www.godview.net)\n\n\n## ⚔ 目录\n* [一、描述](#一描述)\n    * [1.1、框架组成](#11框架组成)\n        * [1.1.1、Pinecone 基础运行支持库](#111基础运行支持库)\n             * [1.1.1.1、扩展容器](#1111扩展容器)\n             * [1.1.1.2、工具库](#1112工具库)\n        * [1.1.2、Slime 大数据系统支持框架](#112大数据系统支持框架)\n        * [1.1.3、Ulfhedinn 基础运行支持库，第三方依赖版](#113大数据系统支持库)\n        * [1.1.4、Hydra 分布式、任务系统框架](#114分布式、任务系统框架)\n        * [1.1.5、Radium 分布式爬虫系统框架](#114分布式、任务系统框架)\n    * [1.2、功能模块组成](#12功能模块组成)\n        * [1.2.1、网络通信库](#121网络通信库)\n            * [1.2.1.1、流处理模块](#1211流处理模块)\n\n\n* [二、编译、使用](#二编译、使用)\n* [三、目录结构说明](#三目录结构说明)\n    * [3.1、TODO](#31TODO)\n\n* [四、使用许可](#四使用许可)\n* [五、参考文献](#五参考文献)\n* [六、致谢](#六致谢)\n* [七、题外话](#七题外话)\n\n## 一、📝 描述\n### 1.1、框架组成\n#### 全局中央架构鸟瞰图（抽象全局架构）\n![HydraArchitecture](assets/imgs/architect/architect_frame_global.png)\n#### 1.1.1、Pinecone 基础运行支持库\n##### 1.1.1.1、 扩展容器\n1. LinkedTreeMap\n2. ScopeMap (多域查找树、Map), 实现和支持类似动态语言（如JS、PHP、Python等）的底层继承数据结构，支持两类子模型（单继承、多继承），\n可以实现多域查找的功能。\n3.  Dictium、Dictionary（字典接口模型），实现和支持类似动态语言（如PHP、Python等）的Array、字典查找，Map和可索引对象进一步抽象化。\n4.  Multi*Map (多种MultiValueMap范式)，实现支持多种多值Map的实现，如MultiCollectionMap、MultiSetMap等。\n5.  Top (TopN问题通用解决)，实现和支持堆法、有序树法、多值有序树法三种实现。\n6.  distinct (差异器)，实现传统Set法、分治法、Bloom等的集合差异分析器。\n7.  affinity (亲缘性器)，实现和支持对亲缘抽象字典的继承、重写等。\n8.  tabulate (遍历器)，实现以列表式对抽象字典的内部递归，并列表化和分析亲缘关系。\n9.  ShardList (非复制式共享数组)，由 @Geniusay 贡献。\n10. TrieMap (前缀树Map)，支持非递归迭代器，类文件系统完整功能前缀树，Symbolic Reparse 引用挂载点。\n##### 1.1.1.2、工具库\n1. JSON库，BSON，JPlus(JSON++)库 (面向可二次开发、设计的自由JSON设计)，可以重写JSONEncoder、JSONDecoder、JSONCompiler、JSONDecompiler、注入器等。\n2. Name命名空间库\n3. lang (Java包和扩展库)，支持各类类扫描方法、包扫描方法、遍历和收集方法、加载、多域扫描等。\n4. GUID (由@kenssa4eedfd贡献)，统一分布式ID，魔改百度Uid，GUID64、GUID72，支持随机混淆。\n4. TODO\nTODO\n#### 1.1.2、Hydra \n##### 1.1.2.1、系统架构、骨架设计\n1. HierarchySystem，阶级系统(Master-Slaver推广架构)\n2. FederalSystem，联邦系统(面向投票式设计) [BETA, 20250101]\n3. BlockSystem，块式系统(面向边缘、链式系统设计)。让你的每台设备都成为你专属链上节点。 [BETA, 20250101]\n4. CascadeComponent, 级联组件设计，支持亲缘性回收控制，“The Omega Device”，级联回收主键引用。\n5. 分布式容器（分布式多域树等）\n6. 分布式微内核\\\n   借鉴WinNT、Unix，支持KernelObject、挂载点、统一内核对象管理。\\\n   Unix风格内核句柄路径化，如分布式挂载 /proc/、/dev/ 等。 \n##### 1.1.2.2、统一调度编排系统\n![TaskTree](assets/imgs/task/task_tree.png)\n![TaskLineage](assets/imgs/task/task_lineage.png)\n1. Orchestration (事务、任务编排子系统)，面向统一解释器模式方法论和过程化设计，事务和任务编排逻辑化，支持循环控制、条件控制、散转控制、原子化等，更支持事务完整性设计。\n2. Auto (简易命令模式，可编程自动机系统)，实现支持Automaton简易生产-消费命令队列，实现支持PeriodicAutomaton可编程Timer，实现支持Marshalling流水线指令编排器。(更多Timer和算法持续更新中)\n3. Vector DAG（矢量图），\n本文提出一种通用的大规模矢量DAG（Vector DAG）图模型，用于支撑高性能调度、编排与控制任务，适配亿级以上节点规模的实际应用场景。通过拓扑拆分、矢量化子图划分及多种并发图算法，实现了大规模调度控制能力。\n算法支持关键路径计算、节点可达性判断、剪枝优化、最小生成子图合并及最短路径计算等核心图处理能力。\nhttps://docs.nutsky.com/docs/hazelnut_sauron_zh_cn/uniform_massive_graph_dispatch\n![VDAGArchitecture](assets/imgs/vdag_architecture.png)\n\n##### 1.1.2.3、小程序系统\nServgram，小程序系统，是的这很微信，不过是服务端的小程序哦！进一步抽象和推广进程思想，任何服务介质（本地、虚拟机、容器等），一切服务、一切任务等。\n一切统一和谐，一套调度、一套接口、一套操作，生命周期整整齐齐（满足你的控制欲），更可冗余确保稳定。\\\n配合任务编排和事务编排，多个任务，一套系统全包干。\n(TODO，远端进程进一步实现、实现统一分布式锁接口)\n##### 1.1.2.4、统一消息分发系统\n##### 1.1.2.5、WolfMC RPC\n1. 基于Netty设计的原创消息控制中间件，支持RPC模式。\n2. 支持JSON、BSON、Protobuf，更多RPC协议和数据结构持续更新中。[TODO 分片、泳道]\n3. 支持双工通信，双端可收可发。（服务端可被动控制客户端，双路Channel池设计）\n4. 全自动Protobuf动态编译，支持直接接口代理（类似Mybatis Mapper工厂）。\n5. 支持异步回调，类似AJAX。\n6. 支持同步回调。\n7. 配合MessageExpress, 支持类似 Spring Controller 式消息控制。\n8. 支持AOP、IOC，可以自动依赖注入，支持类似Controller范式和消息注解拦截。\n\n##### 1.1.2.6、统一服务注册、发现、管理系统\n1. 服务树\\\n支持多级分类的服务树，可以设置多级命名空间，如 `Name1.Name2.应用1.服务1`。\\\n支持元信息继承、多引用、节点回收、支持复杂服务管理分类。\n\n##### 1.1.2.7、分布式微内核\n1. 配置树、分布式注册表\\\n   \"盗版\" Apollo，支持分布式配置管理。一个配置中心，就像 Windows注册表一样。\n   1. 统一DOM / 前缀树 抽象化，支持自定义节点（插件设计），文件系统式设计。\n   2. 支持配置继承\n   3. 支持Hard Link 引用(标记法引用计数，有循环引用检测 / inode 表设计)\n   4. 支持选择器 （路径选择器、XPath）\n   5. 支持大数据（数据库基准）\n   6. 路径缓存设计\n   7. 改进非递归DFS路径寻址算法\n   8. 兼容Windows 配置表风格\n   9. 支持移动、复制（支持递归级联，复制 / 移动文件夹和配置项）\n   10. 支持 JSON、XML 等原始文本或动态数据格式，支持 JSON、XML 与注册表混转。\n   11. 支持配置动态渲染（EL表达式、逻辑循环支持）\n   12. 数据库操作和底层分离，支持数据库、内存、Redis等任意数据源\n   \n2. 任务树\\\n   任务、进程分类、分组和编排系统。\\\n   对一级挂载点 `/proc/${proc_guid}/task` 的二级挂载和分类。\n3. 部署树、部署管理器\\\n   多种部署模式（如容器、虚拟机、PaaS等），分类、分组和编排系统，类似 Windows 设备管理器。\\\n   抽象部署设备类似传统操作系统的物理设备，通过编写驱动，实现对部署子系统的管理。\n4. 场景树\\\n   功能分类、分组和编排系统。\n5. 统一用户系统\n   1. 内核级统一用户、凭证、角色、权限管理。\n   2. 统一单点登录中台化设计。\n   3. 支持域、组、用户三级设计。\n   \n##### 1.1.2.8、分布式存储系统\n1. 卷系统\n   1. 物理卷，多种数据源设计\n   2. 简单卷\n   3. 跨区卷\n   4. 条带卷，基于状态机无锁编程化并行存储，采用基于差分多路缓存滑动窗口、DFA、FIFO多线程缓存等算法优化的高性能条带卷设计。\n\n  应用层面本项目提供了物理卷与逻辑卷的管理后台方便用户的管理与使用\n  ![image](assets/imgs/logic_volume_manage.png)\n  ![image](assets/imgs/logic_volume_create.png)\n  ![image](assets/imgs/physical_volume_manage.png)\n\n2. 分布式文件系统\\\nHydra 是基于多级级联的大型系统架构，UOFS同样沿用了Hydra的整体架构体系，采样全局级联的设计。支持存储节点、索引节点、卷节点等每一层级的级联设计。\n![FileSystemArchitecture](assets/imgs/fs_architecture.svg)\n应用层面本项目不仅提供了文件浏览器的核心功能，还支持文件预览、多集群上传、外部挂载、文件完整性验证等。\n![image](assets/imgs/file_manage.png)\n![image](assets/imgs/file_preview.png)\n![image](assets/imgs/external_mounting.png)\n![image](assets/imgs/property_view.png)\n\n3.基于UOFS的CDN(文件分发网络)\n本项目基于UOFS结合Kafka、RocketMQ、服务管理中心等提供了保证数据一致性的CDN服务,并提供文件版本管理与站点管理。\n![image](assets/imgs/site_manage.png)\n![image](assets/imgs/file_distribution_and_synchronization.png)\n![image](assets/imgs/file_version_manage.png)\n\n\n\n4. 版本管理\n\n##### 1.1.2.9、统一资源管理、分配接口系统［TODO］\n##### 1.1.2.10、图形管理界面［TODO］\n##### 1.1.2.11、TODO\n\n#### 1.1.3、Slime 史莱姆大数据支持库\n##### 1.1.3.1、统一块抽象、管理、分配系统（泛块式、抽象页面（连续、离散、自定义）、帧、分区、簇等）\n##### 1.1.3.2、Mapper、Querier 抽象映射、查询器，统一接口多种实现（本地、数据库、缓存、数据仓库等）\n1. 优化和缓存版RDBMapper、IndexableMapper，使用多种缓存策略，泛容器化API接口使用。\n##### 1.1.3.3、统一缓存库和查询优化库、支持LRU、冷热优化、页面缓存、页面LRU、多级缓存等多策略实现。\n##### 1.1.3.4、Source抽象数据源库、支持RDB-ibatis、NoSQL、缓存、文件等扩展。\n##### 1.1.3.5、Reducer库[TODO]，更多Reduce策略实现、接口\n\n#### 1.1.4、Radium 分布式爬虫和搜索引擎数据取回、任务编排、处理、持久化框架\n##### 1.1.4.1、一站式爬虫数据处理范式\n基于Map-Reduce思想，面向TB-PB级别数据处理，统一任务编号、映射、处理。\n范式包含 Reaver（掠夺者，数据取回器），Stalker（潜伏者，面向批量爬虫索引嗅探），Embezzler（洗钱者，面向批处理爬虫数据处理）。\n##### 1.1.4.2、统一多任务调度、配置、编排系统\n支持事务型、Best-Effort等多种任务粒度控制。\n支持分组、嵌套、多级任务调度，支持子任务继承父任务关系、血缘性。\n支持任务回滚、熔断等接口设计。\nTODO\n\n## 二、🧬 编译、使用\n### 编译\n- 项目使用Maven管理，使用jdk11以上版本即可运行。\n- 编译得到jar包，即插即用，随意部署。\n- 或使用 IntelliJ IDEA 直接打开即可。\n\n### 最小系统使用\n- 无需特意配置环境变量等信息。\n- 系统配置文件，默认位于\"./system/setup/..\"\n```json5\n    \"Orchestration\"         : {\n      \"Name\": \"ServgramOrchestrator\",\n      \"Type\": \"Parallel\", // Enum: { Sequential, Parallel, Loop }\n\n      // Servgram-Classes scanning package-scopes\n      \"ServgramScopes\": [\n        \"com.sauron.heist.heistron\"\n      ],\n\n      \"Transactions\": [\n        { \"Name\": \"Heist\", \"Type\": \"Sequential\", \"Primary\": true }\n      ]\n    }\n```\n- 默认启动 `Heist` （爬虫）任务\n- 检查 `Heist` 小程序配置，默认位于\"./system/setup/heist.json5\"\n```json5\n    \"Orchestration\"    : {\n        \"Name\": \"HeistronOrchestrator\",\n        \"Type\": \"Parallel\", // Enum: { Sequential, Parallel, Loop }\n    \n        \"DirectlyLoad\" : {\n          \"Prefix\": [],\n          \"Suffix\": [ \"Heist\" ]\n        },\n    \n        \"ServgramScopes\": [\n          \"com.sauron.shadow.heists\",\n          \"com.sauron.shadow.chronicle\"\n        ],\n    \n        // 修改这里，可运行例程 'Void' , 最小系统演示\n        \"Transactions\": [\n          { \"Name\": \"Void\", \"Type\": \"Sequential\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ },\n        ]\n    }\n```\n- 检查 `Void` 小小程序配置，默认位于\"./system/setup/heists/Void.json5\"，原则上注意大小写\n```json5\n    \"Orchestration\"         : {\n        \"Name\": \"VoidOrchestrator\",\n        \"Type\": \"Parallel\", // Enum: { Sequential, Parallel, Loop }\n    \n        \"Transactions\": [\n          { \"Name\": \"Jesus\", \"Type\": \"Sequential\"  },\n          { \"Name\": \"Satan\", \"Type\": \"Sequential\"  },\n          { \"Name\": \"Rick\" , \"Type\": \"Sequential\"  }\n        ]\n    }\n```\n- 正常启动，将开始本地流水线序列调度 \"Jesus\"、\"Satan\"、\"Rick\"三个大任务和其子任务。\n\n     ![最小系统成功运行图](assets/imgs/demo_result.png)\n     \n## 三、🔨 目录结构说明\n- TODO \n\n## 四、🔬 使用许可\n- MIT (保留本许可后，可随意分发、修改，欢迎参与贡献)\n\n## 五、📚 参考文献\n(参考文献包括Nuts家族 C/C++、Java等子语言运行支持库、本项目框架、本项目等所有涉及的子项目的总参考文献、源码、设计、\n专利等相关资料。便于读者了解相关技术（设计）的源头和底层方法论，作者向相关参考项目（以及未直接列出项目）作者表示崇高敬意和感谢。)\n01. C/C++ STL (容器、运行支持库设计，算法、设计模式和数据结构)\n02. Java JDK  (容器、运行支持库设计，算法、设计模式和数据结构)\n03. Go SDK  (容器、运行支持库设计，算法、设计模式和数据结构)\n04. PHP 5.6 Source (解释器、相关支持库设计)\n05. MySQL Source (参考多个设计思想和部分思想实现)\n06. Linux Kernel (参考多个设计思想和部分思想实现)\n07. Win95 Kernel (Reveal Edition)，Win32Apis，Runtime framework\n08. WinNT 窗口事件思想、回调函数注入等\n09. C/C++ Boost\n10. C/C++ ACL -- One advanced C/C++ library for Unix/Windows.\n11. Java Springframework Family (How IOC/AOP/etc works)\n12. Hadoop MapReduce (How it works)\n13. Python TensorFlow (Graph, how it orchestras)\n14. Javascript DOM 设计、CSS选择器等\n15. 其他若干个小框架、工具库、语言等（如Apache Commons、org.json、fastcgi、fastjson、libevent等），本文表示崇高敬意和感谢。\n\n\n\n\n# 📈 项目活跃表\n![Alt](https://repobeats.axiom.co/api/embed/0ae23655bb105addf8d90a999df36f690d615af7.svg \"Repobeats analytics image\")\n"
  },
  {
    "path": "RedQueen/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sauron</artifactId>\n        <groupId>com.sauron</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.acorn.redqueen</groupId>\n    <artifactId>redqueen</artifactId>\n    <packaging>pom</packaging>\n    <version>2.5.1</version>\n\n    <modules>\n        <module>redqueen-architecture</module>\n        <module>redqueen-computation-suit</module>\n        <module>redqueen-system</module>\n        <module>redqueen-framework-service</module>\n    </modules>\n</project>"
  },
  {
    "path": "RedQueen/redqueen-architecture/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>redqueen</artifactId>\n        <groupId>com.acorn.redqueen</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.acorn.redqueen.kernel</groupId>\n    <artifactId>redqueen-architecture</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-service</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "RedQueen/redqueen-architecture/src/main/java/com/acorn/redqueen/compute/ComputationNode.java",
    "content": "package com.acorn.redqueen.compute;\n\npublic interface ComputationNode {\n}\n"
  },
  {
    "path": "RedQueen/redqueen-architecture/src/main/java/com/acorn/redqueen/system/Dummy.java",
    "content": "package com.acorn.redqueen.system;\n\npublic class Dummy {\n}\n"
  },
  {
    "path": "RedQueen/redqueen-computation-suit/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>redqueen</artifactId>\n        <groupId>com.acorn.redqueen</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.acorn.redqueen.kernel</groupId>\n    <artifactId>redqueen-computation-suit</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "RedQueen/redqueen-framework-service/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>redqueen</artifactId>\n        <groupId>com.acorn.redqueen</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.acorn.redqueen.kernel</groupId>\n    <artifactId>redqueen-framework-service</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.acorn.redqueen.kernel</groupId>\n            <artifactId>redqueen-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-service-control</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/ApplicationManager.java",
    "content": "package com.acorn.redqueen.service;\n\nimport com.pinecone.framework.system.regime.arch.Manager;\nimport com.pinecone.hydra.system.ko.CascadeKernelObjectInstrument;\n\npublic interface ApplicationManager extends CascadeKernelObjectInstrument, Manager {\n}\n"
  },
  {
    "path": "RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/ArchRedApplication.java",
    "content": "package com.acorn.redqueen.service;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.Identification;\nimport com.pinecone.hydra.service.kom.entity.ApplicationElement;\n\npublic abstract class ArchRedApplication implements RedApplication {\n    protected ApplicationElement mApplicationElement;\n\n    @Override\n    public ApplicationElement getApplicationElement() {\n        return this.mApplicationElement;\n    }\n\n    @Override\n    public long getEnumId() {\n        return this.mApplicationElement.getEnumId();\n    }\n\n    @Override\n    public GUID getGuid() {\n        return this.mApplicationElement.getGuid();\n    }\n\n    @Override\n    public Identification getId() {\n        return this.mApplicationElement.getId();\n    }\n\n    @Override\n    public String getName() {\n        return this.mApplicationElement.getName();\n    }\n\n    @Override\n    public String getScenario() {\n        return this.mApplicationElement.getScenario();\n    }\n\n    @Override\n    public String getPrimaryImplLang() {\n        return this.mApplicationElement.getPrimaryImplLang();\n    }\n\n    @Override\n    public String getExtraInformation() {\n        return this.mApplicationElement.getExtraInformation();\n    }\n\n    @Override\n    public String getLevel() {\n        return this.mApplicationElement.getLevel();\n    }\n\n    @Override\n    public String getDescription() {\n        return this.mApplicationElement.getDescription();\n    }\n}\n"
  },
  {
    "path": "RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/RedApplication.java",
    "content": "package com.acorn.redqueen.service;\n\nimport com.pinecone.hydra.service.Application;\nimport com.pinecone.hydra.service.kom.entity.ApplicationElement;\n\npublic interface RedApplication extends Application {\n\n    ApplicationElement getApplicationElement();\n\n}\n"
  },
  {
    "path": "RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/RedQueenServiceControllerException.java",
    "content": "package com.acorn.redqueen.service;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.service.registry.ServiceControlException;\n\npublic class RedQueenServiceControllerException extends ServiceControlException implements Pinenut {\n    public RedQueenServiceControllerException() {\n        super();\n    }\n\n    public RedQueenServiceControllerException( String message ) {\n        super(message);\n    }\n\n    public RedQueenServiceControllerException( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public RedQueenServiceControllerException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/RedServiceApplication.java",
    "content": "package com.acorn.redqueen.service;\n\npublic class RedServiceApplication extends ArchRedApplication implements ServiceApplication {\n\n}\n"
  },
  {
    "path": "RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/ServiceApplication.java",
    "content": "package com.acorn.redqueen.service;\n\npublic interface ServiceApplication extends RedApplication {\n}\n"
  },
  {
    "path": "RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/conduct/CollectiveServiceRegiment.java",
    "content": "package com.acorn.redqueen.service.conduct;\n\nimport com.pinecone.framework.system.regime.Regiment;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.registry.ServiceControlException;\nimport com.pinecone.hydra.service.registry.server.ServiceManager;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\n\npublic interface CollectiveServiceRegiment extends Regiment, Slf4jTraceable {\n\n    ServiceManager serviceManager();\n\n    ServiceInstrument serviceInstrument();\n\n    void startServiceManage() throws ServiceControlException;\n}\n"
  },
  {
    "path": "RedQueen/redqueen-framework-service/src/main/java/com/acorn/redqueen/service/conduct/RedCollectiveServiceRegiment.java",
    "content": "package com.acorn.redqueen.service.conduct;\n\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.registry.ServiceControlException;\nimport com.pinecone.hydra.service.registry.server.ServiceManager;\nimport com.pinecone.hydra.system.Hydrogen;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class RedCollectiveServiceRegiment implements CollectiveServiceRegiment {\n\n    protected ServiceManager                    mServiceManager;\n\n    protected ServiceInstrument                 mServiceInstrument;\n\n    protected Hydrogen                          mSystem;\n\n    protected Logger                            mLogger;\n\n\n    public RedCollectiveServiceRegiment(\n            Hydrogen system, ServiceInstrument serviceInstrument,\n            ServiceManager serviceManager\n    ) {\n        this.mSystem                    = system;\n        this.mServiceInstrument         = serviceInstrument;\n        this.mLogger                    = LoggerFactory.getLogger( \"RedCollectiveServiceRegiment\" );\n        this.mServiceManager            = serviceManager;\n    }\n\n    @Override\n    public Logger getLogger() {\n        return this.mLogger;\n    }\n\n    @Override\n    public void startServiceManage() throws ServiceControlException {\n        this.mServiceManager.startService();\n\n        this.mLogger.info( \"RPC init success\" );\n    }\n\n    @Override\n    public ServiceManager serviceManager() {\n        return this.mServiceManager;\n    }\n\n\n    @Override\n    public ServiceInstrument serviceInstrument() {\n        return this.mServiceInstrument;\n    }\n}\n"
  },
  {
    "path": "RedQueen/redqueen-system/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>redqueen</artifactId>\n        <groupId>com.acorn.redqueen</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.acorn.redqueen.kernel</groupId>\n    <artifactId>redqueen-system</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "RedQueen/redqueen-system/src/main/java/com/acorn/redqueen/RedQueen.java",
    "content": "package com.acorn.redqueen;\n\nimport com.acorn.redqueen.system.ServiceCentralControl;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.framework.util.io.Tracer;\nimport com.pinecone.hydra.system.ArchModularizedSubsystem;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.component.LogStatuses;\n\npublic class RedQueen extends ArchModularizedSubsystem implements ServiceCentralControl {\n\n    public RedQueen( Hydrogen primarySystem, String name, PatriarchalConfig config ) {\n        super( primarySystem, name, config );\n    }\n\n    @Override\n    protected void traceWelcomeInfo() {\n        Tracer console = this.mPrimarySystem.console();\n        console.getOut().print( \"---------------------------------------------------------------\\n\" );\n        console.getOut().print( \"\\u001B[31mBean Nuts Acorn Red Queen\\u001B[0m\\n\" );\n        console.getOut().print( \"\\u001B[31mMassive Parallel Computing Orchestration System \\u001B[0m\\n\" );\n        console.getOut().print( \"\\u001B[32mCopyright(C) 2008-2028 Bean Nuts Foundation. All rights reserved.\\u001B[0m\\n\" );\n        console.getOut().print( \"---------------------------------------------------------------\\n\" );\n    }\n\n    protected void init() {\n        this.getLogger().info( \"<RedQueen> >>> System Booting...\" );\n\n        this.infoLifecycle( \"<RedQueen> Domain Subsystem Initialization\", LogStatuses.StatusStart );\n        this.traceWelcomeInfo();\n        this.prepare_system_skeleton();\n\n        this.infoLifecycle( \"<RedQueen> Welcome to the Red Queen super computing!\", LogStatuses.StatusReady );\n        this.infoLifecycle( \"<RedQueen> Domain Subsystem Initialization\", LogStatuses.StatusReady );\n    }\n\n    protected void prepare_system_skeleton() {\n\n    }\n\n    @Override\n    public void vitalize() {\n        this.init();\n    }\n\n    @Override\n    public void terminate() {\n\n    }\n}\n"
  },
  {
    "path": "RedQueen/redqueen-system/src/main/java/com/acorn/redqueen/system/ServiceCentralControl.java",
    "content": "package com.acorn.redqueen.system;\n\nimport com.pinecone.framework.system.SynergicSystem;\nimport com.pinecone.hydra.system.centrum.CentralControlSubsystem;\n\npublic interface ServiceCentralControl extends SynergicSystem, CentralControlSubsystem {\n}\n"
  },
  {
    "path": "Saurons/Saurye/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>saurons</artifactId>\n        <groupId>com.saurons</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.sauron.saurye</groupId>\n    <artifactId>saurye</artifactId>\n    <version>2.1.0</version>\n\n\n</project>"
  },
  {
    "path": "Saurons/Shadow/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>saurons</artifactId>\n        <groupId>com.saurons</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.sauron.shadow</groupId>\n    <artifactId>shadow</artifactId>\n    <version>1.2.7</version>\n    <packaging>jar</packaging>\n\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.springframework.boot</groupId>\n                <artifactId>spring-boot-maven-plugin</artifactId>\n\n                <executions>\n                    <execution>\n                        <phase>package</phase>\n                        <goals>\n                            <goal>repackage</goal>\n                        </goals>\n                    </execution>\n                </executions>\n\n                <configuration>\n                    <includeSystemScope>true</includeSystemScope>\n                    <mainClass>com.sauron.shadow.ShadowBoot</mainClass>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime</groupId>\n            <artifactId>slime</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulf-lib-oltp-rdb</artifactId>\n            <version>1.2.1</version>\n        </dependency>\n\n        <dependency>\n            <groupId>com.sauron.core</groupId>\n            <artifactId>sauron-core</artifactId>\n            <version>1.2.7</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.sauron.heist</groupId>\n            <artifactId>heist-system-schedule</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Saurons/Shadow/src/main/java/META-INF/MANIFEST.MF",
    "content": "Manifest-Version: 1.0\nMain-Class: com.sauron.shadow.ShadowBoot\n\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/Shadow.java",
    "content": "package com.sauron.shadow;\n\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.sauron.Sauron;\n\n\npublic class Shadow extends Sauron {\n    public Shadow( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Shadow( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    protected void traceSubsystemWelcomeInfo() {\n        super.traceSubsystemWelcomeInfo();\n        this.pout().print( \"------------------------Shadow Subsystem-----------------------\\n\" );\n        this.pout().print( \"\\u001B[31m\\uD83D\\uDE08 Sauron`s Shadow Subsystem \\uD83D\\uDE08 \\u001B[0m\\n\" );\n        this.pout().print( \"\\u001B[32mShadow is hungry, desiring for blood.\\u001B[0m\\n\" );\n        this.pout().print( \"---------------------------------------------------------------\\n\" );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        super.vitalize();\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/ShadowBoot.java",
    "content": "package com.sauron.shadow;\n\nimport com.pinecone.Pinecone;\n\npublic class ShadowBoot {\n    public static Shadow shadow = null;\n\n    public static void main( String[] args ) throws Exception {\n        ShadowBoot.shadow = new Shadow( args, Pinecone.sys() );\n        ShadowBoot.shadow.init( (Object...cfg )->{\n            ShadowBoot.shadow.vitalize();\n\n            return 0;\n        }, (Object[]) args );\n    }\n}"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/AffinitySuggestation.java",
    "content": "package com.sauron.shadow.chronicle;\n\nimport com.pinecone.hydra.auto.ArchSuggestation;\nimport com.pinecone.hydra.auto.ContinueException;\nimport com.sauron.heist.heistron.orchestration.Instructations;\n\npublic class AffinitySuggestation extends ArchSuggestation {\n    protected Clerk mClerk;\n\n    public AffinitySuggestation( Clerk clerk ) {\n        this.mClerk = clerk;\n    }\n\n    @Override\n    public void execute() {\n        try{\n            Instructations.infoConformed( AffinitySuggestation.this.mClerk, \"toRavage\" );\n\n            AffinitySuggestation.this.mClerk.isTimeToFeast();\n\n            Instructations.infoCompleted( AffinitySuggestation.this.mClerk, \"toRavage\" );\n        }\n        catch ( Exception e ) {\n            AffinitySuggestation.this.mClerk.tracer().warn(\n                    String.format(\"[Fatality] (%s : %s) <Continue>\", e.getClass().getSimpleName(), e.getMessage())\n            );\n            throw new ContinueException( e );\n        }\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/ArchClerk.java",
    "content": "package com.sauron.shadow.chronicle;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.JSONGet;\nimport com.pinecone.hydra.auto.Instructation;\nimport com.sauron.heist.heistron.HTTPCrew;\nimport com.sauron.heist.heistron.HTTPHeist;\n\npublic abstract class ArchClerk extends HTTPCrew implements Clerk {\n    protected Instructation       mAffinityPrimeDirective;\n    protected JSONObject          mConfig;\n\n    @JSONGet( \"__proto__.NewsDataTable\" )\n    protected String              mszNewsDataTable;\n\n    public ArchClerk( HTTPHeist heist, int id, JSONObject joConfig ) {\n        super( heist, id );\n\n        this.mConfig                 = joConfig;\n        this.mAffinityPrimeDirective = new AffinitySuggestation( this );\n    }\n\n    public ArchClerk( HTTPHeist heist, int id, JSONObject joConfig, Class<?> childType ) {\n        this( heist, id, joConfig );\n        this.autoInject( ArchClerk.class );\n        this.autoInject( childType );\n    }\n\n    @Override\n    public ChronicleHeist parentHeist() {\n        return (ChronicleHeist)super.parentHeist();\n    }\n\n    @Override\n    public JSONObject getConfig() {\n        return this.mConfig;\n    }\n\n    @Override\n    public Instructation getPrimeDirective() {\n        return this.mAffinityPrimeDirective;\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/Chronicle.java",
    "content": "package com.sauron.shadow.chronicle;\n\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.sauron.heist.heistron.Heistum;\nimport com.sauron.shadow.chronicle.dao.BasicChronicleManipulator;\nimport org.apache.ibatis.session.SqlSession;\n\npublic interface Chronicle extends Heistum {\n    IbatisClient getPrimaryDataIbatisClient();\n\n    SqlSession getPrimarySharedSqlSession();\n\n    BasicChronicleManipulator getBasicChronicleManipulator();\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/ChronicleHeist.java",
    "content": "package com.sauron.shadow.chronicle;\n\nimport com.pinecone.framework.system.NonNull;\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.JSONGet;\nimport com.pinecone.slime.jelly.source.ibatis.SoloSessionMapperPool;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.sauron.heist.heistron.CascadeHeist;\nimport com.sauron.heist.heistron.Crew;\nimport com.sauron.heist.heistron.HTTPHeist;\nimport com.sauron.heist.heistron.Heistgram;\nimport com.sauron.heist.heistron.chronic.PeriodicHeistRehearsal;\nimport com.sauron.shadow.chronicle.dao.BasicChronicleManipulator;\nimport org.apache.ibatis.session.SqlSession;\n\nimport java.util.Map;\n\n\n//@Heistlet( \"Chronicle\" )\npublic class ChronicleHeist extends HTTPHeist implements Chronicle {\n    protected PeriodicHeistRehearsal       mPeriodicHeistKernel;\n\n    @JSONGet( \"PrimaryRDB\" )\n    protected String                       mszPrimaryRDBName   ;\n\n    protected IbatisClient                 mPrimaryDataIbatisClient ;\n\n    protected SqlSession                   mPrimarySharedSqlSession;\n\n    @Structure\n    protected BasicChronicleManipulator    mBasicChronicleManipulator;\n\n\n    public ChronicleHeist( Heistgram heistron ){\n        super( heistron );\n        this.initSelf();\n    }\n\n    public ChronicleHeist( Heistgram heistron, JSONConfig joConfig ){\n        super( heistron, joConfig );\n        this.initSelf();\n    }\n\n    public ChronicleHeist( Heistgram heistron, @Nullable CascadeHeist parent, @NonNull String szChildName ) {\n        super( heistron, parent, szChildName );\n        this.initSelf();\n    }\n\n    protected void initSelf() {\n        if( this.isSlave() ) {\n            this.mPeriodicHeistKernel     = new ChroniclePeriodicHeistKernel( this );\n            this.parentSystem().getPrimaryConfigScope().autoInject( ChronicleHeist.class, this.getConfig(), this );\n            this.mPrimaryDataIbatisClient = (IbatisClient) this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( this.mszPrimaryRDBName );\n            this.mPrimarySharedSqlSession = this.mPrimaryDataIbatisClient.openSession( true );\n            this.prepareChildrenConfig();\n            this.prepareScopeDAOManipulator();\n            this.parentSystem().getPrimaryConfigScope().autoConstruct( ChronicleHeist.class, this.getConfig(), this );\n        }\n    }\n\n    protected void prepareChildrenConfig() {\n        JSONObject jo = this.mPeriodicHeistKernel.getRaiderConfigs();\n\n        for( Object o : jo.entrySet() ) {\n            Map.Entry kv  = (Map.Entry) o;\n\n            JSONObject jc = (JSONObject)kv.getValue();\n            jc.put( \"__proto__\", this.getConfig() ); // Jesus, no needs to override... Just using prototype chain.\n        }\n    }\n\n    protected void prepareScopeDAOManipulator() {\n        this.mPrimaryDataIbatisClient.addDataAccessObjectScope( this.getClass().getPackageName() );\n\n        this.parentSystem().getDispenserCenter().getInstanceDispenser().register(\n                BasicChronicleManipulator.class,\n                new SoloSessionMapperPool( this.mPrimarySharedSqlSession, BasicChronicleManipulator.class )\n        );\n        //this.mBasicChronicleManipulator = this.mPrimarySharedSqlSession.getMapper( BasicChronicleManipulator.class );\n    }\n\n    @Override\n    public IbatisClient getPrimaryDataIbatisClient() {\n        return this.mPrimaryDataIbatisClient;\n    }\n\n    @Override\n    public SqlSession getPrimarySharedSqlSession() {\n        return this.mPrimarySharedSqlSession;\n    }\n\n    @Override\n    public BasicChronicleManipulator getBasicChronicleManipulator(){\n        return this.mBasicChronicleManipulator;\n//        return new BasicChronicleManipulator() {\n//            @Override\n//            public void insertOneNews(String szTableName, String szObjectName, String szDateTime, String szNewsIndex) {\n//\n//            }\n//        };\n    }\n\n    @Override\n    public Crew newCrew( int nCrewId ) {\n        return new ChronicleReaver( this, nCrewId );\n    }\n\n    @Override\n    public void toRavage(){\n        ChronicleHeist.this.infoLifecycle( \"Chronicle Heist Vitalization\",\"Vitalized\" );\n\n//        ZhihuClerk clerk = new ZhihuClerk(\n//                this, 1999, this.mPeriodicHeistKernel.getRaiderConfigs().optJSONObject( \"Zhihu\" )\n//        );\n//        clerk.toRavage();\n//        clerk.toRavage();\n//        clerk.toRavage();\n        try{\n            this.mPeriodicHeistKernel.joinVitalize();\n        }\n        catch ( InterruptedException e ) {\n            throw new ProxyProvokeHandleException( e );\n        }\n\n        ChronicleHeist.this.infoLifecycle( \"Chronicle Heist Termination\",\"Terminated\" );\n    }\n\n    @Override\n    public void toStalk(){\n\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/ChroniclePeriodicHeistKernel.java",
    "content": "package com.sauron.shadow.chronicle;\n\nimport com.sauron.heist.heistron.Heistum;\nimport com.sauron.heist.heistron.chronic.ArchPeriodicHeistRehearsal;\n\npublic class ChroniclePeriodicHeistKernel extends ArchPeriodicHeistRehearsal {\n    public ChroniclePeriodicHeistKernel( Heistum heistum, boolean bDaemon ) {\n        super( heistum, bDaemon );\n    }\n\n    public ChroniclePeriodicHeistKernel( Heistum heistum ) {\n        this( heistum, false );\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/ChronicleReaver.java",
    "content": "package com.sauron.shadow.chronicle;\n\nimport com.sauron.heist.heistron.HTTPCrew;\nimport com.sauron.heist.heistron.HTTPHeist;\nimport com.sauron.heist.heistron.LootRecoveredException;\nimport com.sauron.heist.heistron.Reaver;\nimport com.sauron.heist.heistron.LootAbortException;\n\nimport java.io.IOException;\n\npublic class ChronicleReaver extends HTTPCrew implements Reaver {\n    public ChronicleReaver( HTTPHeist heist, int id ){\n        super( heist, id );\n    }\n\n    @Override\n    protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException, IOException {\n        //Page retryPage = this.queryHTTPPageSafe(new Request(\"https://www.artstation.com/sitemap.xml\"));\n\n        //Debug.trace( retryPage.getRawText() );\n        //this.terminate();\n    }\n\n    @Override\n    public void toRavage() {\n        this.startBatchTask();\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/Clerk.java",
    "content": "package com.sauron.shadow.chronicle;\n\nimport com.pinecone.framework.system.homotype.StereotypicInjector;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.sauron.heist.heistron.chronic.Raider;\n\nimport java.time.LocalDateTime;\nimport java.time.format.DateTimeFormatter;\n\n/**\n *  Bean Nuts Hazelnut Sauron Tritium - Sauron`s Shadow For Java, Clerk [史官, 书记]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Cooperate with the chronicle system for periodic crawler to retrieve data.\n *  配合编年史系统，面向周期性爬虫数据取回\n *  *****************************************************************************************\n *  Dragon King, the undefined\n */\npublic interface Clerk extends Raider {\n    JSONObject getConfig();\n\n    default StereotypicInjector autoInject( Class<?> stereotype ) {\n        return this.parentSystem().getPrimaryConfigScope().autoInject(\n                stereotype, this.getConfig(), this\n        );\n    }\n\n    default String nowDateTime() {\n        return LocalDateTime.now().format( DateTimeFormatter.ofPattern( \"yyyy-MM-dd HH:mm:ss\" ) );\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/Newstron/BaiduClerk.java",
    "content": "package com.sauron.shadow.chronicle.Newstron;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.JSONGet;\nimport com.sauron.heist.heistron.HTTPHeist;\nimport com.sauron.shadow.chronicle.ArchClerk;\nimport com.sauron.shadow.chronicle.Clerk;\nimport org.jsoup.nodes.Document;\nimport org.jsoup.nodes.Element;\nimport org.jsoup.nodes.Node;\nimport us.codecraft.webmagic.Page;\n\npublic class BaiduClerk extends ArchClerk implements Clerk {\n    @JSONGet( \"TopHref\" )\n    protected String mszTopHref;\n\n    public BaiduClerk( HTTPHeist heist, int id, JSONObject joConfig ){\n        super( heist, id, joConfig, BaiduClerk.class );\n    }\n\n    @Override\n    public void toRavage() {\n        JSONObject jBuiduToIndex = new JSONMaptron();\n        try{\n            this.parseBaiduTopIndex( jBuiduToIndex );\n            this.parentHeist().getBasicChronicleManipulator().insertOneNews(\n                    this.mszNewsDataTable, \"BaiduTop\", this.nowDateTime(), StringUtils.addSlashes( jBuiduToIndex.toJSONString() )\n            );\n            //Debug.trace( jBuiduToIndex );\n        }\n        catch ( IllegalStateException e ) {\n            this.tracer().warn( String.format(\"[Fatality] (%s : %s) <Continue>\", e.getClass().getSimpleName(), e.getMessage()) );\n        }\n    }\n\n    protected void parseBaiduTopIndex( JSONObject jIndex ) throws IllegalStateException {\n        try {\n            Page httpPage = this.getHTTPPage( this.mszTopHref );\n\n            Document document   = httpPage.getHtml().getDocument();\n            Element rootElement = document.getElementById( \"sanRoot\" ); // API sanRoot 20221127\n            if ( rootElement != null ) {\n                Node firstChild = rootElement.childNode( 0 );\n                if ( firstChild.nodeName().equals(\"#comment\") ) {\n                    String szInner = firstChild.toString();\n                    int nJsonAt = szInner.indexOf(\"s-data:\");\n                    if ( nJsonAt != -1 ) {\n                        nJsonAt += 7;\n                        jIndex.jsonDecode( szInner.substring( nJsonAt ).trim() );\n                        return;\n                    }\n                }\n            }\n        }\n        catch ( Exception e ) {\n            throw new IllegalStateException( \"IllegalStateException: CompromisedParseBaiduTop\", e );\n        }\n\n        throw new IllegalStateException( \"IllegalStateException: CompromisedParseBaiduTop\" );\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/Newstron/CNNClerk.java",
    "content": "package com.sauron.shadow.chronicle.Newstron;\n\nimport com.pinecone.framework.util.Randomium;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.JSONGet;\nimport com.sauron.heist.heistron.HTTPHeist;\nimport com.sauron.shadow.chronicle.ArchClerk;\nimport com.sauron.shadow.chronicle.Clerk;\nimport us.codecraft.webmagic.Page;\n\npublic class CNNClerk extends ArchClerk implements Clerk {\n    @JSONGet( \"NewsHref\" )\n    protected String mszNewsHref;\n\n    @JSONGet( \"TopN\" )\n    protected int mnTopN;\n\n    @JSONGet( \"request_id\" )\n    protected String mszRequestId;\n\n    public CNNClerk( HTTPHeist heist, int id, JSONObject joConfig ){\n        super( heist, id, joConfig, CNNClerk.class );\n    }\n\n    @Override\n    public void toRavage() {\n        JSONObject jIndex = new JSONMaptron();\n        this.parseCNNIndex( jIndex );\n        this.parentHeist().getBasicChronicleManipulator().insertOneNews(\n                this.mszNewsDataTable, \"CNNNewsTop\" + this.mnTopN, this.nowDateTime(), StringUtils.addSlashes( jIndex.toJSONString() )\n        );\n        //Debug.trace( jIndex );\n    }\n\n    protected void parseCNNIndex0( JSONObject jIndex ) throws IllegalStateException {\n        String szHref     = String.format( this.mszNewsHref, this.mnTopN, ( new Randomium() ).nextString( 8 ) );\n        Page httpPage     = this.getHTTPPage( szHref );\n        jIndex.clear();\n        jIndex.jsonDecode( httpPage.getRawText() );\n        jIndex.eliminateExcepts( \"result\" );\n    }\n\n    protected void parseCNNIndex( JSONObject jIndex ) throws IllegalStateException {\n        try {\n            this.parseCNNIndex0( jIndex );\n        }\n        catch ( Exception e ) {\n            try {\n                this.parseCNNIndex0( jIndex );\n            }\n            catch ( Exception e1 ) {\n                throw new IllegalStateException( \"IllegalStateException: CompromisedParseCNN\", e );\n            }\n        }\n    }\n\n\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/Newstron/GoogleClerk.java",
    "content": "package com.sauron.shadow.chronicle.Newstron;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.JSONGet;\nimport com.sauron.heist.heistron.HTTPHeist;\nimport com.sauron.shadow.chronicle.ArchClerk;\nimport com.sauron.shadow.chronicle.Clerk;\nimport org.jsoup.nodes.Document;\nimport org.jsoup.nodes.Element;\nimport org.jsoup.select.Elements;\nimport us.codecraft.webmagic.Page;\n\nimport java.text.Normalizer;\n\npublic class GoogleClerk extends ArchClerk implements Clerk {\n    @JSONGet( \"NewsHref\" )\n    protected String mszNewsHref;\n\n    @JSONGet( \"TopN\" )\n    protected int mnTopN;\n\n    public GoogleClerk( HTTPHeist heist, int id, JSONObject joConfig ){\n        super( heist, id, joConfig, GoogleClerk.class );\n    }\n\n    @Override\n    public void toRavage() {\n        JSONObject jGoogleIndex = new JSONMaptron();\n        this.fetchGoogleNewsIndexTopN( jGoogleIndex );\n        this.parentHeist().getBasicChronicleManipulator().insertOneNews(\n                this.mszNewsDataTable, \"GoogleNewsTop\" + this.mnTopN + \"Pages\", this.nowDateTime(), StringUtils.addSlashes( jGoogleIndex.toJSONString() )\n        );\n        //Debug.trace( jGoogleIndex );\n    }\n\n\n    public static String googleStringNormalize( String sz ) {\n        if ( sz == null || sz.isEmpty() ) {\n            return sz;\n        }\n\n        int nStemFrontAt = 0;\n        int nStemEndAt   = sz.length() - 1;\n\n        while ( nStemFrontAt < sz.length() && Character.isWhitespace(sz.charAt(nStemFrontAt)) ) {\n            nStemFrontAt++;\n        }\n\n        while ( nStemEndAt > nStemFrontAt && Character.isWhitespace(sz.charAt(nStemEndAt)) ) {\n            nStemEndAt--;\n        }\n\n        if ( nStemFrontAt > nStemEndAt ) {\n            return \"\";\n        }\n\n        String trimmedString = sz.substring( nStemFrontAt, nStemEndAt + 1 );\n        return Normalizer.normalize( trimmedString, Normalizer.Form.NFC );\n    }\n\n    protected void fetchGoogleNewsIndexTopN( JSONObject jIndex ) {\n        for ( int i = 0; i < this.mnTopN; ++i ) {\n            this.parseGoogleNewsIndexSinglePage( jIndex, i );\n        }\n    }\n\n    protected void parseGoogleNewsIndexSinglePage( JSONObject jIndex, int nPageId ) {\n        String szHrefById = this.mszNewsHref + ( nPageId * 10 );\n        try {\n            Page httpPage = this.getHTTPPage( szHrefById );\n            Document document = httpPage.getHtml().getDocument();\n            Element lpList = document.getElementById( \"search\" );\n            if ( lpList != null ) {\n                Elements children = lpList.children();\n                if( children.size() == 1 ) {\n                    children = children.get(0).children();\n                    if( children.size() == 1 || children.size() == 2 ) {\n                        if( children.size() == 2 && children.get(0).tagName().toLowerCase().equals( \"h1\" ) ) {\n                            children = children.get(1).children();\n                        }\n                        else {\n                            children = children.get(0).children();\n                        }\n\n                        if( children.size() == 1 ) {\n                            children = children.get(0).children();\n                            if( children.size() == 1 ) {\n                                children = children.get(0).children();\n                            }\n                        }\n                    }\n                }\n\n                int nNews = nPageId * 10;\n                for ( Element lpChild : children ) {\n                    Elements aNodes = lpChild.getElementsByTag( \"a\" );\n                    if( aNodes.size() == 1 ) {\n                        Element aNode  = aNodes.get(0);\n                        Elements nexts = aNode.children();\n                        if( !nexts.isEmpty() ){\n                            nexts = nexts.get(0).children();\n                            if( nexts.size() == 2 ) {\n                                Element contentDiv = nexts.get( 1 ); // The final content.\n\n                                JSONObject jNews = new JSONMaptron();\n                                jNews.put( \"id\", nNews );\n                                jNews.put( \"href\", aNode.attr(\"href\") );\n\n                                Elements divElements = contentDiv.children();\n                                int nDiv = 0;\n                                for ( Element divElement : divElements ) {\n                                    if ( divElement.tagName().equals(\"div\") ) {\n                                        if ( nDiv == 0 ) {\n                                            jNews.put( \"source\", GoogleClerk.googleStringNormalize( divElement.text() ) );\n                                        }\n                                        else if ( nDiv == 1 ) {\n                                            jNews.put( \"title\", GoogleClerk.googleStringNormalize( divElement.text() ) );\n                                        }\n                                        else if ( nDiv == 2 ) {\n                                            jNews.put( \"abstract\", GoogleClerk.googleStringNormalize( divElement.text() ) );\n                                        }\n                                        else if ( nDiv == 4 ) {\n                                            jNews.put( \"timeSpan\", GoogleClerk.googleStringNormalize( divElement.text() ) );\n                                        }\n                                        ++nDiv;\n                                    }\n                                }\n                                jIndex.append( \"data\", jNews );\n                                ++nNews;\n                            }\n                        }\n                    }\n                }\n            }\n        }\n        catch ( Exception e ) {\n            this.tracer().warn(String.format( \"[Fatality] (%s : %s : %d) <Continue>\", e.getClass().getSimpleName(), e.getMessage(), nPageId) );\n        }\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/Newstron/ZhihuClerk.java",
    "content": "package com.sauron.shadow.chronicle.Newstron;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.JSONGet;\nimport com.pinecone.hydra.auto.Instructation;\nimport com.sauron.heist.heistron.HTTPHeist;\nimport com.sauron.shadow.chronicle.ArchClerk;\nimport com.sauron.shadow.chronicle.Clerk;\n\npublic class ZhihuClerk extends ArchClerk implements Clerk {\n    @JSONGet( \"HotlineApi\" )\n    protected String mszHotlineApi;\n\n    @JSONGet( \"Global.IndexFrom\" )\n    protected int mnIndexFrom;\n\n    @JSONGet( \"Global.IndexTo\" )\n    protected int mnIndexTo;\n\n    @JSONGet( \"Global.IndexStep\" )\n    protected int mnIndexStep;\n\n    public ZhihuClerk( HTTPHeist heist, int id, JSONObject joConfig ){\n        super( heist, id, joConfig, ZhihuClerk.class );\n    }\n\n    @Override\n    public void toRavage() {\n        JSONObject jZhihuIndex = new JSONMaptron();\n        this.fetchZhihuByRange( this.mnIndexFrom, this.mnIndexTo, this.mnIndexStep, jZhihuIndex );\n\n        this.parentHeist().getBasicChronicleManipulator().insertOneNews(\n                this.mszNewsDataTable, \"ZhihuTop\" + this.mnIndexStep, this.nowDateTime(), StringUtils.addSlashes( jZhihuIndex.toJSONString() )\n        );\n        //Debug.trace( jZhihuIndex );\n    }\n\n    protected void fetchZhihuByRange( int nFrom, int nTo, int nStep, JSONObject jIndex ) {\n        String szApi = this.mszHotlineApi; // Zhihu v4 api\n        int nItems   = nTo - nFrom;\n        int nRound   = nItems / nStep;\n        int nMoving  = 0;\n        if ( nRound * nStep < nItems ) {\n            ++nRound;\n        }\n\n        if ( nRound == 1 ) {\n            szApi = this.mszHotlineApi + \"&limit=\" + nStep + \"&offset=\" + nFrom + \"&period=hour\";\n            try {\n                String szHtml = this.getHTTPPage( szApi, false ).getRawText();\n                jIndex.jsonDecode( szHtml );\n            }\n            catch ( Exception e ) {\n                this.tracer().warn( String.format(\"[Fatality] (%s : %s) <Continue>\", e.getClass().getSimpleName(), e.getMessage()) );\n            }\n        }\n        else {\n            for ( int i = 0; i < nRound; ++i ) {\n                JSONObject jEach = new JSONMaptron();\n                int    nStepPace = nStep;\n                if ( nMoving + nStep > nItems ) {\n                    nStepPace = nMoving + nStep - nItems;\n                }\n\n                szApi = this.mszHotlineApi + \"&limit=\" + nStepPace + \"&offset=\" + nMoving + \"&period=hour\";\n                try {\n                    String szHtml = this.getHTTPPage( szApi, false ).getRawText();\n                    jEach.jsonDecode(szHtml);\n                    JSONArray   data = jIndex.optJSONArray(\"data\");\n                    for ( int j = 0; j < data.length(); ++j ) {\n                        data.put( data.getJSONObject( j ) );\n                    }\n                    jIndex.put(\"paging\", jEach.getJSONObject(\"paging\"));\n                    nMoving += nStep;\n                }\n                catch ( Exception e ) {\n                    this.tracer().warn( String.format(\"[Fatality] (%s : %s) <Continue>\", e.getClass().getSimpleName(), e.getMessage()) );\n                }\n            }\n        }\n    }\n\n    @Override\n    public Instructation getPrimeDirective() {\n        return this.mAffinityPrimeDirective;\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/SimpleAjaxBasedClerk.java",
    "content": "package com.sauron.shadow.chronicle;\n\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.auto.Instructation;\nimport com.sauron.heist.heistron.HTTPHeist;\nimport com.sauron.heist.heistron.orchestration.Instructations;\n\npublic class SimpleAjaxBasedClerk extends ArchClerk implements Clerk {\n    protected JSONArray           mTasks;\n\n    public SimpleAjaxBasedClerk( HTTPHeist heist, int id, JSONObject joConfig ){\n        super( heist, id, joConfig, SimpleAjaxBasedClerk.class );\n\n        this.mTasks                  = this.mConfig.optJSONArray( \"Tasks\" );\n    }\n\n    @Override\n    public void toRavage() {\n        Instructations.infoConformed( SimpleAjaxBasedClerk.this );\n        for( Object o : this.mTasks ) {\n            JSONObject jo       = (JSONObject) o;\n            String szObjectName = jo.optString( \"ObjectName\" );\n            String szApi        = jo.optString( \"Api\" );\n\n            try{\n                String szNewsIndex = this.getHTTPPage( szApi ).getRawText();\n                JSONObject tmp     = new JSONMaptron( szNewsIndex );\n                this.parentHeist().getBasicChronicleManipulator().insertOneNews(\n                        this.mszNewsDataTable, szObjectName, this.nowDateTime(), StringUtils.addSlashes( tmp.toJSONString() )\n                );\n                //Debug.trace( this.getHTTPPage( szApi ).getRawText() );\n            }\n            catch ( Exception e ) {\n                SimpleAjaxBasedClerk.this.tracer().warn(\n                        String.format(\"[Fatality] (%s::%s : %s) <Continue>\", szObjectName, e.getClass().getSimpleName(), e.getMessage())\n                );\n            }\n        }\n    }\n\n    @Override\n    public Instructation getPrimeDirective() {\n        return this.mAffinityPrimeDirective;\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/chronicle/dao/BasicChronicleManipulator.java",
    "content": "package com.sauron.shadow.chronicle.dao;\n\nimport com.pinecone.slime.jelly.source.ibatis.IbatisDataAccessObject;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Param;\n\n@IbatisDataAccessObject( scope = \"MySQLKingData0\" )\npublic interface BasicChronicleManipulator {\n\n    @Insert( \"INSERT INTO ${tableName} ( `object_name`, `date_time`, `news_index` ) VALUES ( '${object_name}', '${date_time}', '${news_index}' )\" )\n    void insertOneNews(\n            @Param( \"tableName\" ) String szTableName, @Param( \"object_name\" )\n            String szObjectName, @Param( \"date_time\" ) String szDateTime, @Param( \"news_index\" ) String szNewsIndex\n    );\n\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Apesk/ApeskHeist.java",
    "content": "package com.sauron.shadow.heists.Apesk;\n\nimport com.pinecone.ulf.rdb.mysql.MySQLExecutor;\nimport com.sauron.heist.heistron.Crew;\nimport com.sauron.heist.heistron.HTTPIndexHeist;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.sauron.heist.heistron.Heistgram;\n\npublic class ApeskHeist extends HTTPIndexHeist {\n    protected MySQLExecutor mysql;\n\n    public ApeskHeist( Heistgram heistron ){\n        super( heistron );\n        this.init();\n    }\n\n    public ApeskHeist( Heistgram heistron, JSONConfig joConfig ){\n        super( heistron, joConfig );\n        this.init();\n    }\n\n    @Override\n    protected void init() {\n        super.init();\n\n    }\n\n    @Override\n    public Crew newCrew( int nCrewId ) {\n        return new ApeskReaver( this, nCrewId ) ;\n    }\n\n    @Override\n    public String queryHrefById ( long id ) {\n        return this.heistURL + this.getConfig().optString( \"SubHref\" ) + id;\n    }\n\n    @Override\n    public void toRavage(){\n        super.toRavage();\n    }\n\n    @Override\n    public void toStalk(){\n        ( new ApeskStalker( this, 0 ) ).toStalk();\n    }\n\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Apesk/ApeskReaver.java",
    "content": "package com.sauron.shadow.heists.Apesk;\n\n\nimport com.sauron.heist.heistron.HTTPIndexHeist;\nimport com.sauron.heist.heistron.MegaDOMIndexCrew;\nimport com.sauron.heist.heistron.Reaver;\nimport org.jsoup.nodes.Element;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\n\npublic class ApeskReaver extends MegaDOMIndexCrew implements Reaver {\n    public ApeskReaver( HTTPIndexHeist heist, int id ){\n        super( heist, id );\n    }\n\n    protected Page compressSoloMBTIArchive( Page page, Request request ) {\n        Element typeInfo  = page.getHtml().getDocument().selectFirst(\".results .type-info\");\n        Element segRow    = page.getHtml().getDocument().selectFirst(\".results .row\");\n        String szRawPage  = \"\";\n        if( typeInfo != null ) {\n            szRawPage += \"<div class='type-info'>\" + typeInfo.html() + \"</div>\\n\";\n        }\n        else {\n            this.logger.info(\"NoTypeInfo\");\n        }\n        if( segRow != null ) {\n            szRawPage += \"<div class='row'>\" + segRow.html() + \"</div>\\n\";\n        }\n        else {\n            this.logger.info(\"NoSegRow\");\n        }\n\n        int id = request.getExtra(\"id\");\n        szRawPage += this.fetchCompressSoloMBTIArchiveExRawPage( id );\n        return this.parentHeist().extendPage( szRawPage, page );\n    }\n\n    protected String fetchCompressSoloMBTIArchiveExRawPage( long id ) {\n        String newUrl = this.heistURL + \"/mbti/submit_email_date_cx_m.asp?code=223.73.241.5&user=\" + id;\n        Request request = new Request( newUrl );\n        request.setCharset( \"gb2312\" );\n        Page page = this.queryHTTPPageSafe( request );\n        Element rawInfo  = page.getHtml().getDocument().selectFirst(\"table[align='center'][border='0']\");\n\n        if( rawInfo != null ) {\n            return  \"<div class='raw-info'>\" + ( rawInfo.html() ) + \"</div>\\n\";\n        }\n        else {\n            this.logger.info(\"NoRawInfo\");\n        }\n\n        return \"\";\n    }\n\n    @Override\n    protected Page afterPageFetched( Page page, Request request ){\n        return this.compressSoloMBTIArchive( page, request );\n    }\n\n    @Override\n    public String querySpoilStoragePath( long id ) {\n        return this.querySpoilStorageDir( id ) + \"page_\" + id + \".html\";\n    }\n\n    @Override\n    public void toRavage() {\n        this.startBatchTask();\n    }\n\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Apesk/ApeskStalker.java",
    "content": "package com.sauron.shadow.heists.Apesk;\n\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.sauron.heist.heistron.*;\n\npublic class ApeskStalker extends HTTPCrew implements Stalker {\n    protected int mutualID;\n\n    protected String mszQueryCookie = \"\";\n\n    protected JSONObject mjoConfig      ;\n\n    public ApeskStalker( HTTPIndexHeist heist, int id ){\n        super( heist, id );\n        this.mjoConfig      = this.parentHeist().getConfig();\n        this.mszQueryCookie = this.mjoConfig.optString( \"QueryCookie\" );\n    }\n\n    @Override\n    protected void tryConsumeById( long index ) throws LootRecoveredException, LootAbortException, IllegalStateException {\n//        try{\n//            Debug.trace( new String( this.getHTTPFile( \"https://rednest.cn\" ).getBytes(), \"UTF8\" ) );\n//        }\n//        catch ( exception e ) {\n//\n//        }\n    }\n\n\n    @Override\n    public void toStalk() {\n\n    }\n}"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/ArtStation/ArtStationHeist.java",
    "content": "package com.sauron.shadow.heists.ArtStation;\n\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.sauron.heist.heistron.*;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONException;\nimport com.pinecone.ulf.rdb.mysql.MySQLExecutor;\nimport com.pinecone.ulf.rdb.mysql.MySQLHost;\n\nimport java.sql.SQLException;\n\npublic class ArtStationHeist extends HTTPIndexHeist {\n    protected MySQLExecutor mysql;\n\n    public ArtStationHeist( Heistotron heistron ){\n        super( heistron );\n        this.init();\n    }\n\n    public ArtStationHeist(Heistotron heistron, JSONConfig joConfig ){\n        super( heistron, joConfig );\n        this.init();\n    }\n\n    @Override\n    protected void init() {\n        super.init();\n        try{\n            this.mysql = new MySQLExecutor( new MySQLHost(\n                    \"192.168.1.177:33062/nonaron\",\n                    \"root\",\n                    \"root\"\n            ));\n        }\n        catch ( SQLException e ) {\n            this.handleKillException( e );\n        }\n    }\n\n    @Override\n    public Crew newCrew( int nCrewId ) {\n        return new ArtStationReaver( this, nCrewId ) ;\n    }\n\n\n    @Override\n    public String queryHrefById ( long id ) {\n        try {\n            JSONArray ja = this.mysql.fetch( \"SELECT href FROM nona_pubchem_sitemap_idx WHERE mutual_id =\" + id );\n            return ja.getJSONObject( 0 ).getString( \"href\" );\n        }\n        catch ( SQLException | JSONException e ) {\n            this.handleAliveException( e );\n        }\n        return \"\";\n    }\n\n    @Override\n    public void toRavage(){\n        super.toRavage();\n    }\n\n    @Override\n    public void toStalk(){\n        ( new ArtStationStalker( this, 0 ) ).toStalk();\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/ArtStation/ArtStationReaver.java",
    "content": "package com.sauron.shadow.heists.ArtStation;\n\nimport com.sauron.heist.heistron.*;\nimport org.jsoup.nodes.Document;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\n\npublic class ArtStationReaver extends MegaDOMIndexCrew implements Reaver {\n    public ArtStationReaver( HTTPIndexHeist heist, int id ){\n        super( heist, id );\n    }\n\n    @Override\n    protected Page afterPageFetched( Page page, Request request ){\n        Document document = page.getHtml().getDocument();\n        String id  = document.select( \"meta[name='ncbi_pubchem_cid']\" ).attr( \"content\" );\n        String seg = document.select( \"meta[name='pubchem_uid_name']\" ).attr( \"content\" );\n\n        String newUrl = this.heistURL + \"/rest/pug_view/data/\"+ seg +\"/\" + id + \"/JSON/\";\n        return this.queryHTTPPageSafe( ( new Request( newUrl ) ).putExtra( \"id\", id ) );\n    }\n\n    @Override\n    public String querySpoilStoragePath( long id ) {\n        return this.querySpoilStorageDir( id ) + \"page_\" + id + \".json\";\n    }\n\n\n    @Override\n    public void toRavage() {\n        this.startBatchTask();\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/ArtStation/ArtStationStalker.java",
    "content": "package com.sauron.shadow.heists.ArtStation;\n\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.sauron.heist.heistron.*;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONObject;\nimport org.jsoup.select.Elements;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\n\nimport java.io.File;\nimport java.util.Map;\n\npublic class ArtStationStalker extends HTTPCrew implements Stalker {\n    protected int mutualID;\n\n    protected String mszQueryCookie = \"\";\n\n    public ArtStationStalker( HTTPIndexHeist heist, int id ){\n        super( heist, id );\n        this.mszQueryCookie = this.parentHeist().getConfig().optString( \"QueryCookie\" );\n    }\n\n    @Override\n    protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException {\n//        try{\n//            Debug.trace( new String( this.getHTTPFile( \"https://rednest.cn\" ).getBytes(), \"UTF8\" ) );\n//        }\n//        catch ( exception e ) {\n//\n//        }\n    }\n\n    protected void stalk_inlet_index() {\n        JSONObject joSiteMaps = this.parentHeist().getConfig().optJSONObject( \"SiteMaps\" );\n        this.mutualID = 1;\n\n        for( Object ok : joSiteMaps.entrySet() ) {\n            Map.Entry k = (Map.Entry) ok;\n            this.stalk_sub_site_map( k.getKey().toString(), (JSONObject) k.getValue() );\n        }\n        //this.stalk_sub_site_map( \"annotation\", joSiteMaps.optJSONObject(\"annotation\") );\n    }\n\n    protected void stalk_sub_site_map( String szSeg, JSONObject jo ) {\n        String szIndexPath = this.parentHeist().getIndexPath();\n        File fSegFileDir = new File( szIndexPath );\n        fSegFileDir.mkdir();\n\n        String szSegFile = szIndexPath + \"/\" + szSeg + \".xml\";\n        Page cachePage;\n        String href = jo.optString( \"href\" );\n        cachePage = this.queryHTTPPage( new Request(href).addHeader( \"Cookie\", this.mszQueryCookie ), szSegFile );\n\n        Elements elements = cachePage.getHtml().getDocument().select( \"loc\" );\n\n        JSONObject joIndexList = new JSONMaptron();\n        for ( int i = 0; i < elements.size(); i++ ) {\n            String szItemHref = elements.get(i).text();\n            String[] debris   = szItemHref.split( this.heistURL + \"/\" );\n            String szItemFN   = debris[1];\n            String szSegment  = \"artists\";\n            if( szItemFN.contains( \"artists\" ) ) {\n                szSegment = \"artists\";\n            }\n            else if( szItemFN.contains( \"artworks\" ) ) {\n                szSegment = \"artworks\";\n            }\n            else {\n                continue;\n            }\n            joIndexList.affirmArray( szSegment ).put( szItemFN );\n\n            String szLocalPath = szIndexPath + szItemFN;\n            cachePage = this.queryHTTPPage( new Request(szItemHref).addHeader( \"Cookie\", this.mszQueryCookie ), szLocalPath );\n            if( cachePage.getStatusCode() != 200 ) {\n                this.logger.error( \"<FetchIndexError:{}, {}, {}>\", i, szItemHref, cachePage.getStatusCode() );\n            }\n            else {\n                this.logger.info( \"<FetchIndexDone:{}, {}, {}>\", i, szItemHref, cachePage.getBytes().length );\n            }\n        }\n\n        Debug.trace( joIndexList.size() );\n    }\n\n    protected void profileSiteMap() {\n        this.stalk_inlet_index();\n    }\n\n    @Override\n    public void toStalk() {\n        this.profileSiteMap();\n    }\n}"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/DeviantArt/DeviantArtHeist.java",
    "content": "package com.sauron.shadow.heists.DeviantArt;\n\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.sauron.heist.heistron.*;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONException;\nimport com.pinecone.ulf.rdb.mysql.MySQLExecutor;\nimport com.pinecone.ulf.rdb.mysql.MySQLHost;\n\nimport java.sql.SQLException;\n\npublic class DeviantArtHeist extends HTTPIndexHeist {\n    protected MySQLExecutor mysql;\n\n    public DeviantArtHeist( Heistotron heistron ){\n        super( heistron );\n        this.init();\n    }\n\n    public DeviantArtHeist(Heistotron heistron, JSONConfig joConfig ){\n        super( heistron, joConfig );\n        this.init();\n    }\n\n    @Override\n    protected void init() {\n        super.init();\n        try{\n            this.mysql = new MySQLExecutor( new MySQLHost(\n                    \"192.168.1.177:33062/nonaron\",\n                    \"root\",\n                    \"root\"\n            ));\n        }\n        catch ( SQLException e ) {\n            this.handleKillException( e );\n        }\n    }\n\n    @Override\n    public Crew newCrew( int nCrewId ) {\n       return new DeviantArtReaver( this, nCrewId );\n    }\n\n    @Override\n    public String queryHrefById ( long id ) {\n        try {\n            JSONArray ja = this.mysql.fetch( \"SELECT href FROM nona_pubchem_sitemap_idx WHERE mutual_id =\" + id );\n            return ja.getJSONObject( 0 ).getString( \"href\" );\n        }\n        catch ( SQLException | JSONException e ) {\n            this.handleAliveException( e );\n        }\n        return \"\";\n    }\n\n    @Override\n    public void toRavage(){\n        super.toRavage();\n    }\n\n    @Override\n    public void toStalk(){\n        ( new DeviantArtStalker( this, 0 ) ).toStalk();\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/DeviantArt/DeviantArtReaver.java",
    "content": "package com.sauron.shadow.heists.DeviantArt;\n\nimport com.sauron.heist.heistron.*;\nimport org.jsoup.nodes.Document;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\n\npublic class DeviantArtReaver extends MegaDOMIndexCrew implements Reaver {\n    public DeviantArtReaver( HTTPIndexHeist heist, int id ){\n        super( heist, id );\n    }\n\n    @Override\n    protected Page afterPageFetched( Page page, Request request ){\n        Document document = page.getHtml().getDocument();\n        String id  = document.select( \"meta[name='ncbi_pubchem_cid']\" ).attr( \"content\" );\n        String seg = document.select( \"meta[name='pubchem_uid_name']\" ).attr( \"content\" );\n\n        String newUrl = this.heistURL + \"/rest/pug_view/data/\"+ seg +\"/\" + id + \"/JSON/\";\n        return this.queryHTTPPageSafe( ( new Request( newUrl ) ).putExtra( \"id\", id ) );\n    }\n\n    @Override\n    public String querySpoilStoragePath( long id ) {\n        return this.querySpoilStorageDir( id ) + \"page_\" + id + \".json\";\n    }\n\n\n    @Override\n    public void toRavage() {\n        this.startBatchTask();\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/DeviantArt/DeviantArtStalker.java",
    "content": "package com.sauron.shadow.heists.DeviantArt;\n\nimport com.sauron.heist.heistron.*;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.io.FileUtils;\nimport com.pinecone.framework.util.json.JSONObject;\nimport org.jsoup.nodes.Element;\nimport org.jsoup.select.Elements;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\n\nimport java.io.IOException;\n\npublic class DeviantArtStalker extends HTTPCrew implements Stalker {\n    protected int mutualID;\n\n    protected String mszQueryCookie = \"\";\n\n    protected JSONObject mjoConfig      ;\n\n    public DeviantArtStalker( HTTPIndexHeist heist, int id ){\n        super( heist, id );\n        this.mjoConfig      = this.parentHeist().getConfig();\n        this.mszQueryCookie = this.mjoConfig.optString( \"QueryCookie\" );\n    }\n\n    @Override\n    protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException {\n//        try{\n//            Debug.trace( new String( this.getHTTPFile( \"https://rednest.cn\" ).getBytes(), \"UTF8\" ) );\n//        }\n//        catch ( exception e ) {\n//\n//        }\n    }\n\n    protected void stalk_inlet_index() {\n        String szIndexPath   = this.parentHeist().getIndexPath();\n        String szGZIndexPath = szIndexPath + \"/RawGZ/\";\n        String szInletSMLocal = szIndexPath + this.mjoConfig.optString( \"InletSitemap\" );\n        try{\n            Page page = this.parentHeist().extendPage( FileUtils.readAll( szInletSMLocal ), new Request(\"\") );\n            Elements elements = page.getHtml().getDocument().select( \"loc\" );\n            String szMajorHref = this.heistURL + \"/sitemaps/\";\n\n            for ( int i = 0; i < elements.size(); i++ ) {\n                Element loc = elements.get(i);\n                String szLocHref = loc.text();\n                String[] debris = szLocHref.split( szMajorHref );\n                String szGZFN = debris[1];\n\n                String szGZLocalPath = szGZIndexPath + szGZFN;\n                this.queryHTTPPage( new Request( szLocHref ), szGZLocalPath );\n\n                Debug.trace( szGZLocalPath, i );\n            }\n        }\n        catch ( IOException e ) {\n            this.handleKillException( e );\n        }\n    }\n\n    protected void profileSiteMap() {\n        this.stalk_inlet_index();\n    }\n\n    @Override\n    public void toStalk() {\n        this.profileSiteMap();\n    }\n}"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/DownloadCNet/DownloadCNetHeist.java",
    "content": "package com.sauron.shadow.heists.DownloadCNet;\n\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.sauron.heist.heistron.Crew;\nimport com.sauron.heist.heistron.HTTPIndexHeist;\nimport com.sauron.heist.heistron.Heistotron;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONException;\nimport com.pinecone.ulf.rdb.mysql.MySQLExecutor;\nimport com.pinecone.ulf.rdb.mysql.MySQLHost;\n\nimport java.sql.SQLException;\n\npublic class DownloadCNetHeist extends HTTPIndexHeist {\n    protected MySQLExecutor mysql;\n\n    public DownloadCNetHeist(Heistotron heistron ){\n        super( heistron );\n        this.init();\n    }\n\n    public DownloadCNetHeist(Heistotron heistron, JSONConfig joConfig ){\n        super( heistron, joConfig );\n        this.init();\n    }\n\n    @Override\n    protected void init() {\n        super.init();\n        try{\n            this.mysql = new MySQLExecutor( new MySQLHost(\n                    \"node1.nutgit.com:13393/nonaron\",\n                    \"root\",\n                    \"root\"\n            ));\n        }\n        catch ( SQLException e ) {\n            this.handleKillException( e );\n        }\n    }\n\n    @Override\n    public Crew newCrew( int nCrewId ) {\n        return new DownloadCNetReaver( this, nCrewId );\n    }\n\n    @Override\n    public String queryHrefById ( long id ) {\n        try {\n            JSONArray ja = this.mysql.fetch( \"SELECT href FROM nona_download_cnet_idx WHERE mutual_id =\" + id );\n            return ja.getJSONObject( 0 ).getString( \"href\" );\n        }\n        catch ( SQLException | JSONException e ) {\n            this.handleAliveException( e );\n        }\n        return \"\";\n    }\n\n    @Override\n    public void toRavage(){\n        super.toRavage();\n    }\n\n    @Override\n    public void toStalk(){\n        ( new DownloadCNetStalker( this, 0 ) ).toStalk();\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/DownloadCNet/DownloadCNetReaver.java",
    "content": "package com.sauron.shadow.heists.DownloadCNet;\n\nimport com.sauron.heist.heistron.HTTPIndexHeist;\nimport com.sauron.heist.heistron.MegaDOMIndexCrew;\nimport com.sauron.heist.heistron.Reaver;\nimport org.jsoup.nodes.Element;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\n\npublic class DownloadCNetReaver extends MegaDOMIndexCrew implements Reaver {\n\n    public DownloadCNetReaver(HTTPIndexHeist heist, int id ){\n        super( heist, id );\n    }\n\n    protected Page compressSoloArchive( Page page, Request request ) {\n        Element infoJSON  = page.getHtml().getDocument().selectFirst(\"script[data-hid='ld+json']\");\n        Element mainPage  = page.getHtml().getDocument().selectFirst(\".c-layoutDefault_page .c-scrollPercent\");\n        Element megaJSON  = page.getHtml().getDocument().selectFirst(\"body script:nth-child(2)\");\n        String szRawPage  = \"\";\n        if( infoJSON != null ) {\n            szRawPage += \"<script id='baseInfoJson'>\" + infoJSON.html() + \"</script>\\n\";\n        }\n        else {\n            this.logger.info(\"NoFirstJSON\");\n        }\n        if( mainPage != null ) {\n            szRawPage += \"<div id='mainPage'>\" + mainPage.html() + \"</div>\\n\";\n        }\n        else {\n            this.logger.info(\"NoMainPage\");\n        }\n        if( megaJSON != null ) {\n            szRawPage += \"<script id='megaInfoJson'>\" + megaJSON.html() + \"</script>\\n\";\n        }\n        else {\n            this.logger.info(\"NoMegaJSON\");\n        }\n\n        return this.parentHeist().extendPage( szRawPage, page );\n    }\n\n    @Override\n    protected Page afterPageFetched( Page page, Request request ){\n        return this.compressSoloArchive( page, request );\n    }\n\n    @Override\n    public String querySpoilStoragePath( long id ) {\n        return this.querySpoilStorageDir( id ) + \"page_\" + id + \".html\";\n    }\n\n\n    @Override\n    public void toRavage() {\n        this.startBatchTask();\n    }\n\n\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/DownloadCNet/DownloadCNetStalker.java",
    "content": "package com.sauron.shadow.heists.DownloadCNet;\n\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.sauron.heist.heistron.*;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.io.FileUtils;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.rdb.MappedSQLSplicer;\nimport org.jsoup.select.Elements;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\n\nimport java.io.File;\nimport java.io.FileWriter;\nimport java.io.IOException;\n\npublic class DownloadCNetStalker extends HTTPCrew implements Stalker {\n    protected int mutualID;\n\n    public DownloadCNetStalker(HTTPIndexHeist heist, int id ){\n        super( heist, id );\n    }\n\n    @Override\n    protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException {}\n\n    protected void stalk_inlet_index() {\n        JSONObject joSiteMaps = this.parentHeist().getConfig().optJSONObject( \"SiteMaps\" );\n        this.mutualID = 1;\n        this.stalk_sub_site_map( \"products\", joSiteMaps.optJSONObject(\"products\") );\n    }\n\n    protected void stalk_sub_site_map( String szSeg, JSONObject jo ) {\n        String szSegFileDir = this.parentHeist().getIndexPath() + szSeg;\n        File fSegFileDir = new File( szSegFileDir );\n        boolean mkdir = fSegFileDir.mkdir();\n\n        String szSegFile = szSegFileDir + \"\\\\\" + szSeg + \"_main.xml\";\n        Page cachePage;\n        File fSegFile = new File( szSegFile );\n\n        try {\n            String href = jo.optString( \"href\" );\n            if( !fSegFile.exists() ) {\n                cachePage = this.getHTTPPage( href );\n                FileWriter fw = new FileWriter( fSegFile );\n                fw.write( cachePage.getRawText() );\n                fw.close();\n            }\n            else {\n                String cache = FileUtils.readAll( szSegFile );\n                cachePage = this.parentHeist().extendPage( cache, new Request( href ) );\n            }\n\n            Elements elements = cachePage.getHtml().getDocument().select( \"loc\" );\n\n            File fSQLIndex = new File( szSegFileDir + \"/\" + szSeg + \".sql\" );\n            FileWriter fSQL = new FileWriter( fSQLIndex );\n            MappedSQLSplicer sqlSplicer = new MappedSQLSplicer();\n\n            int topicId = 1;\n            for ( int i = 0; i < elements.size(); i++ ) {\n                String szFN = String.format( \"%s/%s_%d.xml\", szSegFileDir, szSeg, i );\n                cachePage = this.getHTTPPage( elements.get(i).text(), szFN );\n                Elements subEles = cachePage.getHtml().getDocument().select( \"loc\" );\n                StringBuilder sqlBuf = new StringBuilder();\n                for ( int j = 0; j < subEles.size(); j++ ) {\n                    String szHref = subEles.get(j).text();\n                    if( szHref.length() > 333 ) {\n                        continue;\n                    }\n\n                    JSONObject thisSQLMap = new JSONMaptron();\n                    thisSQLMap.put( \"heist\", this.crewName() );\n                    thisSQLMap.put( \"href\", szHref );\n                    thisSQLMap.put( \"mutual_id\", this.mutualID );\n                    thisSQLMap.put( \"topic\", szSeg );\n                    thisSQLMap.put( \"topic_id\", topicId );\n\n                    sqlBuf.append( sqlSplicer.spliceInsertSQL( \"nona_download_cnet_idx\", thisSQLMap.getMap(), false ) );\n                    sqlBuf.append( \";\\n\" );\n                    ++topicId;\n                    ++this.mutualID;\n                }\n                fSQL.write( sqlBuf.toString() );\n\n                Debug.trace( i );\n            }\n            fSQL.close();\n        }\n        catch ( IOException e ){\n            e.printStackTrace();\n        }\n    }\n\n    protected void profileSiteMap() {\n        this.stalk_inlet_index();\n    }\n\n    @Override\n    public void toStalk() {\n        this.profileSiteMap();\n    }\n}"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/PubChem/PubChemHeist.java",
    "content": "package com.sauron.shadow.heists.PubChem;\n\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.sauron.heist.heistron.*;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONException;\nimport com.pinecone.ulf.rdb.mysql.MySQLExecutor;\nimport com.pinecone.ulf.rdb.mysql.MySQLHost;\n\nimport java.sql.SQLException;\n\npublic class PubChemHeist extends HTTPIndexHeist {\n    protected MySQLExecutor mysql;\n\n    public PubChemHeist( Heistotron heistron ){\n        super( heistron );\n        this.init();\n    }\n\n    public PubChemHeist(Heistotron heistron, JSONConfig joConfig ){\n        super( heistron, joConfig );\n        this.init();\n    }\n\n    @Override\n    protected void init() {\n        super.init();\n        try{\n            this.mysql = new MySQLExecutor( new MySQLHost(\n                    \"b-serverkingpin:33062/nonaron\",\n                    \"root\",\n                    \"root\"\n            ));\n        }\n        catch ( SQLException e ) {\n            this.handleKillException( e );\n        }\n    }\n\n    @Override\n    public Crew newCrew( int nCrewId ) {\n        return new PubChemReaver( this, nCrewId ) ;\n    }\n\n    @Override\n    public String queryHrefById( long id ) {\n        try {\n            JSONArray ja = this.mysql.fetch( \"SELECT href FROM nona_pubchem_sitemap_idx WHERE mutual_id =\" + id );\n            return ja.getJSONObject( 0 ).getString( \"href\" );\n        }\n        catch ( SQLException | JSONException e ) {\n            this.handleAliveException( e );\n        }\n        return \"\";\n    }\n\n    @Override\n    public void toRavage(){\n        super.toRavage();\n    }\n\n    @Override\n    public void toStalk(){\n        ( new PubChemStalker( this, 0 ) ).toStalk();\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/PubChem/PubChemReaver.java",
    "content": "package com.sauron.shadow.heists.PubChem;\n\nimport com.sauron.heist.heistron.*;\nimport org.jsoup.nodes.Document;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\n\npublic class PubChemReaver extends MegaDOMIndexCrew implements Reaver {\n    public PubChemReaver ( HTTPIndexHeist heist, int id ){\n        super( heist, id );\n    }\n\n    @Override\n    protected Page afterPageFetched( Page page, Request request ){\n        Document document = page.getHtml().getDocument();\n        String id  = document.select( \"meta[name='ncbi_pubchem_cid']\" ).attr( \"content\" );\n        String seg = document.select( \"meta[name='pubchem_uid_name']\" ).attr( \"content\" );\n\n        String newUrl = this.heistURL + \"/rest/pug_view/data/\"+ seg +\"/\" + id + \"/JSON/\";\n        return this.queryHTTPPageSafe( ( new Request( newUrl ) ).putExtra( \"id\", id ) );\n    }\n\n    @Override\n    public String querySpoilStoragePath( long id ) {\n        return this.querySpoilStorageDir( id ) + \"page_\" + id + \".json\";\n    }\n\n\n    @Override\n    public void toRavage() {\n        this.startBatchTask();\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/PubChem/PubChemStalker.java",
    "content": "package com.sauron.shadow.heists.PubChem;\n\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.sauron.heist.heistron.*;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.framework.util.io.FileUtils;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.rdb.MappedSQLSplicer;\nimport org.jsoup.select.Elements;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\n\nimport java.io.File;\nimport java.io.FileWriter;\nimport java.io.IOException;\nimport java.util.Map;\n\npublic class PubChemStalker extends HTTPCrew implements Stalker {\n    protected int mutualID;\n\n    public PubChemStalker ( HTTPIndexHeist heist, int id ){\n        super( heist, id );\n    }\n\n    @Override\n    protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException {\n//        try{\n//            Debug.trace( new String( this.getHTTPFile( \"https://rednest.cn\" ).getBytes(), \"UTF8\" ) );\n//        }\n//        catch ( exception e ) {\n//\n//        }\n    }\n\n    protected void stalk_inlet_index() {\n        JSONObject joSiteMaps = this.parentHeist().getConfig().optJSONObject( \"SiteMaps\" );\n        this.mutualID = 1;\n\n        for( Object ok : joSiteMaps.entrySet() ) {\n            Map.Entry k = (Map.Entry) ok;\n            this.stalk_sub_site_map( k.getKey().toString(), (JSONObject) k.getValue() );\n        }\n        //this.stalk_sub_site_map( \"annotation\", joSiteMaps.optJSONObject(\"annotation\") );\n    }\n\n    protected void stalk_sub_site_map( String szSeg, JSONObject jo ) {\n        String szSegFileDir = this.parentHeist().getIndexPath() + szSeg;\n        File fSegFileDir = new File( szSegFileDir );\n        fSegFileDir.mkdir();\n\n        String szSegFile = szSegFileDir + \"/\" + szSeg + \"_main.xml\";\n        Page cachePage;\n        File fSegFile = new File( szSegFile );\n        try {\n            String href = jo.optString( \"href\" );\n            if( !fSegFile.exists() ) {\n                cachePage = this.getHTTPPage( href );\n                FileWriter fw = new FileWriter( fSegFile );\n                fw.write( cachePage.getRawText() );\n                fw.close();\n            }\n            else {\n                String cache = FileUtils.readAll( szSegFile );\n                cachePage = this.parentHeist().extendPage( cache, new Request( href ) );\n            }\n\n            Elements elements = cachePage.getHtml().getDocument().select( \"loc\" );\n\n            File fSQLIndex = new File( szSegFileDir + \"/\" + szSeg + \".sql\" );\n            FileWriter fSQL = new FileWriter( fSQLIndex );\n            MappedSQLSplicer sqlSplicer = new MappedSQLSplicer();\n\n            int topicId = 1;\n            for ( int i = 0; i < elements.size(); i++ ) {\n                String szFN = String.format( \"%s/%s_%d.xml\", szSegFileDir, szSeg, i );\n                cachePage = this.getHTTPPage( elements.get(i).text(), szFN );\n                Elements subEles = cachePage.getHtml().getDocument().select( \"loc\" );\n                StringBuilder sqlBuf = new StringBuilder();\n                for ( int j = 0; j < subEles.size(); j++ ) {\n                    JSONObject thisSQLMap = new JSONMaptron();\n                    thisSQLMap.put( \"heist\", this.crewName() );\n                    thisSQLMap.put( \"href\", StringUtils.addSlashes( subEles.get(j).text() ) );\n                    thisSQLMap.put( \"mutual_id\", this.mutualID );\n                    thisSQLMap.put( \"topic\", szSeg );\n                    thisSQLMap.put( \"topic_id\", topicId );\n\n                    sqlBuf.append( sqlSplicer.spliceInsertSQL( \"nona_pubchem_sitemap_idx\", thisSQLMap.getMap(), false ) );\n                    sqlBuf.append( \";\\n\" );\n                    ++topicId;\n                    ++this.mutualID;\n                }\n                fSQL.write( sqlBuf.toString() );\n\n\n                Debug.trace( i );\n            }\n            fSQL.close();\n        }\n        catch ( IOException e ){\n            e.printStackTrace();\n        }\n    }\n\n    protected void profileSiteMap() {\n        this.stalk_inlet_index();\n    }\n\n    @Override\n    public void toStalk() {\n        this.profileSiteMap();\n    }\n}"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Steam/SteamHeist.java",
    "content": "package com.sauron.shadow.heists.Steam;\n\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.sauron.heist.heistron.*;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONException;\nimport com.pinecone.ulf.rdb.mysql.MySQLExecutor;\nimport com.pinecone.ulf.rdb.mysql.MySQLHost;\n\nimport java.sql.SQLException;\n\npublic class SteamHeist extends HTTPIndexHeist {\n    protected MySQLExecutor mysql;\n\n    public SteamHeist( Heistotron heistron ){\n        super( heistron );\n        this.init();\n    }\n\n    public SteamHeist(Heistotron heistron, JSONConfig joConfig ){\n        super( heistron, joConfig );\n        this.init();\n    }\n\n    @Override\n    protected void init() {\n        super.init();\n        try{\n            this.mysql = new MySQLExecutor( new MySQLHost(\n                    \"node1.nutgit.com:13393/nonaron\",\n                    \"root\",\n                    \"root\"\n            ));\n        }\n        catch ( SQLException e ) {\n            this.handleKillException( e );\n        }\n    }\n\n    @Override\n    public Crew newCrew( int nCrewId ) {\n        return new SteamReaver( this, nCrewId );\n    }\n\n    protected String queryInletHref( long id ) {\n        return this.heistURL + \"/search/?ndl=1&ignore_preferences=1&page=\" + id;\n    }\n\n    @Override\n    public String queryHrefById( long id ) {\n        if( this.getInstanceName().equals( \"FetchInletList\" ) ) {\n            return this.queryInletHref( id );\n        }\n        else {\n            try {\n                JSONArray ja = this.mysql.fetch( \"SELECT href FROM nona_steam_game_idx WHERE mutual_id =\" + id );\n                return ja.getJSONObject( 0 ).getString( \"href\" );\n            }\n            catch ( SQLException | JSONException e ) {\n                this.handleAliveException( e );\n            }\n        }\n        return \"\";\n    }\n\n    @Override\n    public void toRavage(){\n        super.toRavage();\n    }\n\n    @Override\n    public void toStalk(){\n        //( new SteamStalker( this ) ).toStalk();\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Steam/SteamReaver.java",
    "content": "package com.sauron.shadow.heists.Steam;\n\nimport com.sauron.heist.heistron.*;\nimport org.jsoup.select.Elements;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\n\npublic class SteamReaver extends MegaDOMIndexCrew implements Reaver {\n    public SteamReaver(HTTPIndexHeist heist, int id ){\n        super( heist, id );\n    }\n\n    @Override\n    protected Page afterPageFetched( Page page, Request request ){\n        String[] cookieGroup = {\n                \"wants_mature_content=1; steamCountry=US%7C6200f47c9b62892472b38bf7bbfd9a20; browserid=3066434021590310219; sessionid=d3260d1c80b4ff080f3c3641; timezoneOffset=28800,0; _ga=GA1.2.650079156.1685022988; _gid=GA1.2.697747378.1685022988; ak_bmsc=2CA539E4F2635E8FB79CF2F744843296~000000000000000000000000000000~YAAQXY0duFuJ2y+IAQAABpQyUxPg33L/2piDSim4d/G5+YQl4fuLirFFGlXtPSRxbh3xoU0Ohnb8FyHO30d/nuLiVKiOV2X6drabWJZ1UjnodRMJLWqKYWPNZjaKf1ZLQGkHflTxg5qaAAz+dS389vPGWWM53jZvD8ZZbYsOucK3oWJoRL+I7nJhwS6k0+JZVEckl3Al3V7gvx4shiDTHmTZ/z8+dTnpFpf/fWRCWVFWRWExB/VWDNDFJInXrVTEIwcBQ4wSWRFkJfW4d/S5JQl2QSDHyqaHpgW1va2vAICYX/GKFR/lxrgXVm2LBLT6UqFv1BSx/UzJfsZZm2mCxktV2FKaASRYt3pUcfGVBXlNrZ2LCtj72mNrJ78FKD/50SWSNS72; steamLoginSecure=76561199447520905%7C%7CeyAidHlwIjogIkpXVCIsICJhbGciOiAiRWREU0EiIH0.eyAiaXNzIjogInI6MEQzNV8yMjk2RTFFM19BQ0JDMSIsICJzdWIiOiAiNzY1NjExOTk0NDc1MjA5MDUiLCAiYXVkIjogWyAid2ViIiBdLCAiZXhwIjogMTY4NTExMDc5MSwgIm5iZiI6IDE2NzYzODMwMDMsICJpYXQiOiAxNjg1MDIzMDAzLCAianRpIjogIjBEMzFfMjI5NkUxREZfQUQ3MDQiLCAib2F0IjogMTY4NTAyMzAwMiwgInJ0X2V4cCI6IDE3MDI4OTc1NjIsICJwZXIiOiAwLCAiaXBfc3ViamVjdCI6ICI3NC4xMjEuMTg4LjIyMSIsICJpcF9jb25maXJtZXIiOiAiNzQuMTIxLjE4OC4yMjEiIH0.hzq-8liTaMgNVPoLOzeFmmjRIiSgjMwhsFYlBrFEC37Q3QSQ6sC1xbSYY3tLlh9DL5VUDfF05bA59M03sx_8Bg; recentapps=%7B%22981160%22%3A1685023036%7D; birthtime=28828801; lastagecheckage=1-0-1964\",\n        };\n        Elements age = page.getHtml().getDocument().select(\"#app_agegate\");\n        Elements login = page.getHtml().getDocument().select(\"#error_box\");\n        if( age.size()==0&&login.size()==0 ) {\n            return page;\n        }\n        else{\n            for( int i = 0; i < cookieGroup.length; ++i ){\n                Page retryPage = this.queryHTTPPageSafe( new Request(page.getRequest().getUrl()).addHeader(\"cookie\",cookieGroup[i]) );\n                if( retryPage.getHtml().getDocument().select(\"#app_agegate\").size() == 0 ){\n                    return retryPage;\n                }\n            }\n            return page;\n        }\n    }\n\n    @Override\n    public String querySpoilStoragePath( long id ) {\n        return this.querySpoilStorageDir( id ) + \"page_\" + id + \".html\";\n    }\n\n\n    @Override\n    public void toRavage() {\n        this.startBatchTask();\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Void/VoidHeist.java",
    "content": "package com.sauron.shadow.heists.Void;\n\nimport com.pinecone.framework.system.NonNull;\nimport com.pinecone.framework.system.Nullable;\nimport com.sauron.heist.heistron.CascadeHeist;\nimport com.sauron.heist.heistron.Crew;\nimport com.sauron.heist.heistron.HTTPIndexHeist;\nimport com.sauron.heist.heistron.Heistgram;\nimport com.pinecone.framework.util.config.JSONConfig;\n\n//@Heistlet( \"Void\" )\npublic class VoidHeist extends HTTPIndexHeist {\n    public VoidHeist( Heistgram heistron ){\n        super( heistron );\n    }\n\n    public VoidHeist( Heistgram heistron, JSONConfig joConfig ){\n        super( heistron, joConfig );\n    }\n\n    public VoidHeist( Heistgram heistron, @Nullable CascadeHeist parent, @NonNull String szChildName ) {\n        super( heistron, parent, szChildName );\n    }\n\n    @Override\n    public Crew newCrew( int nCrewId ) {\n        VoidReaver reaver = new VoidReaver( this, nCrewId );\n        //this.heistPool.submit( reaver );\n        return reaver;\n    }\n\n    @Override\n    public void toRavage(){\n        super.toRavage();\n    }\n\n    @Override\n    public void toStalk(){\n\n    }\n}\n"
  },
  {
    "path": "Saurons/Shadow/src/main/java/com/sauron/shadow/heists/Void/VoidReaver.java",
    "content": "package com.sauron.shadow.heists.Void;\n\nimport com.sauron.heist.heistron.*;\n\nimport java.io.IOException;\n\npublic class VoidReaver extends MegaDOMIndexCrew implements Reaver {\n    public VoidReaver( HTTPIndexHeist heist, int id ){\n        super( heist, id );\n    }\n\n    @Override\n    protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException, IOException {\n        //Page retryPage = this.queryHTTPPageSafe(new Request(\"https://www.artstation.com/sitemap.xml\"));\n\n        //Debug.trace( retryPage.getRawText() );\n        //this.terminate();\n    }\n\n    @Override\n    public void toRavage() {\n        this.startBatchTask();\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>saurons</artifactId>\n        <groupId>com.saurons</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <groupId>com.sauron.heist</groupId>\n    <artifactId>heist-framework-architecture</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n        </dependency>\n        <dependency>\n            <groupId>com.sauron.core</groupId>\n            <artifactId>sauron-core</artifactId>\n            <version>1.2.7</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.jsoup</groupId>\n            <artifactId>jsoup</artifactId>\n            <version>1.15.4</version>\n        </dependency>\n\n        <!-- https://mvnrepository.com/artifact/us.codecraft/webmagic-core -->\n        <dependency>\n            <groupId>us.codecraft</groupId>\n            <artifactId>webmagic-core</artifactId>\n            <version>0.8.0</version>\n        </dependency>\n\n        <!-- https://mvnrepository.com/artifact/us.codecraft/webmagic-extension -->\n        <dependency>\n            <groupId>us.codecraft</groupId>\n            <artifactId>webmagic-extension</artifactId>\n            <version>0.8.0</version>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/CascadeHeist.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.framework.util.name.UniNamespace;\nimport com.sauron.heist.heistron.orchestration.Hierarchy;\n\npublic interface CascadeHeist extends Heistum {\n    String HeistNSSeparator = \"::\";\n\n    CascadeHeist parent();\n\n    default boolean isRoot() {\n        return this.parent() == null;\n    }\n\n    default CascadeHeist root() {\n        CascadeHeist p = this;\n        CascadeHeist c = p;\n        while ( p != null ) {\n            c = p;\n            p = p.parent();\n        }\n\n        return c;\n    }\n\n    Hierarchy getHierarchy();\n\n    default boolean isMaster() {\n        return this.getHierarchy() == Hierarchy.Master;\n    }\n\n    default boolean isSlave() {\n        return this.getHierarchy() == Hierarchy.Slave;\n    }\n\n    Namespace getHeistNamespace();\n\n    default String getInstanceFullName() {\n        return this.getHeistNamespace().getFullName();\n    }\n\n    default String getInstanceName() {\n        return this.getHeistNamespace().getName();\n    }\n\n\n    static Namespace newNamespace( String szSegmentName, @Nullable CascadeHeist parent ) {\n        Namespace p = null;\n        if( parent != null ) {\n            p = parent.getHeistNamespace();\n        }\n        return new UniNamespace( szSegmentName, p, CascadeHeist.HeistNSSeparator );\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/ConfigNotFoundException.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.framework.system.PineRuntimeException;\n\npublic class ConfigNotFoundException extends PineRuntimeException {\n    public ConfigNotFoundException() {\n        super();\n    }\n\n    public ConfigNotFoundException( String message ) {\n        super( message );\n    }\n\n    public ConfigNotFoundException( String message, Throwable cause ) {\n        super( message, cause );\n    }\n\n    public ConfigNotFoundException( Throwable cause ) {\n        super(cause);\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Crew.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.tritium.system.MissionTerminateException;\nimport com.pinecone.tritium.system.TritiumSystem;\nimport com.pinecone.tritium.system.StorageSystem;\n\nimport org.slf4j.Logger;\n\npublic interface Crew extends Crewnium {\n    String crewName();\n\n    Heistum parentHeist();\n\n    void validateSpoil( String sz );\n\n    void isTimeToFeast();\n\n    default void terminate(){\n        throw new MissionTerminateException();\n    }\n\n    void startBatchTask();\n\n    @Override\n    TritiumSystem parentSystem();\n\n    StorageSystem getStorageSystem();\n\n    Logger tracer();\n}"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Crewnium.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.sauron.system.Saunut;\nimport com.pinecone.framework.system.executum.Executum;\n\npublic interface Crewnium extends Runnable, Executum, Saunut {\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Embezzler.java",
    "content": "package com.sauron.heist.heistron;\n\n/**\n *  Bean Nuts Hazelnut Sauron Tritium For Java, Embezzler [洗钱者]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Focus on batch crawler data processing\n *  面向批处理爬虫数据处理\n *  *****************************************************************************************\n *  Dragon King, the undefined\n */\npublic interface Embezzler extends Crew {\n    void toEmbezzle();\n\n    @Override\n    default void isTimeToFeast(){\n        this.toEmbezzle();\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/HeistConfigConstants.java",
    "content": "package com.sauron.heist.heistron;\n\npublic final class HeistConfigConstants {\n\n    public static final String KeyConfigScope     = \"ConfigScope\";\n    public static final String KeyLocalConfigs    = \"LocalConfigs\";\n    public static final String KeyTemplatedConfig = \"TemplatedConfig\";\n    public static final String KeyHeistsTable     = \"Heists\";\n    public static final String KeyComponents      = \"Components\";\n    public static final String KeyHttpBrowser     = \"HttpBrowser\";\n\n\n    public final class Heistum {\n        public static final String KeyHeistURL    = \"HeistURL\";\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/HeistException.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class HeistException extends Exception implements Pinenut {\n\n    public HeistException    () {\n        super();\n    }\n\n    public HeistException    ( String message ) {\n        super(message);\n    }\n\n    public HeistException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public HeistException    ( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/HeistExecutionException.java",
    "content": "package com.sauron.heist.heistron;\n\npublic class HeistExecutionException extends HeistException {\n\n    public HeistExecutionException    () {\n        super();\n    }\n\n    public HeistExecutionException    ( String message ) {\n        super(message);\n    }\n\n    public HeistExecutionException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public HeistExecutionException    ( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/HeistOrchestrateException.java",
    "content": "package com.sauron.heist.heistron;\n\npublic class HeistOrchestrateException extends HeistException {\n\n    public HeistOrchestrateException    () {\n        super();\n    }\n\n    public HeistOrchestrateException    ( String message ) {\n        super(message);\n    }\n\n    public HeistOrchestrateException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public HeistOrchestrateException    ( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/HeistScheme.java",
    "content": "package com.sauron.heist.heistron;\n\nimport java.util.Map;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.MultiScopeMap;\nimport com.pinecone.framework.util.config.JSONConfig;\n\npublic interface HeistScheme extends Pinenut {\n\n    JSONConfig getInstanceConfigByName( String name );\n\n    /**\n     * getInstanceConfigByName\n     * @param name ( Child instance name, which will extents the parent scope, and get its instance config of this child. )\n     *             ( The `null` is the current scope, [this] )\n     * @param bRecursive ( Override all object and list, if that key which its child doesnt`t had. )\n     * @return Instance Config\n     */\n    JSONConfig getInstanceConfigByName( @Nullable String name, boolean bRecursive );\n\n    void overrideSegment ( Map<String, Object > parentProto, Map<String, Object > instance );\n\n\n    HeistScheme reinterpret( JSONConfig that );\n\n    MultiScopeMap<String, Object > getHeistScope();\n\n    JSONConfig getProtoConfig();\n\n    Heistgram getHeistgram();\n\n    Heistum getParentHeist();\n\n    JSONConfig getTemplateHeistSchemeConfig();\n\n\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/HeistStatusTerminatedException.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class HeistStatusTerminatedException extends IllegalStateException implements Pinenut {\n\n    public HeistStatusTerminatedException    () {\n        super();\n    }\n\n    public HeistStatusTerminatedException    ( String message ) {\n        super(message);\n    }\n\n    public HeistStatusTerminatedException    ( String message, Throwable cause ) {\n        super(message, cause);\n    }\n\n    public HeistStatusTerminatedException    ( Throwable cause ) {\n        super(cause);\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Heistgram.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.hydra.servgram.Servgramium;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.sauron.heist.heistron.event.HeistLifecycleEventInterceptor;\nimport com.sauron.heist.heistron.orchestration.HeistletOrchestrator;\nimport com.pinecone.framework.system.executum.ExclusiveProcessum;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.sauron.heist.heistron.orchestration.Hierarchy;\n\npublic interface Heistgram extends ExclusiveProcessum, Servgramium {\n    Hydrogen parentSystem();\n\n    JSONConfig getTemplateHeistSchemeConfig();\n\n    JSONConfig getLocalHeistsConfigList();\n\n    HeistletOrchestrator getHeistletOrchestrator();\n\n    JSONConfig queryHeistConfig ( String szHeistName );\n\n    JSONConfig getComponentsConfig();\n\n    String searchHeistName( Heistum that ) ;\n\n    Heistgram addLifecycleEventInterceptors( HeistLifecycleEventInterceptor interceptor );\n\n    Heistgram removeLifecycleEventInterceptors( HeistLifecycleEventInterceptor interceptor );\n\n    void notifyLifecycleEvent(Heistum heist, TaskInstanceStatus instanceStatus, Hierarchy hierarchy );\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Heistium.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.sauron.heist.heistron.orchestration.Taskium;\nimport com.sauron.heist.heistron.scheduler.TaskProducer;\n\nimport java.util.concurrent.atomic.AtomicBoolean;\n\npublic interface Heistium extends Taskium {\n    Heistum getParentHeist();\n\n    TaskProducer getTaskProducer();\n\n    void joinStartMultiTasks();\n\n    void terminate();\n\n    AtomicBoolean queryTerminationSignal();\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Heists.java",
    "content": "package com.sauron.heist.heistron;\n\npublic final class Heists {\n    public static String getCriterionNomenclatureName( Heistum heistum ) {\n        String szHeistName;\n        if( heistum instanceof CascadeHeist ) {\n            szHeistName = ((CascadeHeist) heistum).getInstanceFullName();\n        }\n        else {\n            szHeistName = heistum.heistName();\n        }\n\n        return szHeistName;\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Heistum.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.hydra.servgram.AutoOrchestrator;\nimport com.pinecone.hydra.servgram.Servgramlet;\nimport com.sauron.heist.heistron.orchestration.ChildHeistInstanceModifier;\nimport com.sauron.heist.heistron.orchestration.ChildHeistOrchestrator;\nimport com.sauron.heist.heistron.orchestration.HeistletOrchestrator;\nimport com.sauron.system.Saunut;\nimport org.slf4j.Logger;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.slime.chunk.RangedPage;\n\npublic interface Heistum extends Saunut, Servgramlet {\n    String heistName();\n\n    @Override\n    default String gramName() {\n        return this.heistName();\n    }\n\n    @Override\n    default String getName() {\n        return this.taskName();\n    }\n\n    default String taskName() {\n        return this.getHeistium().getName();\n    }\n\n    ChildHeistInstanceModifier getChildHeistInstanceModifier();\n\n    void applyChildHeistInstanceModifier( ChildHeistInstanceModifier modifier );\n\n    ChildHeistOrchestrator getThisHeistletOrchestrator();\n\n    HeistletOrchestrator getGramHeistletOrchestrator();\n\n    @Override\n    JSONConfig getConfig();\n\n    JSONConfig getProtoConfig();\n\n    HeistScheme getHeistScheme();\n\n    Heistgram getHeistgram();\n\n    RangedPage getMasterTaskPage();\n\n    Heistium getHeistium();\n\n    Crew newCrew( int nCrewId ) ; // For Heistium to start the crew.\n\n    int getMaximumThread();\n\n    Logger tracer();\n\n    void terminate();\n\n    @Override\n    default void execute() throws Exception {\n        this.toHeist();\n    }\n\n    void toRavage();\n\n    void toStalk();\n\n    void toEmbezzle();\n\n    void toHeist() throws HeistException;\n\n\n\n    void handleAliveException( Exception e );\n\n    void handleKillException( Exception e ) throws IllegalStateException ;\n\n\n    String ConfigChildrenKey      = \"Children\";\n    String ConfigOrchestrationKey = AutoOrchestrator.ConfigOrchestrationKey;\n\n    String StatusStart            = \"Start\";\n    String StatusDone             = \"Done\";\n}"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/LootAbortException.java",
    "content": "package com.sauron.heist.heistron;\n\npublic class LootAbortException extends RuntimeException {\n    public LootAbortException() {\n        super();\n    }\n\n    public LootAbortException( String message ) {\n        super( message );\n    }\n\n    public LootAbortException( String message, Throwable cause ) {\n        super( message, cause );\n    }\n\n    @Override\n    public String toString() {\n        return \"[object LootAbortException]\";\n    }\n\n    public String prototypeName() {\n        return \"LootAbortException\";\n    }\n}"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/LootRecoveredException.java",
    "content": "package com.sauron.heist.heistron;\n\npublic class LootRecoveredException extends RuntimeException {\n    public LootRecoveredException() {\n        super();\n    }\n\n    public LootRecoveredException( String message ) {\n        super( message );\n    }\n\n    public LootRecoveredException( String message, Throwable cause ) {\n        super( message, cause );\n    }\n\n    @Override\n    public String toString() {\n        return \"[object LootRecoveredException]\";\n    }\n\n    public String prototypeName() {\n        return \"LootRecoveredException\";\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Metier.java",
    "content": "package com.sauron.heist.heistron;\n\npublic enum Metier {\n    REAVER    (\"Reaver\"),\n    STALKER   (\"Stalker\"),\n    EMBEZZLER (\"Embezzler\");\n\n    private final String value;\n    Metier( String value ){\n        this.value = value;\n    }\n\n    public String getName(){\n        return this.value;\n    }\n\n    public static String queryName( Metier type ) {\n        return type.getName();\n    }\n\n    public static Metier queryMetier( String sz ) {\n        return Metier.valueOf( sz.toUpperCase() );\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Reaver.java",
    "content": "package com.sauron.heist.heistron;\n\n/**\n *  Bean Nuts Hazelnut Sauron Tritium For Java, Reaver [掠夺者]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Focus on batch crawler downloading and retrieving.\n *  面向批处理化爬虫数据取回\n *  *****************************************************************************************\n *  Dragon King, the undefined\n */\npublic interface Reaver extends Crew {\n    default void toRavage() {\n        this.startBatchTask();\n    }\n\n    @Override\n    default void isTimeToFeast(){\n        this.toRavage();\n    }\n}"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/Stalker.java",
    "content": "package com.sauron.heist.heistron;\n\n/**\n *  Bean Nuts Hazelnut Sauron Tritium For Java, Stalker [潜伏者]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Focus on batch crawler indexes sniffing.\n *  面向批量爬虫索引嗅探\n *  *****************************************************************************************\n *  Dragon King, the undefined\n */\npublic interface Stalker extends Crew {\n    void toStalk();\n\n    @Override\n    default void isTimeToFeast(){\n        this.toStalk();\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/MultiRaiderLoader.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.pinecone.framework.util.lang.MultiClassScopeLoader;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.hydra.servgram.filters.AnnotationValueFilter;\nimport com.pinecone.ulf.util.lang.MultiTraitClassLoader;\n\nimport java.util.List;\n\npublic interface MultiRaiderLoader extends MultiClassScopeLoader, MultiTraitClassLoader {\n    @Override\n    Class<? extends Raider > load( Name simpleName ) throws ClassNotFoundException ;\n\n    // Directly by it`s name.\n    @Override\n    Class<? extends Raider > loadByName( Name simpleName ) throws ClassNotFoundException ;\n\n    // Scanning class`s annotations, methods or others.\n    @Override\n    Class<? extends Raider > loadInClassTrait( Name simpleName ) throws ClassNotFoundException ;\n\n    @Override\n    MultiRaiderLoader updateScope();\n\n    void setAnnotationValueFilter( AnnotationValueFilter filter );\n\n    @Override\n    List<Class<? > > loads( Name name ) ;\n\n    @Override\n    List<Class<? > > loadsByName( Name simpleName );\n\n    @Override\n    List<Class<? > > loadsInClassTrait( Name simpleName ) ;\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/PeriodicHeist.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.sauron.heist.heistron.Heistum;\n\npublic interface PeriodicHeist extends Heistum {\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/PeriodicHeistKernel.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.sauron.heist.heistron.Heistgram;\nimport com.sauron.heist.heistron.Heistum;\nimport com.sauron.system.Saunut;\n\nimport java.util.List;\n\npublic interface PeriodicHeistKernel extends Saunut {\n    Heistgram getHeistgram();\n\n    Heistum getParentHeist();\n\n    void vitalize();\n\n    void joinVitalize() throws InterruptedException;\n\n    List getPreloadPrefixes() ;\n\n    List getPreloadSuffixes() ;\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/PeriodicHeistRehearsal.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.auto.PeriodicAutomatron;\n\nimport java.util.List;\nimport java.util.concurrent.atomic.AtomicInteger;\n\npublic interface PeriodicHeistRehearsal extends PeriodicHeistKernel {\n    PeriodicAutomatron getAutomatron();\n\n    List<String > getRawChronicPeriods();\n\n    AtomicInteger getIndexId();\n\n    @Override\n    default void vitalize() {\n        this.getAutomatron().start();\n    }\n\n    @Override\n    default void joinVitalize() throws InterruptedException {\n        this.vitalize();\n        this.getAutomatron().join();\n    }\n\n    JSONObject getRaiderMarshalingConf();\n\n    JSONObject getRaiderConfigs();\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/Raider.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.pinecone.hydra.auto.Instructation;\nimport com.sauron.heist.heistron.Reaver;\n\n/**\n *  Bean Nuts Hazelnut Sauron Tritium For Java, Raider [突袭者]\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Focus on periodic or burst irregularity crawler downloading and retrieving.\n *  面向周期或无规律性突发爬虫数据取回\n *  *****************************************************************************************\n *  Dragon King, the undefined\n */\npublic interface Raider extends Reaver {\n    Instructation getPrimeDirective();\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/RaiderFactory.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.pinecone.framework.util.lang.ClassScope;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.framework.util.name.ScopeName;\nimport com.pinecone.ulf.util.lang.MultiScopeFactory;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.List;\n\npublic interface RaiderFactory extends MultiScopeFactory {\n    @Override\n    ClassLoader       getClassLoader();\n\n    @Override\n    ClassScope        getClassScope();\n\n    @Override\n    MultiRaiderLoader getTraitClassLoader();\n\n    @Override\n    default Raider spawn( String name, Class<?>[] stereotypes, Object... args ) throws InvocationTargetException {\n        return this.spawn( new ScopeName(name), stereotypes, args );\n    }\n\n    @Override\n    Raider spawn( Name name, Class<?>[] stereotypes, Object... args ) throws InvocationTargetException;\n\n    @Override\n    default Raider spawn( String name, Object... args ) throws InvocationTargetException {\n        return this.spawn( new ScopeName(name), args );\n    }\n\n    @Override\n    Raider spawn( Name name, Object... args ) throws InvocationTargetException;\n\n    @Override\n    default List<Raider > popping( String name, Class<?>[] stereotypes, Object... args ) {\n        return this.popping( new ScopeName(name), stereotypes, args );\n    }\n\n    @Override\n    List<Raider > popping( Name name, Class<?>[] stereotypes, Object... args );\n\n    @Override\n    default List<Raider > popping( String name, Object... args ) {\n        return this.popping( new ScopeName(name), args );\n    }\n\n    @Override\n    List<Raider > popping( Name name, Object... args );\n\n}"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/chronic/Raiderlet.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport java.lang.annotation.Documented;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\nimport java.lang.annotation.ElementType;\n\n\n@Target({ElementType.TYPE})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface Raiderlet {\n    String ValueKey = \"value\";\n\n    String value() default \"\";\n}"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/event/HeistLifecycleEventInterceptor.java",
    "content": "package com.sauron.heist.heistron.event;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.sauron.heist.heistron.Heistum;\nimport com.sauron.heist.heistron.orchestration.Hierarchy;\n\npublic interface HeistLifecycleEventInterceptor extends Pinenut {\n\n    void afterLifecycleEventTriggered( String name, Heistum heist, TaskInstanceStatus instanceStatus, Hierarchy hierarchy );\n\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/mapreduce/SchemeQuerier.java",
    "content": "package com.sauron.heist.heistron.mapreduce;\n\nimport com.pinecone.slime.map.AlterableQuerier;\n\npublic interface SchemeQuerier<V > extends AlterableQuerier<V > {\n    default boolean hasOwnProperty( Object k ) {\n        return this.containsKey( k );\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/mapreduce/TaskScheme.java",
    "content": "package com.sauron.heist.heistron.mapreduce;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface TaskScheme extends Pinenut {\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/ChildHeistInstanceModifier.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.sauron.heist.heistron.CascadeHeist;\n\npublic interface ChildHeistInstanceModifier extends Pinenut {\n\n    void modify( CascadeHeist heistum ) ;\n\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/ChildHeistOrchestrator.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\nimport com.pinecone.hydra.servgram.ServgramOrchestrator;\nimport com.sauron.heist.heistron.CascadeHeist;\nimport com.sauron.heist.heistron.Heistgram;\nimport com.sauron.heist.heistron.Heistium;\nimport com.sauron.system.Saunut;\n\npublic interface ChildHeistOrchestrator extends Saunut, ServgramOrchestrator {\n    CascadeHeist getHeist();\n\n    Heistium getHeistium();\n\n    Heistgram getHeistgram();\n\n    int nextAutoIncrementTaskId();\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/Heistlet.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\nimport java.lang.annotation.*;\n\n@Target({ElementType.TYPE})\n@Retention(RetentionPolicy.RUNTIME)\n@Documented\npublic @interface Heistlet {\n    String ValueKey = \"value\";\n\n    String value() default \"\";\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/HeistletOrchestrator.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\nimport com.pinecone.hydra.servgram.ServgramOrchestrator;\nimport com.sauron.heist.heistron.Heistgram;\nimport com.sauron.system.Saunut;\n\nimport java.util.List;\n\npublic interface HeistletOrchestrator extends Saunut, ServgramOrchestrator {\n    Heistgram getHeistgram();\n\n    List getPreloadPrefixes() ;\n\n    List getPreloadSuffixes() ;\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/Hierarchy.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\npublic enum Hierarchy {\n    Master  ( \"Master\" ),\n    Slave   ( \"Slave\" );\n\n    private final String value;\n\n    Hierarchy( String value ){\n        this.value = value;\n    }\n\n    public String getName(){\n        return this.value;\n    }\n\n    public static String queryName( Hierarchy hierarchy ) {\n        return hierarchy.getName();\n    }\n\n    public static Hierarchy queryHierarchy( String sz ) {\n        return Hierarchy.valueOf( sz );\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/Instructations.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\nimport com.pinecone.hydra.auto.Instructation;\nimport com.sauron.heist.heistron.Heistum;\nimport com.sauron.heist.heistron.chronic.Raider;\n\npublic final class Instructations {\n    public static void infoLifecycle(Heistum heistum, String szWhat, String szStateOrExtra ) {\n        heistum.tracer().info( \"[Lifecycle] [{}] <{}>\", szWhat, szStateOrExtra );\n    }\n\n    public static void infoConformed( Heistum heistum, Instructation instructation ) {\n        Instructations.infoLifecycle( heistum, \"Conformed\",\n                String.format(\n                \"System committed instruction (%s)\", instructation.className().replace( \"Instructation\", \"\" )\n                )\n        );\n    }\n\n    public static void infoConformed(Raider raider, String methodName ) {\n        if( methodName == null ) {\n            StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();\n            methodName = stackTraceElements[ 2 ].getMethodName();\n        }\n\n        Instructations.infoLifecycle( raider.parentHeist(), \"Conformed\",\n                String.format(\n                        \"System committed instruction (%s::%s)\", raider.className(), methodName\n                )\n        );\n    }\n\n    public static void infoConformed( Raider raider ) {\n        Instructations.infoConformed( raider, null );\n    }\n\n    public static void infoCompleted( Raider raider, String methodName ) {\n        if( methodName == null ) {\n            StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();\n            methodName = stackTraceElements[ 2 ].getMethodName();\n        }\n\n        Instructations.infoLifecycle( raider.parentHeist(), \"Termination\",\n                String.format(\n                        \"Instruction completed (%s::%s)\", raider.className(), methodName\n                )\n        );\n    }\n\n    public static void infoCompleted( Raider raider ) {\n        Instructations.infoCompleted( raider, null );\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/TaskTransaction.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\nimport com.sauron.system.Saunut;\n\npublic interface TaskTransaction extends Saunut {\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/orchestration/Taskium.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\nimport com.pinecone.framework.system.executum.Processum;\n\npublic interface Taskium extends Processum {\n    default ChildHeistOrchestrator getHeistletOrchestrator() {\n        return (ChildHeistOrchestrator) this.getTaskManager();\n    }\n\n    long getTaskId();\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/PageFrame64ConsumerAdapter.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\nimport com.sauron.system.Saunut;\n\npublic interface PageFrame64ConsumerAdapter extends Saunut {\n    void consumeById( long index );\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskConsumer.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\nimport com.sauron.system.Saunut;\n\npublic interface TaskConsumer extends Saunut {\n    void consume();\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskFrame64Consumer.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\npublic interface TaskFrame64Consumer extends TaskConsumer {\n    TaskFrame64Producer getTaskPageProducer();\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskFrame64Producer.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\npublic interface TaskFrame64Producer extends TaskProducer {\n    Long require();\n\n    void deactivate( Long that );\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskPage.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\nimport com.pinecone.slime.chunk.RangedPage;\n\npublic interface TaskPage extends RangedPage {\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskPageConsumer.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\npublic interface TaskPageConsumer extends TaskConsumer {\n    TaskPageProducer getTaskPageProducer();\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskPageProducer.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\nimport com.pinecone.slime.chunk.scheduler.ActivePageScheduler;\n\npublic interface TaskPageProducer extends ActivePageScheduler, TaskProducer {\n    TaskPage require();\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskProducer.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.concurrent.TimeUnit;\n\npublic interface TaskProducer extends Pinenut {\n    Object require();\n\n    boolean hasMoreProducts();\n\n    boolean hasTerminateSignal();\n\n    long getProductsSum();\n\n    void awaitProducerFinished() throws InterruptedException;\n\n    void awaitProducerFinished( long timeout, TimeUnit unit ) throws InterruptedException;\n\n    boolean isFinished();\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/heistron/scheduler/TaskSchedulerStrategy.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.sauron.heist.heistron.Heistium;\nimport com.pinecone.slime.chunk.RangedPage;\nimport com.pinecone.slime.chunk.scheduler.PageDivider;\nimport com.pinecone.slime.chunk.scheduler.PagePool;\nimport com.pinecone.slime.chunk.scheduler.PageRecycleStrategy;\n\npublic interface TaskSchedulerStrategy extends Pinenut {\n    Heistium getParentHeistium();\n\n    RangedPage getMasterPage();\n\n    PagePool getHeistTaskPagePool();\n\n    PageDivider getPageDivider();\n\n    PageRecycleStrategy getPageRecycleStrategy();\n\n    TaskSchedulerStrategy setHeistTaskPagePool( PagePool pagePool );\n\n    TaskSchedulerStrategy setPageDivider( PageDivider divider );\n\n    TaskSchedulerStrategy setPageRecycleStrategy( PageRecycleStrategy strategy );\n\n    TaskProducer formulateProducer();\n}\n"
  },
  {
    "path": "Saurons/heist-framework-architecture/src/main/java/com/sauron/heist/http/HttpBrowserConf.java",
    "content": "package com.sauron.heist.http;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.homotype.DirectObjectInjector;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.tritium.util.ConfigHelper;\nimport com.sauron.heist.heistron.HeistConfigConstants;\nimport com.sauron.heist.heistron.Heistum;\n\npublic class HttpBrowserConf implements Pinenut {\n    public enum ProxyStrategy {\n        NoProxy,\n        SystemOnly,\n        ProxyGroup\n    }\n\n    protected Heistum    mParentHeistum;\n\n    public JSONObject    protoConfig;\n    public JSONArray     headConfigGroup;\n    public boolean       agentConfusion;\n    public JSONObject    systemProxy;\n    public JSONArray     proxyGroup;\n    public ProxyStrategy proxyStrategy     = ProxyStrategy.NoProxy;\n    public boolean       enableRandomDelay = false;\n    public int           randomDelayMin    = 0;\n    public int           randomDelayMax    = 0;\n    public int           socketTimeout     = 20000;\n    public String        charset           = \"UTF-8\";\n    public boolean       enableCookieJar   = true;\n\n\n    // New improved V2\n\n    // 是否跟随 HTTP 3xx 重定向\n    public boolean followRedirects          = true;\n    // 是否跟随 HTTPS → HTTPS / HTTP → HTTPS 重定向\n    public boolean followSslRedirects       = true;\n    // 是否在连接异常时自动重试（TCP 层）\n    public boolean retryOnConnectionFailure = true;\n\n\n    public int connectTimeout             = this.socketTimeout;   // ms\n    public int readTimeout                = 20000;   // ms\n    public int writeTimeout               = 20000;   // ms\n\n\n    // 一般来说业务会自己限制，可开可不开\n    public boolean enableRequestLimit     = false;\n    // 全局最大并发请求数\n    public int maxRequests                = 4096;\n    // 单 Host 最大并发\n    public int maxRequestsPerHost         = 512;\n\n    // 连接池配置\n    public boolean enableConnectionPool   = true;\n    public int maxIdleConnections         = 5;\n    public int keepAliveSeconds           = 300;\n\n    public HttpBrowserConf( Heistum heistum ) {\n        this.mParentHeistum   = heistum;\n        JSONObject parentConf = this.mParentHeistum.getConfig();\n        this.protoConfig      = parentConf.optJSONObject( HeistConfigConstants.KeyHttpBrowser );\n\n        if( this.protoConfig == null ) {\n            this.protoConfig = this.mParentHeistum.getHeistgram().getComponentsConfig().optJSONObject( HeistConfigConstants.KeyHttpBrowser ) ;\n        }\n\n        DirectObjectInjector.instance( ConfigHelper.fnToSmallHumpName, this.getClass() ).typeInject(\n                this.protoConfig, this\n        );\n        this.proxyStrategy = ProxyStrategy.valueOf( this.protoConfig.optString( \"ProxyStrategy\" ) );\n    }\n\n}"
  },
  {
    "path": "Saurons/heist-http-client-okhttp-suit/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>saurons</artifactId>\n        <groupId>com.saurons</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <groupId>com.sauron.heist</groupId>\n    <artifactId>heist-http-client-okhttp-suit</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n        </dependency>\n        <dependency>\n            <groupId>com.sauron.core</groupId>\n            <artifactId>sauron-core</artifactId>\n            <version>1.2.7</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.jsoup</groupId>\n            <artifactId>jsoup</artifactId>\n            <version>1.15.4</version>\n        </dependency>\n\n        <!-- https://mvnrepository.com/artifact/us.codecraft/webmagic-core -->\n        <dependency>\n            <groupId>us.codecraft</groupId>\n            <artifactId>webmagic-core</artifactId>\n            <version>0.8.0</version>\n        </dependency>\n\n        <!-- https://mvnrepository.com/artifact/us.codecraft/webmagic-extension -->\n        <dependency>\n            <groupId>us.codecraft</groupId>\n            <artifactId>webmagic-extension</artifactId>\n            <version>0.8.0</version>\n        </dependency>\n\n        <dependency>\n            <groupId>com.sauron.heist</groupId>\n            <artifactId>heist-framework-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n\n        <dependency>\n            <groupId>com.squareup.okhttp3</groupId>\n            <artifactId>okhttp</artifactId>\n            <version>4.12.0</version>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Saurons/heist-http-client-okhttp-suit/src/main/java/com/sauron/heist/okhttp/HeistOkHttpClientFactory.java",
    "content": "package com.sauron.heist.okhttp;\n\nimport java.net.InetSocketAddress;\nimport java.net.Proxy;\nimport java.net.ProxySelector;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.concurrent.ThreadLocalRandom;\nimport java.util.concurrent.TimeUnit;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.sauron.heist.heistron.Heistum;\nimport com.sauron.heist.http.HttpBrowserConf;\n\nimport okhttp3.ConnectionPool;\nimport okhttp3.Dispatcher;\nimport okhttp3.OkHttpClient;\n\npublic class HeistOkHttpClientFactory implements OkHttpFactory {\n    protected Heistum heistum;\n    protected HttpBrowserConf conf;\n\n    public HeistOkHttpClientFactory( Heistum heistum ) {\n        this.heistum = heistum;\n        this.conf = new HttpBrowserConf( this.heistum );\n    }\n\n    @Override\n    public List<OkHttpClient> make() {\n        return this.make( this.conf );\n    }\n\n    @Override\n    public List<OkHttpClient> make( HttpBrowserConf conf ) {\n        List<OkClientConstructionScheme> schemes = this.makeScheme( conf );\n        List<OkHttpClient> clients = new ArrayList<>();\n        for ( OkClientConstructionScheme scheme : schemes ) {\n            OkHttpClient client = scheme.getBuilder().build();\n            clients.add(client);\n        }\n        return clients;\n    }\n\n    @Override\n    public List<OkClientConstructionScheme> makeScheme( HttpBrowserConf conf ) {\n        List<OkClientConstructionScheme> schemes = new ArrayList<>();\n\n        switch ( conf.proxyStrategy ) {\n            case SystemOnly: {\n                OkHttpClient.Builder builder = this.createBaseBuilder(conf);\n                builder.proxySelector(ProxySelector.getDefault());\n\n                OkClientConstructionScheme pair = new OkClientConstructionScheme(\n                        builder, conf, conf.systemProxy, null\n                );\n\n                schemes.add(pair);\n                break;\n            }\n            case ProxyGroup: {\n                if ( conf.proxyGroup != null ) {\n                    for ( int i = 0; i < conf.proxyGroup.length(); ++i ) {\n                        JSONObject proxyConf = conf.proxyGroup.optJSONObject(i);\n                        if (proxyConf == null) {\n                            continue;\n                        }\n\n                        Proxy proxy = this.buildProxyFromConf(proxyConf);\n                        if (proxy == null) {\n                            continue;\n                        }\n\n                        OkHttpClient.Builder builder = this.createBaseBuilder(conf);\n                        builder.proxy(proxy);\n\n                        OkClientConstructionScheme pair = new OkClientConstructionScheme(\n                                builder, conf, proxyConf, proxy\n                        );\n\n                        schemes.add(pair);\n                    }\n                }\n                break;\n            }\n            case NoProxy:\n            default: {\n                OkHttpClient.Builder builder = this.createBaseBuilder(conf);\n                builder.proxy(Proxy.NO_PROXY);\n\n                OkClientConstructionScheme pair = new OkClientConstructionScheme(\n                        builder, conf, null, Proxy.NO_PROXY\n                );\n\n                schemes.add(pair);\n                break;\n            }\n        }\n\n        return schemes;\n    }\n\n    protected OkHttpClient.Builder createBaseBuilder( HttpBrowserConf conf ) {\n        OkHttpClient.Builder builder = new OkHttpClient.Builder()\n                .connectTimeout(conf.connectTimeout, TimeUnit.MILLISECONDS)\n                .readTimeout(conf.readTimeout, TimeUnit.MILLISECONDS)\n                .writeTimeout(conf.writeTimeout, TimeUnit.MILLISECONDS)\n                .followRedirects(conf.followRedirects)\n                .followSslRedirects(conf.followSslRedirects)\n                .retryOnConnectionFailure(conf.retryOnConnectionFailure);\n\n        if ( conf.enableCookieJar ) {\n            builder.cookieJar(new InMemoryCookieJar());\n        }\n\n        // === 连接池配置（可关闭，默认开启）===\n        if ( conf.enableConnectionPool ) {\n            ConnectionPool connectionPool = new ConnectionPool(\n                    conf.maxIdleConnections,\n                    conf.keepAliveSeconds,\n                    TimeUnit.SECONDS\n            );\n            builder.connectionPool(connectionPool);\n        }\n\n        if ( conf.enableRequestLimit ) {\n            Dispatcher dispatcher = new Dispatcher();\n            dispatcher.setMaxRequests(conf.maxRequests);\n            dispatcher.setMaxRequestsPerHost(conf.maxRequestsPerHost);\n            builder.dispatcher(dispatcher);\n        }\n\n        if ( conf.enableRandomDelay ) {\n            builder.addInterceptor(chain -> {\n                this.applyRandomDelay(conf);\n                return chain.proceed(chain.request());\n            });\n        }\n\n        return builder;\n    }\n\n    protected Proxy buildProxyFromConf( JSONObject proxyConf ) {\n        String host = proxyConf.optString(\"host\", null);\n        int port = proxyConf.optInt(\"port\", -1);\n\n        if (host == null || port <= 0) {\n            return null;\n        }\n\n        return new Proxy(\n                Proxy.Type.HTTP,\n                new InetSocketAddress(host, port)\n        );\n    }\n\n    protected void applyRandomDelay( HttpBrowserConf conf ) {\n        int min = conf.randomDelayMin;\n        int max = conf.randomDelayMax;\n\n        if (max <= min || min < 0) {\n            return;\n        }\n\n        int delay = ThreadLocalRandom.current().nextInt(min, max + 1);\n        try {\n            Thread.sleep( delay );\n        }\n        catch (InterruptedException e) {\n            Thread.currentThread().interrupt();\n        }\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-http-client-okhttp-suit/src/main/java/com/sauron/heist/okhttp/InMemoryCookieJar.java",
    "content": "package com.sauron.heist.okhttp;\n\nimport java.util.Collections;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\n\nimport okhttp3.Cookie;\nimport okhttp3.CookieJar;\nimport okhttp3.HttpUrl;\n\npublic class InMemoryCookieJar implements CookieJar {\n\n    private final Map<String, List<Cookie>> cookieStore = new ConcurrentHashMap<>();\n\n    @Override\n    public void saveFromResponse(HttpUrl url, List<Cookie> cookies) {\n        this.cookieStore.put(url.host(), cookies);\n    }\n\n    @Override\n    public List<Cookie> loadForRequest(HttpUrl url) {\n        return this.cookieStore.getOrDefault(url.host(), Collections.emptyList());\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-http-client-okhttp-suit/src/main/java/com/sauron/heist/okhttp/OkClientConstructionScheme.java",
    "content": "package com.sauron.heist.okhttp;\n\nimport java.net.Proxy;\n\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.sauron.heist.http.HttpBrowserConf;\n\nimport okhttp3.OkHttpClient;\n\npublic final class OkClientConstructionScheme {\n\n    private OkHttpClient.Builder builder;\n    private HttpBrowserConf httpBrowserConf;\n    private JSONObject proxyConf;\n    private Proxy proxy;\n\n    public OkClientConstructionScheme(\n            OkHttpClient.Builder builder,\n            HttpBrowserConf httpBrowserConf,\n            JSONObject proxyConf,\n            Proxy proxy\n    ) {\n        this.builder = builder;\n        this.httpBrowserConf = httpBrowserConf;\n        this.proxyConf = proxyConf;\n        this.proxy = proxy;\n    }\n\n    public OkHttpClient.Builder getBuilder() {\n        return this.builder;\n    }\n\n    public void setBuilder(OkHttpClient.Builder builder) {\n        this.builder = builder;\n    }\n\n    public HttpBrowserConf getHttpBrowserConf() {\n        return this.httpBrowserConf;\n    }\n\n    public void setHttpBrowserConf(HttpBrowserConf httpBrowserConf) {\n        this.httpBrowserConf = httpBrowserConf;\n    }\n\n    public JSONObject getProxyConf() {\n        return this.proxyConf;\n    }\n\n    public void setProxyConf(JSONObject proxyConf) {\n        this.proxyConf = proxyConf;\n    }\n\n    public Proxy getProxy() {\n        return this.proxy;\n    }\n\n    public void setProxy(Proxy proxy) {\n        this.proxy = proxy;\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-http-client-okhttp-suit/src/main/java/com/sauron/heist/okhttp/OkHttpFactory.java",
    "content": "package com.sauron.heist.okhttp;\n\nimport java.util.List;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.sauron.heist.heistron.Heistum;\nimport com.sauron.heist.http.HttpBrowserConf;\n\nimport okhttp3.OkHttpClient;\n\npublic interface OkHttpFactory extends Pinenut {\n\n    List<OkHttpClient> make(HttpBrowserConf conf );\n\n    List<OkHttpClient> make();\n\n    List<OkClientConstructionScheme> makeScheme( HttpBrowserConf conf );\n\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>saurons</artifactId>\n        <groupId>com.saurons</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <groupId>com.sauron.heist</groupId>\n    <artifactId>heist-system-schedule</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime</groupId>\n            <artifactId>slime</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime.jelly</groupId>\n            <artifactId>jelly</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n        </dependency>\n<!--        <dependency>-->\n<!--            <groupId>com.pinecone.summer</groupId>-->\n<!--            <artifactId>summer</artifactId>-->\n<!--            <version>2.1.0</version>-->\n<!--        </dependency>-->\n        <dependency>\n            <groupId>com.pinecone.summer.springram</groupId>\n            <artifactId>springram</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.sauron.core</groupId>\n            <artifactId>sauron-core</artifactId>\n            <version>1.2.7</version>\n            <scope>compile</scope>\n        </dependency>\n        <!--<dependency>-->\n            <!--<groupId>com.walnut.sparta</groupId>-->\n            <!--<artifactId>sparta</artifactId>-->\n            <!--<version>2.1.0</version>-->\n        <!--</dependency>-->\n\n        <dependency>\n            <groupId>org.javassist</groupId>\n            <artifactId>javassist</artifactId>\n            <version>3.29.0-GA</version>\n        </dependency>\n        <dependency>\n            <groupId>io.netty</groupId>\n            <artifactId>netty-all</artifactId>\n            <version>4.1.80.Final</version>\n        </dependency>\n\n\n\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>org.jsoup</groupId>\n            <artifactId>jsoup</artifactId>\n            <version>1.15.4</version>\n        </dependency>\n\n        <!-- https://mvnrepository.com/artifact/us.codecraft/webmagic-core -->\n        <dependency>\n            <groupId>us.codecraft</groupId>\n            <artifactId>webmagic-core</artifactId>\n            <version>0.8.0</version>\n        </dependency>\n\n        <!-- https://mvnrepository.com/artifact/us.codecraft/webmagic-extension -->\n        <dependency>\n            <groupId>us.codecraft</groupId>\n            <artifactId>webmagic-extension</artifactId>\n            <version>0.8.0</version>\n        </dependency>\n\n\n\n\n\n\n\n\n        <!-- MyBatis dependencies -->\n        <dependency>\n            <groupId>org.mybatis</groupId>\n            <artifactId>mybatis</artifactId>\n            <version>3.5.9</version>\n        </dependency>\n        <dependency>\n            <groupId>org.mybatis</groupId>\n            <artifactId>mybatis-spring</artifactId>\n            <version>2.0.6</version>\n        </dependency>\n\n        <!-- MyBatis Plus dependencies -->\n        <dependency>\n            <groupId>com.baomidou</groupId>\n            <artifactId>mybatis-plus-core</artifactId>\n            <version>3.4.3.4</version>\n        </dependency>\n        <dependency>\n            <groupId>com.baomidou</groupId>\n            <artifactId>mybatis-plus-annotation</artifactId>\n            <version>3.4.3.4</version>\n        </dependency>\n\n        <!-- MySQL Connector -->\n        <dependency>\n            <groupId>mysql</groupId>\n            <artifactId>mysql-connector-java</artifactId>\n            <version>8.0.26</version>\n        </dependency>\n\n        <!-- Logging dependencies -->\n        <dependency>\n            <groupId>org.slf4j</groupId>\n            <artifactId>slf4j-api</artifactId>\n            <version>1.7.30</version>\n        </dependency>\n\n        <dependency>\n            <groupId>net.spy</groupId>\n            <artifactId>spymemcached</artifactId>\n            <version>2.12.3</version>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.httpcomponents.client5</groupId>\n            <artifactId>httpclient5</artifactId>\n            <version>5.1</version>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.commons</groupId>\n            <artifactId>commons-vfs2</artifactId>\n            <version>2.9.0</version>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.commons</groupId>\n            <artifactId>commons-vfs2-jackrabbit1</artifactId>\n            <version>2.9.0</version>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.commons</groupId>\n            <artifactId>commons-lang3</artifactId>\n            <version>3.12.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.sauron.heist</groupId>\n            <artifactId>heist-framework-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/ArchCrew.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.tritium.system.MissionTerminateException;\nimport com.pinecone.tritium.system.TritiumSystem;\nimport com.pinecone.tritium.system.StorageSystem;\nimport com.pinecone.framework.system.functions.FunctionTraits;\nimport com.pinecone.framework.util.json.JSONObject;\n\nimport org.apache.commons.vfs2.FileSystemManager;\nimport org.slf4j.Logger;\n\nimport java.io.IOException;\nimport java.util.Random;\n\npublic abstract class ArchCrew extends LocalCrewnium implements Crew {\n    protected Logger      logger;\n\n    protected String      crewInstanceName;\n\n    protected JSONObject  joFailureConf;\n    protected int         fileRetrieveTime  = 1          ;\n    protected long        fragBase          = 10000      ;  // unit: W\n    protected long        fragRange         = 1000000    ;  // unit: 1\n\n    protected Heist       heist;\n\n\n    public ArchCrew( Heist heist, int nCrewId ){\n        super( heist, nCrewId );\n\n        this.heist              = (Heist) this.mParentHeist;\n        this.fragBase           = this.heist.fragBase;\n        this.fragRange          = this.heist.fragRange;\n        this.joFailureConf      = this.heist.getConfig().optJSONObject( \"FailureConf\" );\n        this.fileRetrieveTime   = this.joFailureConf.optInt( \"FileRetrieveTime\", 1 );\n\n        this.crewInstanceName   = this.className() + this.mnCrewId;\n        //this.failureRetryTimes = this.heistCenter.getProtoConfig().getFailureRetryTimes();\n\n        this.logger             = this.parentSystem().getTracerScope().newLogger( this.crewInstanceName );\n    }\n\n    @Override\n    public Heistum parentHeist() {\n        return this.heist;\n    }\n\n    //根据任务数量获取线程数\n\n    protected String lifecycleTracerSignature() {\n        return String.format( \"%s::Lifecycle\", FunctionTraits.thatName(3) );\n    }\n\n    protected ArchCrew traceTaskState( long idx, String szState ) {\n        this.logger.info( \"[TaskState] <[{},{}], ID:{}> <{}>\", this.heist.taskFrom, this.heist.taskTo ,idx, szState );\n        return this;\n    }\n\n    protected boolean handleTask( long index ){\n        this.traceTaskState( index, \"Handle\" );\n        return new Random().nextInt(100)+1>80;\n    }\n\n    protected boolean noticeTaskDone ( long index, boolean bIsRecovered ){\n        if( bIsRecovered ) {\n            this.traceTaskState( index, \"Recovered\" );\n        }\n        else {\n            this.traceTaskState( index, Heistum.StatusDone );\n        }\n        //this.heist.getSpoilsLock().countDown();\n        return true;\n    }\n\n    protected boolean noticeTaskDone ( long index ){\n        return this.noticeTaskDone( index, false );\n    }\n\n\n    @Override\n    public void startBatchTask() {\n        this.mTaskConsumer.consume();\n    }\n\n    protected void consumeById( long index ) {\n        try {\n            this.traceTaskState( index, Heistum.StatusStart );\n            this.tryConsumeById( index );\n            this.noticeTaskDone( index, false );\n        }\n        catch ( LootAbortException e ) {\n            this.traceTaskState( index, \"Abort\" );\n        }\n        catch ( LootRecoveredException e1 ) {\n            this.noticeTaskDone( index, true );\n        }\n        catch ( IllegalStateException e2 ) {\n            this.traceTaskState( index, \"Error:\" + e2.getMessage() );\n        }\n        catch ( IOException io ) {\n            this.traceTaskState( index, \"IOException:\" + io.getMessage() );\n        }\n        catch ( MissionTerminateException mte ) {\n            this.parentHeist().terminate();\n        }\n        catch ( Exception e3 ) {\n            // Keep this task alive, and ignore other exceptions.\n            this.parentHeist().handleAliveException( e3 );\n        }\n    }\n\n    protected void tryConsumeById( long index ) throws LootRecoveredException, LootAbortException, IllegalStateException, IOException {\n\n    }\n\n    @Override\n    public void run() {\n        //this.lootFromSignal();\n        this.isTimeToFeast();\n    }\n\n    @Override\n    public String crewName() {\n        return this.heist.heistName();\n    }\n\n    @Override\n    public void validateSpoil( String sz ) {\n\n    }\n\n\n    @Override\n    public TritiumSystem parentSystem() {\n        return (TritiumSystem) super.parentSystem();\n    }\n\n    @Override\n    public StorageSystem getStorageSystem() {\n        return this.parentSystem().getStorageSystem();\n    }\n\n    public FileSystemManager getDafaultFileSystemManager() {\n        return this.getStorageSystem().getFileSystemManager();\n    }\n\n    @Override\n    public Logger tracer(){\n        return this.logger;\n    }\n\n    protected void handleAliveException( Exception e ) {\n        this.parentHeist().handleAliveException( e );\n    }\n\n    protected void handleKillException( Exception e ) throws IllegalStateException {\n        this.parentHeist().handleKillException( e );\n    }\n}\n\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/ArchHeistum.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.tritium.system.TritiumSystem;\n\nimport org.slf4j.Logger;\n\npublic abstract class ArchHeistum extends HeistEntity implements Heistum {\n    protected Heistgram         mHeistgram ;\n    protected Logger            mLogger;\n\n    protected ArchHeistum( Heistgram heistgram ) {\n        super();\n        this.mHeistgram   = heistgram;\n        this.mLogger      = this.parentSystem().getTracerScope().newLogger( this.className() );\n    }\n\n    @Override\n    public Logger tracer() {\n        return this.mLogger;\n    }\n\n    protected ArchHeistum infoLifecycle( String szWhat, String szStateOrExtra ) {\n        this.tracer().info( \"[Lifecycle] [{}] <{}>\", szWhat, szStateOrExtra );\n        return this;\n    }\n\n    protected ArchHeistum infoLifecycle( String szStateOrExtra ) {\n        StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace();\n        return this.infoLifecycle( stackTraceElements[ 2 ].getMethodName(), szStateOrExtra );\n    }\n\n\n    @Override\n    public Heistgram getHeistgram() {\n        return this.mHeistgram;\n    }\n\n    public TritiumSystem parentSystem() {\n        return (TritiumSystem) this.getHeistgram().parentSystem();\n    }\n\n    /**\n     * These exceptions will not interrupt the running state of single moulder, but only log them.\n     */\n    @Override\n    public void handleAliveException( Exception e ) {\n        this.tracer().info( \"[{}] <AliveError:{}>\", this.heistName(), e.getMessage() );\n    }\n\n    /**\n     * These exceptions will kill the running state of single moulder, and interrupt and redirect to RuntimeException.\n     */\n    @Override\n    public void handleKillException( Exception e ) throws IllegalStateException {\n        this.tracer().info( \"[{}] <KillError:{}>\", this.heistName(), e.getMessage() );\n        throw new IllegalStateException( e );\n    }\n\n    @Override\n    public abstract Crew newCrew( int nCrewId ) ;\n\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/CrewPageProcessor.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.framework.util.Debug;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\nimport us.codecraft.webmagic.Site;\nimport us.codecraft.webmagic.processor.PageProcessor;\n\npublic class CrewPageProcessor implements PageProcessor {\n    protected HTTPHeist  parentHeist;\n\n    public CrewPageProcessor( HTTPHeist heist ) {\n        this.parentHeist = heist;\n    }\n\n    @Override\n    public void process( Page page ) {\n        Request request = new Request(\"https://rednest.cn/index.html\");\n        request.putExtra(\"requestType\", \"temp\");\n        page.addTargetRequest( request );\n        Debug.trace( \"fuck\", page.getHtml().toString() );\n    }\n\n    @Override\n    public Site getSite() {\n        return this.parentHeist.getSite();\n    }\n}"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/CrewPipeline.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.fasterxml.jackson.databind.ObjectMapper;\nimport us.codecraft.webmagic.ResultItems;\nimport us.codecraft.webmagic.Task;\nimport us.codecraft.webmagic.pipeline.Pipeline;\n\npublic class CrewPipeline implements Pipeline {\n\n    private ObjectMapper objectMapper = new ObjectMapper();\n\n    private int cnt = 0;\n\n    @Override\n    public void process( ResultItems resultItems, Task task ) {\n\n    }\n}"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/HTTPCrew.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.sauron.heist.http.HttpBrowserConf;\n\nimport org.apache.commons.vfs2.FileObject;\nimport org.apache.commons.vfs2.FileSystemManager;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.OutputStream;\nimport java.util.Random;\n\npublic abstract class HTTPCrew extends ArchCrew {\n    protected String heistURL;\n\n    public HTTPCrew ( HTTPHeist heist, int id ){\n        super( heist, id );\n        this.heistURL = this.parentHeist().getConfig().optString( \"HeistURL\" );\n    }\n\n    public void validateSpoil( Page page ) throws LootAbortException, IllegalStateException {\n        if( page.getBytes().length < this.joFailureConf.optInt( \"FailedFileSize\" ) ) {\n            throw new IllegalStateException(\"CompromisedFilesSize\");\n        }\n    }\n\n    @Override\n    public HTTPHeist parentHeist() {\n        return (HTTPHeist) this.heist;\n    }\n\n    protected void afterPageQueried( Page cache ) {\n    }\n\n    // [Query, Get] Inlet method.\n    public Page queryHTTPPage( Request request, boolean bPooled ) {\n        Page cache = this.parentHeist().queryHTTPPage( request, bPooled );\n        try{\n            HttpBrowserConf browserConf = this.parentHeist().getBrowserConf();\n            if( browserConf.enableRandomDelay ){\n                Thread.sleep( ( new Random() ).nextInt( browserConf.randomDelayMax - browserConf.randomDelayMin + 1 ) + browserConf.randomDelayMin );\n            }\n        }\n        catch ( InterruptedException e ) {\n            this.parentHeist().handleKillException( e );\n        }\n\n        this.afterPageQueried( cache );\n        this.tracer().info( \"[{}] [PageFetched:<Status:{}, Size:{}>]\", this.lifecycleTracerSignature(), cache.getStatusCode(), cache.getBytes().length );\n        return cache;\n    }\n\n    public Page queryHTTPPage( Request request ) {\n        return this.queryHTTPPage( request, true );\n    }\n\n    public Page getHTTPPage( String szHref, boolean bPooled ) {\n        Request request = new Request( szHref );\n        request.putExtra(\"requestType\", \"CrewDefault\");\n        request.setMethod( \"GET\" );\n\n        return this.queryHTTPPage( request, bPooled );\n    }\n\n    public Page getHTTPPage( String szHref ) {\n        return this.getHTTPPage( szHref, true );\n    }\n\n    public String getHTTPFile( String szHref, boolean bPooled ) {\n        return this.getHTTPPage( szHref, bPooled ).getRawText();\n    }\n\n    public String getHTTPFile( String szHref ) {\n        return this.getHTTPFile( szHref, true );\n    }\n\n    // No validate\n    public Page queryHTTPPage( Request request, String szFilePath ) {\n        try {\n            Page cachePage;\n            byte[] cache ;\n            FileSystemManager fsm = this.getDafaultFileSystemManager();\n            FileObject fileObject = fsm.resolveFile( szFilePath );\n            if ( fileObject.exists() ) {\n                try ( InputStream inputStream = fileObject.getContent().getInputStream() ) {\n                    cache = inputStream.readAllBytes();\n                    cachePage = this.parentHeist().extendPage( cache, request );\n                }\n            }\n            else {\n                cachePage = this.queryHTTPPage( request );\n                fileObject.createFile();\n                try ( OutputStream outputStream = fileObject.getContent().getOutputStream() ) {\n                    outputStream.write( cachePage.getBytes() );\n                }\n            }\n\n            return cachePage;\n        }\n        catch ( IOException e ){\n            this.parentHeist().handleAliveException( e );\n        }\n        return null;\n    }\n\n    public Page getHTTPPage( String szHref, String szFilePath ) {\n        return this.queryHTTPPage( new Request( szHref ), szFilePath );\n    }\n\n    public Page queryHTTPPageSafe( Request request ) {\n        Page page = null;\n        int nRetry = 0;\n        IllegalStateException lpLastError = null;\n        for ( int i = 0; i < this.fileRetrieveTime; ++i ) {\n            try {\n                page = this.queryHTTPPage( request );\n                this.validateSpoil( page );\n                break;\n            }\n            catch ( IllegalStateException e ) {\n                ++nRetry;\n                lpLastError = e;\n            }\n            catch ( LootAbortException e ) {\n                return page;\n            }\n        }\n\n        if ( nRetry >= this.fileRetrieveTime - 1 && lpLastError != null ) {\n            throw new IllegalStateException(\"IrredeemableLoot\");\n        }\n        return page;\n    }\n\n    protected Page afterPageFetched( Page page, Request request ){\n        return page;\n    }\n\n    Page tryRecoverFromLocalFile( String szStoragePath, Request request ) throws LootRecoveredException, LootAbortException {\n        try {\n            byte[] cache ;\n            FileSystemManager fsm = this.getDafaultFileSystemManager();\n            FileObject fileObject = fsm.resolveFile( szStoragePath );\n            try ( InputStream inputStream = fileObject.getContent().getInputStream() ) {\n                cache = inputStream.readAllBytes();\n            }\n\n            Page page = this.parentHeist().extendPage( cache, request );\n\n            this.validateSpoil( page );\n            throw new LootRecoveredException();\n        }\n        catch ( LootRecoveredException | LootAbortException e ) {\n            throw e;\n        }\n        catch ( IOException e1 ) {\n            return null;\n        }\n    }\n\n    void storeHrefCache( String szStoragePath, Request request ) throws LootRecoveredException, LootAbortException, IOException {\n        Page cache;\n\n        try {\n            cache = this.tryRecoverFromLocalFile( szStoragePath, request );\n        }\n        catch ( LootRecoveredException | LootAbortException e ) {\n            throw e;\n        }\n        catch ( IllegalStateException e ) {\n            this.logger.info( \"[Mission::Lifecycle] [Heistum<{}>] <RecoverFromLocalFileFailed.>\", this.className() );\n        }\n\n        cache = this.queryHTTPPageSafe( request );\n        cache = this.afterPageFetched( cache, request );\n\n        FileSystemManager fsm = this.getDafaultFileSystemManager();\n        FileObject fileObject = fsm.resolveFile( szStoragePath );\n        if ( !fileObject.exists() ) {\n            fileObject.createFile();\n        }\n        try ( OutputStream outputStream = fileObject.getContent().getOutputStream() ) {\n            outputStream.write( cache.getBytes() );\n        }\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/HTTPHeist.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.framework.system.NonNull;\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.sauron.heist.http.HttpBrowserConf;\nimport com.sauron.heist.http.HttpBrowserDownloader;\nimport com.pinecone.framework.util.json.JSONObject;\n\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\nimport us.codecraft.webmagic.Site;\nimport us.codecraft.webmagic.Spider;\nimport us.codecraft.webmagic.proxy.Proxy;\nimport us.codecraft.webmagic.proxy.SimpleProxyProvider;\nimport us.codecraft.webmagic.selector.PlainText;\n\nimport java.io.IOException;\nimport java.io.UnsupportedEncodingException;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\n\npublic abstract class HTTPHeist extends Heist {\n    protected final String           defUserAgent = \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36 Edg/112.0.1722.48\";\n\n    protected String                 heistURL;     // 爬虫的主链接\n\n    protected Site                   site;\n    protected HttpBrowserConf        browserConf;\n    protected Spider                 majorSpider;\n    protected CrewPageProcessor      pageProcessor;\n    protected HttpBrowserDownloader  httpBrowser;\n    protected ReentrantReadWriteLock requestLock = new ReentrantReadWriteLock();\n\n    public HTTPHeist( Heistgram heistron ){\n        super( heistron );\n    }\n\n    public HTTPHeist( Heistgram heistron, JSONConfig joConfig ){\n        super( heistron, joConfig );\n    }\n\n    public HTTPHeist( Heistgram heistron, @Nullable CascadeHeist parent, @NonNull String szChildName ) {\n        super( heistron, parent, szChildName );\n    }\n\n    protected void init() {\n        this.site                 = Site.me().setRetryTimes( 3 );\n        this.pageProcessor        = new CrewPageProcessor( this );\n        this.majorSpider          = Spider.create( this.pageProcessor );\n        this.httpBrowser          = new HttpBrowserDownloader();\n    }\n\n    protected void loadSiteConf() {\n        if( this.getConfig() != null ) {\n            if( this.browserConf.enableRandomDelay ) {\n                this.site.setSleepTime( this.browserConf.randomDelayMin );\n                this.site.setRetrySleepTime( this.browserConf.randomDelayMin );\n            }\n            else {\n                this.site.setSleepTime( 0 );\n                this.site.setRetrySleepTime( 100 );\n            }\n\n            this.site.setUserAgent( defUserAgent ) // TODO\n                    .setCharset( this.browserConf.charset )\n                    .setTimeOut( this.browserConf.socketTimeout );\n        }\n    }\n\n    protected void loadProxyConf() {\n        if( this.getConfig() != null ) {\n            switch ( this.browserConf.proxyStrategy ) {\n                case NoProxy: {\n                    this.httpBrowser.setProxyProvider( null );\n                    break;\n                }\n                case SystemOnly: {\n                    this.httpBrowser.setProxyProvider(\n                            SimpleProxyProvider.from( new Proxy(\n                                    this.browserConf.systemProxy.optString(\"host\"),\n                                    this.browserConf.systemProxy.optInt(\"port\")\n                            ) )\n                    );\n                    break;\n                }\n                case ProxyGroup: {\n                    List<Proxy> proxies = new ArrayList<>();\n                    for ( int i = 0; i < this.browserConf.proxyGroup.size(); ++i ) {\n                        JSONObject jo = this.browserConf.proxyGroup.optJSONObject( i );\n                        proxies.add( new Proxy( jo.optString(\"host\"), jo.optInt(\"port\") ) );\n                    }\n\n                    this.httpBrowser.setProxyProvider(\n                            SimpleProxyProvider.from( (Proxy[]) proxies.toArray() )\n                    );\n                    break;\n                }\n                default: {\n                    break;\n                }\n            }\n        }\n    }\n\n    @Override\n    protected void loadConfig() {\n        this.init();\n        super.loadConfig();\n        this.browserConf = new HttpBrowserConf( this );\n        this.loadSiteConf();\n        this.loadProxyConf();\n        this.heistURL = this.getConfig().optString( HeistConfigConstants.Heistum.KeyHeistURL );\n    }\n\n    public Site getSite() {\n        return this.site;\n    }\n\n    public HttpBrowserConf getBrowserConf() {\n        return this.browserConf;\n    }\n\n    public CrewPageProcessor getPageProcessor() {\n        return this.pageProcessor;\n    }\n\n    public HttpBrowserDownloader getHttpBrowser() {\n        return this.httpBrowser;\n    }\n\n    public Page queryHTTPPage( Request request ) {\n        return this.queryHTTPPage( request, true );\n    }\n\n    protected Page queryHTTPPageOnly( Request request, boolean bPooled ) {\n        this.requestLock.readLock().lock();\n        try{\n            return this.httpBrowser.download( request, this.majorSpider, bPooled );\n        }\n        finally {\n            this.requestLock.readLock().unlock();\n        }\n    }\n\n    public Page queryHTTPPage( Request request, boolean bPooled ) {\n        try{\n            return this.queryHTTPPageOnly( request, bPooled );\n        }\n        catch ( ProxyProvokeHandleException e ) {\n            if( e.getCause() instanceof IOException ) {\n                this.tracer().warn( \"[queryHTTPPage:Warning] [What: IOException, \" + e.getMessage() + \"]\" );\n                // Fixed: CloseableHttpClient SSL exception using none pooled.\n                try{\n                    return this.queryHTTPPageOnly( request, bPooled );\n                }\n                catch ( ProxyProvokeHandleException e1 ) {\n                    if ( e.getCause() instanceof IOException ) {\n                        this.tracer().warn(\"[queryHTTPPage:Warning:ResetPool] [What: IOException, \" + e.getMessage() + \"]\");\n                        this.requestLock.writeLock().lock();\n                        try{\n                            this.httpBrowser.reset();\n                        }\n                        finally {\n                            this.requestLock.writeLock().unlock();\n                        }\n                        return this.queryHTTPPageOnly( request, bPooled );\n                    }\n                    throw e1;\n                }\n            }\n            throw e;\n        }\n    }\n\n    public Page getHTTPPage( String szHref, boolean bPooled ) {\n        Request request = new Request( szHref );\n        request.putExtra(\"requestType\", \"HeistDefault\");\n        request.setMethod( \"GET\" );\n\n        return this.queryHTTPPage( request, bPooled );\n    }\n\n    public Page getHTTPPage( String szHref ) {\n        return this.getHTTPPage( szHref, true );\n    }\n\n    public String getHTTPFile( String szHref, boolean bPooled ) {\n        return this.getHTTPPage( szHref, bPooled ).getHtml().toString();\n    }\n\n    public String getHTTPFile( String szHref ) {\n        return this.getHTTPFile( szHref, true );\n    }\n\n\n    protected Page initDefaultPage( Page page, Request request ) {\n        page.setStatusCode( 200 );\n        page.setRequest   ( request );\n        page.setCharset   ( this.getSite().getCharset() );\n        page.setUrl       ( new PlainText( request.getUrl() ) );\n        page.setDownloadSuccess( true );\n\n        return page;\n    }\n\n    public Page extendPage( byte[] pageCache, Request request ) {\n        Page page = new Page();\n        page.setBytes     ( pageCache );\n        try{\n            page.setRawText( new String( pageCache, this.getSite().getCharset() ) );\n        }\n        catch ( UnsupportedEncodingException e1 ) {\n            page.setRawText( null );\n        }\n\n        return this.initDefaultPage( page, request );\n    }\n\n    public Page extendPage( String szPageCache, Request request ) {\n        Page page = new Page();\n        page.setRawText   ( szPageCache );\n        page.setBytes     ( szPageCache.getBytes() );\n\n        return this.initDefaultPage( page, request );\n    }\n\n    public Page extendPage( String szNeoPageCache, Page that ) {\n        Page neoPage = new Page();\n        neoPage.setRawText   ( szNeoPageCache );\n        neoPage.setBytes     ( szNeoPageCache.getBytes() );\n        neoPage.setStatusCode( that.getStatusCode());\n        neoPage.setRequest   ( that.getRequest());\n        neoPage.setCharset   ( that.getCharset());\n        neoPage.setUrl       ( that.getUrl());\n        neoPage.setDownloadSuccess( that.isDownloadSuccess() );\n\n        return neoPage;\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/HTTPIndexHeist.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.framework.system.NonNull;\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.sauron.heist.heistron.mapreduce.SchemeQuerier;\n\npublic abstract class HTTPIndexHeist extends HTTPHeist {\n    protected SchemeQuerier<Object > mSchemeQuerier;\n\n    public HTTPIndexHeist( Heistgram heistron ){\n        super(heistron);\n    }\n\n    public HTTPIndexHeist( Heistgram heistron, JSONConfig joConfig ){\n        super( heistron, joConfig );\n    }\n\n    public HTTPIndexHeist( Heistgram heistron, @Nullable CascadeHeist parent, @NonNull String szChildName ) {\n        super( heistron, parent, szChildName );\n    }\n\n    public SchemeQuerier<Object > getSchemeQuerier() {\n        return this.mSchemeQuerier;\n    }\n\n    public String queryHrefById ( long id ) {\n        Object scheme = this.mSchemeQuerier.get( id );\n\n        if( scheme instanceof String ) {\n            return (String) scheme;\n        }\n\n        return null; // TODO\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/Heist.java",
    "content": "package com.sauron.heist.heistron;\n\n//import com.genius.common.Heist;\n//import com.genius.common.UlfUMC.UlfUMCMessage;\n//import com.genius.config.HeistConfig;\n//import com.genius.config.SystemConfig;\n//import com.genius.mq.Harbor;\n//import com.genius.pool.FunctionNamePool;\n//import org.springframework.beans.factory.annotation.Autowired;\n\nimport com.pinecone.framework.system.NonNull;\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\nimport com.pinecone.framework.util.json.JSONArray;\nimport com.pinecone.framework.util.name.Namespace;\nimport com.pinecone.hydra.servgram.GramTransaction;\nimport com.pinecone.hydra.servgram.OrchestrateInterruptException;\nimport com.pinecone.hydra.servgram.AutoOrchestrator;\nimport com.pinecone.hydra.servgram.ServgramOrchestrator;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.pinecone.tritium.util.ConfigHelper;\nimport com.sauron.heist.heistron.orchestration.ChildHeistInstanceModifier;\nimport com.sauron.heist.heistron.orchestration.ChildHeistOrchestrator;\nimport com.sauron.heist.heistron.orchestration.HeistletOrchestrator;\nimport com.sauron.heist.heistron.orchestration.Hierarchy;\nimport com.sauron.heist.heistron.scheduler.RangedTaskPage;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.json.homotype.DirectObjectInjector;\nimport com.pinecone.slime.chunk.RangedPage;\n\n\n/**\n * 负责抢劫任务(Heist)的任务调度分配\n */\npublic abstract class Heist extends ArchHeistum implements CascadeHeist {\n    //@Autowired\n    //private Harbor harbor;  //港口，负责和master结点进行通信\n\n    protected Hierarchy                    mHierarchy;\n    protected Namespace                    mHeistName;\n    protected CascadeHeist                 mParent          = null;\n\n    protected JSONConfig                   mjoProtoConfig ;  // 当前Heist的JSON原型配置项，等待被子Heist继承和重写\n    protected JSONConfig                   mjoInstanceConfig;\n\n    protected HeistScheme                  heistScheme;\n\n    protected RangedPage                   mMasterHeistTaskPage;\n    protected Heistium                     mHeistium;       // The affiliated process, which is using for executing specific heist.\n\n\n    protected ChildHeistInstanceModifier   mChildHeistInstanceModifier;\n\n    protected Heist( Heistgram heistgram, @Nullable JSONConfig joConfig, @Nullable CascadeHeist parent, Namespace heistName ) {\n        super( heistgram );\n        this.mParent     = parent;\n        this.mHeistName  = heistName;\n\n        this.infoLifecycle( \"Heist::FinalConstructor\", \"A new heist has been contrived\" );\n        this.apply( joConfig );\n    }\n\n    protected Heist( Heistgram heistron, @Nullable JSONConfig joConfig, @Nullable CascadeHeist parent, @Nullable String szHeistName ) {\n        this( heistron, joConfig, parent, (Namespace) null );\n        if( szHeistName == null ) {\n            szHeistName = heistron.searchHeistName( this );\n        }\n        this.mHeistName = CascadeHeist.newNamespace( szHeistName, parent );\n    }\n\n    public Heist( Heistgram heistron, @Nullable CascadeHeist parent, @NonNull String szChildName ) {\n        this( heistron, null, parent, szChildName );\n\n        JSONConfig jc = parent.getHeistScheme().getInstanceConfigByName( szChildName, true );\n        parent.getHeistScheme().reinterpret( jc );\n        this.apply( jc );\n    }\n\n    public Heist( Heistgram heistron, @Nullable JSONConfig joConfig ){\n        this( heistron, joConfig, null, (String) null );\n    }\n\n    public Heist( Heistgram heistron ) {\n        this( heistron, (String) null );\n    }\n\n    public Heist( Heistgram heistron, @Nullable String szName ) {\n        this( heistron, null, null, szName );\n\n        if( this.mHeistName != null ) {\n            this.apply( heistron.queryHeistConfig( this.heistName() ) );\n        }\n    }\n\n    protected void loadConfig() {\n        this.applyThisInnerConfig( HeistEntity.class );\n        this.apply( this.taskFrom, this.taskTo );\n    }\n\n    public Heist apply( long taskFrom, long taskTo ) {\n        this.taskFrom             = taskFrom;\n        this.taskTo               = taskTo;\n\n        this.mMasterHeistTaskPage = new RangedTaskPage( taskFrom, taskTo, 0 );\n        this.mHeistium            = new LocalHeistium( this, 0 );\n        return this;\n    }\n\n    public Heist apply( @Nullable JSONConfig joProtoConfig ) {\n        if( joProtoConfig != null ) {\n            this.mjoProtoConfig = joProtoConfig;\n            this.heistScheme    = new PatriarchalHeistScheme( this );\n\n            this.mjoInstanceConfig = this.getHeistScheme().getInstanceConfigByName( null, true );\n            this.getHeistScheme().reinterpret( this.mjoInstanceConfig );\n\n            //Debug.fmt( 2, this.mjoInstanceConfig );\n\n            this.mHierarchy = this.queryHeistHierarchy();\n            this.loadConfig();\n            this.reportScheme();\n        }\n        return this;\n    }\n\n    protected Hierarchy queryHeistHierarchy() {\n        JSONConfig joOrchestration = this.getConfig().getChild( AutoOrchestrator.ConfigOrchestrationKey );\n        if( joOrchestration != null ) {\n            JSONArray transaction = joOrchestration.optJSONArray( GramTransaction.ConfigTransactionsListKey );\n            if( transaction != null && !transaction.isEmpty() ) {\n                return Hierarchy.Master;\n            }\n        }\n        return Hierarchy.Slave; // Root is also a slave.\n    }\n\n    protected void applyThisInnerConfig( Class<?> stereotype ) {\n        DirectObjectInjector.instance(  ConfigHelper.fnToSmallHumpName, stereotype ).inject(\n                this.mjoInstanceConfig, stereotype, this\n        );\n\n        this.metier = Metier.queryMetier( this.mjoInstanceConfig.optString( \"Metier\" ) );\n    }\n\n    protected void reportScheme(){\n        // TODO, Detailed scheme.\n        this.infoLifecycle( String.format(\n                \"Standby => { Hierarchy => %s, Name => %s }\", this.getHierarchy(), this.getInstanceFullName()\n        ) );\n    }\n\n    @Override\n    public Hierarchy getHierarchy() {\n        return this.mHierarchy;\n    }\n\n    @Override\n    public CascadeHeist parent() {\n        return this.mParent;\n    }\n\n    @Override\n    public RangedPage getMasterTaskPage() {\n        return this.mMasterHeistTaskPage;\n    }\n\n    @Override\n    public Heistium getHeistium() {\n        return this.mHeistium;\n    }\n\n    @Override\n    public HeistScheme getHeistScheme() {\n        return this.heistScheme;\n    }\n\n    public String getIndexPath() {\n        return this.indexPath;\n    }\n\n\n    @Override\n    public JSONConfig getConfig() {\n        return this.mjoInstanceConfig;\n    }\n\n    @Override\n    public JSONConfig getProtoConfig() {\n        return this.mjoProtoConfig;\n    }\n\n    @Override\n    public ServgramOrchestrator getAttachedOrchestrator() {\n        if( this.isRoot() ) {\n            return this.getGramHeistletOrchestrator();\n        }\n        else {\n            return this.parent().getThisHeistletOrchestrator();\n        }\n    }\n\n    @Override\n    public ChildHeistInstanceModifier getChildHeistInstanceModifier() {\n        return this.mChildHeistInstanceModifier;\n    }\n\n    @Override\n    public void applyChildHeistInstanceModifier( ChildHeistInstanceModifier modifier ) {\n        this.mChildHeistInstanceModifier = modifier;\n    }\n\n    @Override\n    public String heistName(){\n        return this.mHeistName.rootName();\n    }\n\n    @Override\n    public Namespace getHeistNamespace() {\n        return this.mHeistName;\n    }\n\n    @Override\n    public ChildHeistOrchestrator getThisHeistletOrchestrator() {\n        return this.getHeistium().getHeistletOrchestrator();\n    }\n\n    @Override\n    public HeistletOrchestrator getGramHeistletOrchestrator() {\n        return this.getHeistgram().getHeistletOrchestrator();\n    }\n\n    @Override\n    public void terminate(){\n        this.mHeistium.terminate();\n    }\n\n    @Override\n    public void toRavage() {\n        this.infoLifecycle( Heistum.StatusStart );\n        this.mHeistium.joinStartMultiTasks();\n        //Debug.trace( \"Deal!\" );\n    }\n\n    @Override\n    public void toStalk() {\n        this.infoLifecycle( Heistum.StatusStart );\n    }\n\n    @Override\n    public void toEmbezzle() {\n        this.infoLifecycle( Heistum.StatusStart );\n    }\n\n    protected void executeSlaveMission() throws HeistExecutionException {\n        try {\n            this.infoLifecycle(  \"It`s time to feast\" );\n\n            switch ( this.metier ) {\n                case REAVER : {\n                    this.toRavage();\n                    break;\n                }\n                case STALKER : {\n                    this.toStalk();\n                    break;\n                }\n                case EMBEZZLER : {\n                    this.toEmbezzle();\n                    break;\n                }\n                default: {\n                    break;\n                }\n            }\n\n            this.infoLifecycle( Heistum.StatusDone );\n        }\n        catch ( ProxyProvokeHandleException e ) {\n            throw new HeistExecutionException( e.getCause() );\n        }\n        catch ( RuntimeException e ) {\n            throw new HeistExecutionException( e );\n        }\n    }\n\n    protected void executeMasterMission() throws HeistOrchestrateException {\n        this.infoLifecycle( \"orchestrating transactions\" );\n\n        try {\n            this.getHeistium().getHeistletOrchestrator().orchestrate();\n        }\n        catch ( OrchestrateInterruptException e ) {\n            throw new HeistOrchestrateException( e );\n        }\n\n        this.infoLifecycle( Heistum.StatusDone );\n    }\n\n    @Override\n    public void toHeist() throws HeistException {\n        Hierarchy hierarchy = this.getHierarchy();\n        this.getHeistgram().notifyLifecycleEvent( this, TaskInstanceStatus.Running, hierarchy );\n        try {\n            if( hierarchy == Hierarchy.Slave ) {\n                this.executeSlaveMission();\n            }\n            else {\n                this.executeMasterMission();\n            }\n        }\n        catch ( HeistExecutionException e ) {\n            Throwable cause = e.getCause();\n            if ( cause instanceof RuntimeException ) {\n                if ( e.getCause() != null ) {\n                    cause = e.getCause();\n                }\n            }\n\n            if ( cause instanceof InterruptedException ) {\n                this.getHeistgram().notifyLifecycleEvent( this, TaskInstanceStatus.Killed, hierarchy );\n            }\n            throw e;\n        }\n        catch ( HeistException e ) {\n            this.getHeistgram().notifyLifecycleEvent( this, TaskInstanceStatus.Error, hierarchy );\n            throw e;\n        }\n        this.getHeistgram().notifyLifecycleEvent( this, TaskInstanceStatus.Finished, hierarchy );\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/HeistEntity.java",
    "content": "package com.sauron.heist.heistron;\n\npublic abstract class HeistEntity implements Heistum {\n    protected String            indexPath;       // 索引路径\n    protected String            spoilPath;       // 数据文件存储路径\n\n    protected String            workingPath;     // 程序工作路径\n    protected long              taskFrom         = 0;      // Range min\n    protected long              taskTo           = 100000; // Range max\n    protected long              fragBase;\n    protected long              fragRange;\n\n    protected int               maximumThread    = 5;\n\n    protected boolean           fromDeathPoint;  // 从上一个死亡点复活\n    protected Metier            metier;          // 职业 (掠夺者、潜伏者、洗钱者)\n\n    protected HeistEntity() {\n    }\n\n    @Override\n    public int getMaximumThread() {\n        return this.maximumThread;\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/Heistotron.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.hydra.config.MapConfigReinterpreter;\nimport com.pinecone.hydra.servgram.Gram;\nimport com.pinecone.hydra.servgram.OrchestrateInterruptException;\nimport com.pinecone.hydra.task.TaskInstanceStatus;\nimport com.sauron.heist.heistron.event.HeistLifecycleEventInterceptor;\nimport com.sauron.heist.heistron.orchestration.Heistlet;\nimport com.pinecone.tritium.system.TritiumSystem;\nimport com.sauron.heist.heistron.orchestration.Hierarchy;\nimport com.sauron.heist.heistron.orchestration.LocalHeistumOrchestrator;\nimport com.sauron.heist.heistron.orchestration.HeistletOrchestrator;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.hydra.config.ConfigSource;\nimport com.pinecone.hydra.config.LocalConfigSource;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.servgram.ArchServgramium;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\n\nimport java.io.IOException;\nimport java.lang.annotation.Annotation;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\n\n/**\n *  Bean Nuts Hazelnut Sauron Heistotron\n *  Author: Harald.E / JH.W (DragonKing)\n *  Copyright © 2008 - 2028 Bean Nuts Foundation All rights reserved.\n *  *****************************************************************************************\n *  Heistotron Spider for PB-Level Automatic Crawler\n *\n *  *****************************************************************************************\n *  DragonKing.cn of Harald\n */\n@Gram( \"Heist\" )\npublic class Heistotron extends ArchServgramium implements Heistgram {\n    protected boolean                                      mbEnableCmdCall    = true;\n\n    protected ConfigSource                                 mUnifyConfigSource;\n    protected JSONConfig                                   mLocalHeistsConfigList;\n    protected JSONConfig                                   mTemplateHeistSchemeConfig;\n    protected JSONConfig                                   mComponents;\n\n    protected HeistletOrchestrator                         mHeistletOrchestrator;\n\n    protected Collection<HeistLifecycleEventInterceptor>   mLifecycleEventInterceptors;\n\n    public Heistotron( String szName, Processum parent ) {\n        super( szName, parent );\n\n        this.mHeistletOrchestrator = new LocalHeistumOrchestrator( this, this.getConfig()  );\n        this.loadHeistronScopeConfig();\n        this.mLifecycleEventInterceptors = new ArrayList<>();\n    }\n\n    protected void prepareTemplateHeistScheme() {\n        JSONConfig tc = this.getConfig().getChild( HeistConfigConstants.KeyTemplatedConfig ) ;\n        if( tc == null ){\n            this.getConfig().put( HeistConfigConstants.KeyTemplatedConfig, new JSONMaptron() );\n            tc = this.getConfig().getChild( HeistConfigConstants.KeyTemplatedConfig );\n        }\n        this.mTemplateHeistSchemeConfig       = tc;\n\n        this.mTemplateHeistSchemeConfig.put( HeistConfigConstants.KeyHttpBrowser, this.getComponentsConfig().opt( HeistConfigConstants.KeyHttpBrowser ) );\n    }\n\n    protected void loadHeistronScopeConfig() {\n        this.mbEnableCmdCall                  = this.getConfig().optBoolean( \"EnableCmdCall\" );\n        this.mLocalHeistsConfigList           = this.getConfig().getChild( HeistConfigConstants.KeyHeistsTable );\n        this.mComponents                      = this.getConfig().getChild( HeistConfigConstants.KeyComponents );\n\n        JSONConfig joLocalHeistsConfigList    = this.getLocalHeistsConfigList();\n        JSONConfig joLocalConfigs             = this.getConfig().getChild( HeistConfigConstants.KeyConfigScope ).getChild( HeistConfigConstants.KeyLocalConfigs );\n        MapConfigReinterpreter reinterpreter  = this.parentSystem().getPrimaryConfigScope().newMapConfigReinterpreter();\n        reinterpreter.addExcludeKey( HeistConfigConstants.KeyTemplatedConfig );\n        reinterpreter.reinterpretByLineage( this.getConfig(), null );\n\n        this.mUnifyConfigSource               = new LocalConfigSource( this, joLocalConfigs, joLocalHeistsConfigList ); // TODO, to implements UnifyConfigSource\n\n        this.prepareTemplateHeistScheme();\n    }\n\n    @Override\n    public Heistgram addLifecycleEventInterceptors( HeistLifecycleEventInterceptor interceptor ) {\n        this.mLifecycleEventInterceptors.add( interceptor );\n        return this;\n    }\n\n    @Override\n    public Heistgram removeLifecycleEventInterceptors( HeistLifecycleEventInterceptor interceptor ) {\n        this.mLifecycleEventInterceptors.remove( interceptor );\n        return this;\n    }\n\n    @Override\n    public JSONConfig getConfig() {\n        return (JSONConfig)this.mServgramConf;\n    }\n\n    @Override\n    public TritiumSystem parentSystem() {\n        return (TritiumSystem) super.parentSystem();\n    }\n\n    @Override\n    public JSONConfig getTemplateHeistSchemeConfig() {\n        return this.mTemplateHeistSchemeConfig;\n    }\n\n    public ConfigSource getConfigSource() {\n        return this.mUnifyConfigSource;\n    }\n\n    @Override\n    public JSONConfig getLocalHeistsConfigList() {\n        return this.mLocalHeistsConfigList;\n    }\n\n    public HeistletOrchestrator getHeistletOrchestrator() {\n        return this.mHeistletOrchestrator;\n    }\n\n    protected String queryCmdDesignatedHeist() {\n        Map<String, String[] > map = this.parentSystem().getStartupCommandMap();\n        String[] heists = map.get( \"heist\" );\n        if( heists != null && heists.length > 0 ) {\n            return heists[ 0 ];\n        }\n        return \"\";\n    }\n\n    @Override\n    public JSONConfig queryHeistConfig ( String szHeistName ) {\n        JSONConfig parent = this.getLocalHeistsConfigList();\n        Object thisConf   = parent.opt( szHeistName );\n\n        JSONConfig config;\n        try{\n            if( thisConf instanceof String ) {\n                config = (JSONConfig) this.getConfigSource().loadConfig( Path.of( (String) thisConf ) );\n            }\n            else if( thisConf instanceof JSONObject ) {\n                config = parent.getChild( szHeistName );\n            }\n            else {\n                config = (JSONConfig) this.getConfigSource().loadConfigBySegmentName( szHeistName );\n            }\n        }\n        catch ( IOException e ) {\n            throw new ConfigNotFoundException( e );\n        }\n\n        if( config == null ) {\n            throw new ConfigNotFoundException( \"Compromised attempts, Heist config `\" + szHeistName + \"` can be found in nowhere.\" );\n        }\n        return config;\n    }\n\n    @Override\n    public JSONConfig getComponentsConfig() {\n        return this.mComponents;\n    }\n\n    protected void dispatch() throws HeistException {\n        String szDesignatedHeist = this.queryCmdDesignatedHeist();\n        //szDesignatedHeist = \"Void\";\n        if( szDesignatedHeist.length() != 0 ) {\n            this.infoLifecycle( \"Into command-prompt mode\" );\n            List heists = this.mHeistletOrchestrator.preloads( szDesignatedHeist );\n            for( Object o : heists ) {\n                ( (Heistum) o ).toHeist();\n            }\n        }\n        else {\n            this.infoLifecycle( \"Into orchestrator mode\" );\n            try {\n                this.mHeistletOrchestrator.orchestrate();\n            }\n            catch ( OrchestrateInterruptException e ) {\n                throw new HeistOrchestrateException( e );\n            }\n        }\n    }\n\n    @Override\n    public void execute() throws HeistException  {\n        this.infoLifecycle( \"Can do !\" );\n        this.dispatch();\n    }\n\n    @Override\n    public String searchHeistName( Heistum that ) {\n        Annotation[] annotations = that.getClass().getAnnotations();\n        for( Annotation annotation : annotations ) {\n            if( annotation instanceof com.pinecone.hydra.servgram.Gram ) {\n                return ( (com.pinecone.hydra.servgram.Gram) annotation ).value();\n            }\n            else if( annotation instanceof Heistlet) {\n                return ( (Heistlet) annotation ).value();\n            }\n        }\n\n        List prefixes      = this.mHeistletOrchestrator.getPreloadPrefixes();\n        String szClassName = that.className();\n        if( prefixes != null ) {\n            for( Object o : prefixes ) {\n                szClassName = szClassName.replaceFirst( o.toString(), \"\" );\n            }\n        }\n\n        List suffixes      = this.mHeistletOrchestrator.getPreloadSuffixes();\n        if( suffixes != null ) {\n            for( Object o : suffixes ) {\n                szClassName = szClassName.replaceFirst( o.toString(), \"\" );\n            }\n        }\n\n        return szClassName;\n    }\n\n    @Override\n    public void notifyLifecycleEvent( Heistum heist, TaskInstanceStatus instanceStatus, Hierarchy hierarchy ) {\n        for ( HeistLifecycleEventInterceptor interceptor : this.mLifecycleEventInterceptors ) {\n            interceptor.afterLifecycleEventTriggered( heist.getName(), heist, instanceStatus, hierarchy );\n        }\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/LocalCrewnium.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.sauron.heist.heistron.scheduler.LocalPreemptiveSingleFrame64Consumer;\nimport com.sauron.heist.heistron.scheduler.PageFrame64ConsumerAdapter;\nimport com.sauron.heist.heistron.scheduler.TaskConsumer;\nimport com.sauron.heist.heistron.scheduler.TaskFrame64Producer;\nimport com.pinecone.framework.system.executum.ArchThreadum;\n\npublic abstract class LocalCrewnium extends ArchThreadum implements Crewnium {\n    protected int                            mnCrewId;\n    protected Heistum                        mParentHeist;\n    protected TaskConsumer                   mTaskConsumer;\n    protected PageFrame64ConsumerAdapter     mFrame64ConsumerAdapter = LocalCrewnium.this::consumeById;\n\n\n    public LocalCrewnium ( Heist heist, int nCrewId )  {\n        super( null, heist.getHeistium() );\n\n        this.mnCrewId      = nCrewId;\n        this.mParentHeist  = heist;\n\n        Thread affinityThread = new Thread( this );\n        this.setThreadAffinity( affinityThread );\n        this.getAffiliateThread().setName( this.nomenclature() );\n        this.setName( affinityThread.getName() );\n\n//        this.mTaskConsumer = new LocalSingleTaskPageConsumer(\n//                (TaskPageProducer) this.parentExecutum().getTaskProducer(), this.mFrame64ConsumerAdapter\n//        );\n\n        this.mTaskConsumer = new LocalPreemptiveSingleFrame64Consumer(\n                (TaskFrame64Producer) this.parentExecutum().getTaskProducer(), this.mFrame64ConsumerAdapter\n        );\n    }\n\n    protected abstract void consumeById( long index );\n\n    @Override\n    public Heistium parentExecutum() {\n        return (Heistium) super.parentExecutum();\n    }\n\n    protected String nomenclature() {\n        String szHeistName;\n        if( this.mParentHeist instanceof CascadeHeist ) {\n            szHeistName = ((CascadeHeist) this.mParentHeist).getInstanceFullName();\n        }\n        else {\n            szHeistName = this.mParentHeist.heistName();\n        }\n        return String.format(\n                \"%s-%s\", szHeistName, this.getAffiliateThread().getName()\n        ).toLowerCase();\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/LocalHeistium.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.sauron.heist.heistron.orchestration.HeistTask;\nimport com.sauron.heist.heistron.orchestration.Hierarchy;\nimport com.sauron.heist.heistron.scheduler.LocalPreemptiveTaskFrame64Producer;\nimport com.sauron.heist.heistron.scheduler.TaskProducer;\n\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.atomic.AtomicBoolean;\n\npublic class LocalHeistium extends HeistTask implements Heistium {\n    protected AtomicBoolean     mTerminateSignal;\n    protected int               mnMaximumThread;\n    protected TaskProducer      mTaskProducer;\n    protected long              mnAwaitFinishedMaxMillis;\n    protected Hierarchy         mHierarchy;\n\n    protected void initSelf( Heistum heistum, long nAwaitFinishedMaxMillis ) {\n        if( heistum instanceof CascadeHeist ) {\n            this.mHierarchy                  = ((CascadeHeist) heistum).getHierarchy();\n        }\n        else {\n            this.mHierarchy                  = Hierarchy.Slave;\n        }\n\n        this.mnMaximumThread             = heistum.getMaximumThread();\n        this.mTerminateSignal            = new AtomicBoolean( false );\n        this.mnAwaitFinishedMaxMillis    = nAwaitFinishedMaxMillis;\n\n        if( this.mHierarchy == Hierarchy.Slave ) {\n            //TODO\n            //this.mTaskProducer               = (new LocalTaskSchedulerStrategy( this, 100 )).formulateProducer();\n            this.mTaskProducer               = new LocalPreemptiveTaskFrame64Producer( this, this.mParentHeist.getMasterTaskPage() );\n        }\n    }\n\n    public LocalHeistium( String szName, Heistum heistum, long nAwaitFinishedMaxMillis ) {\n        super( szName, heistum );\n        this.initSelf( heistum, nAwaitFinishedMaxMillis );\n    }\n\n    public LocalHeistium( Heistum heistum, long nAwaitFinishedMaxMillis ) {\n        super( heistum );\n        this.initSelf( heistum, nAwaitFinishedMaxMillis );\n    }\n\n    @Override\n    public TaskProducer getTaskProducer() {\n        return this.mTaskProducer;\n    }\n\n    protected void verifyIsTerminated() {\n        if( this.mTaskProducer.isFinished() || this.mTerminateSignal.get() ) {\n            throw new HeistStatusTerminatedException( \"Mission is already terminated.\" );\n        }\n    }\n\n    protected void beforeMultiTaskStart() {\n        this.verifyIsTerminated();\n    }\n\n    protected void vitalizeSoloClew( int nCrewId ) {\n        Crewnium crewnium = this.getParentHeist().newCrew( nCrewId );\n        crewnium.getAffiliateThread().start();\n        this.getTaskManager().add( crewnium  );\n    }\n\n    protected void vitalizeMultiTasks (){\n        for ( int i = 0; i < this.mnMaximumThread; ++i ) {\n            this.vitalizeSoloClew( i );\n        }\n    }\n\n    protected void awaitTasksFinished() {\n        try {\n            if( this.mnAwaitFinishedMaxMillis > 0 ) {\n                this.mTaskProducer.awaitProducerFinished( this.mnAwaitFinishedMaxMillis, TimeUnit.MILLISECONDS );\n            }\n            else {\n                this.mTaskProducer.awaitProducerFinished();\n            }\n        }\n        catch ( InterruptedException e ) {\n            this.handleAliveException( e );\n        }\n        finally {\n            this.getTaskManager().purge();\n        }\n    }\n\n    @Override\n    public void apoptosis() {\n        if( this.mHierarchy == Hierarchy.Slave ) {\n            synchronized ( this ) {\n                if( !this.mTerminateSignal.get() ) {\n                    this.mTerminateSignal.getAndSet( true );\n                    this.mParentHeist.tracer().info( \"[{} has been terminate.]\", this.mParentHeist.heistName() );\n                }\n            }\n        }\n        else {\n            this.getHeistletOrchestrator().terminate(); // Cascading terminate children.\n        }\n    }\n\n    @Override\n    public void terminate(){\n        this.apoptosis(); //TODO\n    }\n\n    @Override\n    public AtomicBoolean queryTerminationSignal() {\n        return this.mTerminateSignal;\n    }\n\n    @Override\n    public void joinStartMultiTasks() {\n        this.beforeMultiTaskStart();\n        this.vitalizeMultiTasks();\n        this.awaitTasksFinished();\n    }\n\n    protected void handleAliveException( Exception e ) {\n        this.getParentHeist().handleAliveException( e );\n    }\n\n    protected void handleKillException( Exception e ) throws IllegalStateException {\n        this.getParentHeist().handleKillException( e );\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/MegaDOMIndexCrew.java",
    "content": "package com.sauron.heist.heistron;\n\nimport us.codecraft.webmagic.Request;\n\nimport java.io.File;\nimport java.io.IOException;\n\npublic abstract class MegaDOMIndexCrew extends HTTPCrew {\n    public MegaDOMIndexCrew ( HTTPIndexHeist heist, int id ){\n        super( heist, id );\n    }\n\n    @Override\n    public HTTPIndexHeist parentHeist() {\n        return (HTTPIndexHeist) this.heist;\n    }\n\n    public String queryHrefById ( long id ) {\n        return this.parentHeist().queryHrefById( id );\n    }\n\n    public String queryFragNamespace( long id ) {\n        id = (id == 0 ? 1 : id);\n        long nBase = this.fragRange / this.fragBase;\n        long nLow = id / this.fragRange;\n        long nMod = id % this.fragRange;\n\n        long nAbove = nLow;\n        if ( nMod != 0 || id % 10 == 0 ) {\n            ++nAbove;\n        }\n\n        nAbove *= nBase;\n        nLow *= nBase;\n\n        String szNS = nLow + \"W\";\n        szNS += \"_\" + nAbove + \"W\";\n\n        return szNS;\n    }\n\n    public String querySpoilStorageDir( long id ) {\n        return this.parentHeist().spoilPath + this.queryFragNamespace( id ) + \"/\";\n    }\n\n    public String querySpoilStoragePath( long id ) {\n        return this.querySpoilStorageDir( id ) + \"page_\" + id + \".html\";\n    }\n\n    @Override\n    protected void tryConsumeById( long id ) throws LootRecoveredException, LootAbortException, IllegalStateException, IOException {\n        String szStorageDir  = this.querySpoilStorageDir ( id );\n        String szStoragePath = this.querySpoilStoragePath( id );\n\n        File storageDir = new File( szStorageDir );\n        if ( !storageDir.isDirectory() ) {\n            storageDir.mkdirs();\n        }\n\n        String szDummyHref = this.queryHrefById( id );\n        String szHref;\n        if ( szDummyHref.startsWith( \"http\" ) ) {\n            szHref = szDummyHref;\n        }\n        else {\n            szHref = this.heistURL + szDummyHref;\n        }\n\n\n        Request request = new Request( szHref );\n        request.putExtra( \"id\", id );\n        this.storeHrefCache( szStoragePath, request );\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/PatriarchalHeistScheme.java",
    "content": "package com.sauron.heist.heistron;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.unit.MultiScopeMap;\nimport com.pinecone.framework.unit.MultiScopeMaptron;\nimport com.pinecone.framework.unit.TreeMap;\nimport com.pinecone.framework.unit.affinity.RecursiveUnitOverrider;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.config.MapConfigReinterpreter;\nimport com.pinecone.hydra.config.ScopedMapConfigReinterpreter;\n\nimport java.util.LinkedHashMap;\nimport java.util.Map;\n\npublic class PatriarchalHeistScheme extends RecursiveUnitOverrider<String, Object > implements HeistScheme {\n    protected Heistum                           mParentHeist;\n    protected Heistgram                         mHeistron;\n    protected JSONConfig                        mjoTemplateHeistSchemeConfig;\n\n    protected JSONConfig                        mjoProtoConfig ;  // 当前Heist的JSON原型配置项，等待被子Heist继承和重写\n    protected JSONObject                        mjoChildrenConfig;\n    protected MultiScopeMap<String, Object >    mHeistScope;\n\n    protected MapConfigReinterpreter            mReinterpreter;\n\n\n    public PatriarchalHeistScheme( Heistum heist ) {\n        this.mParentHeist                 = heist;\n        this.mHeistron                    = this.mParentHeist.getHeistgram();\n        this.mjoTemplateHeistSchemeConfig = this.mHeistron.getTemplateHeistSchemeConfig();\n        this.mjoProtoConfig               = this.getParentHeist().getProtoConfig();\n\n        this.mHeistScope                  = new MultiScopeMaptron<>();\n        this.getHeistScope().addParent( ( new MultiScopeMaptron<>( this.getProtoConfig() ) ).addParent(\n                ( new MultiScopeMaptron<>( this.getTemplateHeistSchemeConfig() ) ).setName( \"Template\" ) )\n        ).setName( \"ProtoConfig\" );\n        this.mjoChildrenConfig            = (JSONObject) this.getHeistScope().get( Heistum.ConfigChildrenKey );\n\n        this.mReinterpreter               = new ScopedMapConfigReinterpreter( null );\n    }\n\n    protected PatriarchalHeistScheme applyInstanceScope( Map<String, Object > instance ) {\n        this.getHeistScope().setThisScope( instance );\n        return this;\n    }\n\n    @Override\n    public JSONConfig getInstanceConfigByName( String name ) {\n        return this.getInstanceConfigByName( name, false );\n    }\n\n    /**\n     * getInstanceConfigByName\n     * @param name ( Child instance name, which will extents the parent scope, and get its instance config of this child. )\n     *             ( The `null` is the current scope, [this] )\n     * @param bRecursive ( Override all object and list, if that key which its child doesnt`t had. )\n     * @return Instance Config\n     */\n    @Override\n    public JSONConfig getInstanceConfigByName( @Nullable String name, boolean bRecursive ) {\n        Map<String, Object > selfProto = null;\n        Map<String, Object > selfCopy ;\n\n        if( name == null ) {\n            selfCopy = this.getProtoConfig().clone();\n        }\n        else {\n            JSONObject sub = this.mjoChildrenConfig.optJSONObject( name );\n            if( sub != null ) {\n                selfProto = sub;\n                selfCopy  = sub.clone();\n            }\n            else {\n                return null;\n            }\n        }\n\n        // Protecting the children`s key [\"Children\"]\n        Map thisChildren = (Map)selfCopy.get( Heistum.ConfigChildrenKey );\n        if( thisChildren != null ) {\n            selfCopy.remove( Heistum.ConfigChildrenKey );\n        }\n\n        this.applyInstanceScope( selfCopy );\n        JSONConfig neo = new JSONConfig( this.getProtoConfig() );\n        LinkedHashMap<String, Object > overridden = new LinkedHashMap<>();\n        this.getHeistScope().overrideTo( overridden );\n        neo.setThisScope( overridden );\n\n\n        if( bRecursive ) {\n            this.overrideObject( overridden, this.getProtoConfig(), bRecursive );\n            this.overrideObject( overridden, this.getTemplateHeistSchemeConfig(), bRecursive );\n            //Debug.echo( JSON.stringify( overridden, 2 ) );\n        }\n\n        // Restoring the protected children`s key [\"Children\"]\n        if( thisChildren != null ) {\n            neo.put( Heistum.ConfigChildrenKey, thisChildren );\n        }\n        else {\n            neo.put( Heistum.ConfigChildrenKey, new JSONMaptron() );\n        }\n\n        if( name == null ) {\n            this.overrideOrchestrationSegment( this.getProtoConfig(), neo );\n        }\n        else {\n            this.overrideOrchestrationSegment( selfProto, neo );\n        }\n\n        return neo;\n    }\n\n    protected void overrideOrchestrationSegment( Map<String, Object > selfProto, JSONConfig neo ) {\n        Map jp = (Map) selfProto.get( Heistum.ConfigOrchestrationKey );\n        Map<String, Object > copy ;\n        if( jp == null ) {\n            copy = new JSONMaptron();\n        }\n        else {\n            copy = ( (JSONObject) jp).clone();\n        }\n        this.override( copy, this.getTemplateHeistSchemeConfig().opt( Heistum.ConfigOrchestrationKey ), true );\n\n        neo.put( Heistum.ConfigOrchestrationKey, copy );\n    }\n\n    @Override\n    public void overrideSegment ( Map<String, Object > parentProto, Map<String, Object > instance ) {\n        if( parentProto == this.getHeistScope() ) {\n            this.getHeistScope().overrideTo( instance );\n        }\n        else {\n            MultiScopeMap<String, Object > scope = new MultiScopeMaptron<>();\n            scope.setThisScope( parentProto );\n            scope.overrideTo( instance );\n        }\n    }\n\n\n    @Override\n    public PatriarchalHeistScheme reinterpret( JSONConfig that ) {\n        MultiScopeMap<String, Object > sysGlobalScope  = this.getHeistgram().parentSystem().getGlobalConfigScope(); // System runtime global config scope.\n\n        JSONConfig heistParentList = this.getHeistgram().getLocalHeistsConfigList();                   // Parent Scope of the master[e.g. Heist.json5::Heists] config.\n        JSONConfig rootConfig      = (JSONConfig) this.getHeistgram().parentSystem().getGlobalConfig();   // Root Scope of the master[e.g. config.json5] config.\n\n        MultiScopeMap<String, Object > keyWords = new MultiScopeMaptron<>( new TreeMap<>() );\n        keyWords.put( \"this\"      , that               );\n        keyWords.put( \"super\"     , heistParentList    );\n        keyWords.put( \"__root__\"  , rootConfig         );\n\n        this.mReinterpreter.setPrimaryScope( sysGlobalScope );\n        this.mReinterpreter.addExcludeKey( Heistum.ConfigChildrenKey );\n        this.mReinterpreter.reinterpretByBasicKeyWordsScope( that, keyWords );\n        return this;\n    }\n\n    @Override\n    public MultiScopeMap<String, Object > getHeistScope() {\n        return this.mHeistScope;\n    }\n\n    @Override\n    public JSONConfig getProtoConfig() {\n        return this.mjoProtoConfig;\n    }\n\n    @Override\n    public Heistgram getHeistgram() {\n        return this.mHeistron;\n    }\n\n    @Override\n    public Heistum getParentHeist() {\n        return this.mParentHeist;\n    }\n\n    @Override\n    public JSONConfig getTemplateHeistSchemeConfig() {\n        return this.mjoTemplateHeistSchemeConfig;\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/ArchPeriodicHeistRehearsal.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.RuntimeSystem;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.framework.util.json.homotype.AnnotatedObjectInjector;\nimport com.pinecone.framework.util.json.homotype.JSONGet;\nimport com.pinecone.framework.util.lang.NamespaceCollector;\nimport com.pinecone.framework.util.lang.PackageNameFetcher;\nimport com.pinecone.framework.util.name.FixScopeName;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.hydra.auto.PeriodicAutomaton;\nimport com.pinecone.hydra.auto.PeriodicAutomatron;\nimport com.sauron.heist.heistron.HTTPHeist;\nimport com.sauron.heist.heistron.Heistgram;\nimport com.sauron.heist.heistron.Heists;\nimport com.sauron.heist.heistron.Heistum;\nimport com.pinecone.tritium.system.TritiumSystem;\nimport com.sauron.heist.heistron.orchestration.LocalHeistumOrchestrator;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.concurrent.atomic.AtomicInteger;\n\npublic abstract class ArchPeriodicHeistRehearsal implements PeriodicHeistRehearsal {\n    protected Heistum mHeistum;\n\n    @JSONGet( \"ChronicPerAcc\" )\n    protected long                mnChronicPerAcc;\n\n    @JSONGet( \"ChronicPeriods\" )\n    protected List<String >       mChronicPeriods;\n\n    @JSONGet( \"RaiderMarshaling\" )\n    protected JSONObject          mRaiderMarshaling;\n\n    @JSONGet( \"RaiderMarshaling.RaiderScopes\" )\n    protected List<String >       mRaiderScopes;\n\n    @JSONGet( \"RaiderMarshaling.MarshalingList\" )\n    protected List<String >       mMarshalingList;\n\n    @JSONGet( \"RaiderMarshaling.Raiders\" )\n    protected JSONObject          mRaiderConfigs;\n\n    @JSONGet( \"RaiderMarshaling.UsingSedation\" )\n    protected boolean             mbUsingSedation;\n\n    @JSONGet( \"RaiderMarshaling.UsingUniformFeast\" )\n    protected boolean             mbUsingUniformFeast;\n\n    protected List<Raider >       mExclusiveRaiders;\n    protected List                mPreloadPrefixes;\n    protected List                mPreloadSuffixes;\n    protected RaiderFactory       mRaiderFactory;\n    protected AtomicInteger       mIndexId;\n    protected PeriodicAutomatron  mPrimaryAutomatron;\n\n    protected ArchPeriodicHeistRehearsal( Heistum heistum , boolean bDaemon ) {\n        this.mHeistum           = heistum;\n        this.mIndexId           = new AtomicInteger();\n        this.mExclusiveRaiders  = new ArrayList<>();\n        RuntimeSystem system    = heistum.parentSystem();\n        if( system instanceof TritiumSystem) {\n            ( (TritiumSystem) system ).getPrimaryConfigScope().autoInject( ArchPeriodicHeistRehearsal.class, this.mHeistum.getConfig(), this );\n        }\n        else {\n            AnnotatedObjectInjector injector = new AnnotatedObjectInjector( ArchPeriodicHeistRehearsal.class );\n            injector.inject( this.mHeistum.getConfig(), this );\n        }\n\n        this.initDirectlyLoad();\n        this.prepareFactory( new LocalRaiderFactory( this.mHeistum.getThisHeistletOrchestrator() ) );\n        this.mPrimaryAutomatron = new PeriodicAutomaton(\n                Heists.getCriterionNomenclatureName( this.mHeistum ), heistum.getHeistium(), this.mnChronicPerAcc, bDaemon\n        );\n    }\n\n\n    @Override\n    public AtomicInteger getIndexId() {\n        return this.mIndexId;\n    }\n\n    @Override\n    public List getPreloadPrefixes() {\n        return this.mPreloadPrefixes;\n    }\n\n    @Override\n    public List getPreloadSuffixes() {\n        return this.mPreloadSuffixes;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    protected void initDirectlyLoad() {\n        try{\n            JSONObject jDirectlyLoad = this.mRaiderMarshaling.optJSONObject( LocalHeistumOrchestrator.ConfigDirectlyLoadKey );\n            if( jDirectlyLoad == null ) {\n                this.mPreloadPrefixes = new ArrayList<>();\n                this.mPreloadSuffixes = new ArrayList<>();\n            }\n            else {\n                this.mPreloadPrefixes = (List) jDirectlyLoad.getOrDefault( \"Prefix\", new ArrayList<>() );\n                this.mPreloadSuffixes = (List) jDirectlyLoad.getOrDefault( \"Suffix\", new ArrayList<>() );\n            }\n\n            if ( !this.mPreloadSuffixes.contains( \"\" ) ) {\n                this.mPreloadSuffixes.add( \"\" );\n            }\n            if ( !this.mPreloadPrefixes.contains( \"\" ) ) {\n                this.mPreloadPrefixes.add( \"\" );\n            }\n        }\n        catch ( Exception e ) {\n            e.printStackTrace( this.getHeistgram().parentSystem().console().getErr() );\n        }\n    }\n\n    protected void prepareFactory( @Nullable RaiderFactory factory ) {\n        if( factory != null ) {\n            this.mRaiderFactory              = factory;\n            String szCurrentPackageName      = this.getClass().getPackageName();\n            this.mRaiderScopes.add( szCurrentPackageName );\n\n            NamespaceCollector collector = new PackageNameFetcher( factory.getClassLoader() );\n            List<String > children       = collector.fetch( szCurrentPackageName );\n            this.mRaiderScopes.addAll( children );\n\n            for( String sz : this.mRaiderScopes ) {\n                this.mRaiderFactory.getClassScope().addScope( sz );\n            }\n            this.mRaiderFactory.getTraitClassLoader().updateScope();\n        }\n    }\n\n    @Override\n    public Heistgram getHeistgram() {\n        return this.getParentHeist().getHeistgram();\n    }\n\n    @Override\n    public Heistum getParentHeist() {\n        return this.mHeistum;\n    }\n\n    @Override\n    public PeriodicAutomatron getAutomatron() {\n        return this.mPrimaryAutomatron;\n    }\n\n    @Override\n    public List<String > getRawChronicPeriods() {\n        return this.mChronicPeriods;\n    }\n\n    @Override\n    public JSONObject getRaiderMarshalingConf() {\n        return this.mRaiderMarshaling;\n    }\n\n    @Override\n    public JSONObject getRaiderConfigs() {\n        return this.mRaiderConfigs;\n    }\n\n    protected int nextId(){\n        return this.getIndexId().getAndIncrement();\n    }\n\n\n    @Override\n    public void vitalize() {\n        for( String szRaider : this.mMarshalingList ) {\n            List<Raider > list = this.popping( szRaider );\n            this.mExclusiveRaiders.addAll( list );\n        }\n\n        if ( this.mbUsingUniformFeast ) {\n            this.getAutomatron().command( new FeastInstructation( this ) );\n        }\n\n        for( Raider raider : this.mExclusiveRaiders ) {\n            this.getAutomatron().command( raider.getPrimeDirective() );\n            //this.getAutomatron().command( KernelInstructation.DIE );\n//            try{\n//                raider.getPrimeDirective().execute();\n//            }\n//            catch ( Exception e ) {\n//\n//            }\n\n        }\n//        this.getAutomatron().command(new Instructation() {\n//            @Override\n//            public void execute() throws Exception {\n//                Debug.sleep(30000);\n//                Debug.trace(\"shit\");\n//            }\n//        });\n        if( this.mbUsingSedation ){\n            this.getAutomatron().command( new SedationInstructation( this ) );\n        }\n        //this.getAutomatron().command( KernelInstructation.TERMINATE );\n\n        this.getAutomatron().start();\n    }\n\n    @Override\n    public void joinVitalize() throws InterruptedException {\n        this.vitalize();\n        this.getAutomatron().join();\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    protected List<Raider > popping( String szName ) {\n        List<String > prefixes = new ArrayList<>( this.mPreloadPrefixes );\n        prefixes.add( szName + \".\" );\n\n        return this.mRaiderFactory.popping(\n                new FixScopeName(szName, prefixes, (List<String >)this.mPreloadSuffixes),\n                (HTTPHeist)this.getParentHeist(), this.nextId(), this.mRaiderConfigs.optJSONObject( szName )\n        );\n    }\n\n    protected List<Raider > popping( Name name ) {\n        return this.mRaiderFactory.popping( name, this.getHeistgram() );\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/ExcludeRaiderletFilters.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.pinecone.framework.util.lang.TypeFilter;\nimport com.pinecone.ulf.util.lang.HierarchyClassInspector;\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.NotFoundException;\n\nimport java.io.IOException;\n\npublic class ExcludeRaiderletFilters implements TypeFilter {\n    protected HierarchyClassInspector mClassInspector;\n\n    public ExcludeRaiderletFilters( HierarchyClassInspector inspector ) {\n        this.mClassInspector = inspector;\n    }\n\n    @Override\n    public boolean match( String szClassName, Object pool ) throws IOException {\n        try{\n            CtClass clz = ( (ClassPool) pool ).get( szClassName );\n            if( clz.isInterface() ) {\n                return true;\n            }\n            if( this.mClassInspector.isImplemented( clz, Raider.class ) ) {\n                return false;\n            }\n            return !this.mClassInspector.hasOwnAnnotation( clz, Raiderlet.class ) ;\n        }\n        catch ( NotFoundException e ) {\n            return true;\n        }\n    }\n}"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/FeastInstructation.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.pinecone.framework.util.datetime.UniformDateTimeAudit;\nimport com.pinecone.hydra.auto.ArchInstructation;\nimport com.pinecone.hydra.auto.ContinueException;\nimport com.sauron.heist.heistron.Heistum;\nimport com.sauron.heist.heistron.orchestration.Instructations;\n\nimport java.time.LocalDateTime;\nimport java.util.List;\n\npublic class FeastInstructation extends ArchInstructation {\n    private Heistum mHeistum;\n    private List<String >           mChronicPeriods;\n    private UniformDateTimeAudit    mDateTimeAudit;\n\n    public FeastInstructation( Heistum heistum, List<String > chronicPeriods ) {\n        this.mHeistum        = heistum;\n        this.mChronicPeriods = chronicPeriods;\n        this.mDateTimeAudit  = UniformDateTimeAudit.DefaultAudit;\n\n        Instructations.infoConformed( heistum, this );\n    }\n\n    public FeastInstructation( PeriodicHeistRehearsal kernel ) {\n        this( kernel.getParentHeist(), kernel.getRawChronicPeriods() );\n    }\n\n    @Override\n    public void execute() throws Exception {\n        LocalDateTime currentTime   = LocalDateTime.now();\n\n        boolean isFeastTime = false;\n\n        for ( String period : this.mChronicPeriods ) {\n            if ( this.mDateTimeAudit.matches( period, currentTime ) ) {\n                isFeastTime = true;\n                break;\n            }\n        }\n\n        if ( !isFeastTime ) {\n            this.infoStarvation(\"It`s time to feast?\", \"Slumber\" );\n            throw new ContinueException();\n        }\n        this.infoStarvation(\"It`s time to feast?\", \"Berserking\" );\n    }\n\n\n    protected FeastInstructation infoStarvation( String szWhat, String szStateOrExtra ) {\n        this.mHeistum.tracer().info( \"[Starvation] [{}] <{}>\", szWhat, szStateOrExtra );\n        return this;\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/LocalMultiRaiderLoader.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.pinecone.framework.util.lang.ClassScope;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.hydra.servgram.filters.AnnotationValueFilter;\nimport com.pinecone.ulf.util.lang.ArchMultiScopeLoader;\nimport com.pinecone.ulf.util.lang.GenericPreloadClassInspector;\nimport com.pinecone.ulf.util.lang.PooledClassCandidateScanner;\nimport javassist.ClassPool;\nimport javassist.bytecode.annotation.Annotation;\n\npublic class LocalMultiRaiderLoader extends ArchMultiScopeLoader implements  MultiRaiderLoader {\n    protected AnnotationValueFilter mAnnoValueFilter    ;\n\n    protected LocalMultiRaiderLoader( ClassScope classScope, ClassLoader classLoader, ClassPool classPool ) {\n        super( classScope, classLoader, classPool, null, null );\n\n        this.mClassScanner         = new PooledClassCandidateScanner( new LocalRaiderScopeSet( this.mClassLoader ), this.mClassLoader, this.mClassPool );\n        this.mClassInspector       = new GenericPreloadClassInspector( this.mClassPool );\n        this.mClassScanner.addExcludeFilter( new ExcludeRaiderletFilters( this.mClassInspector ) );\n        this.setAnnotationValueFilter( new RaiderletAnnotationValueFilter() );\n    }\n\n    protected LocalMultiRaiderLoader( ClassScope classScope, ClassLoader classLoader ) {\n        this( classScope, classLoader, ClassPool.getDefault() );\n    }\n\n    public LocalMultiRaiderLoader( RaiderFactory factory ) {\n        this( factory.getClassScope(), factory.getClassLoader() );\n    }\n\n    @Override\n    public void setAnnotationValueFilter( AnnotationValueFilter filter ) {\n        this.mAnnoValueFilter = filter;\n    }\n\n    @Override\n    protected boolean isAnnotationQualified( Annotation that, String szName ) {\n        return !this.mAnnoValueFilter.match( that, szName );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Class<? extends Raider > load( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? extends Raider > )super.load( simpleName );\n    }\n\n    // Directly by it`s name.\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Class<? extends Raider > loadByName( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? extends Raider > )super.loadByName( simpleName );\n    }\n\n    // Scanning class`s annotations, methods or others.\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public Class<? extends Raider > loadInClassTrait( Name simpleName ) throws ClassNotFoundException {\n        return (Class<? extends Raider > )super.loadInClassTrait( simpleName );\n    }\n\n    @Override\n    protected Class<? extends Raider > loadSingleByFullClassName( String szFullClassName ) {\n        try {\n            Class<?> clazz = this.mClassLoader.loadClass( szFullClassName );\n            if( this.filter( clazz ) ) {\n                return null;\n            }\n            if ( Raider.class.isAssignableFrom( clazz ) ) {\n                return clazz.asSubclass( Raider.class );\n            }\n        }\n        catch ( ClassNotFoundException e ) {\n            return null;\n        }\n\n        return null;\n    }\n\n    @Override\n    public MultiRaiderLoader updateScope() {\n        return (MultiRaiderLoader)super.updateScope();\n    }\n\n    @Override\n    public void clearCache() {\n        this.mLoadedClassesPool.clear();\n        this.mVisitedClasses.clear();\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/LocalRaiderFactory.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.pinecone.framework.system.executum.TaskManager;\nimport com.pinecone.framework.util.lang.ClassScope;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.ulf.util.lang.ArchMultiScopeFactory;\n\nimport java.lang.reflect.InvocationTargetException;\nimport java.util.List;\n\npublic class LocalRaiderFactory extends ArchMultiScopeFactory implements RaiderFactory {\n    public LocalRaiderFactory( TaskManager taskManager, ClassLoader classLoader, MultiRaiderLoader raiderLoader, ClassScope classScope ) {\n        super( taskManager, classLoader, raiderLoader, classScope );\n    }\n\n    public LocalRaiderFactory( TaskManager taskManager ) {\n        this( taskManager, taskManager.getClassLoader(), null, null );\n\n        this.mClassScope        = new LocalRaiderScopeSet( this );\n        this.mTraitClassLoader  = new LocalMultiRaiderLoader( this );\n    }\n\n    public LocalRaiderFactory( TaskManager taskManager, ClassScope classScope ) {\n        this( taskManager, taskManager.getClassLoader(), null, classScope );\n\n        this.mTraitClassLoader = new LocalMultiRaiderLoader( this );\n    }\n\n    @Override\n    public MultiRaiderLoader getTraitClassLoader() {\n        return (MultiRaiderLoader) super.getTraitClassLoader();\n    }\n\n    @Override\n    public Raider newInstance ( Class<? > that, Class<?>[] stereotypes, Object[] args ) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {\n        return (Raider) super.newInstance( that, stereotypes, args );\n    }\n\n    @Override\n    public Raider spawn ( Name name, Object... args ) throws InvocationTargetException {\n        return this.spawn( name, null, args );\n    }\n\n    @Override\n    public Raider spawn ( Name name, Class<?>[] stereotypes, Object... args ) throws InvocationTargetException {\n        return (Raider) super.spawn( name, stereotypes, args );\n    }\n\n    @Override\n    public List<Raider > popping ( Name name, Object... args ) {\n        return this.popping( name, null, args );\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    public List<Raider > popping ( Name name, Class<?>[] stereotypes, Object... args ) {\n        return (List<Raider >) super.popping( name, stereotypes, args );\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/LocalRaiderScopeSet.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.pinecone.framework.unit.LinkedTreeSet;\nimport com.pinecone.framework.util.lang.ArchClassScopeSet;\nimport com.pinecone.framework.util.lang.ScopedPackage;\n\nimport java.util.Set;\n\npublic class LocalRaiderScopeSet extends ArchClassScopeSet {\n    public LocalRaiderScopeSet( Set<ScopedPackage > scope, ClassLoader classLoader ) {\n        super( scope, classLoader );\n    }\n\n    public LocalRaiderScopeSet( ClassLoader classLoader ) {\n        super( new LinkedTreeSet<>(), classLoader );\n    }\n\n    public LocalRaiderScopeSet( RaiderFactory factory ) {\n        super( new LinkedTreeSet<>(), factory.getClassLoader() );\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/RaiderletAnnotationValueFilter.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.pinecone.hydra.servgram.filters.AnnotationValueFilter;\n\nimport javassist.bytecode.annotation.Annotation;\n\npublic class RaiderletAnnotationValueFilter implements AnnotationValueFilter {\n    public boolean match( Annotation that, String destinationName ) {\n        if( that.getTypeName().equals( Raiderlet.class.getName() ) ) {\n            String szAN = that.getMemberValue( Raiderlet.ValueKey ).toString();\n            if( szAN.startsWith( \"\\\"\" ) ){\n                return !szAN.equals(\"\\\"\" + destinationName + \"\\\"\");\n            }\n            return !szAN.equals( destinationName );\n        }\n\n        return true;\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/chronic/SedationInstructation.java",
    "content": "package com.sauron.heist.heistron.chronic;\n\nimport com.pinecone.framework.util.datetime.GenericMultiFormDateTimeAudit;\nimport com.pinecone.framework.util.datetime.StorageDateTime;\nimport com.pinecone.framework.util.datetime.UniformDateTimeAudit;\nimport com.pinecone.hydra.auto.ArchInstructation;\nimport com.sauron.heist.heistron.Heistum;\nimport com.sauron.heist.heistron.orchestration.Instructations;\n\nimport java.time.LocalDateTime;\nimport java.time.temporal.ChronoUnit;\nimport java.util.List;\n\n/**\n * Force slumber to prevent excessive actions within the same time period.\n */\npublic class SedationInstructation extends ArchInstructation {\n    private Heistum mHeistum;\n    private List<String >           mChronicPeriods;\n    private UniformDateTimeAudit    mDateTimeAudit;\n\n    public SedationInstructation( Heistum heistum, List<String > chronicPeriods ) {\n        this.mHeistum        = heistum;\n        this.mChronicPeriods = chronicPeriods;\n        this.mDateTimeAudit  = UniformDateTimeAudit.DefaultAudit;\n\n        Instructations.infoConformed( heistum, this );\n    }\n\n    public SedationInstructation( PeriodicHeistRehearsal kernel ) {\n        this( kernel.getParentHeist(), kernel.getRawChronicPeriods() );\n    }\n\n\n    // Increments the first non-negative component(wildcard) from the smallest to the largest unit\n    // (nano, second, minute, hour, day, month, year)\n    protected LocalDateTime firstJumpOutTime( String period, LocalDateTime now ) {\n        StorageDateTime dateTime        = GenericMultiFormDateTimeAudit.fromString( period );\n\n        if ( dateTime.getNano() != -1 ) {\n            dateTime.setNano( dateTime.getNano() + 1 );\n        }\n        else if ( dateTime.getSecond() != -1 ) {\n            dateTime.setSecond( dateTime.getSecond() + 1 );\n        }\n        else if ( dateTime.getMinute() != -1 ) {\n            dateTime.setMinute( dateTime.getMinute() + 1 );\n        }\n        else if ( dateTime.getHour() != -1 ) {\n            dateTime.setHour( dateTime.getHour() + 1 );\n        }\n        else if ( dateTime.getDayOfMonth() != -1 ) {\n            dateTime.setDay( dateTime.getDayOfMonth() + 1 );\n        }\n        else if ( dateTime.getMonthValue() != -1 ) {\n            dateTime.setMonth( dateTime.getMonthValue() + 1 );\n        }\n        else if ( dateTime.getYear() != -1 ) {\n            dateTime.setYear( dateTime.getYear() + 1 );\n        }\n\n        return GenericMultiFormDateTimeAudit.toLocalDateTime( dateTime, now );\n    }\n\n    @Override\n    public void execute() throws Exception {\n        LocalDateTime currentTime   = LocalDateTime.now();\n\n        for ( String period : this.mChronicPeriods ) {\n            if ( this.mDateTimeAudit.matches( period, currentTime ) ) {\n                StorageDateTime dateTime        = GenericMultiFormDateTimeAudit.fromString( period );\n                LocalDateTime previous          = GenericMultiFormDateTimeAudit.toLocalDateTime( dateTime, currentTime );\n                LocalDateTime next              = this.firstJumpOutTime( period, currentTime );\n                long differenceInMillis         = Math.abs( ChronoUnit.MILLIS.between( next, previous ) );\n                if( differenceInMillis != 0 ) {\n                    this.infoStarvation(\"Activates Sedative [Entrance] [ForceSlumber: \" + (double)differenceInMillis / 1000d + \"s]\", \"Activated\" );\n                    Thread.sleep( differenceInMillis );\n                    this.infoStarvation(\"Deactivates Sedative [Slumber] [CurrentTime: \" + LocalDateTime.now() + \"]\", \"Deactivated\" );\n                }\n                break;\n            }\n        }\n\n        this.infoStarvation(\"Aborts Sedative [Slumber]\", \"Abort\" );\n    }\n\n\n    protected SedationInstructation infoStarvation( String szWhat, String szStateOrExtra ) {\n        this.mHeistum.tracer().info( \"[Starvation] [{}] <{}>\", szWhat, szStateOrExtra );\n        return this;\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/mapreduce/HTTPResourceTaskScheme.java",
    "content": "package com.sauron.heist.heistron.mapreduce;\n\nimport java.net.URL;\n\npublic class HTTPResourceTaskScheme implements TaskScheme {\n    public URL    url;\n    public String method;\n    public String charset;\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/ExcludeHeistletFilters.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\nimport com.pinecone.framework.util.lang.TypeFilter;\nimport com.sauron.heist.heistron.Heistum;\nimport com.pinecone.ulf.util.lang.HierarchyClassInspector;\n\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.NotFoundException;\n\nimport java.io.IOException;\n\npublic class ExcludeHeistletFilters implements TypeFilter {\n    protected HierarchyClassInspector mClassInspector;\n\n    public ExcludeHeistletFilters( HierarchyClassInspector inspector ) {\n        this.mClassInspector = inspector;\n    }\n\n    @Override\n    public boolean match( String szClassName, Object pool ) throws IOException {\n        try{\n            CtClass clz = ( (ClassPool) pool ).get( szClassName );\n            if( clz.isInterface() ) {\n                return true;\n            }\n            if( this.mClassInspector.isImplemented( clz, Heistum.class ) ) {\n                return false;\n            }\n            return !this.mClassInspector.hasOwnAnnotation( clz, Heistlet.class ) ;\n        }\n        catch ( NotFoundException e ) {\n            return true;\n        }\n    }\n}"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/HeistTask.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\nimport com.pinecone.framework.system.executum.ArchProcessum;\nimport com.pinecone.framework.system.executum.Executum;\nimport com.sauron.heist.heistron.CascadeHeist;\nimport com.sauron.heist.heistron.Heistum;\n\nimport java.util.concurrent.atomic.AtomicInteger;\n\npublic abstract class HeistTask extends ArchProcessum implements Taskium {\n    private static final AtomicInteger rootAutoIncrementId     = new AtomicInteger( 0 );\n\n    private static long nextRootAutoIncrementId() {\n        return Executum.AutoIncrementId.getAndIncrement();\n    }\n\n    private static String name( String szName, Heistum heistum ) {\n        if( szName != null ) {\n            return szName;\n        }\n\n        if( heistum instanceof CascadeHeist) {\n            CascadeHeist cascadeHeist = (CascadeHeist) heistum;\n            return cascadeHeist.getInstanceFullName();\n        }\n\n        return heistum.heistName();\n    }\n\n    private long         mnTaskId;\n    protected Heistum    mParentHeist;\n\n    public HeistTask( String szName, Heistum heistum ) {\n        super( szName, heistum.getHeistgram() );\n\n        this.mParentHeist        = heistum;\n        //this.mTaskManager        = new GenericMasterTaskManager( this );\n        this.mTaskManager        = new LocalChildHeistOrchestrator( this, this.getParentHeist().getConfig() );\n\n\n        boolean bUsingRootId     = true ;\n        if( heistum instanceof CascadeHeist ) {\n            CascadeHeist cascadeHeist = (CascadeHeist) heistum;\n            if( cascadeHeist.parent() != null ){\n                this.mnTaskId    = cascadeHeist.parent().getHeistium().getHeistletOrchestrator().nextAutoIncrementTaskId();\n                bUsingRootId    = false;\n            }\n        }\n\n        if( bUsingRootId ) {\n            this.mnTaskId        = HeistTask.nextRootAutoIncrementId();\n        }\n        this.mszName            = szName + \"-task-\" + this.getTaskId();\n    }\n\n    public HeistTask( Heistum heistum ) {\n        this( HeistTask.name( null, heistum ), heistum );\n    }\n\n    @Override\n    public long getTaskId() {\n        return this.mnTaskId;\n    }\n\n    public Heistum getParentHeist() {\n        return this.mParentHeist;\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/HeistletAnnotationValueFilter.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\nimport com.pinecone.hydra.servgram.filters.AnnotationValueFilter;\nimport javassist.bytecode.annotation.Annotation;\n\npublic class HeistletAnnotationValueFilter implements AnnotationValueFilter {\n    public boolean match(Annotation that, String destinationName ) {\n        if( that.getTypeName().equals( Heistlet.class.getName() ) ) {\n            String szAN = that.getMemberValue( Heistlet.ValueKey ).toString();\n            if( szAN.startsWith( \"\\\"\" ) ){\n                return !szAN.equals(\"\\\"\" + destinationName + \"\\\"\");\n            }\n            return !szAN.equals( destinationName );\n        }\n\n        return true;\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/LocalChildHeistOrchestrator.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.hydra.servgram.*;\nimport com.sauron.heist.heistron.CascadeHeist;\nimport com.sauron.heist.heistron.Heistgram;\nimport com.sauron.heist.heistron.Heistium;\nimport com.sauron.heist.heistron.Heistum;\n\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.concurrent.atomic.AtomicInteger;\n\npublic class LocalChildHeistOrchestrator extends ArchServgramOrchestrator implements ChildHeistOrchestrator {\n    private final AtomicInteger mAutoIncrementTaskId  = new AtomicInteger( 0 ) ;\n    protected Heistium mHeistium                                                ;\n    protected JSONConfig        mChildren                                                ;\n\n    public LocalChildHeistOrchestrator( Processum parent, PatriarchalConfig sectionConfig, @Nullable GramFactory factory, GramTransaction transaction ) {\n        super( parent, sectionConfig, factory, transaction );\n    }\n\n    public LocalChildHeistOrchestrator( Processum parent, PatriarchalConfig sectionConfig ) {\n        this( parent, sectionConfig, null, null );\n\n        //this.prepareFactory( new LocalHeistletFactory( this ) );\n        this.setTransaction( new LocalGramTransaction( this, parent ) );\n\n        if( parent instanceof Heistium ) {\n            this.mHeistium = (Heistium)parent;\n            this.mChildren = this.getHeist().getConfig().getChild( Heistum.ConfigChildrenKey );\n        }\n    }\n\n    @Override\n    public int nextAutoIncrementTaskId() {\n        return this.mAutoIncrementTaskId.getAndIncrement();\n    }\n\n    @Override\n    public Heistium getHeistium() {\n        return this.mHeistium;\n    }\n\n    @Override\n    public CascadeHeist getHeist() {\n        return (CascadeHeist) this.getHeistium().getParentHeist();\n    }\n\n    @Override\n    public Heistgram getHeistgram() {\n        return this.getHeist().getHeistgram();\n    }\n\n    @Override\n    protected List<Servgram> popping( String szName ) {\n        List<Servgram> list = new ArrayList<>();\n        if( this.mChildren.hasOwnProperty( szName ) ) {\n            try{\n                CascadeHeist heistum = this.getHeist().getClass().getConstructor( Heistgram.class, CascadeHeist.class, String.class ).newInstance(\n                        this.getHeistgram(), this.getHeist(), szName\n                );\n\n                ChildHeistInstanceModifier modifier = this.getHeist().getChildHeistInstanceModifier();\n                if ( modifier != null ) {\n                    heistum.applyChildHeistInstanceModifier( modifier );\n                    modifier.modify( heistum );\n                }\n\n                this.infoLifecycle(  \"Child contrived -> \" + heistum.getInstanceFullName() ) ;\n                list.add( heistum );\n            }\n            catch ( Exception e ) {\n                this.tracer().warn( String.format( \"[%s] Construct `%s` has been compromised.\", this.className(), szName ), e );\n            }\n        }\n        return list;\n    }\n\n    @Override\n    protected List<Servgram > popping( Name name ) {\n        return this.popping( name.getName() );\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/LocalHeistletFactory.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\n\nimport com.pinecone.framework.system.executum.TaskManager;\nimport com.pinecone.hydra.servgram.ArchGramFactory;\nimport com.pinecone.hydra.servgram.GramScope;\nimport com.pinecone.hydra.servgram.LocalGramScopeSet;\nimport com.pinecone.hydra.servgram.MultiGramsLoader;\n\n\npublic class LocalHeistletFactory extends ArchGramFactory {\n    public LocalHeistletFactory( TaskManager taskManager, ClassLoader classLoader, MultiGramsLoader gramLoader, GramScope gramScope ) {\n        super( taskManager, classLoader, gramLoader, gramScope );\n    }\n\n    public LocalHeistletFactory( TaskManager taskManager ) {\n        this( taskManager, taskManager.getClassLoader(), null, null );\n\n        this.mClassScope        = new LocalGramScopeSet( this );\n        this.mTraitClassLoader  = new LocalHeistletLoader( this );\n    }\n\n    public LocalHeistletFactory( TaskManager taskManager, GramScope gramScope ) {\n        this( taskManager, taskManager.getClassLoader(), null, gramScope );\n\n        this.mTraitClassLoader = new LocalHeistletLoader( this );\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/LocalHeistletLoader.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\nimport com.pinecone.hydra.servgram.ArchGramLoader;\nimport com.pinecone.hydra.servgram.GramFactory;\nimport com.pinecone.hydra.servgram.GramScope;\nimport com.pinecone.hydra.servgram.filters.AnnotationValueFilter;\nimport com.sauron.heist.heistron.Heistum;\n\nimport javassist.ClassPool;\nimport javassist.bytecode.annotation.Annotation;\n\npublic class LocalHeistletLoader extends ArchGramLoader {\n    protected AnnotationValueFilter mAnnoValueFilter    ;\n\n    public LocalHeistletLoader( GramScope classScope, ClassLoader classLoader, ClassPool classPool ) {\n        super( classScope, classLoader, classPool );\n\n        this.mClassScanner.addExcludeFilter( new ExcludeHeistletFilters( this.mClassInspector ) );\n        this.setAnnotationValueFilter( new HeistletAnnotationValueFilter() );\n    }\n\n    public LocalHeistletLoader( GramScope classScope, ClassLoader classLoader ) {\n        this( classScope, classLoader, ClassPool.getDefault() );\n    }\n\n    public LocalHeistletLoader( GramFactory factory ) {\n        this( factory.getClassScope(), factory.getClassLoader() );\n    }\n\n\n    public void setAnnotationValueFilter( AnnotationValueFilter filter ) {\n        this.mAnnoValueFilter = filter;\n    }\n\n    @Override\n    protected boolean isAnnotationQualified( Annotation that, String szName ) {\n        return !this.mAnnoValueFilter.match( that, szName );\n    }\n\n    @Override\n    protected Class<? extends Heistum> loadSingleByFullClassName(String szFullClassName ) {\n        try {\n            Class<?> clazz = this.mClassLoader.loadClass( szFullClassName );\n            if( this.filter( clazz ) ) {\n                return null;\n            }\n            if ( Heistum.class.isAssignableFrom( clazz ) ) {\n                return clazz.asSubclass( Heistum.class );\n            }\n        }\n        catch ( ClassNotFoundException e ) {\n            return null;\n        }\n\n        return null;\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/orchestration/LocalHeistumOrchestrator.java",
    "content": "package com.sauron.heist.heistron.orchestration;\n\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.framework.util.name.FixScopeName;\nimport com.pinecone.framework.util.name.Name;\nimport com.pinecone.hydra.servgram.ArchServgramOrchestrator;\nimport com.pinecone.hydra.servgram.GramFactory;\nimport com.pinecone.hydra.servgram.GramTransaction;\nimport com.pinecone.hydra.servgram.LocalGramTransaction;\nimport com.pinecone.hydra.servgram.Servgram;\nimport com.sauron.heist.heistron.Heistgram;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class LocalHeistumOrchestrator extends ArchServgramOrchestrator implements HeistletOrchestrator {\n    public final static String     ConfigDirectlyLoadKey = \"DirectlyLoad\";\n\n    protected Heistgram mHeistgram;\n    protected List            mPreloadPrefixes;\n    protected List            mPreloadSuffixes;\n\n    public LocalHeistumOrchestrator( Processum parent, PatriarchalConfig sectionConfig, @Nullable GramFactory factory, GramTransaction transaction ) {\n        super( parent, sectionConfig, factory, transaction );\n\n        this.initDirectlyLoad();\n    }\n\n    public LocalHeistumOrchestrator( Processum parent, PatriarchalConfig sectionConfig ) {\n        this( parent, sectionConfig, null, null );\n\n        this.prepareFactory( new LocalHeistletFactory( this ) );\n        this.setTransaction( new LocalGramTransaction( this, parent ) );\n\n        if( parent instanceof Heistgram ) {\n            this.mHeistgram = (Heistgram)parent;\n        }\n    }\n\n    @Override\n    public List getPreloadPrefixes() {\n        return this.mPreloadPrefixes;\n    }\n\n    @Override\n    public List getPreloadSuffixes() {\n        return this.mPreloadSuffixes;\n    }\n\n    @SuppressWarnings( \"unchecked\" )\n    protected void initDirectlyLoad() {\n        try{\n            PatriarchalConfig jDirectlyLoad = this.getOrchestrationConfig().getChild( LocalHeistumOrchestrator.ConfigDirectlyLoadKey );\n            if( jDirectlyLoad != null ) {\n                this.mPreloadPrefixes = (List) jDirectlyLoad.getOrDefault( \"Prefix\", new ArrayList<>() );\n                this.mPreloadSuffixes = (List) jDirectlyLoad.getOrDefault( \"Suffix\", new ArrayList<>() );\n            }\n            else {\n                this.mPreloadPrefixes = new ArrayList<>();\n                this.mPreloadSuffixes = new ArrayList<>();\n            }\n\n            if ( !this.mPreloadPrefixes.contains( \"\" ) ) {\n                this.mPreloadPrefixes.add( \"\" );\n            }\n            if ( !this.mPreloadSuffixes.contains( \"\" ) ) {\n                this.mPreloadSuffixes.add( \"\" );\n            }\n        }\n        catch ( Exception e ) {\n            e.printStackTrace( this.getHeistgram().parentSystem().console().getErr() );\n        }\n    }\n\n    @Override\n    public Heistgram getHeistgram() {\n        return this.mHeistgram;\n    }\n\n    @Override\n    @SuppressWarnings( \"unchecked\" )\n    protected List<Servgram > popping( String szName ) {\n        List<String > prefixes = new ArrayList<>( this.mPreloadPrefixes );\n        prefixes.add( szName + \".\" );\n\n        return ( (GramFactory)this.getClassFactory() ).popping(\n                new FixScopeName(szName, prefixes, (List<String >)this.mPreloadSuffixes), this.getHeistgram()\n        );\n    }\n\n    @Override\n    protected List<Servgram > popping( Name name ) {\n        return ( (GramFactory)this.getClassFactory() ).popping( name, this.getHeistgram() );\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/ActiveTaskPageProducer.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\nimport com.pinecone.slime.chunk.scheduler.ActivePageScheduler64;\nimport com.pinecone.slime.chunk.scheduler.LocalMapChunkRegister;\nimport com.pinecone.slime.chunk.scheduler.PageDivider;\n\n\npublic abstract class ActiveTaskPageProducer extends ActivePageScheduler64 implements TaskPageProducer {\n    public ActiveTaskPageProducer(PageDivider divider, long autoIncrementId ) {\n        super( divider, autoIncrementId );\n\n        this.mChunkRegister = new LocalMapChunkRegister<>();\n    }\n\n    @Override\n    public boolean hasMoreProducts() {\n        return this.getDivider().remainAllocatable() > 0;\n    }\n\n    @Override\n    public TaskPage require() {\n        return (TaskPage) this.activate();\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/LocalMultiActiveTaskPageProducer.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\nimport com.sauron.heist.heistron.Heistium;\nimport com.pinecone.slime.chunk.ContiguousPage;\nimport com.pinecone.slime.chunk.scheduler.PageDivider;\n\nimport java.util.concurrent.CountDownLatch;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.locks.ReadWriteLock;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\n\npublic class LocalMultiActiveTaskPageProducer extends ActiveTaskPageProducer {\n    protected Heistium mHeistium;\n    protected ReadWriteLock     mActivateLock;\n    protected CountDownLatch    mTaskCountDownLatch;\n\n    public LocalMultiActiveTaskPageProducer( Heistium heistium, PageDivider divider, long autoIncrementId ) {\n        super( divider, autoIncrementId );\n\n        this.mHeistium           = heistium;\n        this.mActivateLock       = new ReentrantReadWriteLock();\n\n        long nProductsSum        = this.getProductsSum();\n        if( nProductsSum > Integer.MAX_VALUE ) {\n            throw new IllegalArgumentException( \"Number of local tasks should not above INT32_MAX\" );\n        }\n        this.mTaskCountDownLatch = new CountDownLatch( (int)nProductsSum );\n    }\n\n    @Override\n    public boolean hasTerminateSignal() {\n        return this.mHeistium.queryTerminationSignal().get();\n    }\n\n    @Override\n    public boolean hasMoreProducts() {\n        this.mActivateLock.readLock().lock();\n        try {\n            return super.hasMoreProducts();\n        }\n        finally {\n            this.mActivateLock.readLock().unlock();\n        }\n    }\n\n    @Override\n    public TaskPage require() {\n        return this.activate();\n    }\n\n    @Override\n    public TaskPage activate() {\n        this.mActivateLock.writeLock().lock();\n        try {\n            return (TaskPage) super.activate();\n        }\n        finally {\n            this.mActivateLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public void activate( ContiguousPage that ) {\n        this.mActivateLock.writeLock().lock();\n        try {\n            super.activate( that );\n        }\n        finally {\n            this.mActivateLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public void deactivate( ContiguousPage that ) {\n        this.mActivateLock.writeLock().lock();\n        try {\n            super.deactivate( that );\n        }\n        finally {\n            this.mActivateLock.writeLock().unlock();\n        }\n\n        this.mTaskCountDownLatch.countDown();\n    }\n\n    @Override\n    public void deactivate( ContiguousPage[] those ) {\n        this.mActivateLock.writeLock().lock();\n        try {\n            for ( ContiguousPage p : those ) {\n                super.deactivate( p );\n                this.mTaskCountDownLatch.countDown();\n            }\n        }\n        finally {\n            this.mActivateLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public long getActivatedSize() {\n        this.mActivateLock.readLock().lock();\n        try {\n            return super.getActivatedSize();\n        }\n        finally {\n            this.mActivateLock.readLock().unlock();\n        }\n    }\n\n    @Override\n    public ContiguousPage getPageById(long id ) {\n        this.mActivateLock.readLock().lock();\n        try {\n            return super.getPageById( id );\n        }\n        finally {\n            this.mActivateLock.readLock().unlock();\n        }\n    }\n\n    @Override\n    public long getProductsSum() {\n        return this.getDivider().getMaxAllocations();\n    }\n\n    @Override\n    public void awaitProducerFinished() throws InterruptedException {\n        this.mTaskCountDownLatch.await();\n    }\n\n    @Override\n    public void awaitProducerFinished( long timeout, TimeUnit unit ) throws InterruptedException {\n        this.mTaskCountDownLatch.await( timeout, unit );\n    }\n\n    @Override\n    public boolean isFinished() {\n        return this.mTaskCountDownLatch.getCount() <= 0;\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/LocalPreemptiveSingleFrame64Consumer.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\npublic class LocalPreemptiveSingleFrame64Consumer implements TaskFrame64Consumer {\n    protected TaskFrame64Producer            mFrameProducer;\n    protected PageFrame64ConsumerAdapter     mFrame64ConsumerAdapter;\n\n    public LocalPreemptiveSingleFrame64Consumer( TaskFrame64Producer pageProducer, PageFrame64ConsumerAdapter frame64ConsumerAdapter ) {\n        this.mFrameProducer           = pageProducer;\n        this.mFrame64ConsumerAdapter  = frame64ConsumerAdapter;\n    }\n\n    @Override\n    public void consume () {\n        Long id = this.mFrameProducer.require();\n\n        while ( id != null ) {\n            if( this.mFrameProducer.hasTerminateSignal() || Thread.currentThread().isInterrupted() ) {\n                break;\n            }\n\n            try{\n                this.mFrame64ConsumerAdapter.consumeById( id );\n\n                id = this.mFrameProducer.require();\n            }\n            finally {\n                this.mFrameProducer.deactivate( id );\n            }\n        }\n    }\n\n    @Override\n    public TaskFrame64Producer getTaskPageProducer() {\n        return this.mFrameProducer;\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/LocalPreemptiveTaskFrame64Producer.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\nimport com.sauron.heist.heistron.Heistium;\nimport com.pinecone.slime.unitization.MinMaxRange;\nimport com.pinecone.slime.chunk.RangedPage;\n\nimport java.util.concurrent.CountDownLatch;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.atomic.AtomicLong;\nimport java.util.concurrent.locks.ReadWriteLock;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\n\npublic class LocalPreemptiveTaskFrame64Producer implements TaskFrame64Producer {\n    protected Heistium mHeistium;\n    protected RangedPage       mMasterPage;\n    protected ReadWriteLock    mActivateLock;\n    protected CountDownLatch   mTaskCountDownLatch;\n\n    protected long             mnProductStartOffset;\n    protected AtomicLong       mFinishedTasks;\n    protected long             mnProductsSum;\n    protected long             mnFinishedProductsSum;\n\n    public LocalPreemptiveTaskFrame64Producer( Heistium heistium, RangedPage masterPage ) {\n        this.mHeistium              = heistium;\n        this.mMasterPage            = masterPage;\n        this.mActivateLock          = new ReentrantReadWriteLock();\n\n        MinMaxRange range = (MinMaxRange)this.mMasterPage.getRange();\n        this.mnProductStartOffset   = range.getMin().longValue();\n        this.mnProductsSum          = range.span().longValue();\n        this.mnFinishedProductsSum  = this.mnProductsSum + this.mnProductStartOffset;\n\n        if( this.mnProductsSum > Integer.MAX_VALUE ) {\n            throw new IllegalArgumentException( \"Number of local tasks should not above INT32_MAX\" );\n        }\n\n        this.mFinishedTasks         = new AtomicLong     ( this.mnProductStartOffset    );\n        this.mTaskCountDownLatch    = new CountDownLatch( (int)this.mnProductsSum );\n    }\n\n    @Override\n    public boolean hasTerminateSignal() {\n        return this.mHeistium.queryTerminationSignal().get();\n    }\n\n    @Override\n    public long getProductsSum() {\n        return this.mnProductsSum;\n    }\n\n    @Override\n    public boolean hasMoreProducts() {\n        return this.mFinishedTasks.get() < this.mnFinishedProductsSum;\n    }\n\n    @Override\n    public Long require() {\n        long index = this.mFinishedTasks.getAndIncrement();\n        if( index < this.mnFinishedProductsSum ){\n            return index;\n        }\n        return null;\n    }\n\n    @Override\n    public void deactivate( Long that ){\n        this.mTaskCountDownLatch.countDown();\n    }\n\n    @Override\n    public boolean isFinished() {\n        return this.mTaskCountDownLatch.getCount() <= 0;\n    }\n\n    @Override\n    public void awaitProducerFinished() throws InterruptedException {\n        this.mTaskCountDownLatch.await();\n    }\n\n    @Override\n    public void awaitProducerFinished( long timeout, TimeUnit unit ) throws InterruptedException {\n        this.mTaskCountDownLatch.await( timeout, unit );\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/LocalSingleTaskPageConsumer.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\npublic class LocalSingleTaskPageConsumer extends SingleTaskPageConsumer {\n    public LocalSingleTaskPageConsumer( TaskPageProducer pageProducer, PageFrame64ConsumerAdapter frame64ConsumerAdapter ) {\n        super( pageProducer, frame64ConsumerAdapter );\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/LocalTaskSchedulerStrategy.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\nimport com.sauron.heist.heistron.Heistium;\nimport com.pinecone.slime.chunk.RangedPage;\nimport com.pinecone.slime.chunk.Splitunk;\nimport com.pinecone.slime.chunk.scheduler.*;\n\npublic class LocalTaskSchedulerStrategy implements TaskSchedulerStrategy {\n    protected Heistium mParentHeistium;\n    protected PagePool mHeistTaskPagePool;\n    protected PageDivider mPageDivider;\n\n    protected PageRecycleStrategy mPageRecycleStrategy;\n    protected TaskProducer         mTaskProducer;\n\n    public LocalTaskSchedulerStrategy( Heistium heistium, PagePool pagePool, PageDivider divider, PageRecycleStrategy recycleStrategy ) {\n        this.mParentHeistium        = heistium;\n        this.mHeistTaskPagePool     = pagePool;\n        this.mPageDivider           = divider;\n        this.mPageRecycleStrategy   = recycleStrategy;\n    }\n\n    public LocalTaskSchedulerStrategy( Heistium heistium, long each ) {\n        this( heistium, new DirectPagePool( RangedTaskPage.class ), null, null );\n        this.mPageDivider = new FixedPageDivider64( (Splitunk) this.getMasterPage(), this.getHeistTaskPagePool(), each );\n    }\n\n    @Override\n    public Heistium getParentHeistium() {\n        return this.mParentHeistium;\n    }\n\n    @Override\n    public RangedPage getMasterPage() {\n        return this.getParentHeistium().getParentHeist().getMasterTaskPage();\n    }\n\n    @Override\n    public PagePool getHeistTaskPagePool() {\n        return this.mHeistTaskPagePool;\n    }\n\n    @Override\n    public PageDivider getPageDivider() {\n        return this.mPageDivider;\n    }\n\n    @Override\n    public PageRecycleStrategy getPageRecycleStrategy() {\n        return this.mPageRecycleStrategy;\n    }\n\n\n\n    @Override\n    public TaskSchedulerStrategy setHeistTaskPagePool( PagePool pagePool ) {\n        this.mHeistTaskPagePool = pagePool;\n        return this;\n    }\n\n    @Override\n    public TaskSchedulerStrategy setPageDivider( PageDivider divider ) {\n        this.mPageDivider = divider;\n        return this;\n    }\n\n    @Override\n    public TaskSchedulerStrategy setPageRecycleStrategy( PageRecycleStrategy strategy ) {\n        this.mPageRecycleStrategy = strategy;\n        return this;\n    }\n\n\n\n    @Override\n    public TaskProducer formulateProducer() {\n        this.mTaskProducer = new LocalMultiActiveTaskPageProducer(\n                this.mParentHeistium, this.getPageDivider(), this.getMasterPage().getId() + 1\n        );\n\n        return this.mTaskProducer;\n    }\n\n\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/RangedTaskPage.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\nimport com.pinecone.slime.chunk.RangedChunk64;\nimport com.pinecone.slime.chunk.RangedPage64;\n\npublic class RangedTaskPage extends RangedPage64 implements TaskPage {\n    public RangedTaskPage(){\n        super();\n    }\n\n    public RangedTaskPage( long nStart, long nEnd, long id, RangedChunk64 parent ) {\n        super( nStart, nEnd, id, parent );\n    }\n\n    public RangedTaskPage( long nStart, long nEnd, long id ) {\n        super( nStart, nEnd, id );\n    }\n\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/heistron/scheduler/SingleTaskPageConsumer.java",
    "content": "package com.sauron.heist.heistron.scheduler;\n\nimport com.pinecone.slime.unitization.MinMaxRange64;\n\npublic abstract class SingleTaskPageConsumer implements TaskPageConsumer {\n    protected TaskPageProducer               mPageProducer;\n    protected PageFrame64ConsumerAdapter     mFrame64ConsumerAdapter;\n\n    protected SingleTaskPageConsumer( TaskPageProducer pageProducer, PageFrame64ConsumerAdapter frame64ConsumerAdapter ) {\n        this.mPageProducer           = pageProducer;\n        this.mFrame64ConsumerAdapter = frame64ConsumerAdapter;\n    }\n\n    @Override\n    public void consume () {\n        while ( this.mPageProducer.hasMoreProducts() ) {\n            if( this.mPageProducer.hasTerminateSignal() || Thread.currentThread().isInterrupted() ) {\n                break;\n            }\n\n            TaskPage page = this.mPageProducer.require();\n\n            try{\n                this.consumeSinglePage( page );\n            }\n            finally {\n                this.mPageProducer.deactivate( page );\n            }\n        }\n    }\n\n    protected void consumeSinglePage( TaskPage page ) {\n        long min = ( (MinMaxRange64)page.getRange()).getMin();\n        long max = ( (MinMaxRange64)page.getRange()).getMax();\n\n        for ( long i = min; i < max; ++i ) {\n            this.mFrame64ConsumerAdapter.consumeById( i );\n        }\n    }\n\n    @Override\n    public TaskPageProducer getTaskPageProducer() {\n        return this.mPageProducer;\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/http/AbstractDownloader.java",
    "content": "package com.sauron.heist.http;\n\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\nimport us.codecraft.webmagic.Site;\nimport us.codecraft.webmagic.Task;\nimport us.codecraft.webmagic.selector.Html;\n\npublic abstract class AbstractDownloader implements PageDownloader {\n    public AbstractDownloader() {\n    }\n\n    public Html download( String url) {\n        return this.download(url, (String)null);\n    }\n\n    public Html download( String url, String charset) {\n        Page page = this.download(new Request(url), Site.me().setCharset(charset).toTask());\n        return page.getHtml();\n    }\n\n    /** @deprecated */\n    @Deprecated\n    protected void onSuccess(Request request) {\n    }\n\n    protected void onSuccess( Request request, Task task ) {\n        this.onSuccess(request);\n    }\n\n    /** @deprecated */\n    @Deprecated\n    protected void onError(Request request) {\n    }\n\n    protected void onError(Request request, Task task, Throwable e) {\n        this.onError(request);\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/http/GenericHttpClientGenerator.java",
    "content": "package com.sauron.heist.http;\n\nimport java.io.IOException;\nimport java.security.KeyManagementException;\nimport java.security.NoSuchAlgorithmException;\nimport java.security.SecureRandom;\nimport java.security.cert.CertificateException;\nimport java.security.cert.X509Certificate;\nimport java.util.Iterator;\nimport java.util.Map;\nimport java.util.concurrent.locks.ReentrantReadWriteLock;\nimport javax.net.ssl.KeyManager;\nimport javax.net.ssl.SSLContext;\nimport javax.net.ssl.TrustManager;\nimport javax.net.ssl.X509TrustManager;\nimport org.apache.commons.lang3.JavaVersion;\nimport org.apache.commons.lang3.SystemUtils;\nimport org.apache.http.HttpException;\nimport org.apache.http.HttpRequest;\nimport org.apache.http.HttpRequestInterceptor;\nimport org.apache.http.client.CookieStore;\nimport org.apache.http.config.Registry;\nimport org.apache.http.config.RegistryBuilder;\nimport org.apache.http.config.SocketConfig;\nimport org.apache.http.conn.HttpClientConnectionManager;\nimport org.apache.http.conn.socket.ConnectionSocketFactory;\nimport org.apache.http.conn.socket.PlainConnectionSocketFactory;\nimport org.apache.http.conn.ssl.DefaultHostnameVerifier;\nimport org.apache.http.conn.ssl.SSLConnectionSocketFactory;\nimport org.apache.http.impl.client.BasicCookieStore;\nimport org.apache.http.impl.client.CloseableHttpClient;\nimport org.apache.http.impl.client.DefaultHttpRequestRetryHandler;\nimport org.apache.http.impl.client.HttpClientBuilder;\nimport org.apache.http.impl.client.HttpClients;\nimport org.apache.http.impl.conn.PoolingHttpClientConnectionManager;\nimport org.apache.http.impl.cookie.BasicClientCookie;\nimport org.apache.http.protocol.HttpContext;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport us.codecraft.webmagic.Site;\nimport us.codecraft.webmagic.downloader.CustomRedirectStrategy;\n\n\npublic class GenericHttpClientGenerator implements HttpClientGenerator {\n    private transient Logger                           logger = LoggerFactory.getLogger( this.getClass() );\n    private PoolingHttpClientConnectionManager         connectionManager;\n    private final ReentrantReadWriteLock               generatorLock = new ReentrantReadWriteLock();\n    private Registry<ConnectionSocketFactory >         registry;\n\n    public GenericHttpClientGenerator() {\n        this.initConnectionManager();\n    }\n\n    protected void initConnectionManager(){\n        this.registry = RegistryBuilder.<ConnectionSocketFactory >create()\n                .register(\"http\", PlainConnectionSocketFactory.INSTANCE)\n                .register(\"https\", this.buildSSLConnectionSocketFactory()).build();\n        this.connectionManager = new PoolingHttpClientConnectionManager( this.registry );\n        this.connectionManager.setDefaultMaxPerRoute(1000);\n        this.connectionManager.setValidateAfterInactivity(10000);\n    }\n\n    @Override\n    public HttpClientConnectionManager getConnectionManager() {\n        return this.connectionManager;\n    }\n\n    private SSLConnectionSocketFactory buildSSLConnectionSocketFactory() {\n        try {\n            SSLContext sslContext = this.createIgnoreVerifySSL();\n            String[] supportedProtocols;\n            if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_11)) {\n                supportedProtocols = new String[]{\"SSLv3\", \"TLSv1\", \"TLSv1.1\", \"TLSv1.2\", \"TLSv1.3\"};\n            } else {\n                supportedProtocols = new String[]{\"SSLv3\", \"TLSv1\", \"TLSv1.1\", \"TLSv1.2\"};\n            }\n\n            this.logger.debug(\"supportedProtocols: {}\", String.join(\", \", supportedProtocols));\n            return new SSLConnectionSocketFactory(sslContext, supportedProtocols, (String[])null, new DefaultHostnameVerifier());\n        }\n        catch (KeyManagementException e) {\n            this.logger.error(\"ssl connection fail\", e);\n        }\n        catch (NoSuchAlgorithmException e2) {\n            this.logger.error(\"ssl connection fail\", e2);\n        }\n\n        return SSLConnectionSocketFactory.getSocketFactory();\n    }\n\n    private SSLContext createIgnoreVerifySSL() throws NoSuchAlgorithmException, KeyManagementException {\n        X509TrustManager trustManager = new X509TrustManager() {\n            public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {\n            }\n\n            public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {\n            }\n\n            public X509Certificate[] getAcceptedIssuers() {\n                return null;\n            }\n        };\n        SSLContext sc = SSLContext.getInstance(\"TLS\");\n        sc.init((KeyManager[])null, new TrustManager[]{trustManager}, (SecureRandom)null);\n        return sc;\n    }\n\n    @Override\n    public GenericHttpClientGenerator setPoolSize( int poolSize ) {\n        this.connectionManager.setMaxTotal(poolSize);\n        return this;\n    }\n\n    @Override\n    public int getPoolSize(){\n        return this.connectionManager.getMaxTotal();\n    }\n\n    @Override\n    public CloseableHttpClient getClient( Site site ) {\n        return this.generateClient( site, true );\n    }\n\n    @Override\n    public CloseableHttpClient getClient( Site site, boolean bPooled ) {\n        return this.generateClient( site, bPooled );\n    }\n\n    protected CloseableHttpClient generateClient( Site site, boolean bPooled ) {\n        this.generatorLock.readLock().lock();\n        try{\n            HttpClientBuilder httpClientBuilder = HttpClients.custom();\n            if( bPooled ) {\n                httpClientBuilder.setConnectionManager( this.connectionManager );\n            }\n            if ( site.getUserAgent() != null ) {\n                httpClientBuilder.setUserAgent(site.getUserAgent());\n            }\n            else {\n                httpClientBuilder.setUserAgent(\"\");\n            }\n\n            if (site.isUseGzip()) {\n                httpClientBuilder.addInterceptorFirst(new HttpRequestInterceptor() {\n                    public void process(HttpRequest request, HttpContext context) throws HttpException, IOException {\n                        if (!request.containsHeader(\"Accept-Encoding\")) {\n                            request.addHeader(\"Accept-Encoding\", \"gzip\");\n                        }\n\n                    }\n                });\n            }\n\n            httpClientBuilder.setRedirectStrategy(new CustomRedirectStrategy());\n            SocketConfig.Builder socketConfigBuilder = SocketConfig.custom();\n            socketConfigBuilder.setSoKeepAlive(true).setTcpNoDelay(true);\n            socketConfigBuilder.setSoTimeout(site.getTimeOut());\n\n            if( bPooled ) {\n                SocketConfig socketConfig = socketConfigBuilder.build();\n                httpClientBuilder.setDefaultSocketConfig(socketConfig);\n                this.connectionManager.setDefaultSocketConfig( socketConfig );\n            }\n\n            httpClientBuilder.setRetryHandler(new DefaultHttpRequestRetryHandler(site.getRetryTimes(), true));\n            this.generateCookie(httpClientBuilder, site);\n            return httpClientBuilder.build();\n        }\n        finally {\n            this.generatorLock.readLock().unlock();\n        }\n    }\n\n    private void generateCookie( HttpClientBuilder httpClientBuilder, Site site ) {\n        if ( site.isDisableCookieManagement() ) {\n            httpClientBuilder.disableCookieManagement();\n        }\n        else {\n            CookieStore cookieStore = new BasicCookieStore();\n            Iterator iterator = site.getCookies().entrySet().iterator();\n\n            Map.Entry domainEntry;\n            while( iterator.hasNext() ) {\n                domainEntry = (Map.Entry)iterator.next();\n                BasicClientCookie cookie = new BasicClientCookie((String)domainEntry.getKey(), (String)domainEntry.getValue());\n                cookie.setDomain(site.getDomain());\n                cookieStore.addCookie(cookie);\n            }\n\n            iterator = site.getAllCookies().entrySet().iterator();\n\n            while( iterator.hasNext() ) {\n                domainEntry = (Map.Entry)iterator.next();\n                Iterator it = ((Map)domainEntry.getValue()).entrySet().iterator();\n\n                while( it.hasNext() ) {\n                    Map.Entry<String, String> cookieEntry = (Map.Entry)it.next();\n                    BasicClientCookie cookie = new BasicClientCookie((String)cookieEntry.getKey(), (String)cookieEntry.getValue());\n                    cookie.setDomain((String)domainEntry.getKey());\n                    cookieStore.addCookie(cookie);\n                }\n            }\n\n            httpClientBuilder.setDefaultCookieStore(cookieStore);\n        }\n    }\n\n    @Override\n    public void close() {\n        this.generatorLock.writeLock().lock();\n        try{\n            this.connectionManager.close();\n        }\n        finally {\n            this.generatorLock.writeLock().unlock();\n        }\n    }\n\n    @Override\n    public void clearPool() {\n        if( this.connectionManager != null ) {\n            this.generatorLock.writeLock().lock();\n            try{\n                this.connectionManager.close();\n                this.initConnectionManager();\n            }\n            finally {\n                this.generatorLock.writeLock().unlock();\n            }\n        }\n    }\n}"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/http/HttpBrowserDownloader.java",
    "content": "package com.sauron.heist.http;\n\nimport com.pinecone.framework.system.ProxyProvokeHandleException;\n\nimport org.apache.commons.io.IOUtils;\nimport org.apache.http.HttpResponse;\nimport org.apache.http.client.methods.CloseableHttpResponse;\nimport org.apache.http.impl.client.CloseableHttpClient;\nimport org.apache.http.util.EntityUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\nimport us.codecraft.webmagic.Site;\nimport us.codecraft.webmagic.Task;\n\nimport us.codecraft.webmagic.downloader.HttpClientRequestContext;\nimport us.codecraft.webmagic.downloader.HttpUriRequestConverter;\nimport us.codecraft.webmagic.proxy.Proxy;\nimport us.codecraft.webmagic.proxy.ProxyProvider;\nimport us.codecraft.webmagic.selector.PlainText;\nimport us.codecraft.webmagic.utils.CharsetUtils;\nimport us.codecraft.webmagic.utils.HttpClientUtils;\n\nimport javax.net.ssl.SSLException;\nimport java.io.IOException;\nimport java.nio.charset.Charset;\nimport java.util.HashMap;\nimport java.util.Map;\nimport java.util.concurrent.TimeUnit;\n\npublic class HttpBrowserDownloader extends AbstractDownloader {\n    protected Logger logger = LoggerFactory.getLogger(this.getClass());\n    protected final Map<String, CloseableHttpClient> httpClients = new HashMap<>();\n    protected HttpClientGenerator httpClientGenerator = new GenericHttpClientGenerator();\n    protected HttpUriRequestConverter httpUriRequestConverter = new HttpUriRequestConverter();\n    protected ProxyProvider proxyProvider;\n    protected boolean responseHeader = true;\n\n    protected Task parentTask = null;\n\n    public HttpBrowserDownloader() {\n    }\n\n    public HttpBrowserDownloader( Task task ) {\n        this.parentTask = task;\n    }\n\n    public void setHttpUriRequestConverter( HttpUriRequestConverter httpUriRequestConverter) {\n        this.httpUriRequestConverter = httpUriRequestConverter;\n    }\n\n    public void setProxyProvider( ProxyProvider proxyProvider ) {\n        this.proxyProvider = proxyProvider;\n    }\n\n    protected CloseableHttpClient getHttpClient( Site site, boolean bPooled ) {\n        if( !bPooled ) {\n            // Explicit using false.\n            return this.httpClientGenerator.getClient( site, false );\n        }\n\n        if ( site == null ) {\n            return this.httpClientGenerator.getClient((Site)null);\n        }\n        else {\n            String domain = site.getDomain();\n            CloseableHttpClient httpClient = (CloseableHttpClient)this.httpClients.get(domain);\n            if ( httpClient == null ) {\n                synchronized(this) {\n                    httpClient = (CloseableHttpClient)this.httpClients.get(domain);\n                    if (httpClient == null) {\n                        httpClient = this.httpClientGenerator.getClient(site);\n                        this.httpClients.put(domain, httpClient);\n                    }\n                }\n            }\n            return httpClient;\n        }\n    }\n\n    @Override\n    public Page download( Request request, Task task ) {\n        return this.download( request, task, true );\n    }\n\n    protected Page download0( Request request, Task task, boolean bPooled ) throws IOException {\n        CloseableHttpResponse httpResponse = null;\n        CloseableHttpClient httpClient = this.getHttpClient( task.getSite(), bPooled );\n        Proxy proxy = this.proxyProvider != null ? this.proxyProvider.getProxy(task) : null;\n        HttpClientRequestContext requestContext = this.httpUriRequestConverter.convert(request, task.getSite(), proxy);\n        Page page = Page.fail();\n\n        try {\n            httpResponse = httpClient.execute(requestContext.getHttpUriRequest(), requestContext.getHttpClientContext());\n            page = this.handleResponse(request, request.getCharset() != null ? request.getCharset() : task.getSite().getCharset(), httpResponse, task);\n            this.onSuccess( request, task );\n            this.logger.info( \"downloading page success {}\", request.getUrl() );\n\n            if( !bPooled ) {\n                try{\n                    httpClient.close();\n                }\n                catch ( IOException e ) {\n                    throw new ProxyProvokeHandleException( e );\n                }\n            }\n            return page;\n        }\n        finally {\n            if (httpResponse != null) {\n                EntityUtils.consumeQuietly(httpResponse.getEntity());\n            }\n\n            if ( this.proxyProvider != null && proxy != null ) {\n                this.proxyProvider.returnProxy(proxy, page, task);\n            }\n        }\n    }\n\n    @Override\n    public Page download( Request request, Task task, boolean bPooled ) {\n        if ( task != null && task.getSite() != null ) {\n            try{\n                return this.download0( request, task, bPooled );\n            }\n            catch ( IOException firstIOE ) { // First try\n                this.httpClientGenerator.getConnectionManager().closeExpiredConnections();\n                this.httpClientGenerator.getConnectionManager().closeIdleConnections( 0, TimeUnit.SECONDS );\n                this.logger.info( \"First connection {} in error, retrying.\", request.getUrl() );\n\n                try{\n                    return this.download0( request, task, bPooled );\n                }\n                catch ( SSLException sse ) {\n                    throw new ProxyProvokeHandleException( sse );\n                }\n                catch ( IOException e ) {\n                    this.onError( request, task, e );\n                    this.logger.info( \"download page {} error\", request.getUrl(), e );\n                }\n                return Page.fail();\n            }\n        }\n        else {\n            throw new NullPointerException(\"task or site can not be null\");\n        }\n    }\n\n    @Override\n    public void setThread( int threads ) {\n        this.httpClientGenerator.setPoolSize( threads );\n    }\n\n    protected Page handleResponse(Request request, String charset, HttpResponse httpResponse, Task task) throws IOException {\n        byte[] bytes = IOUtils.toByteArray(httpResponse.getEntity().getContent());\n        String contentType = httpResponse.getEntity().getContentType() == null ? \"\" : httpResponse.getEntity().getContentType().getValue();\n        Page page = new Page();\n        page.setBytes(bytes);\n        if (!request.isBinaryContent()) {\n            if (charset == null) {\n                charset = this.getHtmlCharset(contentType, bytes);\n            }\n\n            page.setCharset(charset);\n            page.setRawText(new String(bytes, charset));\n        }\n\n        page.setUrl(new PlainText(request.getUrl()));\n        page.setRequest(request);\n        page.setStatusCode(httpResponse.getStatusLine().getStatusCode());\n        page.setDownloadSuccess(true);\n        if (this.responseHeader) {\n            page.setHeaders(HttpClientUtils.convertHeaders(httpResponse.getAllHeaders()));\n        }\n\n        return page;\n    }\n\n    private String getHtmlCharset(String contentType, byte[] contentBytes) throws IOException {\n        String charset = CharsetUtils.detectCharset(contentType, contentBytes);\n        if (charset == null) {\n            charset = Charset.defaultCharset().name();\n            this.logger.warn(\"Charset autodetect failed, use {} as charset. Please specify charset in Site.setCharset()\", Charset.defaultCharset());\n        }\n\n        return charset;\n    }\n\n    public Logger getLogger() {\n        return this.logger;\n    }\n\n    @Override\n    public void reset() {\n        this.httpClientGenerator.clearPool();\n        this.httpClients.clear();\n    }\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/http/HttpClientGenerator.java",
    "content": "package com.sauron.heist.http;\n\nimport com.sauron.system.Saunut;\n\nimport org.apache.http.conn.HttpClientConnectionManager;\nimport org.apache.http.impl.client.*;\n\nimport us.codecraft.webmagic.Site;\n\npublic interface HttpClientGenerator extends Saunut {\n    GenericHttpClientGenerator setPoolSize( int poolSize ) ;\n\n    int getPoolSize();\n\n    CloseableHttpClient getClient( Site site );\n\n    CloseableHttpClient getClient( Site site, boolean bPooled );\n\n    void close();\n\n    void clearPool();\n\n    HttpClientConnectionManager getConnectionManager();\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/java/com/sauron/heist/http/PageDownloader.java",
    "content": "package com.sauron.heist.http;\n\nimport com.sauron.system.Saunut;\nimport us.codecraft.webmagic.Page;\nimport us.codecraft.webmagic.Request;\nimport us.codecraft.webmagic.Task;\nimport us.codecraft.webmagic.downloader.Downloader;\n\npublic interface PageDownloader extends Downloader, Saunut {\n    @Override\n    Page download( Request request, Task task );\n\n    Page download( Request request, Task task, boolean bPooled );\n\n    @Override\n    void setThread( int threads );\n\n    void reset();\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/main/resources/application.yaml",
    "content": "# Any private port must start with '75', eg.7580, 7577, etc.\n\nserver:\n  port: 7580\n\nspring:\n  datasource:\n    url: jdbc:mysql://node1.nutgit.com:13393/predator?useSSL=false&serverTimezone=UTC\n    username: root\n    password: root\n    driver-class-name: com.mysql.jdbc.Driver\n  redis:\n    host: localhost\n    port: 6379\n    password: Genius123\n    lettuce:\n      pool:\n        max-active: 8\n        max-idle: 8\n        min-idle: 0\n        max-wait: 100\n        time-between-eviction-runs: 10s\n  rabbitmq:\n    host: node1.nutgit.com # 主机名\n    port: 1234 # 端口\n    virtual-host: /wolf # 虚拟主机\n    username: undefined # 用户名\n    password: 1234 # 密码\n\n\nnonjron:\n  system:\n    config:\n      serviceId: Nonaron-Kingpin-Prime\n      minionName: Nonaron\n      tracer:\n        consoleTrace: true\n        infoTracer: \"./system/logs/%s_SysTrace.log\"\n        errTracer: \"./system/logs/%s_SysError.log\"\n    components:\n      heist:\n        failure-retry-times: 3\n        heist-num: 5\n        rob-task-name: douban\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/test/java/com/Test.java",
    "content": "package com;\n\npublic class Test {\n}\n"
  },
  {
    "path": "Saurons/heist-system-schedule/src/test/java/com/others/TestServgramTritium.java",
    "content": "package com.others;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.NonNull;\nimport com.pinecone.framework.system.Nullable;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.sauron.heist.heistron.CascadeHeist;\nimport com.sauron.heist.heistron.Crew;\nimport com.sauron.heist.heistron.HTTPIndexHeist;\nimport com.sauron.heist.heistron.Heist;\nimport com.sauron.heist.heistron.Heistgram;\nimport com.sauron.heist.heistron.Heistotron;\nimport com.pinecone.ulf.util.lang.GenericPreloadClassInspector;\n\nimport javassist.ClassPool;\nimport javassist.CtClass;\nimport javassist.bytecode.annotation.Annotation;\n\nclass FakeHeist extends HTTPIndexHeist {\n    public FakeHeist( Heistgram heistron ){\n        super( heistron );\n    }\n\n    public FakeHeist( Heistgram heistron, JSONConfig joConfig ){\n        super( heistron, joConfig );\n    }\n\n    public FakeHeist(Heistgram heistron, @Nullable CascadeHeist parent, @NonNull String szChildName ) {\n        super( heistron, parent, szChildName );\n    }\n\n    @Override\n    public Crew newCrew(int nCrewId ) {\n        return null;\n    }\n\n    @Override\n    public void toRavage(){\n        super.toRavage();\n    }\n\n    @Override\n    public void toStalk(){\n\n    }\n}\n\npublic class TestServgramTritium {\n    public static void testJavassist() throws Exception{\n        GenericPreloadClassInspector inspector = new GenericPreloadClassInspector( ClassPool.getDefault() );\n\n\n        String className = \"com.sauron.heist.heistron.Heistotron\";\n        Debug.trace( inspector.isImplementedDirectly( className, Heistgram.class ) );\n        Debug.trace( inspector.isImplemented( className, com.pinecone.hydra.servgram.Servgram.class ) );\n\n        Debug.trace( inspector.isExtendedDirectly( Heistgram.class.getName(), com.pinecone.hydra.servgram.Servgram.class ) );\n        Debug.trace( inspector.isExtended( Heistgram.class.getName(), com.pinecone.framework.system.executum.Processum.class ) );\n\n        Debug.trace( inspector.hasOwnAnnotations( Heistotron.class.getName(), new Class[]{ com.pinecone.hydra.servgram.Gram.class } ) );\n\n        Debug.trace( inspector.isImplemented( com.others.FakeHeist.class.getName(), Heist.class /*com.pinecone.framework.system.prototype.Pinenut.class*/ ) );\n\n        Debug.trace( inspector.isExtended( com.others.FakeHeist.class.getName(), Heist.class ) );\n\n        ClassPool pool = ClassPool.getDefault();\n        CtClass ctClass = pool.get(className);\n        Debug.trace( ctClass.isFrozen() ) ;\n\n        Annotation[] annotations = inspector.queryVisibleAnnotations( ctClass );\n        Debug.echo( annotations[0].getMemberValue( \"value\" ) );\n\n    }\n\n    public static void main(String[] args) throws Exception {\n        // String szJson = FileUtils.readAll(\"J:/120KWordsPhonetics.json5\");\n        Pinecone.init((Object... cfg) -> {\n\n            // TestServgramTritium.testClassReader();\n            TestServgramTritium.testJavassist();\n\n\n            return 0;\n        }, (Object[]) args);\n    }\n}\n"
  },
  {
    "path": "Saurons/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sauron</artifactId>\n        <groupId>com.sauron</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.saurons</groupId>\n    <artifactId>saurons</artifactId>\n    <packaging>pom</packaging>\n    <version>1.2.7</version>\n\n    <modules>\n        <module>heist-framework-architecture</module>\n        <module>heist-system-schedule</module>\n        <module>heist-http-client-okhttp-suit</module>\n        <module>sauron-core</module>\n        <module>saurye</module>\n        <module>shadow</module>\n    </modules>\n\n    <dependencies>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Saurons/sauron-core/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>saurons</artifactId>\n        <groupId>com.saurons</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.sauron.core</groupId>\n    <artifactId>sauron-core</artifactId>\n    <version>1.2.7</version>\n    <packaging>jar</packaging>\n\n    <modules>\n\n    </modules>\n\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime</groupId>\n            <artifactId>slime</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Saurons/sauron-core/src/main/java/com/sauron/Sauron.java",
    "content": "package com.sauron;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.tritium.Tritium;\nimport com.sauron.system.SauronKingdom;\n\npublic class Sauron extends Tritium implements SauronKingdom {\n    public Sauron( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Sauron( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n\n    @Override\n    protected void traceSubsystemWelcomeInfo() {\n        this.pout().print( \"------------------------Sauron Framework-----------------------\\n\" );\n        this.pout().print( \"\\u001B[31m\\uD83D\\uDE08 Bean Sauron Engine, Project.`Manhattan, the Grand Design` \\uD83D\\uDE08 \\u001B[0m\\n\" );\n        this.pout().print( \"\\u001B[32mCthulhu Data-Platform of Bean Nuts Digital IDC \\u001B[0m\\n\" );\n        this.pout().print( \"\\u001B[32mCopyright(C) 2008-2028 Bean Nuts Foundation. All rights reserved.\\u001B[0m\\n\" );\n        this.pout().print( \"---------------------------------------------------------------\\n\" );\n        this.pout().print( \"Greeting! My name is Sauron, I insight omniscience.\\n\" );\n        this.pout().print( \"Salve! Nomen Sauron est, omnia perspicio.\\n\" );\n        this.pout().print( \"----------------------Kernel Information-----------------------\\n\" );\n        this.pout().print( \"PineconeVer  : Bean Nuts Pinecone Ursus \" + Pinecone.VERSION + \"\\n\" );\n        this.pout().print( \"PineconeAlly : Bean Nuts Acorn Hydra\\n\" );\n        this.pout().print( \"PineconeAlly : Bean Nuts Walnut Ulfhedinn (Pinecone Framework Edition)\\n\"   );\n        this.pout().print( \"PineconeAlly : Bean Nuts Walnut Sparta\\n\" );\n        this.pout().print( \"PineconeAlly : Bean Nuts Acorn Summer (Pinecone Framework Edition)\\n\" );\n        this.pout().print( \"PineconeAlly : Bean Nuts Hazelnut Slime\\n\"   );\n        this.pout().print( \"PineconeAlly : Bean Nuts Hazelnut Hydra Tritium\\n\"   );\n        this.pout().print( \"PineconeAlly : Bean Nuts Acorn Summer Springron (org.springframework.boot 2.4.1)\\n\"   );\n    }\n}\n"
  },
  {
    "path": "Saurons/sauron-core/src/main/java/com/sauron/system/Saunut.java",
    "content": "package com.sauron.system;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.system.prototype.Prototype;\n\npublic interface Saunut extends Pinenut {\n    default String toJSONString() {\n        return \"\\\"[object \" + this.className() +  \"]\\\"\";\n    }\n\n    default String prototypeName() {\n        return this.className();\n    }\n\n    default String className() {\n        return Prototype.prototypeName( this );\n    }\n}\n"
  },
  {
    "path": "Saurons/sauron-core/src/main/java/com/sauron/system/SauronKingdom.java",
    "content": "package com.sauron.system;\n\nimport com.pinecone.tritium.system.TritiumSystem;\n\npublic interface SauronKingdom extends TritiumSystem {\n}\n"
  },
  {
    "path": "Skynet/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sauron</artifactId>\n        <groupId>com.sauron</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.acorn.skynet</groupId>\n    <artifactId>skynet</artifactId>\n    <packaging>pom</packaging>\n    <version>2.5.1</version>\n\n    <modules>\n\n        <module>skynet-architecture</module>\n        <module>skynet-system</module>\n        <module>skynet-cloud-deploy</module>\n    </modules>\n</project>"
  },
  {
    "path": "Skynet/skynet-architecture/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>skynet</artifactId>\n        <groupId>com.acorn.skynet</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.acorn.skynet.kernel</groupId>\n    <artifactId>skynet-architecture</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Skynet/skynet-cloud-deploy/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>skynet</artifactId>\n        <groupId>com.acorn.skynet</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.acorn.skynet.deploy</groupId>\n    <artifactId>skynet-cloud-deploy</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-service-control</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-device</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Skynet/skynet-cloud-deploy/src/main/java/com/acorn/skynet/deploy/CloudDeploy.java",
    "content": "package com.acorn.skynet.deploy;\n\npublic interface CloudDeploy {\n}\n"
  },
  {
    "path": "Skynet/skynet-cloud-deploy/src/main/java/com/acorn/skynet/deploy/service/CollectiveServiceDeployRegiment.java",
    "content": "package com.acorn.skynet.deploy.service;\n\nimport com.pinecone.framework.system.regime.Regiment;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\n\npublic interface CollectiveServiceDeployRegiment extends Regiment, Slf4jTraceable {\n}\n"
  },
  {
    "path": "Skynet/skynet-cloud-deploy/src/main/java/com/acorn/skynet/deploy/service/SkyCollectiveServiceDeployRegiment.java",
    "content": "package com.acorn.skynet.deploy.service;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.deploy.kom.DeployInstrument;\nimport com.pinecone.hydra.service.registry.server.ServiceManager;\nimport com.pinecone.hydra.service.registry.event.ServiceRegisterEvent;\nimport com.pinecone.hydra.service.registry.event.ServiceRegisterEventHandler;\nimport org.slf4j.Logger;\n\npublic class SkyCollectiveServiceDeployRegiment implements CollectiveServiceDeployRegiment {\n\n    protected Logger                mLogger;\n\n    protected ServiceManager        mServiceManager;\n\n    protected DeployInstrument      mDeployInstrument;\n\n\n    public SkyCollectiveServiceDeployRegiment( ServiceManager serviceManager, DeployInstrument deployInstrument ) {\n        this.mServiceManager        = serviceManager;\n        this.mDeployInstrument      = deployInstrument;\n\n        this.initServiceEvent();\n    }\n\n    protected void initServiceEvent() {\n        this.mServiceManager.addRegisterEventHandler(new ServiceRegisterEventHandler() {\n            @Override\n            public void fired( long clientId, GUID insId, GUID serviceId, ServiceRegisterEvent event, Object caused ) {\n\n            }\n        });\n    }\n\n    @Override\n    public Logger getLogger() {\n        return this.mLogger;\n    }\n}\n"
  },
  {
    "path": "Skynet/skynet-system/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>skynet</artifactId>\n        <groupId>com.acorn.skynet</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.acorn.skynet.kernel</groupId>\n    <artifactId>skynet-system</artifactId>\n    <version>2.1.0</version>\n    <packaging>jar</packaging>\n    <modules>\n\n    </modules>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Skynet/skynet-system/src/main/java/com/acorn/skynet/Skynet.java",
    "content": "package com.acorn.skynet;\n\nimport com.acorn.skynet.system.SkynetSubsystem;\nimport com.pinecone.framework.util.config.PatriarchalConfig;\nimport com.pinecone.framework.util.io.Tracer;\nimport com.pinecone.hydra.system.ArchModularizedSubsystem;\nimport com.pinecone.hydra.system.Hydrogen;\nimport com.pinecone.hydra.system.component.LogStatuses;\n\npublic class Skynet extends ArchModularizedSubsystem implements SkynetSubsystem {\n\n    public Skynet( Hydrogen primarySystem, String name, PatriarchalConfig config ) {\n        super( primarySystem, name, config );\n    }\n\n    @Override\n    protected void traceWelcomeInfo() {\n        Tracer console = this.mPrimarySystem.console();\n        console.getOut().print( \"---------------------------------------------------------------\\n\" );\n        console.getOut().print( \"\\u001B[31mBean Nuts Acorn Skynet\\u001B[0m\\n\" );\n        console.getOut().print( \"\\u001B[31mSkynet cloud computing infrastructure \\u001B[0m\\n\" );\n        console.getOut().print( \"\\u001B[32mCopyright(C) 2008-2028 Bean Nuts Foundation. All rights reserved.\\u001B[0m\\n\" );\n        console.getOut().print( \"---------------------------------------------------------------\\n\" );\n    }\n\n    protected void init() {\n        this.getLogger().info( \"<Skynet> >>> System Booting...\" );\n\n        this.infoLifecycle( \"<Skynet> Domain Subsystem Initialization\", LogStatuses.StatusStart );\n        this.traceWelcomeInfo();\n        this.prepare_system_skeleton();\n\n        this.infoLifecycle( \"<Skynet> Welcome to the Skynet cloud computing!\", LogStatuses.StatusReady );\n        this.infoLifecycle( \"<Skynet> Domain Subsystem Initialization\", LogStatuses.StatusReady );\n    }\n\n    protected void prepare_system_skeleton() {\n\n    }\n\n    @Override\n    public void vitalize() {\n        this.init();\n    }\n\n    @Override\n    public void terminate() {\n\n    }\n}\n"
  },
  {
    "path": "Skynet/skynet-system/src/main/java/com/acorn/skynet/system/SkynetSubsystem.java",
    "content": "package com.acorn.skynet.system;\n\nimport com.pinecone.framework.system.ModularizedSubsystem;\nimport com.pinecone.framework.system.SynergicSystem;\nimport com.pinecone.framework.system.regime.arch.Lord;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\n\npublic interface SkynetSubsystem extends SynergicSystem, ModularizedSubsystem, Lord, Slf4jTraceable {\n}\n"
  },
  {
    "path": "Sparta/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sauron</artifactId>\n        <groupId>com.sauron</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnuts.sparta</groupId>\n    <artifactId>sparta</artifactId>\n    <packaging>pom</packaging>\n    <version>2.5.1</version>\n\n    <modules>\n        <module>sparta-core-console</module>\n        <module>sparta-uofs-service</module>\n        <module>sparta-uofs-console</module>\n        <module>sparta-uac-console</module>\n        <module>sparta-api-uac</module>\n        <module>sparta-ucdn-service</module>\n        <module>sparta-ucdn-console</module>\n        <module>sparta-api-uofs</module>\n    </modules>\n\n\n</project>"
  },
  {
    "path": "Sparta/sparta-api-uac/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sparta</artifactId>\n        <groupId>com.walnuts.sparta</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.sparta.api.uac</groupId>\n    <artifactId>sparta-api-uac</artifactId>\n    <version>2.1.0</version>\n\n    <dependencies>\n        <dependency>\n            <groupId>junit</groupId>\n            <artifactId>junit</artifactId>\n            <version>3.8.1</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n            <artifactId>hydra-kom-default-driver</artifactId>\n            <version>2.1.0</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.summer.springram</groupId>\n            <artifactId>springram</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.sdk.thrift</groupId>\n            <artifactId>hydra-lib-thrift-sdk</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-test</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.testng</groupId>\n            <artifactId>testng</artifactId>\n            <version>RELEASE</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.thrift</groupId>\n            <artifactId>libthrift</artifactId>\n            <version>0.18.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "Sparta/sparta-api-uac/src/main/java/com/walnut/sparta/account/rpc/thrift/AccountIface.java",
    "content": "package com.walnut.sparta.account.rpc.thrift;\n/**\n * Autogenerated by Thrift Compiler (0.18.0)\n *\n * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING\n *  @generated\n */\n@javax.annotation.Generated(value = \"Autogenerated by Thrift Compiler (0.18.0)\", date = \"2025-01-25\")\n@SuppressWarnings({\"cast\", \"rawtypes\", \"serial\", \"unchecked\", \"unused\"})\npublic class AccountIface {\n\n  public interface Iface {\n\n    public java.lang.String queryNodeByPath(java.lang.String path) throws org.apache.thrift.TException;\n\n  }\n\n  public interface AsyncIface {\n\n    public void queryNodeByPath(java.lang.String path, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException;\n\n  }\n\n  public static class Client extends org.apache.thrift.TServiceClient implements Iface {\n    public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {\n      public Factory() {}\n      @Override\n      public Client getClient(org.apache.thrift.protocol.TProtocol prot) {\n        return new Client(prot);\n      }\n      @Override\n      public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {\n        return new Client(iprot, oprot);\n      }\n    }\n\n    public Client(org.apache.thrift.protocol.TProtocol prot)\n    {\n      super(prot, prot);\n    }\n\n    public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {\n      super(iprot, oprot);\n    }\n\n    @Override\n    public java.lang.String queryNodeByPath(java.lang.String path) throws org.apache.thrift.TException\n    {\n      send_queryNodeByPath(path);\n      return recv_queryNodeByPath();\n    }\n\n    public void send_queryNodeByPath(java.lang.String path) throws org.apache.thrift.TException\n    {\n      queryNodeByPath_args args = new queryNodeByPath_args();\n      args.setPath(path);\n      sendBase(\"queryNodeByPath\", args);\n    }\n\n    public java.lang.String recv_queryNodeByPath() throws org.apache.thrift.TException\n    {\n      queryNodeByPath_result result = new queryNodeByPath_result();\n      receiveBase(result, \"queryNodeByPath\");\n      if (result.isSetSuccess()) {\n        return result.success;\n      }\n      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, \"queryNodeByPath failed: unknown result\");\n    }\n\n  }\n  public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {\n    public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {\n      private org.apache.thrift.async.TAsyncClientManager clientManager;\n      private org.apache.thrift.protocol.TProtocolFactory protocolFactory;\n      public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {\n        this.clientManager = clientManager;\n        this.protocolFactory = protocolFactory;\n      }\n    @Override\n      public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {\n        return new AsyncClient(protocolFactory, clientManager, transport);\n      }\n    }\n\n    public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {\n      super(protocolFactory, clientManager, transport);\n    }\n\n    @Override\n    public void queryNodeByPath(java.lang.String path, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException {\n      checkReady();\n      queryNodeByPath_call method_call = new queryNodeByPath_call(path, resultHandler, this, ___protocolFactory, ___transport);\n      this.___currentMethod = method_call;\n      ___manager.call(method_call);\n    }\n\n    public static class queryNodeByPath_call extends org.apache.thrift.async.TAsyncMethodCall<java.lang.String> {\n      private java.lang.String path;\n      public queryNodeByPath_call(java.lang.String path, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {\n        super(client, protocolFactory, transport, resultHandler, false);\n        this.path = path;\n      }\n\n      @Override\n      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {\n        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage(\"queryNodeByPath\", org.apache.thrift.protocol.TMessageType.CALL, 0));\n        queryNodeByPath_args args = new queryNodeByPath_args();\n        args.setPath(path);\n        args.write(prot);\n        prot.writeMessageEnd();\n      }\n\n      @Override\n      public java.lang.String getResult() throws org.apache.thrift.TException {\n        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {\n          throw new java.lang.IllegalStateException(\"Method call not finished!\");\n        }\n        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());\n        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);\n        return (new Client(prot)).recv_queryNodeByPath();\n      }\n    }\n\n  }\n\n  public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {\n    private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(Processor.class.getName());\n    public Processor(I iface) {\n      super(iface, getProcessMap(new java.util.HashMap<java.lang.String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));\n    }\n\n    protected Processor(I iface, java.util.Map<java.lang.String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {\n      super(iface, getProcessMap(processMap));\n    }\n\n    private static <I extends Iface> java.util.Map<java.lang.String,  org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> getProcessMap(java.util.Map<java.lang.String, org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> processMap) {\n      processMap.put(\"queryNodeByPath\", new queryNodeByPath());\n      return processMap;\n    }\n\n    public static class queryNodeByPath<I extends Iface> extends org.apache.thrift.ProcessFunction<I, queryNodeByPath_args> {\n      public queryNodeByPath() {\n        super(\"queryNodeByPath\");\n      }\n\n      @Override\n      public queryNodeByPath_args getEmptyArgsInstance() {\n        return new queryNodeByPath_args();\n      }\n\n      @Override\n      protected boolean isOneway() {\n        return false;\n      }\n\n      @Override\n      protected boolean rethrowUnhandledExceptions() {\n        return false;\n      }\n\n      @Override\n      public queryNodeByPath_result getResult(I iface, queryNodeByPath_args args) throws org.apache.thrift.TException {\n        queryNodeByPath_result result = new queryNodeByPath_result();\n        result.success = iface.queryNodeByPath(args.path);\n        return result;\n      }\n    }\n\n  }\n\n  public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {\n    private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(AsyncProcessor.class.getName());\n    public AsyncProcessor(I iface) {\n      super(iface, getProcessMap(new java.util.HashMap<java.lang.String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>>()));\n    }\n\n    protected AsyncProcessor(I iface, java.util.Map<java.lang.String,  org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase, ?>> processMap) {\n      super(iface, getProcessMap(processMap));\n    }\n\n    private static <I extends AsyncIface> java.util.Map<java.lang.String,  org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase,?>> getProcessMap(java.util.Map<java.lang.String,  org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase, ?>> processMap) {\n      processMap.put(\"queryNodeByPath\", new queryNodeByPath());\n      return processMap;\n    }\n\n    public static class queryNodeByPath<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, queryNodeByPath_args, java.lang.String> {\n      public queryNodeByPath() {\n        super(\"queryNodeByPath\");\n      }\n\n      @Override\n      public queryNodeByPath_args getEmptyArgsInstance() {\n        return new queryNodeByPath_args();\n      }\n\n      @Override\n      public org.apache.thrift.async.AsyncMethodCallback<java.lang.String> getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {\n        final org.apache.thrift.AsyncProcessFunction fcall = this;\n        return new org.apache.thrift.async.AsyncMethodCallback<java.lang.String>() { \n          @Override\n          public void onComplete(java.lang.String o) {\n            queryNodeByPath_result result = new queryNodeByPath_result();\n            result.success = o;\n            try {\n              fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);\n            } catch (org.apache.thrift.transport.TTransportException e) {\n              _LOGGER.error(\"TTransportException writing to internal frame buffer\", e);\n              fb.close();\n            } catch (java.lang.Exception e) {\n              _LOGGER.error(\"Exception writing to internal frame buffer\", e);\n              onError(e);\n            }\n          }\n          @Override\n          public void onError(java.lang.Exception e) {\n            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;\n            org.apache.thrift.TSerializable msg;\n            queryNodeByPath_result result = new queryNodeByPath_result();\n            if (e instanceof org.apache.thrift.transport.TTransportException) {\n              _LOGGER.error(\"TTransportException inside handler\", e);\n              fb.close();\n              return;\n            } else if (e instanceof org.apache.thrift.TApplicationException) {\n              _LOGGER.error(\"TApplicationException inside handler\", e);\n              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;\n              msg = (org.apache.thrift.TApplicationException)e;\n            } else {\n              _LOGGER.error(\"Exception inside handler\", e);\n              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;\n              msg = new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());\n            }\n            try {\n              fcall.sendResponse(fb,msg,msgType,seqid);\n            } catch (java.lang.Exception ex) {\n              _LOGGER.error(\"Exception writing to internal frame buffer\", ex);\n              fb.close();\n            }\n          }\n        };\n      }\n\n      @Override\n      protected boolean isOneway() {\n        return false;\n      }\n\n      @Override\n      public void start(I iface, queryNodeByPath_args args, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException {\n        iface.queryNodeByPath(args.path,resultHandler);\n      }\n    }\n\n  }\n\n  @SuppressWarnings({\"cast\", \"rawtypes\", \"serial\", \"unchecked\", \"unused\"})\n  public static class queryNodeByPath_args implements org.apache.thrift.TBase<queryNodeByPath_args, queryNodeByPath_args._Fields>, java.io.Serializable, Cloneable, Comparable<queryNodeByPath_args>   {\n    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(\"queryNodeByPath_args\");\n\n    private static final org.apache.thrift.protocol.TField PATH_FIELD_DESC = new org.apache.thrift.protocol.TField(\"path\", org.apache.thrift.protocol.TType.STRING, (short)1);\n\n    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new queryNodeByPath_argsStandardSchemeFactory();\n    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new queryNodeByPath_argsTupleSchemeFactory();\n\n    public @org.apache.thrift.annotation.Nullable java.lang.String path; // required\n\n    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */\n    public enum _Fields implements org.apache.thrift.TFieldIdEnum {\n      PATH((short)1, \"path\");\n\n      private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();\n\n      static {\n        for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {\n          byName.put(field.getFieldName(), field);\n        }\n      }\n\n      /**\n       * Find the _Fields constant that matches fieldId, or null if its not found.\n       */\n      @org.apache.thrift.annotation.Nullable\n      public static _Fields findByThriftId(int fieldId) {\n        switch(fieldId) {\n          case 1: // PATH\n            return PATH;\n          default:\n            return null;\n        }\n      }\n\n      /**\n       * Find the _Fields constant that matches fieldId, throwing an exception\n       * if it is not found.\n       */\n      public static _Fields findByThriftIdOrThrow(int fieldId) {\n        _Fields fields = findByThriftId(fieldId);\n        if (fields == null) throw new java.lang.IllegalArgumentException(\"Field \" + fieldId + \" doesn't exist!\");\n        return fields;\n      }\n\n      /**\n       * Find the _Fields constant that matches name, or null if its not found.\n       */\n      @org.apache.thrift.annotation.Nullable\n      public static _Fields findByName(java.lang.String name) {\n        return byName.get(name);\n      }\n\n      private final short _thriftId;\n      private final java.lang.String _fieldName;\n\n      _Fields(short thriftId, java.lang.String fieldName) {\n        _thriftId = thriftId;\n        _fieldName = fieldName;\n      }\n\n      @Override\n      public short getThriftFieldId() {\n        return _thriftId;\n      }\n\n      @Override\n      public java.lang.String getFieldName() {\n        return _fieldName;\n      }\n    }\n\n    // isset id assignments\n    public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;\n    static {\n      java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);\n      tmpMap.put(_Fields.PATH, new org.apache.thrift.meta_data.FieldMetaData(\"path\", org.apache.thrift.TFieldRequirementType.DEFAULT, \n          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));\n      metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);\n      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(queryNodeByPath_args.class, metaDataMap);\n    }\n\n    public queryNodeByPath_args() {\n    }\n\n    public queryNodeByPath_args(\n      java.lang.String path)\n    {\n      this();\n      this.path = path;\n    }\n\n    /**\n     * Performs a deep copy on <i>other</i>.\n     */\n    public queryNodeByPath_args(queryNodeByPath_args other) {\n      if (other.isSetPath()) {\n        this.path = other.path;\n      }\n    }\n\n    @Override\n    public queryNodeByPath_args deepCopy() {\n      return new queryNodeByPath_args(this);\n    }\n\n    @Override\n    public void clear() {\n      this.path = null;\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    public java.lang.String getPath() {\n      return this.path;\n    }\n\n    public queryNodeByPath_args setPath(@org.apache.thrift.annotation.Nullable java.lang.String path) {\n      this.path = path;\n      return this;\n    }\n\n    public void unsetPath() {\n      this.path = null;\n    }\n\n    /** Returns true if field path is set (has been assigned a value) and false otherwise */\n    public boolean isSetPath() {\n      return this.path != null;\n    }\n\n    public void setPathIsSet(boolean value) {\n      if (!value) {\n        this.path = null;\n      }\n    }\n\n    @Override\n    public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {\n      switch (field) {\n      case PATH:\n        if (value == null) {\n          unsetPath();\n        } else {\n          setPath((java.lang.String)value);\n        }\n        break;\n\n      }\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    @Override\n    public java.lang.Object getFieldValue(_Fields field) {\n      switch (field) {\n      case PATH:\n        return getPath();\n\n      }\n      throw new java.lang.IllegalStateException();\n    }\n\n    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */\n    @Override\n    public boolean isSet(_Fields field) {\n      if (field == null) {\n        throw new java.lang.IllegalArgumentException();\n      }\n\n      switch (field) {\n      case PATH:\n        return isSetPath();\n      }\n      throw new java.lang.IllegalStateException();\n    }\n\n    @Override\n    public boolean equals(java.lang.Object that) {\n      if (that instanceof queryNodeByPath_args)\n        return this.equals((queryNodeByPath_args)that);\n      return false;\n    }\n\n    public boolean equals(queryNodeByPath_args that) {\n      if (that == null)\n        return false;\n      if (this == that)\n        return true;\n\n      boolean this_present_path = true && this.isSetPath();\n      boolean that_present_path = true && that.isSetPath();\n      if (this_present_path || that_present_path) {\n        if (!(this_present_path && that_present_path))\n          return false;\n        if (!this.path.equals(that.path))\n          return false;\n      }\n\n      return true;\n    }\n\n    @Override\n    public int hashCode() {\n      int hashCode = 1;\n\n      hashCode = hashCode * 8191 + ((isSetPath()) ? 131071 : 524287);\n      if (isSetPath())\n        hashCode = hashCode * 8191 + path.hashCode();\n\n      return hashCode;\n    }\n\n    @Override\n    public int compareTo(queryNodeByPath_args other) {\n      if (!getClass().equals(other.getClass())) {\n        return getClass().getName().compareTo(other.getClass().getName());\n      }\n\n      int lastComparison = 0;\n\n      lastComparison = java.lang.Boolean.compare(isSetPath(), other.isSetPath());\n      if (lastComparison != 0) {\n        return lastComparison;\n      }\n      if (isSetPath()) {\n        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.path, other.path);\n        if (lastComparison != 0) {\n          return lastComparison;\n        }\n      }\n      return 0;\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    @Override\n    public _Fields fieldForId(int fieldId) {\n      return _Fields.findByThriftId(fieldId);\n    }\n\n    @Override\n    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {\n      scheme(iprot).read(iprot, this);\n    }\n\n    @Override\n    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {\n      scheme(oprot).write(oprot, this);\n    }\n\n    @Override\n    public java.lang.String toString() {\n      java.lang.StringBuilder sb = new java.lang.StringBuilder(\"queryNodeByPath_args(\");\n      boolean first = true;\n\n      sb.append(\"path:\");\n      if (this.path == null) {\n        sb.append(\"null\");\n      } else {\n        sb.append(this.path);\n      }\n      first = false;\n      sb.append(\")\");\n      return sb.toString();\n    }\n\n    public void validate() throws org.apache.thrift.TException {\n      // check for required fields\n      // check for sub-struct validity\n    }\n\n    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {\n      try {\n        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));\n      } catch (org.apache.thrift.TException te) {\n        throw new java.io.IOException(te);\n      }\n    }\n\n    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {\n      try {\n        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));\n      } catch (org.apache.thrift.TException te) {\n        throw new java.io.IOException(te);\n      }\n    }\n\n    private static class queryNodeByPath_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {\n      @Override\n      public queryNodeByPath_argsStandardScheme getScheme() {\n        return new queryNodeByPath_argsStandardScheme();\n      }\n    }\n\n    private static class queryNodeByPath_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<queryNodeByPath_args> {\n\n      @Override\n      public void read(org.apache.thrift.protocol.TProtocol iprot, queryNodeByPath_args struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TField schemeField;\n        iprot.readStructBegin();\n        while (true)\n        {\n          schemeField = iprot.readFieldBegin();\n          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { \n            break;\n          }\n          switch (schemeField.id) {\n            case 1: // PATH\n              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {\n                struct.path = iprot.readString();\n                struct.setPathIsSet(true);\n              } else { \n                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n              }\n              break;\n            default:\n              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n          }\n          iprot.readFieldEnd();\n        }\n        iprot.readStructEnd();\n\n        // check for required fields of primitive type, which can't be checked in the validate method\n        struct.validate();\n      }\n\n      @Override\n      public void write(org.apache.thrift.protocol.TProtocol oprot, queryNodeByPath_args struct) throws org.apache.thrift.TException {\n        struct.validate();\n\n        oprot.writeStructBegin(STRUCT_DESC);\n        if (struct.path != null) {\n          oprot.writeFieldBegin(PATH_FIELD_DESC);\n          oprot.writeString(struct.path);\n          oprot.writeFieldEnd();\n        }\n        oprot.writeFieldStop();\n        oprot.writeStructEnd();\n      }\n\n    }\n\n    private static class queryNodeByPath_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {\n      @Override\n      public queryNodeByPath_argsTupleScheme getScheme() {\n        return new queryNodeByPath_argsTupleScheme();\n      }\n    }\n\n    private static class queryNodeByPath_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<queryNodeByPath_args> {\n\n      @Override\n      public void write(org.apache.thrift.protocol.TProtocol prot, queryNodeByPath_args struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;\n        java.util.BitSet optionals = new java.util.BitSet();\n        if (struct.isSetPath()) {\n          optionals.set(0);\n        }\n        oprot.writeBitSet(optionals, 1);\n        if (struct.isSetPath()) {\n          oprot.writeString(struct.path);\n        }\n      }\n\n      @Override\n      public void read(org.apache.thrift.protocol.TProtocol prot, queryNodeByPath_args struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;\n        java.util.BitSet incoming = iprot.readBitSet(1);\n        if (incoming.get(0)) {\n          struct.path = iprot.readString();\n          struct.setPathIsSet(true);\n        }\n      }\n    }\n\n    private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {\n      return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();\n    }\n  }\n\n  @SuppressWarnings({\"cast\", \"rawtypes\", \"serial\", \"unchecked\", \"unused\"})\n  public static class queryNodeByPath_result implements org.apache.thrift.TBase<queryNodeByPath_result, queryNodeByPath_result._Fields>, java.io.Serializable, Cloneable, Comparable<queryNodeByPath_result>   {\n    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(\"queryNodeByPath_result\");\n\n    private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField(\"success\", org.apache.thrift.protocol.TType.STRING, (short)0);\n\n    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new queryNodeByPath_resultStandardSchemeFactory();\n    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new queryNodeByPath_resultTupleSchemeFactory();\n\n    public @org.apache.thrift.annotation.Nullable java.lang.String success; // required\n\n    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */\n    public enum _Fields implements org.apache.thrift.TFieldIdEnum {\n      SUCCESS((short)0, \"success\");\n\n      private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();\n\n      static {\n        for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {\n          byName.put(field.getFieldName(), field);\n        }\n      }\n\n      /**\n       * Find the _Fields constant that matches fieldId, or null if its not found.\n       */\n      @org.apache.thrift.annotation.Nullable\n      public static _Fields findByThriftId(int fieldId) {\n        switch(fieldId) {\n          case 0: // SUCCESS\n            return SUCCESS;\n          default:\n            return null;\n        }\n      }\n\n      /**\n       * Find the _Fields constant that matches fieldId, throwing an exception\n       * if it is not found.\n       */\n      public static _Fields findByThriftIdOrThrow(int fieldId) {\n        _Fields fields = findByThriftId(fieldId);\n        if (fields == null) throw new java.lang.IllegalArgumentException(\"Field \" + fieldId + \" doesn't exist!\");\n        return fields;\n      }\n\n      /**\n       * Find the _Fields constant that matches name, or null if its not found.\n       */\n      @org.apache.thrift.annotation.Nullable\n      public static _Fields findByName(java.lang.String name) {\n        return byName.get(name);\n      }\n\n      private final short _thriftId;\n      private final java.lang.String _fieldName;\n\n      _Fields(short thriftId, java.lang.String fieldName) {\n        _thriftId = thriftId;\n        _fieldName = fieldName;\n      }\n\n      @Override\n      public short getThriftFieldId() {\n        return _thriftId;\n      }\n\n      @Override\n      public java.lang.String getFieldName() {\n        return _fieldName;\n      }\n    }\n\n    // isset id assignments\n    public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;\n    static {\n      java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);\n      tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData(\"success\", org.apache.thrift.TFieldRequirementType.DEFAULT, \n          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));\n      metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);\n      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(queryNodeByPath_result.class, metaDataMap);\n    }\n\n    public queryNodeByPath_result() {\n    }\n\n    public queryNodeByPath_result(\n      java.lang.String success)\n    {\n      this();\n      this.success = success;\n    }\n\n    /**\n     * Performs a deep copy on <i>other</i>.\n     */\n    public queryNodeByPath_result(queryNodeByPath_result other) {\n      if (other.isSetSuccess()) {\n        this.success = other.success;\n      }\n    }\n\n    @Override\n    public queryNodeByPath_result deepCopy() {\n      return new queryNodeByPath_result(this);\n    }\n\n    @Override\n    public void clear() {\n      this.success = null;\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    public java.lang.String getSuccess() {\n      return this.success;\n    }\n\n    public queryNodeByPath_result setSuccess(@org.apache.thrift.annotation.Nullable java.lang.String success) {\n      this.success = success;\n      return this;\n    }\n\n    public void unsetSuccess() {\n      this.success = null;\n    }\n\n    /** Returns true if field success is set (has been assigned a value) and false otherwise */\n    public boolean isSetSuccess() {\n      return this.success != null;\n    }\n\n    public void setSuccessIsSet(boolean value) {\n      if (!value) {\n        this.success = null;\n      }\n    }\n\n    @Override\n    public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {\n      switch (field) {\n      case SUCCESS:\n        if (value == null) {\n          unsetSuccess();\n        } else {\n          setSuccess((java.lang.String)value);\n        }\n        break;\n\n      }\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    @Override\n    public java.lang.Object getFieldValue(_Fields field) {\n      switch (field) {\n      case SUCCESS:\n        return getSuccess();\n\n      }\n      throw new java.lang.IllegalStateException();\n    }\n\n    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */\n    @Override\n    public boolean isSet(_Fields field) {\n      if (field == null) {\n        throw new java.lang.IllegalArgumentException();\n      }\n\n      switch (field) {\n      case SUCCESS:\n        return isSetSuccess();\n      }\n      throw new java.lang.IllegalStateException();\n    }\n\n    @Override\n    public boolean equals(java.lang.Object that) {\n      if (that instanceof queryNodeByPath_result)\n        return this.equals((queryNodeByPath_result)that);\n      return false;\n    }\n\n    public boolean equals(queryNodeByPath_result that) {\n      if (that == null)\n        return false;\n      if (this == that)\n        return true;\n\n      boolean this_present_success = true && this.isSetSuccess();\n      boolean that_present_success = true && that.isSetSuccess();\n      if (this_present_success || that_present_success) {\n        if (!(this_present_success && that_present_success))\n          return false;\n        if (!this.success.equals(that.success))\n          return false;\n      }\n\n      return true;\n    }\n\n    @Override\n    public int hashCode() {\n      int hashCode = 1;\n\n      hashCode = hashCode * 8191 + ((isSetSuccess()) ? 131071 : 524287);\n      if (isSetSuccess())\n        hashCode = hashCode * 8191 + success.hashCode();\n\n      return hashCode;\n    }\n\n    @Override\n    public int compareTo(queryNodeByPath_result other) {\n      if (!getClass().equals(other.getClass())) {\n        return getClass().getName().compareTo(other.getClass().getName());\n      }\n\n      int lastComparison = 0;\n\n      lastComparison = java.lang.Boolean.compare(isSetSuccess(), other.isSetSuccess());\n      if (lastComparison != 0) {\n        return lastComparison;\n      }\n      if (isSetSuccess()) {\n        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);\n        if (lastComparison != 0) {\n          return lastComparison;\n        }\n      }\n      return 0;\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    @Override\n    public _Fields fieldForId(int fieldId) {\n      return _Fields.findByThriftId(fieldId);\n    }\n\n    @Override\n    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {\n      scheme(iprot).read(iprot, this);\n    }\n\n    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {\n      scheme(oprot).write(oprot, this);\n      }\n\n    @Override\n    public java.lang.String toString() {\n      java.lang.StringBuilder sb = new java.lang.StringBuilder(\"queryNodeByPath_result(\");\n      boolean first = true;\n\n      sb.append(\"success:\");\n      if (this.success == null) {\n        sb.append(\"null\");\n      } else {\n        sb.append(this.success);\n      }\n      first = false;\n      sb.append(\")\");\n      return sb.toString();\n    }\n\n    public void validate() throws org.apache.thrift.TException {\n      // check for required fields\n      // check for sub-struct validity\n    }\n\n    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {\n      try {\n        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));\n      } catch (org.apache.thrift.TException te) {\n        throw new java.io.IOException(te);\n      }\n    }\n\n    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {\n      try {\n        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));\n      } catch (org.apache.thrift.TException te) {\n        throw new java.io.IOException(te);\n      }\n    }\n\n    private static class queryNodeByPath_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {\n      @Override\n      public queryNodeByPath_resultStandardScheme getScheme() {\n        return new queryNodeByPath_resultStandardScheme();\n      }\n    }\n\n    private static class queryNodeByPath_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme<queryNodeByPath_result> {\n\n      @Override\n      public void read(org.apache.thrift.protocol.TProtocol iprot, queryNodeByPath_result struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TField schemeField;\n        iprot.readStructBegin();\n        while (true)\n        {\n          schemeField = iprot.readFieldBegin();\n          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { \n            break;\n          }\n          switch (schemeField.id) {\n            case 0: // SUCCESS\n              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {\n                struct.success = iprot.readString();\n                struct.setSuccessIsSet(true);\n              } else { \n                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n              }\n              break;\n            default:\n              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n          }\n          iprot.readFieldEnd();\n        }\n        iprot.readStructEnd();\n\n        // check for required fields of primitive type, which can't be checked in the validate method\n        struct.validate();\n      }\n\n      @Override\n      public void write(org.apache.thrift.protocol.TProtocol oprot, queryNodeByPath_result struct) throws org.apache.thrift.TException {\n        struct.validate();\n\n        oprot.writeStructBegin(STRUCT_DESC);\n        if (struct.success != null) {\n          oprot.writeFieldBegin(SUCCESS_FIELD_DESC);\n          oprot.writeString(struct.success);\n          oprot.writeFieldEnd();\n        }\n        oprot.writeFieldStop();\n        oprot.writeStructEnd();\n      }\n\n    }\n\n    private static class queryNodeByPath_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {\n      @Override\n      public queryNodeByPath_resultTupleScheme getScheme() {\n        return new queryNodeByPath_resultTupleScheme();\n      }\n    }\n\n    private static class queryNodeByPath_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme<queryNodeByPath_result> {\n\n      @Override\n      public void write(org.apache.thrift.protocol.TProtocol prot, queryNodeByPath_result struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;\n        java.util.BitSet optionals = new java.util.BitSet();\n        if (struct.isSetSuccess()) {\n          optionals.set(0);\n        }\n        oprot.writeBitSet(optionals, 1);\n        if (struct.isSetSuccess()) {\n          oprot.writeString(struct.success);\n        }\n      }\n\n      @Override\n      public void read(org.apache.thrift.protocol.TProtocol prot, queryNodeByPath_result struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;\n        java.util.BitSet incoming = iprot.readBitSet(1);\n        if (incoming.get(0)) {\n          struct.success = iprot.readString();\n          struct.setSuccessIsSet(true);\n        }\n      }\n    }\n\n    private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {\n      return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();\n    }\n  }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-api-uac/src/main/java/com/walnut/sparta/account/rpc/wolfmc/xx.java",
    "content": "package com.walnut.sparta.account.rpc.wolfmc;\n\npublic class xx {\n}\n"
  },
  {
    "path": "Sparta/sparta-api-uac/src/main/java/com/walnut/sparta/account/xxx.java",
    "content": "package com.walnut.sparta.account;\n\npublic class xxx {\n}\n"
  },
  {
    "path": "Sparta/sparta-api-uac/src/test/java/org/example/AppTest.java",
    "content": "package org.example;\n\nimport junit.framework.Test;\nimport junit.framework.TestCase;\nimport junit.framework.TestSuite;\n\n/**\n * Unit test for simple App.\n */\npublic class AppTest \n    extends TestCase\n{\n    /**\n     * Create the test case\n     *\n     * @param testName name of the test case\n     */\n    public AppTest( String testName )\n    {\n        super( testName );\n    }\n\n    /**\n     * @return the suite of tests being tested\n     */\n    public static Test suite()\n    {\n        return new TestSuite( AppTest.class );\n    }\n\n    /**\n     * Rigourous Test :-)\n     */\n    public void testApp()\n    {\n        assertTrue( true );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-api-uofs/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sparta</artifactId>\n        <groupId>com.walnuts.sparta</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.sparta.api.uofs</groupId>\n    <artifactId>sparta-api-uofs</artifactId>\n    <version>2.1.0</version>\n\n    <dependencies>\n        <dependency>\n            <groupId>junit</groupId>\n            <artifactId>junit</artifactId>\n            <version>3.8.1</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n            <artifactId>hydra-kom-default-driver</artifactId>\n            <version>2.1.0</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.summer.springram</groupId>\n            <artifactId>springram</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.sdk.thrift</groupId>\n            <artifactId>hydra-lib-thrift-sdk</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-test</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.testng</groupId>\n            <artifactId>testng</artifactId>\n            <version>RELEASE</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.thrift</groupId>\n            <artifactId>libthrift</artifactId>\n            <version>0.18.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "Sparta/sparta-api-uofs/src/main/java/com/walnut/sparta/uofs/thrift/UOFSIface.java",
    "content": "package com.walnut.sparta.uofs.thrift;\n\n/**\n * Autogenerated by Thrift Compiler (0.18.0)\n *\n * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING\n *  @generated\n */\n@javax.annotation.Generated(value = \"Autogenerated by Thrift Compiler (0.18.0)\", date = \"2025-03-01\")\n@SuppressWarnings({\"cast\", \"rawtypes\", \"serial\", \"unchecked\", \"unused\"})\npublic class UOFSIface {\n\n  public interface Iface {\n\n    public java.lang.String test(java.lang.String msg) throws org.apache.thrift.TException;\n\n  }\n\n  public interface AsyncIface {\n\n    public void test(java.lang.String msg, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException;\n\n  }\n\n  public static class Client extends org.apache.thrift.TServiceClient implements Iface {\n    public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {\n      public Factory() {}\n      @Override\n      public Client getClient(org.apache.thrift.protocol.TProtocol prot) {\n        return new Client(prot);\n      }\n      @Override\n      public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {\n        return new Client(iprot, oprot);\n      }\n    }\n\n    public Client(org.apache.thrift.protocol.TProtocol prot)\n    {\n      super(prot, prot);\n    }\n\n    public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {\n      super(iprot, oprot);\n    }\n\n    @Override\n    public java.lang.String test(java.lang.String msg) throws org.apache.thrift.TException\n    {\n      send_test(msg);\n      return recv_test();\n    }\n\n    public void send_test(java.lang.String msg) throws org.apache.thrift.TException\n    {\n      test_args args = new test_args();\n      args.setMsg(msg);\n      sendBase(\"test\", args);\n    }\n\n    public java.lang.String recv_test() throws org.apache.thrift.TException\n    {\n      test_result result = new test_result();\n      receiveBase(result, \"test\");\n      if (result.isSetSuccess()) {\n        return result.success;\n      }\n      throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, \"test failed: unknown result\");\n    }\n\n  }\n  public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {\n    public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {\n      private org.apache.thrift.async.TAsyncClientManager clientManager;\n      private org.apache.thrift.protocol.TProtocolFactory protocolFactory;\n      public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {\n        this.clientManager = clientManager;\n        this.protocolFactory = protocolFactory;\n      }\n    @Override\n      public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {\n        return new AsyncClient(protocolFactory, clientManager, transport);\n      }\n    }\n\n    public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {\n      super(protocolFactory, clientManager, transport);\n    }\n\n    @Override\n    public void test(java.lang.String msg, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException {\n      checkReady();\n      test_call method_call = new test_call(msg, resultHandler, this, ___protocolFactory, ___transport);\n      this.___currentMethod = method_call;\n      ___manager.call(method_call);\n    }\n\n    public static class test_call extends org.apache.thrift.async.TAsyncMethodCall<java.lang.String> {\n      private java.lang.String msg;\n      public test_call(java.lang.String msg, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {\n        super(client, protocolFactory, transport, resultHandler, false);\n        this.msg = msg;\n      }\n\n      @Override\n      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {\n        prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage(\"test\", org.apache.thrift.protocol.TMessageType.CALL, 0));\n        test_args args = new test_args();\n        args.setMsg(msg);\n        args.write(prot);\n        prot.writeMessageEnd();\n      }\n\n      @Override\n      public java.lang.String getResult() throws org.apache.thrift.TException {\n        if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {\n          throw new java.lang.IllegalStateException(\"Method call not finished!\");\n        }\n        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());\n        org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);\n        return (new Client(prot)).recv_test();\n      }\n    }\n\n  }\n\n  public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {\n    private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(Processor.class.getName());\n    public Processor(I iface) {\n      super(iface, getProcessMap(new java.util.HashMap<java.lang.String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));\n    }\n\n    protected Processor(I iface, java.util.Map<java.lang.String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {\n      super(iface, getProcessMap(processMap));\n    }\n\n    private static <I extends Iface> java.util.Map<java.lang.String,  org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> getProcessMap(java.util.Map<java.lang.String, org.apache.thrift.ProcessFunction<I, ? extends  org.apache.thrift.TBase>> processMap) {\n      processMap.put(\"test\", new test());\n      return processMap;\n    }\n\n    public static class test<I extends Iface> extends org.apache.thrift.ProcessFunction<I, test_args> {\n      public test() {\n        super(\"test\");\n      }\n\n      @Override\n      public test_args getEmptyArgsInstance() {\n        return new test_args();\n      }\n\n      @Override\n      protected boolean isOneway() {\n        return false;\n      }\n\n      @Override\n      protected boolean rethrowUnhandledExceptions() {\n        return false;\n      }\n\n      @Override\n      public test_result getResult(I iface, test_args args) throws org.apache.thrift.TException {\n        test_result result = new test_result();\n        result.success = iface.test(args.msg);\n        return result;\n      }\n    }\n\n  }\n\n  public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {\n    private static final org.slf4j.Logger _LOGGER = org.slf4j.LoggerFactory.getLogger(AsyncProcessor.class.getName());\n    public AsyncProcessor(I iface) {\n      super(iface, getProcessMap(new java.util.HashMap<java.lang.String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>>()));\n    }\n\n    protected AsyncProcessor(I iface, java.util.Map<java.lang.String,  org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase, ?>> processMap) {\n      super(iface, getProcessMap(processMap));\n    }\n\n    private static <I extends AsyncIface> java.util.Map<java.lang.String,  org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase,?>> getProcessMap(java.util.Map<java.lang.String,  org.apache.thrift.AsyncProcessFunction<I, ? extends  org.apache.thrift.TBase, ?>> processMap) {\n      processMap.put(\"test\", new test());\n      return processMap;\n    }\n\n    public static class test<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, test_args, java.lang.String> {\n      public test() {\n        super(\"test\");\n      }\n\n      @Override\n      public test_args getEmptyArgsInstance() {\n        return new test_args();\n      }\n\n      @Override\n      public org.apache.thrift.async.AsyncMethodCallback<java.lang.String> getResultHandler(final org.apache.thrift.server.AbstractNonblockingServer.AsyncFrameBuffer fb, final int seqid) {\n        final org.apache.thrift.AsyncProcessFunction fcall = this;\n        return new org.apache.thrift.async.AsyncMethodCallback<java.lang.String>() { \n          @Override\n          public void onComplete(java.lang.String o) {\n            test_result result = new test_result();\n            result.success = o;\n            try {\n              fcall.sendResponse(fb, result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);\n            } catch (org.apache.thrift.transport.TTransportException e) {\n              _LOGGER.error(\"TTransportException writing to internal frame buffer\", e);\n              fb.close();\n            } catch (java.lang.Exception e) {\n              _LOGGER.error(\"Exception writing to internal frame buffer\", e);\n              onError(e);\n            }\n          }\n          @Override\n          public void onError(java.lang.Exception e) {\n            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;\n            org.apache.thrift.TSerializable msg;\n            test_result result = new test_result();\n            if (e instanceof org.apache.thrift.transport.TTransportException) {\n              _LOGGER.error(\"TTransportException inside handler\", e);\n              fb.close();\n              return;\n            } else if (e instanceof org.apache.thrift.TApplicationException) {\n              _LOGGER.error(\"TApplicationException inside handler\", e);\n              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;\n              msg = (org.apache.thrift.TApplicationException)e;\n            } else {\n              _LOGGER.error(\"Exception inside handler\", e);\n              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;\n              msg = new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());\n            }\n            try {\n              fcall.sendResponse(fb,msg,msgType,seqid);\n            } catch (java.lang.Exception ex) {\n              _LOGGER.error(\"Exception writing to internal frame buffer\", ex);\n              fb.close();\n            }\n          }\n        };\n      }\n\n      @Override\n      protected boolean isOneway() {\n        return false;\n      }\n\n      @Override\n      public void start(I iface, test_args args, org.apache.thrift.async.AsyncMethodCallback<java.lang.String> resultHandler) throws org.apache.thrift.TException {\n        iface.test(args.msg,resultHandler);\n      }\n    }\n\n  }\n\n  @SuppressWarnings({\"cast\", \"rawtypes\", \"serial\", \"unchecked\", \"unused\"})\n  public static class test_args implements org.apache.thrift.TBase<test_args, test_args._Fields>, java.io.Serializable, Cloneable, Comparable<test_args>   {\n    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(\"test_args\");\n\n    private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField(\"msg\", org.apache.thrift.protocol.TType.STRING, (short)1);\n\n    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new test_argsStandardSchemeFactory();\n    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new test_argsTupleSchemeFactory();\n\n    public @org.apache.thrift.annotation.Nullable java.lang.String msg; // required\n\n    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */\n    public enum _Fields implements org.apache.thrift.TFieldIdEnum {\n      MSG((short)1, \"msg\");\n\n      private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();\n\n      static {\n        for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {\n          byName.put(field.getFieldName(), field);\n        }\n      }\n\n      /**\n       * Find the _Fields constant that matches fieldId, or null if its not found.\n       */\n      @org.apache.thrift.annotation.Nullable\n      public static _Fields findByThriftId(int fieldId) {\n        switch(fieldId) {\n          case 1: // MSG\n            return MSG;\n          default:\n            return null;\n        }\n      }\n\n      /**\n       * Find the _Fields constant that matches fieldId, throwing an exception\n       * if it is not found.\n       */\n      public static _Fields findByThriftIdOrThrow(int fieldId) {\n        _Fields fields = findByThriftId(fieldId);\n        if (fields == null) throw new java.lang.IllegalArgumentException(\"Field \" + fieldId + \" doesn't exist!\");\n        return fields;\n      }\n\n      /**\n       * Find the _Fields constant that matches name, or null if its not found.\n       */\n      @org.apache.thrift.annotation.Nullable\n      public static _Fields findByName(java.lang.String name) {\n        return byName.get(name);\n      }\n\n      private final short _thriftId;\n      private final java.lang.String _fieldName;\n\n      _Fields(short thriftId, java.lang.String fieldName) {\n        _thriftId = thriftId;\n        _fieldName = fieldName;\n      }\n\n      @Override\n      public short getThriftFieldId() {\n        return _thriftId;\n      }\n\n      @Override\n      public java.lang.String getFieldName() {\n        return _fieldName;\n      }\n    }\n\n    // isset id assignments\n    public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;\n    static {\n      java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);\n      tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData(\"msg\", org.apache.thrift.TFieldRequirementType.DEFAULT, \n          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));\n      metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);\n      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(test_args.class, metaDataMap);\n    }\n\n    public test_args() {\n    }\n\n    public test_args(\n      java.lang.String msg)\n    {\n      this();\n      this.msg = msg;\n    }\n\n    /**\n     * Performs a deep copy on <i>other</i>.\n     */\n    public test_args(test_args other) {\n      if (other.isSetMsg()) {\n        this.msg = other.msg;\n      }\n    }\n\n    @Override\n    public test_args deepCopy() {\n      return new test_args(this);\n    }\n\n    @Override\n    public void clear() {\n      this.msg = null;\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    public java.lang.String getMsg() {\n      return this.msg;\n    }\n\n    public test_args setMsg(@org.apache.thrift.annotation.Nullable java.lang.String msg) {\n      this.msg = msg;\n      return this;\n    }\n\n    public void unsetMsg() {\n      this.msg = null;\n    }\n\n    /** Returns true if field msg is set (has been assigned a value) and false otherwise */\n    public boolean isSetMsg() {\n      return this.msg != null;\n    }\n\n    public void setMsgIsSet(boolean value) {\n      if (!value) {\n        this.msg = null;\n      }\n    }\n\n    @Override\n    public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {\n      switch (field) {\n      case MSG:\n        if (value == null) {\n          unsetMsg();\n        } else {\n          setMsg((java.lang.String)value);\n        }\n        break;\n\n      }\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    @Override\n    public java.lang.Object getFieldValue(_Fields field) {\n      switch (field) {\n      case MSG:\n        return getMsg();\n\n      }\n      throw new java.lang.IllegalStateException();\n    }\n\n    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */\n    @Override\n    public boolean isSet(_Fields field) {\n      if (field == null) {\n        throw new java.lang.IllegalArgumentException();\n      }\n\n      switch (field) {\n      case MSG:\n        return isSetMsg();\n      }\n      throw new java.lang.IllegalStateException();\n    }\n\n    @Override\n    public boolean equals(java.lang.Object that) {\n      if (that instanceof test_args)\n        return this.equals((test_args)that);\n      return false;\n    }\n\n    public boolean equals(test_args that) {\n      if (that == null)\n        return false;\n      if (this == that)\n        return true;\n\n      boolean this_present_msg = true && this.isSetMsg();\n      boolean that_present_msg = true && that.isSetMsg();\n      if (this_present_msg || that_present_msg) {\n        if (!(this_present_msg && that_present_msg))\n          return false;\n        if (!this.msg.equals(that.msg))\n          return false;\n      }\n\n      return true;\n    }\n\n    @Override\n    public int hashCode() {\n      int hashCode = 1;\n\n      hashCode = hashCode * 8191 + ((isSetMsg()) ? 131071 : 524287);\n      if (isSetMsg())\n        hashCode = hashCode * 8191 + msg.hashCode();\n\n      return hashCode;\n    }\n\n    @Override\n    public int compareTo(test_args other) {\n      if (!getClass().equals(other.getClass())) {\n        return getClass().getName().compareTo(other.getClass().getName());\n      }\n\n      int lastComparison = 0;\n\n      lastComparison = java.lang.Boolean.compare(isSetMsg(), other.isSetMsg());\n      if (lastComparison != 0) {\n        return lastComparison;\n      }\n      if (isSetMsg()) {\n        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg);\n        if (lastComparison != 0) {\n          return lastComparison;\n        }\n      }\n      return 0;\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    @Override\n    public _Fields fieldForId(int fieldId) {\n      return _Fields.findByThriftId(fieldId);\n    }\n\n    @Override\n    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {\n      scheme(iprot).read(iprot, this);\n    }\n\n    @Override\n    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {\n      scheme(oprot).write(oprot, this);\n    }\n\n    @Override\n    public java.lang.String toString() {\n      java.lang.StringBuilder sb = new java.lang.StringBuilder(\"test_args(\");\n      boolean first = true;\n\n      sb.append(\"msg:\");\n      if (this.msg == null) {\n        sb.append(\"null\");\n      } else {\n        sb.append(this.msg);\n      }\n      first = false;\n      sb.append(\")\");\n      return sb.toString();\n    }\n\n    public void validate() throws org.apache.thrift.TException {\n      // check for required fields\n      // check for sub-struct validity\n    }\n\n    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {\n      try {\n        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));\n      } catch (org.apache.thrift.TException te) {\n        throw new java.io.IOException(te);\n      }\n    }\n\n    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {\n      try {\n        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));\n      } catch (org.apache.thrift.TException te) {\n        throw new java.io.IOException(te);\n      }\n    }\n\n    private static class test_argsStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {\n      @Override\n      public test_argsStandardScheme getScheme() {\n        return new test_argsStandardScheme();\n      }\n    }\n\n    private static class test_argsStandardScheme extends org.apache.thrift.scheme.StandardScheme<test_args> {\n\n      @Override\n      public void read(org.apache.thrift.protocol.TProtocol iprot, test_args struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TField schemeField;\n        iprot.readStructBegin();\n        while (true)\n        {\n          schemeField = iprot.readFieldBegin();\n          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { \n            break;\n          }\n          switch (schemeField.id) {\n            case 1: // MSG\n              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {\n                struct.msg = iprot.readString();\n                struct.setMsgIsSet(true);\n              } else { \n                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n              }\n              break;\n            default:\n              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n          }\n          iprot.readFieldEnd();\n        }\n        iprot.readStructEnd();\n\n        // check for required fields of primitive type, which can't be checked in the validate method\n        struct.validate();\n      }\n\n      @Override\n      public void write(org.apache.thrift.protocol.TProtocol oprot, test_args struct) throws org.apache.thrift.TException {\n        struct.validate();\n\n        oprot.writeStructBegin(STRUCT_DESC);\n        if (struct.msg != null) {\n          oprot.writeFieldBegin(MSG_FIELD_DESC);\n          oprot.writeString(struct.msg);\n          oprot.writeFieldEnd();\n        }\n        oprot.writeFieldStop();\n        oprot.writeStructEnd();\n      }\n\n    }\n\n    private static class test_argsTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {\n      @Override\n      public test_argsTupleScheme getScheme() {\n        return new test_argsTupleScheme();\n      }\n    }\n\n    private static class test_argsTupleScheme extends org.apache.thrift.scheme.TupleScheme<test_args> {\n\n      @Override\n      public void write(org.apache.thrift.protocol.TProtocol prot, test_args struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;\n        java.util.BitSet optionals = new java.util.BitSet();\n        if (struct.isSetMsg()) {\n          optionals.set(0);\n        }\n        oprot.writeBitSet(optionals, 1);\n        if (struct.isSetMsg()) {\n          oprot.writeString(struct.msg);\n        }\n      }\n\n      @Override\n      public void read(org.apache.thrift.protocol.TProtocol prot, test_args struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;\n        java.util.BitSet incoming = iprot.readBitSet(1);\n        if (incoming.get(0)) {\n          struct.msg = iprot.readString();\n          struct.setMsgIsSet(true);\n        }\n      }\n    }\n\n    private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {\n      return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();\n    }\n  }\n\n  @SuppressWarnings({\"cast\", \"rawtypes\", \"serial\", \"unchecked\", \"unused\"})\n  public static class test_result implements org.apache.thrift.TBase<test_result, test_result._Fields>, java.io.Serializable, Cloneable, Comparable<test_result>   {\n    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(\"test_result\");\n\n    private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField(\"success\", org.apache.thrift.protocol.TType.STRING, (short)0);\n\n    private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new test_resultStandardSchemeFactory();\n    private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new test_resultTupleSchemeFactory();\n\n    public @org.apache.thrift.annotation.Nullable java.lang.String success; // required\n\n    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */\n    public enum _Fields implements org.apache.thrift.TFieldIdEnum {\n      SUCCESS((short)0, \"success\");\n\n      private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();\n\n      static {\n        for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {\n          byName.put(field.getFieldName(), field);\n        }\n      }\n\n      /**\n       * Find the _Fields constant that matches fieldId, or null if its not found.\n       */\n      @org.apache.thrift.annotation.Nullable\n      public static _Fields findByThriftId(int fieldId) {\n        switch(fieldId) {\n          case 0: // SUCCESS\n            return SUCCESS;\n          default:\n            return null;\n        }\n      }\n\n      /**\n       * Find the _Fields constant that matches fieldId, throwing an exception\n       * if it is not found.\n       */\n      public static _Fields findByThriftIdOrThrow(int fieldId) {\n        _Fields fields = findByThriftId(fieldId);\n        if (fields == null) throw new java.lang.IllegalArgumentException(\"Field \" + fieldId + \" doesn't exist!\");\n        return fields;\n      }\n\n      /**\n       * Find the _Fields constant that matches name, or null if its not found.\n       */\n      @org.apache.thrift.annotation.Nullable\n      public static _Fields findByName(java.lang.String name) {\n        return byName.get(name);\n      }\n\n      private final short _thriftId;\n      private final java.lang.String _fieldName;\n\n      _Fields(short thriftId, java.lang.String fieldName) {\n        _thriftId = thriftId;\n        _fieldName = fieldName;\n      }\n\n      @Override\n      public short getThriftFieldId() {\n        return _thriftId;\n      }\n\n      @Override\n      public java.lang.String getFieldName() {\n        return _fieldName;\n      }\n    }\n\n    // isset id assignments\n    public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;\n    static {\n      java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);\n      tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData(\"success\", org.apache.thrift.TFieldRequirementType.DEFAULT, \n          new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));\n      metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);\n      org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(test_result.class, metaDataMap);\n    }\n\n    public test_result() {\n    }\n\n    public test_result(\n      java.lang.String success)\n    {\n      this();\n      this.success = success;\n    }\n\n    /**\n     * Performs a deep copy on <i>other</i>.\n     */\n    public test_result(test_result other) {\n      if (other.isSetSuccess()) {\n        this.success = other.success;\n      }\n    }\n\n    @Override\n    public test_result deepCopy() {\n      return new test_result(this);\n    }\n\n    @Override\n    public void clear() {\n      this.success = null;\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    public java.lang.String getSuccess() {\n      return this.success;\n    }\n\n    public test_result setSuccess(@org.apache.thrift.annotation.Nullable java.lang.String success) {\n      this.success = success;\n      return this;\n    }\n\n    public void unsetSuccess() {\n      this.success = null;\n    }\n\n    /** Returns true if field success is set (has been assigned a value) and false otherwise */\n    public boolean isSetSuccess() {\n      return this.success != null;\n    }\n\n    public void setSuccessIsSet(boolean value) {\n      if (!value) {\n        this.success = null;\n      }\n    }\n\n    @Override\n    public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {\n      switch (field) {\n      case SUCCESS:\n        if (value == null) {\n          unsetSuccess();\n        } else {\n          setSuccess((java.lang.String)value);\n        }\n        break;\n\n      }\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    @Override\n    public java.lang.Object getFieldValue(_Fields field) {\n      switch (field) {\n      case SUCCESS:\n        return getSuccess();\n\n      }\n      throw new java.lang.IllegalStateException();\n    }\n\n    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */\n    @Override\n    public boolean isSet(_Fields field) {\n      if (field == null) {\n        throw new java.lang.IllegalArgumentException();\n      }\n\n      switch (field) {\n      case SUCCESS:\n        return isSetSuccess();\n      }\n      throw new java.lang.IllegalStateException();\n    }\n\n    @Override\n    public boolean equals(java.lang.Object that) {\n      if (that instanceof test_result)\n        return this.equals((test_result)that);\n      return false;\n    }\n\n    public boolean equals(test_result that) {\n      if (that == null)\n        return false;\n      if (this == that)\n        return true;\n\n      boolean this_present_success = true && this.isSetSuccess();\n      boolean that_present_success = true && that.isSetSuccess();\n      if (this_present_success || that_present_success) {\n        if (!(this_present_success && that_present_success))\n          return false;\n        if (!this.success.equals(that.success))\n          return false;\n      }\n\n      return true;\n    }\n\n    @Override\n    public int hashCode() {\n      int hashCode = 1;\n\n      hashCode = hashCode * 8191 + ((isSetSuccess()) ? 131071 : 524287);\n      if (isSetSuccess())\n        hashCode = hashCode * 8191 + success.hashCode();\n\n      return hashCode;\n    }\n\n    @Override\n    public int compareTo(test_result other) {\n      if (!getClass().equals(other.getClass())) {\n        return getClass().getName().compareTo(other.getClass().getName());\n      }\n\n      int lastComparison = 0;\n\n      lastComparison = java.lang.Boolean.compare(isSetSuccess(), other.isSetSuccess());\n      if (lastComparison != 0) {\n        return lastComparison;\n      }\n      if (isSetSuccess()) {\n        lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);\n        if (lastComparison != 0) {\n          return lastComparison;\n        }\n      }\n      return 0;\n    }\n\n    @org.apache.thrift.annotation.Nullable\n    @Override\n    public _Fields fieldForId(int fieldId) {\n      return _Fields.findByThriftId(fieldId);\n    }\n\n    @Override\n    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {\n      scheme(iprot).read(iprot, this);\n    }\n\n    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {\n      scheme(oprot).write(oprot, this);\n      }\n\n    @Override\n    public java.lang.String toString() {\n      java.lang.StringBuilder sb = new java.lang.StringBuilder(\"test_result(\");\n      boolean first = true;\n\n      sb.append(\"success:\");\n      if (this.success == null) {\n        sb.append(\"null\");\n      } else {\n        sb.append(this.success);\n      }\n      first = false;\n      sb.append(\")\");\n      return sb.toString();\n    }\n\n    public void validate() throws org.apache.thrift.TException {\n      // check for required fields\n      // check for sub-struct validity\n    }\n\n    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {\n      try {\n        write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));\n      } catch (org.apache.thrift.TException te) {\n        throw new java.io.IOException(te);\n      }\n    }\n\n    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {\n      try {\n        read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));\n      } catch (org.apache.thrift.TException te) {\n        throw new java.io.IOException(te);\n      }\n    }\n\n    private static class test_resultStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {\n      @Override\n      public test_resultStandardScheme getScheme() {\n        return new test_resultStandardScheme();\n      }\n    }\n\n    private static class test_resultStandardScheme extends org.apache.thrift.scheme.StandardScheme<test_result> {\n\n      @Override\n      public void read(org.apache.thrift.protocol.TProtocol iprot, test_result struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TField schemeField;\n        iprot.readStructBegin();\n        while (true)\n        {\n          schemeField = iprot.readFieldBegin();\n          if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { \n            break;\n          }\n          switch (schemeField.id) {\n            case 0: // SUCCESS\n              if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {\n                struct.success = iprot.readString();\n                struct.setSuccessIsSet(true);\n              } else { \n                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n              }\n              break;\n            default:\n              org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);\n          }\n          iprot.readFieldEnd();\n        }\n        iprot.readStructEnd();\n\n        // check for required fields of primitive type, which can't be checked in the validate method\n        struct.validate();\n      }\n\n      @Override\n      public void write(org.apache.thrift.protocol.TProtocol oprot, test_result struct) throws org.apache.thrift.TException {\n        struct.validate();\n\n        oprot.writeStructBegin(STRUCT_DESC);\n        if (struct.success != null) {\n          oprot.writeFieldBegin(SUCCESS_FIELD_DESC);\n          oprot.writeString(struct.success);\n          oprot.writeFieldEnd();\n        }\n        oprot.writeFieldStop();\n        oprot.writeStructEnd();\n      }\n\n    }\n\n    private static class test_resultTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {\n      @Override\n      public test_resultTupleScheme getScheme() {\n        return new test_resultTupleScheme();\n      }\n    }\n\n    private static class test_resultTupleScheme extends org.apache.thrift.scheme.TupleScheme<test_result> {\n\n      @Override\n      public void write(org.apache.thrift.protocol.TProtocol prot, test_result struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;\n        java.util.BitSet optionals = new java.util.BitSet();\n        if (struct.isSetSuccess()) {\n          optionals.set(0);\n        }\n        oprot.writeBitSet(optionals, 1);\n        if (struct.isSetSuccess()) {\n          oprot.writeString(struct.success);\n        }\n      }\n\n      @Override\n      public void read(org.apache.thrift.protocol.TProtocol prot, test_result struct) throws org.apache.thrift.TException {\n        org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;\n        java.util.BitSet incoming = iprot.readBitSet(1);\n        if (incoming.get(0)) {\n          struct.success = iprot.readString();\n          struct.setSuccessIsSet(true);\n        }\n      }\n    }\n\n    private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {\n      return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();\n    }\n  }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-api-uofs/src/main/java/com/walnut/sparta/uofs/xxx.java",
    "content": "package com.walnut.sparta.uofs;\n\npublic class xxx {\n}\n"
  },
  {
    "path": "Sparta/sparta-api-uofs/src/test/java/cn/ken/AppTest.java",
    "content": "package cn.ken;\n\nimport junit.framework.Test;\nimport junit.framework.TestCase;\nimport junit.framework.TestSuite;\n\n/**\n * Unit test for simple App.\n */\npublic class AppTest \n    extends TestCase\n{\n    /**\n     * Create the test case\n     *\n     * @param testName name of the test case\n     */\n    public AppTest( String testName )\n    {\n        super( testName );\n    }\n\n    /**\n     * @return the suite of tests being tested\n     */\n    public static Test suite()\n    {\n        return new TestSuite( AppTest.class );\n    }\n\n    /**\n     * Rigourous Test :-)\n     */\n    public void testApp()\n    {\n        assertTrue( true );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sparta</artifactId>\n        <groupId>com.walnuts.sparta</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.sparta</groupId>\n    <artifactId>sparta-core-console</artifactId>\n    <version>2.1.0</version>\n\n    <dependencies>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-architecture</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n            <artifactId>hydra-kom-default-driver</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime</groupId>\n            <artifactId>slime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.slime.jelly</groupId>\n            <artifactId>jelly</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.ulf</groupId>\n            <artifactId>ulfhedinn</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.summer</groupId>\n            <artifactId>summer</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.summer.springram</groupId>\n            <artifactId>springram</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.sailor.sdd</groupId>\n            <artifactId>sailor-stream-distribute-sdk</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <!--        导入redis依赖-->\n<!--        <dependency>-->\n<!--            <groupId>org.springframework.data</groupId>-->\n<!--            <artifactId>spring-data-redis</artifactId>-->\n<!--            <version>2.6.9</version>-->\n<!--        </dependency>-->\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-test</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.mybatis.spring.boot</groupId>\n            <artifactId>mybatis-spring-boot-starter</artifactId>\n            <version>2.2.2</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.storage.uofs.cache</groupId>\n            <artifactId>hydra-lib-uofs-cache</artifactId>\n            <version>1.2.1</version>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-architecture</artifactId>\n            <version>2.5.1</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-framework-runtime</artifactId>\n            <version>2.5.1</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-framework-conduct</artifactId>\n            <version>2.5.1</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-mapper-driver</artifactId>\n            <version>2.5.1</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.ender.system</groupId>\n            <artifactId>ender-system-hydra</artifactId>\n            <version>2.5.1</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-service-control</artifactId>\n            <version>2.1.0</version>\n            <scope>test</scope>\n        </dependency>\n\n\n        <dependency>\n            <groupId>org.apache.commons</groupId>\n            <artifactId>commons-csv</artifactId>\n            <version>1.10.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.acorn.redqueen.kernel</groupId>\n            <artifactId>redqueen-framework-service</artifactId>\n            <version>2.1.0</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.sdk.grpc</groupId>\n            <artifactId>hydra-lib-grpc-service-sdk</artifactId>\n            <version>1.2.1</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.odin</groupId>\n            <artifactId>odin-system</artifactId>\n            <version>2.5.1</version>\n            <scope>test</scope>\n        </dependency>\n    </dependencies>\n</project>"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/Sparta.java",
    "content": "package com.walnut.sparta;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.servgram.Servgram;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.pinecone.summer.spring.Springron;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\n\npublic class Sparta extends Springron implements Slf4jTraceable {\n    public Sparta( String szName, Processum parent, String[] springbootArgs ) {\n        super( szName, parent, springbootArgs );\n        this.mSpringKernel.setPrimarySources( SpartaBoot.class );\n    }\n\n    public Sparta( String szName, Processum parent ) {\n        this( szName, parent, new String[0] );\n    }\n\n    @Override\n    protected void loadConfig() {\n        this.mServgramList     = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey );\n        Object dyServgramConf  = this.mServgramList.get( this.gramName() );\n        if( dyServgramConf instanceof String ) {\n            try{\n                this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) );\n            }\n            catch ( IOException ignore ) {\n                this.getLogger().info( \"[Notice] Spring will use the default config `application.yaml`.\" );\n            }\n        }\n        else {\n            this.mServgramConf = this.mServgramList.getChild( this.gramName() );\n        }\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/SpartaBoot.java",
    "content": "package com.walnut.sparta;\n\nimport org.springframework.boot.autoconfigure.SpringBootApplication;\nimport org.springframework.context.annotation.ComponentScan;\n\n// Dummy boot for spring to found the main classpath.\n@SpringBootApplication\npublic class SpartaBoot {\n    //    @PostConstruct\n//    public void init() {\n//        try {\n//            ShadowBoot.shadow.vitalize();\n//        }\n//        catch ( Exception e ) {\n//            ShadowBoot.shadow.console().error( e.getMessage() );\n//        }\n//    }\n//    public static void main(String[] args) {\n//        SpringApplication.run(SpartaBoot.class, args);\n//    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/config/SpringGlobalJSONConfig.java",
    "content": "package com.walnut.sparta.config;\n\nimport java.io.IOException;\n\nimport com.pinecone.framework.util.uoi.UOI;\nimport org.springframework.context.annotation.Bean;\nimport org.springframework.context.annotation.Configuration;\nimport org.springframework.http.converter.json.Jackson2ObjectMapperBuilder;\n\nimport com.fasterxml.jackson.core.JsonGenerator;\nimport com.fasterxml.jackson.databind.JsonSerializer;\nimport com.fasterxml.jackson.databind.ObjectMapper;\nimport com.fasterxml.jackson.databind.SerializerProvider;\nimport com.fasterxml.jackson.databind.module.SimpleModule;\nimport com.pinecone.framework.util.id.GUID;\n\n@Configuration\npublic class SpringGlobalJSONConfig {\n    @Bean\n    public ObjectMapper objectMapper( Jackson2ObjectMapperBuilder builder ) {\n        ObjectMapper mapper = builder.createXmlMapper( false ).build();\n        SimpleModule module = new SimpleModule();\n        // 为 GUID 类添加序列化器\n        module.addSerializer(GUID.class, new JsonSerializer<GUID>() {\n            @Override\n            public void serialize(GUID value, JsonGenerator gen, SerializerProvider serializers) throws IOException {\n                gen.writeString(value.toString());\n            }\n        });\n\n        // 为 UOI 类添加序列化器\n        module.addSerializer(UOI.class, new JsonSerializer<UOI>() {\n            @Override\n            public void serialize(UOI value, JsonGenerator gen, SerializerProvider serializers) throws IOException {\n                gen.writeString(value.toString()); // 假设 UOI 类也有 toString 方法\n            }\n        });\n\n        mapper.registerModule(module);\n        return mapper;\n    }\n}"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/config/ibatis/IBatisConfig.java",
    "content": "package com.walnut.sparta.config.ibatis;\n\nimport org.apache.ibatis.session.SqlSessionFactory;\nimport org.mybatis.spring.SqlSessionFactoryBean;\nimport org.mybatis.spring.annotation.MapperScan;\nimport org.springframework.context.annotation.Bean;\nimport org.springframework.context.annotation.Configuration;\nimport org.springframework.core.io.support.PathMatchingResourcePatternResolver;\n\nimport javax.sql.DataSource;\n\n@Configuration\n@MapperScan( \"com.walnut.sparta.services.mapper\" )\n@MapperScan( {\n        \"com.pinecone.hydra.registry.ibatis\", \"com.pinecone.hydra.service.ibatis\", \"com.pinecone.hydra.task.ibatis\",\n        \"com.pinecone.hydra.scenario.ibatis\", \"com.pinecone.hydra.deploy.ibatis\"\n} )\npublic class IBatisConfig {\n\n    @Bean\n    public SqlSessionFactory sqlSessionFactory(DataSource dataSource) throws Exception {\n        SqlSessionFactoryBean factoryBean = new SqlSessionFactoryBean();\n        factoryBean.setDataSource(dataSource);\n        factoryBean.setTypeHandlersPackage( \"com.pinecone.hydra.entity.ibatis\" );\n        factoryBean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources( \"classpath*:mapper/*.xml\" )); // 指定Mapper XML的位置\n        return factoryBean.getObject();\n    }\n//    @Bean\n//    public SqlSessionTemplate sqlSessionTemplate(SqlSessionFactory sqlSessionFactory) {\n//        return new SqlSessionTemplate(sqlSessionFactory);\n//    }\n//\n//    @Bean\n//    public PlatformTransactionManager transactionManager(DataSource dataSource) {\n//        return new DataSourceTransactionManager(dataSource);\n//    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/Dummy.java",
    "content": "package com.walnut.sparta.services.controller;\n\npublic class Dummy {\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/FileSystemController.java",
    "content": "package com.walnut.sparta.services.controller;\n\nimport javax.annotation.Resource;\n\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.walnut.sparta.system.BasicResultResponse;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\n\n@RestController\n@RequestMapping( \"/api/v2/service/uofs/\" )\npublic class FileSystemController {\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    @GetMapping(\"/bucket\")\n    public BasicResultResponse<String> createBucket(@RequestParam String bucketName){\n        return BasicResultResponse.success(bucketName);\n    }\n\n\n    @GetMapping(\"/miao\")\n    public BasicResultResponse<String> miao(@RequestParam String arg){\n        return BasicResultResponse.success(arg );\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/UOFSController.java",
    "content": "package com.walnut.sparta.services.controller;\n\nimport com.walnut.sparta.services.dto.updateObjectDto;\nimport com.walnut.sparta.system.BasicResultResponse;\nimport org.springframework.web.bind.annotation.RequestBody;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RestController;\n\n@RestController\n@RequestMapping( \"/api/v2/UOFSController\" )\npublic class UOFSController {\n\n    public BasicResultResponse<String> updateObject(@RequestBody updateObjectDto dto){\n        return null;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/v2/RegistryMetaController.java",
    "content": "package com.walnut.sparta.services.controller.v2;\n\n//import com.walnut.sparta.services.drivers.RegistryMasterManipulatorImpl;\nimport org.springframework.web.bind.annotation.RequestMapping;\n        import org.springframework.web.bind.annotation.RestController;\n\n@RestController\n@RequestMapping( \"/api/v2/registryMeta\" )\npublic class RegistryMetaController {\n    /*@Resource\n    private RegistryMasterManipulator               registryMasterManipulator;\n\n    private KOMRegistry KOMRegistry;\n\n    @PostConstruct\n    public void init() {\n        this.KOMRegistry = new GenericKOMRegistry(null, this.registryMasterManipulator );\n    }\n\n    *//**\n     * 添加一条namespace节点\n     * @param namespaceNode 节点信息\n     * @return 返回操作情况\n     *//*\n    @PostMapping(\"/putNamespaceNode\")\n    public BasicResultResponse<String> putNamespaceNode(@RequestBody GenericNamespace namespaceNode){\n        this.KOMRegistry.put(namespaceNode);\n        return BasicResultResponse.success();\n    }\n\n    *//**\n     * 添加一个configNode节点\n     * @param configNode 节点信息\n     * @return 返回操作情况\n     *//*\n    @PostMapping(\"/putConfigNode\")\n    public BasicResultResponse<String> putConfigNode( @RequestBody ArchConfigNode configNode ){\n        this.KOMRegistry.put(configNode);\n        return BasicResultResponse.success();\n    }\n\n    *//**\n     * 获取节点路径信息\n     * @param guid 节点guid\n     * @return 返回节点路径\n     *//*\n    @GetMapping(\"/getPath\")\n    public BasicResultResponse<String> getPath( @RequestParam(\"guid\") String guid ){\n        String path = this.KOMRegistry.getPath( GUIDs.GUID72( guid ) );\n        return BasicResultResponse.success(path);\n    }\n\n    *//**\n     * 获取节点信息\n     * @param guid 节点guid\n     * @return 返回节点信息\n     *//*\n    @GetMapping(\"/getNode\")\n    public BasicResultResponse<TreeNode> getNode( @RequestParam(\"guid\") String guid ){\n        TreeNode node = this.KOMRegistry.get( GUIDs.GUID72( guid ) );\n        return BasicResultResponse.success(node);\n    }\n\n    *//**\n     * 给节点添加配置信息\n     * @param key 键\n     * @param guid 所属节点的guid\n     * @param value 值\n     * @param type 值的类型\n     * @return 返回状态码\n     *//*\n    @PostMapping(\"/insertProperties\")\n    public BasicResultResponse<String> insertProperties(@RequestParam(\"key\")String key, @RequestParam(\"Guid\") String guid,\n                                                        @RequestParam(\"value\") String value, @RequestParam(\"type\") String type){\n        Property genericProperties = Property.newDummy();\n        genericProperties.setCreateTime(LocalDateTime.now());\n        genericProperties.setUpdateTime(LocalDateTime.now());\n        genericProperties.setKey(key);\n        genericProperties.setValue(value);\n        genericProperties.setType(type);\n\n        this.KOMRegistry.putProperty( genericProperties, GUIDs.GUID72( guid ) );\n        return BasicResultResponse.success();\n    }\n\n    *//**\n     * 移除节点\n     * @param guid 节点guid\n     * @return 返回状态码\n     *//*\n    @DeleteMapping(\"/remove\")\n    public BasicResultResponse<String> remove(@RequestParam(\"Guid\") GUID guid){\n        this.KOMRegistry.remove( guid );\n        return BasicResultResponse.success();\n    }\n\n    *//**\n     * 解析路径\n     * @param path 路径信息\n     * @return 返回解析后的节点信息\n     *//*\n    @GetMapping(\"/queryElement\")\n    public BasicResultResponse<TreeNode> getNodeByPath( @RequestParam(\"path\") String path ){\n        TreeNode treeNode = this.KOMRegistry.queryElement(path);\n        return BasicResultResponse.success( treeNode );\n    }\n\n    *//**\n     * 给节点添加text信息\n     * @param guid 几点guid\n     * @param text text信息\n     * @param type text类型\n     * @return 返回状态码\n     *//*\n    @PostMapping(\"/putTextValue\")\n    public BasicResultResponse<String> insertTextValue(@RequestParam(\"guid\")String guid,\n                                                       @RequestParam(\"text\") String text,\n                                                       @RequestParam(\"type\") String type){\n        this.KOMRegistry.putTextValue( GUIDs.GUID72( guid ) ,text,type);\n        return BasicResultResponse.success();\n    }\n\n    *//**\n     * 获取节点信息不含继承\n     * @param guid 节点guid\n     * @return 返回节点信息\n     *//*\n    @GetMapping(\"/getSelf\")\n    public BasicResultResponse<TreeNode > getAsRootDepth(@RequestParam(\"guid\") String guid){\n        return BasicResultResponse.success(\n                this.KOMRegistry.getAsRootDepth( GUIDs.GUID72( guid ) )\n        );\n    }*/\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/v2/RegistryTreeController.java",
    "content": "package com.walnut.sparta.services.controller.v2;\n\npublic class RegistryTreeController {\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/v2/ScenarioMetaController.java",
    "content": "package com.walnut.sparta.services.controller.v2;\n\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RestController;\n\n@RestController\n@RequestMapping( \"/api/v2/ScenarioMeta\" )\npublic class ScenarioMetaController {\n//    @Resource\n//    ScenarioTreeManipulatorSharerImpl       scenarioTreeManipulatorSharer;\n//    @Resource\n//    ScenarioMasterManipulatorImpl scenarioMetaManipulatorSharer;\n//\n//    private DistributedScenarioMetaTree     distributedScenarioMetaTree;\n//\n//    @PostConstruct\n//    public void init() {\n//        this.distributedScenarioMetaTree = new GenericDistributedScenarioMetaTree(null,this.scenarioMetaManipulatorSharer);\n//    }\n//\n//    /**\n//     * 插入一个节点\n//     * @param genericNamespaceNode 节点信息\n//     * @return\n//     */\n//    @PostMapping(\"/insert\")\n//    public BasicResultResponse<String> insert(@RequestBody GenericNamespaceNode genericNamespaceNode){\n//        this.distributedScenarioMetaTree.insert(genericNamespaceNode);\n//        return BasicResultResponse.success();\n//    }\n//\n//    /**\n//     * 获取路径信息\n//     * @param guid 节点guid\n//     * @return 返回路径信息\n//     */\n//    @GetMapping(\"/getPath\")\n//    public BasicResultResponse<String> getPath(@RequestParam(\"guid\") String guid){\n//        GUID72 guid72 = new GUID72(guid);\n//        String path = this.distributedScenarioMetaTree.getPath(guid72);\n//        return BasicResultResponse.success(path);\n//    }\n//\n//    /**\n//     * 获取命名空间信息\n//     * @param guid 节点guid\n//     * @return 返回节点信息\n//     */\n//    @GetMapping(\"/getNode\")\n//    public BasicResultResponse<TreeNode> getNode(@RequestParam(\"guid\") String guid){\n//        GUID72 guid72 = new GUID72(guid);\n//        TreeNode treeNode = this.distributedScenarioMetaTree.get(guid72);\n//        return BasicResultResponse.success(treeNode);\n//    }\n//\n//    /**\n//     * 解析路径信息\n//     * @param path 路径\n//     * @return 返回解析出来的节点信息\n//     */\n//    @GetMapping(\"/parsePath\")\n//    public BasicResultResponse<TreeNode> parsePath(@RequestParam(\"path\") String path){\n//        TreeNode treeNode = this.distributedScenarioMetaTree.parsePath(path);\n//        return BasicResultResponse.success(treeNode);\n//    }\n//\n//    /**\n//     * 删除节点\n//     * @param guid 节点guid\n//     * @return 返回操作结果\n//     */\n//    @DeleteMapping(\"/remove\")\n//    public BasicResultResponse<String> remove(@RequestParam(\"guid\") String guid){\n//        GUID72 guid72 = new GUID72(guid);\n//        this.distributedScenarioMetaTree.remove(guid72);\n//        return BasicResultResponse.success();\n//    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/v2/ServiceMetaController.java",
    "content": "//package com.walnut.sparta.services.controller.v2;\n//\n//import com.pinecone.framework.util.id.GUID;\n//import com.pinecone.hydra.service.kom.ServiceInstrument;\n//import com.pinecone.hydra.service.kom.nodes.GenericApplicationNode;\n//import com.pinecone.hydra.service.kom.nodes.GenericNamespace;\n//import com.pinecone.hydra.service.kom.nodes.GenericServiceNode;\n//import com.pinecone.hydra.service.kom.nodes.ServiceTreeNode;\n//import com.pinecone.hydra.service.kom.source.ServiceMasterManipulator;\n//import com.pinecone.hydra.service.kom.source.ServiceFamilyTreeManipulator;\n//import com.pinecone.hydra.service.kom.entity.GenericMetaNodeInstanceFactory;\n//import com.pinecone.hydra.service.kom.entity.MetaNodeWideEntity;\n//import com.pinecone.hydra.service.kom.entity.MetaNodeInstanceFactory;\n//import com.pinecone.ulf.util.id.GUID72;\n//import com.pinecone.hydra.service.kom.CentralServicesInstrument;\n//import com.walnut.sparta.services.drivers.ServiceMasterTreeManipulatorImpl;\n//import com.walnut.sparta.system.BasicResultResponse;\n//import org.springframework.web.bind.annotation.DeleteMapping;\n//import org.springframework.web.bind.annotation.GetMapping;\n//import org.springframework.web.bind.annotation.PathVariable;\n//import org.springframework.web.bind.annotation.PostMapping;\n//import org.springframework.web.bind.annotation.RequestBody;\n//import org.springframework.web.bind.annotation.RequestMapping;\n//import org.springframework.web.bind.annotation.RequestParam;\n//import org.springframework.web.bind.annotation.RestController;\n//\n//import javax.annotation.PostConstruct;\n//import javax.annotation.Resource;\n//\n//@RestController\n//@RequestMapping( \"/api/v2/serviceMeta\" )\n//public class ServiceMetaController {\n//    @Resource\n//    private ServiceMasterManipulator serviceMasterManipulator;\n//\n//    @Resource\n//    private ServiceMasterTreeManipulatorImpl treeManipulatorSharer;\n//\n//    private ServiceInstrument servicesTree;\n//\n//    MetaNodeInstanceFactory metaNodeInstanceFactory;\n//\n//    @PostConstruct\n//    public void init() {\n//        this.servicesTree = new CentralServicesInstrument( null,serviceMasterManipulator);\n//        this.metaNodeInstanceFactory = new GenericMetaNodeInstanceFactory(this.serviceMasterManipulator,treeManipulatorSharer);\n//    }\n//\n//    /**\n//     * 渲染单节点信息\n//     * @param guid 节点UUID\n//     * @return 返回节点信息\n//     */\n//    @GetMapping(\"/queryNodeInfoByGUID/{guid}\")\n//    public BasicResultResponse<ServiceTreeNode> queryNodeInfoByGUID(@PathVariable(\"guid\") String guid ){\n//        GUID72 guid72 = new GUID72( guid );\n//        return BasicResultResponse.success(this.servicesTree.getNode( guid72 ));\n//    }\n//\n//    /**\n//     * 用于将路径反解析为节点信息\n//     * @param path 节点路径\n//     * @return 返回节点信息\n//     */\n//    @GetMapping(\"/queryNodeInfoByPath\")\n//    public BasicResultResponse<ServiceTreeNode> queryNodeInfoByPath( @RequestParam(\"path\") String path ){\n//        ServiceTreeNode node = this.servicesTree.parsePath( path );\n//        if( node == null ) {\n//            return BasicResultResponse.error( \"No such node\" );\n//        }\n//        return BasicResultResponse.success( this.servicesTree.parsePath(path) );\n//    }\n//\n//    /**\n//     * 创建一个服务节点\n//     * @param serviceNode 服务节点信息\n//     * @return 创建的节点的GUID\n//     */\n//    @PostMapping(\"/putServiceNode\")\n//    public BasicResultResponse<String> putServiceNode( @RequestBody GenericServiceNode serviceNode ){\n//        return BasicResultResponse.success(this.servicesTree.addNode( serviceNode ).toString());\n//    }\n//\n//    /**\n//     * 创建一个应用节点\n//     * @param applicationNode 应用节点信息\n//     * @return  创建的节点的GUID\n//     */\n//    @PostMapping(\"/putApplicationNode\")\n//    public BasicResultResponse<String> putApplicationNode( @RequestBody GenericApplicationNode applicationNode ){\n//        return BasicResultResponse.success(this.servicesTree.addNode(applicationNode).toString());\n//    }\n//\n//    /**\n//     * 创建一个分类节点\n//     * @param classificationNode 分类节点信息\n//     * @return 创建的节点的GUID\n//     */\n//    @PostMapping(\"/putClassificationNode\")\n//    public BasicResultResponse<String> putClassificationNode( @RequestBody GenericNamespace classificationNode ){\n//        return BasicResultResponse.success(this.servicesTree.addNode(classificationNode).toString());\n//    }\n//\n//    /**\n//     * 删除节点\n//     * @param guid 节点的guid\n//     * @return 返回删除情况\n//     */\n//    @DeleteMapping(\"/removeSingleNode\")\n//    public BasicResultResponse<String> removeSingleNode(@RequestParam(\"guid\") String guid){\n//        this.servicesTree.removeNode( new GUID72( guid ) );\n//        return BasicResultResponse.success();\n//    }\n//\n//    /**\n//     * 渲染单节点所有信息（含继承）\n//     * @param guid 节点UUID\n//     * @return 返回节点信息\n//     */\n//    @GetMapping(\"/queryNodeWideInfo/{guid}\")\n//    public BasicResultResponse<MetaNodeWideEntity> queryNodeWideInfo(@PathVariable(\"guid\") String guid ){\n//        GUID72 guid72 = new GUID72( guid );\n//        return BasicResultResponse.success(this.servicesTree.getWideMeta(guid72));\n//    }\n//\n//    /**\n//     * 删除节点（完全移除）\n//     * @param guid 节点的guid\n//     * @return 返回移除结果\n//     */\n//    @GetMapping(\"/remove\")\n//    public BasicResultResponse<String> remove(@RequestParam(\"guid\") String guid){\n//        GUID72 guid72 = new GUID72( guid );\n//        this.servicesTree.remove(guid72);\n//        return BasicResultResponse.success();\n//    }\n//\n//    /**\n//     * 用于添加继承关系\n//     * @param childNode 子节点GUID\n//     * @param parentNode 父节点GUID\n//     * @return 返回继承信息\n//     */\n//    @PostMapping(\"/inherit\")\n//    public BasicResultResponse<String> inherit(@RequestParam(\"childNode\") GUID childNode,@RequestParam(\"parentNode\") GUID parentNode){\n//        ServiceFamilyTreeManipulator serviceFamilyTreeManipulator = this.serviceMasterManipulator.getServiceFamilyTreeManipulator();\n//        serviceFamilyTreeManipulator.insert(childNode,parentNode);\n//        return BasicResultResponse.success();\n//    }\n//\n//    /**\n//     * 用于渲染路径信息\n//     * @param guid 节点UUID\n//     * @return 返回路径信息\n//     */\n//    @GetMapping(\"/getPath/{GUID}\")\n//    public BasicResultResponse<String> getPath(@PathVariable(\"GUID\") String guid){\n//        return BasicResultResponse.success( this.servicesTree.getPath( new GUID72(guid) ) );\n//    }\n//}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/v2/ServiceTreeController.java",
    "content": "package com.walnut.sparta.services.controller.v2;\n\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RestController;\n\n@RestController\n@RequestMapping( \"/api/v2/serviceTree\" )\npublic class ServiceTreeController {\n//    @Resource\n//    private ServiceMasterTreeManipulatorImpl treeManipulatorSharer;\n//\n//    private DistributedTrieTree distributedTrieTree;\n//\n//    @PostConstruct\n//    public void init() {\n//        this.distributedTrieTree = new GenericDistributedTrieTree( this.treeManipulatorSharer);\n//    }\n//\n//    /**\n//     * 用于渲染路径信息\n//     * @param guid 节点UUID\n//     * @return 返回路径信息\n//     */\n//    @GetMapping(\"/getPath/{GUID}\")\n//    public BasicResultResponse<String> getPath(@PathVariable(\"GUID\") String guid){\n//        return BasicResultResponse.success( this.distributedTrieTree.getCachePath( new GUID72(guid) ) );\n//    }\n//\n//    /**\n//     * 向指定父节点添加子节点\n//     * @param nodeGUID 子节点GUID\n//     * @param parentGUID 父节点GUID\n//     * @return 返回添加情况\n//     */\n//    @PostMapping(\"/addNodeToParent\")\n//    public BasicResultResponse<String> addNodeToParent(@RequestParam(\"nodeGUID\") String nodeGUID, @RequestParam(\"parentGUID\") String parentGUID ){\n//        GUID72 nodeGUID72 = new GUID72(nodeGUID);\n//        GUID72 parentGUID72 = new GUID72(parentGUID);\n//        this.distributedTrieTree.affirmOwnedNode(nodeGUID72,parentGUID72);\n//        return BasicResultResponse.success();\n//    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/controller/v2/TaskMetaController.java",
    "content": "package com.walnut.sparta.services.controller.v2;\n\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RestController;\n\n@RestController\n@RequestMapping( \"/api/v2/TaskMeta\" )\npublic class TaskMetaController {\n//    @Resource\n//    private TaskMasterManipulator taskMasterManipulator;\n//\n//    @Resource\n//    private TaskTreeManipulatorSharerImpl   treeManipulatorSharer;\n//\n//    private DistributedTaskMetaTree distributedTaskMetaTree;\n//\n//    @PostConstruct\n//    public void init() {\n//        this.distributedTaskMetaTree = new GenericDistributedTaskMetaTree(null,this.taskMasterManipulator);\n//    }\n//\n//    /**\n//     * 新增一个节点\n//     * @param genericTaskNode 节点信息\n//     * @return 返回节点guid\n//     */\n//    @PostMapping(\"/insert\")\n//    public BasicResultResponse<String> insert(@RequestBody GenericTaskNode genericTaskNode){\n//        GUID insert = this.distributedTaskMetaTree.insert(genericTaskNode);\n//        return BasicResultResponse.success(insert.toString());\n//    }\n//\n//    /**\n//     * 获取节点路径信息\n//     * @param guid 节点guid\n//     * @return 返回路径信息\n//     */\n//    @GetMapping(\"/getPath\")\n//    public BasicResultResponse<String> getPath(@RequestParam(\"guid\") String guid){\n//        GUID72 guid72 = new GUID72(guid);\n//        String path = this.distributedTaskMetaTree.getPath(guid72);\n//        return BasicResultResponse.success(path);\n//    }\n//\n//    /**\n//     * 获取节点信息\n//     * @param guid 节点guid\n//     * @return 返回节点信息\n//     */\n//    @GetMapping(\"/getNode\")\n//    public BasicResultResponse<TreeNode> getNode(@RequestParam(\"guid\") String guid){\n//        GUID72 guid72 = new GUID72(guid);\n//        TreeNode treeNode = this.distributedTaskMetaTree.get(guid72);\n//        return BasicResultResponse.success(treeNode);\n//    }\n//\n//    /**\n//     * 解析路径信息\n//     * @param path 路径\n//     * @return 返回节点信息\n//     */\n//    @GetMapping(\"/parsePath\")\n//    public BasicResultResponse<TreeNode> parsePath(@RequestParam(\"path\") String path){\n//        TreeNode treeNode = this.distributedTaskMetaTree.parsePath(path);\n//        return BasicResultResponse.success(treeNode);\n//    }\n//\n//    /**\n//     * 移除节点\n//     * @param guid 节点guid\n//     * @return 返回操作信息\n//     */\n//    @DeleteMapping(\"/remove\")\n//    public BasicResultResponse<String> remove(@RequestParam(\"guid\") String guid){\n//        GUID72 guid72 = new GUID72(guid);\n//        this.distributedTaskMetaTree.remove(guid72);\n//        return BasicResultResponse.success();\n//    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/drivers/RegistryMasterManipulatorImpl.java",
    "content": "package com.walnut.sparta.services.drivers;\n\nimport com.pinecone.hydra.registry.source.RegistryMasterManipulator;\nimport com.pinecone.hydra.registry.source.RegistryConfigNodeManipulator;\nimport com.pinecone.hydra.registry.source.RegistryNodeMetaManipulator;\nimport com.pinecone.hydra.registry.source.RegistryNSNodeManipulator;\nimport com.pinecone.hydra.registry.source.RegistryNSNodeMetaManipulator;\nimport com.pinecone.hydra.registry.source.RegistryAttributesManipulator;\nimport com.pinecone.hydra.registry.source.RegistryPropertiesManipulator;\nimport com.pinecone.hydra.registry.source.RegistryTextFileManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\n\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\n\n@Component\npublic class RegistryMasterManipulatorImpl implements RegistryMasterManipulator {\n    @Resource\n    RegistryConfigNodeManipulator configNodeManipulator;\n\n    @Resource\n    RegistryNSNodeManipulator namespaceNodeManipulator;\n\n    @Resource\n    RegistryPropertiesManipulator registryPropertiesManipulator;\n\n    @Resource\n    RegistryTextFileManipulator registryTextFileManipulator;\n\n    @Resource\n    RegistryNodeMetaManipulator configNodeMetaManipulator;\n\n    @Resource\n    RegistryNSNodeMetaManipulator namespaceNodeMetaManipulator;\n\n    @Resource\n    RegistryAttributesManipulator registryAttributesManipulator;\n\n    @Resource( type = RegistryMasterTreeManipulatorImpl.class )\n    KOISkeletonMasterManipulator    skeletonMasterManipulator;\n\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return this.skeletonMasterManipulator;\n    }\n\n    @Override\n    public RegistryConfigNodeManipulator getConfigNodeManipulator() {\n        return this.configNodeManipulator;\n    }\n\n    @Override\n    public RegistryNSNodeManipulator getNSNodeManipulator() {\n        return this.namespaceNodeManipulator;\n    }\n\n    @Override\n    public RegistryPropertiesManipulator getPropertiesManipulator() {\n        return this.registryPropertiesManipulator;\n    }\n\n    @Override\n    public RegistryTextFileManipulator getTextFileManipulator() {\n        return this.registryTextFileManipulator;\n    }\n\n    @Override\n    public RegistryNodeMetaManipulator getNodeMetaManipulator() {\n        return this.configNodeMetaManipulator;\n    }\n\n    @Override\n    public RegistryNSNodeMetaManipulator getNSNodeMetaManipulator() {\n        return this.namespaceNodeMetaManipulator;\n    }\n\n    @Override\n    public RegistryAttributesManipulator getAttributesManipulator() {\n        return this.registryAttributesManipulator;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/drivers/RegistryMasterTreeManipulatorImpl.java",
    "content": "package com.walnut.sparta.services.drivers;\n\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.hydra.registry.ibatis.RegistryNodeOwnerMapper;\nimport com.pinecone.hydra.registry.ibatis.RegistryNodePathCacheMapper;\nimport com.pinecone.hydra.registry.ibatis.RegistryTreeMapper;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\n\n@Component\npublic class RegistryMasterTreeManipulatorImpl implements TreeMasterManipulator {\n\n    @Resource\n    RegistryNodePathCacheMapper configNodePathMapper;\n\n    @Resource\n    RegistryNodeOwnerMapper configNodeOwnerManipulator;\n\n    @Resource\n    RegistryTreeMapper trieTreeManipulator;\n\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.configNodeOwnerManipulator;\n    }\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n    @Override\n    public TriePathCacheManipulator getTriePathCacheManipulator() {\n        return this.configNodePathMapper;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/drivers/ScenarioMasterManipulatorImpl.java",
    "content": "package com.walnut.sparta.services.drivers;\n\nimport com.pinecone.hydra.scenario.source.NamespaceNodeManipulator;\nimport com.pinecone.hydra.scenario.source.NamespaceNodeMetaManipulator;\nimport com.pinecone.hydra.scenario.source.ScenarioCommonDataManipulator;\nimport com.pinecone.hydra.scenario.source.ScenarioMasterManipulator;\n\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport org.springframework.stereotype.Component;\n\n@Component\npublic class ScenarioMasterManipulatorImpl implements ScenarioMasterManipulator {\n    //@Resource\n    NamespaceNodeManipulator        namespaceNodeManipulator;\n    //@Resource\n    NamespaceNodeMetaManipulator    namespaceNodeMetaManipulator;\n    //@Resource\n    ScenarioCommonDataManipulator   scenarioCommonDataManipulator;\n    //@Resource\n    KOISkeletonMasterManipulator    koiSkeletonMasterManipulator;\n    //@Override\n    public NamespaceNodeManipulator getNamespaceNodeManipulator() {\n        return this.namespaceNodeManipulator;\n    }\n\n    @Override\n    public NamespaceNodeMetaManipulator getNSNodeMetaManipulator() {\n        return this.namespaceNodeMetaManipulator;\n    }\n\n    @Override\n    public ScenarioCommonDataManipulator getScenarioCommonDataManipulator() {\n        return this.scenarioCommonDataManipulator;\n    }\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return this.koiSkeletonMasterManipulator;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/drivers/ScenarioTreeManipulatorSharerImpl.java",
    "content": "package com.walnut.sparta.services.drivers;\n\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.hydra.scenario.ibatis.ScenarioNodeOwnerMapper;\nimport com.pinecone.hydra.scenario.ibatis.ScenarioNodePathCacheMapper;\nimport com.pinecone.hydra.scenario.ibatis.ScenarioTreeMapper;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\n@Component\npublic class ScenarioTreeManipulatorSharerImpl implements TreeMasterManipulator {\n\n    @Resource\n    ScenarioTreeMapper              scenarioTreeMapper;\n    @Resource\n    ScenarioNodeOwnerMapper         scenarioNodeOwnerMapper;\n    @Resource\n    ScenarioNodePathCacheMapper scenarioNodePathMapper;\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.scenarioNodeOwnerMapper;\n    }\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.scenarioTreeMapper;\n    }\n\n    @Override\n    public TriePathCacheManipulator getTriePathCacheManipulator() {\n        return this.scenarioNodePathMapper;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/drivers/ServiceMasterManipulatorImpl.java",
    "content": "package com.walnut.sparta.services.drivers;\n\nimport javax.annotation.Resource;\n\nimport com.pinecone.framework.system.construction.Structure;\nimport com.pinecone.hydra.service.ibatis.AppNodeMetaMapper;\nimport com.pinecone.hydra.service.ibatis.ApplicationNodeMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceInstanceMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceNamespaceMapper;\nimport com.pinecone.hydra.service.ibatis.NamespaceRulesMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceNodeMetaMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceMetaMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceNodeMapper;\nimport com.pinecone.hydra.service.kom.source.ApplicationMetaManipulator;\nimport com.pinecone.hydra.service.kom.source.ApplicationNodeManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceInstanceManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceNamespaceManipulator;\nimport com.pinecone.hydra.service.kom.source.NamespaceRulesManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceMasterManipulator;\nimport com.pinecone.hydra.service.kom.source.NodeMetaManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceMetaManipulator;\nimport com.pinecone.hydra.service.kom.source.ServiceNodeManipulator;\nimport com.pinecone.hydra.system.ko.driver.KOISkeletonMasterManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.hydra.service.ibatis.ServiceTreeMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceNodeOwnerMapper;\nimport com.pinecone.hydra.service.ibatis.ServicePathCacheMapper;\nimport org.springframework.stereotype.Component;\n\n\n@Component\npublic class ServiceMasterManipulatorImpl implements ServiceMasterManipulator {\n    @Resource\n    @Structure(type = ServiceTreeMapper.class)\n    private ServiceTreeMapper trieTreeManipulator;\n    @Resource\n    @Structure(type = ServiceNodeMetaMapper.class)\n    private NodeMetaManipulator nodeMetaManipulator;\n\n    @Resource\n    @Structure(type = ApplicationNodeMapper.class)\n    private ApplicationNodeManipulator     applicationNodeManipulator;\n    @Resource\n    @Structure(type = AppNodeMetaMapper.class)\n    private ApplicationMetaManipulator     applicationMetaManipulator;\n\n    @Resource\n    @Structure(type = ServiceNodeMapper.class)\n    private ServiceNodeManipulator         serviceNodeManipulator;\n    @Resource\n    @Structure(type = ServiceMetaMapper.class)\n    private ServiceMetaManipulator         serviceMetaManipulator;\n\n    @Resource\n    @Structure(type = ServiceNamespaceMapper.class)\n    private ServiceNamespaceManipulator serviceNamespaceManipulator;\n\n    @Resource\n    @Structure(type = ServiceInstanceMapper.class)\n    private ServiceInstanceManipulator serviceInstanceManipulator;\n\n    @Resource\n    @Structure(type = NamespaceRulesMapper.class)\n    private NamespaceRulesManipulator namespaceRulesManipulator;\n\n    @Resource\n    @Structure(type = ServiceNodeOwnerMapper.class)\n    private ServiceNodeOwnerMapper          scopeOwnerManipulator;\n\n    @Resource\n    @Structure(type = ServicePathCacheMapper.class)\n    private ServicePathCacheMapper scopePathManipulator;\n\n    @Resource( type = ServiceMasterTreeManipulatorImpl.class )\n    KOISkeletonMasterManipulator    skeletonMasterManipulator;\n\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n    @Override\n    public NodeMetaManipulator getNodeMetaManipulator() {\n        return this.nodeMetaManipulator;\n    }\n\n    @Override\n    public ApplicationNodeManipulator getApplicationNodeManipulator() {\n        return this.applicationNodeManipulator;\n    }\n\n    @Override\n    public ApplicationMetaManipulator getApplicationElementManipulator() {\n        return this.applicationMetaManipulator;\n    }\n\n    @Override\n    public ServiceNodeManipulator getServiceNodeManipulator() {\n        return this.serviceNodeManipulator;\n    }\n\n    @Override\n    public ServiceMetaManipulator getServiceMetaManipulator() {\n        return this.serviceMetaManipulator;\n    }\n\n    @Override\n    public ServiceNamespaceManipulator getNamespaceManipulator() {\n        return this.serviceNamespaceManipulator;\n    }\n\n    @Override\n    public NamespaceRulesManipulator getNamespaceRulesManipulator() {\n        return this.namespaceRulesManipulator;\n    }\n\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.scopeOwnerManipulator;\n    }\n\n\n    @Override\n    public KOISkeletonMasterManipulator getSkeletonMasterManipulator() {\n        return this.skeletonMasterManipulator;\n    }\n\n    @Override\n    public ServiceInstanceManipulator getServiceInstanceManipulator() {\n        return this.serviceInstanceManipulator;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/drivers/ServiceMasterTreeManipulatorImpl.java",
    "content": "package com.walnut.sparta.services.drivers;\n\nimport com.pinecone.hydra.unit.imperium.source.TireOwnerManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TriePathCacheManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TrieTreeManipulator;\nimport com.pinecone.hydra.unit.imperium.source.TreeMasterManipulator;\nimport com.pinecone.hydra.service.ibatis.ServiceTreeMapper;\nimport com.pinecone.hydra.service.ibatis.ServiceNodeOwnerMapper;\nimport com.pinecone.hydra.service.ibatis.ServicePathCacheMapper;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\n\n\n@Component\npublic class ServiceMasterTreeManipulatorImpl implements TreeMasterManipulator {\n\n    @Resource\n    ServicePathCacheMapper scopePathManipulator;\n\n    @Resource\n    ServiceTreeMapper trieTreeManipulator;\n\n    @Resource\n    ServiceNodeOwnerMapper      scopeOwnerManipulator;\n\n    @Override\n    public TireOwnerManipulator getTireOwnerManipulator() {\n        return this.scopeOwnerManipulator;\n    }\n\n    @Override\n    public TrieTreeManipulator getTrieTreeManipulator() {\n        return this.trieTreeManipulator;\n    }\n\n    @Override\n    public TriePathCacheManipulator getTriePathCacheManipulator() {\n        return this.scopePathManipulator;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/dto/updateObjectDto.java",
    "content": "package com.walnut.sparta.services.dto;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.summer.multiparts.MultipartFile;\n\npublic class updateObjectDto implements Pinenut {\n    private MultipartFile       object;\n    private String              path;\n    private String              volumeGuid;\n\n\n    public updateObjectDto() {\n    }\n\n    public updateObjectDto(MultipartFile object, String path, String volumeGuid) {\n        this.object = object;\n        this.path = path;\n        this.volumeGuid = volumeGuid;\n    }\n\n    public MultipartFile getObject() {\n        return object;\n    }\n\n\n    public void setObject(MultipartFile object) {\n        this.object = object;\n    }\n\n\n    public String getPath() {\n        return path;\n    }\n\n\n    public void setPath(String path) {\n        this.path = path;\n    }\n\n\n    public String getVolumeGuid() {\n        return volumeGuid;\n    }\n\n\n    public void setVolumeGuid(String volumeGuid) {\n        this.volumeGuid = volumeGuid;\n    }\n\n    public String toString() {\n        return \"updateObjectDto{object = \" + object + \", path = \" + path + \", volumeGuid = \" + volumeGuid + \"}\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/mapper/FakeNews.java",
    "content": "package com.walnut.sparta.services.mapper;\n\npublic class FakeNews {\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/pojo/Dummy.java",
    "content": "package com.walnut.sparta.services.pojo;\n\npublic class Dummy {\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/service/ServiceNodeService.java",
    "content": "package com.walnut.sparta.services.service;\n\npublic interface ServiceNodeService {\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/service/ServiceTreeService.java",
    "content": "package com.walnut.sparta.services.service;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface ServiceTreeService {\n    void addNodeToParent(GUID nodeGUID,GUID parentGUID);\n    void removeNode(GUID nodeGUID);\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/service/serviceImpl/ServiceNodeServiceImpl.java",
    "content": "package com.walnut.sparta.services.service.serviceImpl;\n\nimport com.walnut.sparta.services.service.ServiceNodeService;\n\npublic class ServiceNodeServiceImpl implements ServiceNodeService {\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/services/service/serviceImpl/ServiceTreeServiceImpl.java",
    "content": "//package com.walnut.sparta.services.service.serviceImpl;\n//\n//import com.pinecone.framework.util.Debug;\n//import com.pinecone.framework.util.id.GUID;\n//import com.pinecone.framework.util.uoi.UOI;\n//import com.pinecone.hydra.service.kom.nodes.ServiceTreeNode;\n//import com.pinecone.hydra.service.kom.source.ServiceMasterManipulator;\n//import com.pinecone.hydra.unit.udtt.GUIDDistributedTrieNode;\n//import com.pinecone.hydra.service.ibatis.ServiceTrieTreeMapper;\n//import com.walnut.sparta.services.service.ServiceTreeService;\n//import org.springframework.stereotype.Service;\n//\n//import javax.annotation.PostConstruct;\n//import javax.annotation.Resource;\n//import java.util.List;\n//\n//@Service\n//\n//public class ServiceTreeServiceImpl implements ServiceTreeService {\n//    @Resource\n//    private ServiceTrieTreeMapper trieTreeManipulator;\n//    @Resource\n//    private ServiceMasterManipulator serviceMasterManipulator;\n//\n//    private MetaNodeOperatorProxy               metaNodeOperatorProxy;\n//\n//    @PostConstruct\n//    public void init() {\n//        this.metaNodeOperatorProxy = new MetaNodeOperatorProxy(this.serviceMasterManipulator);\n//    }\n//\n//\n//    @Override\n//    public void addNodeToParent(GUID nodeGUID, GUID parentGUID) {\n//        //将节点加入指定位置\n//        this.trieTreeManipulator.insertOwnedNode(nodeGUID,parentGUID);\n//        //添加后要更新节点路径\n//        //递归查询所有要更新的节点\n//        upDateAllPath(nodeGUID);\n//    }\n//\n//    @Override\n//    public void removeNode(GUID nodeGUID) {\n//        //像文件夹一样删除父文件会连带一起输出子文件\n//        removeAllNode(nodeGUID);\n//    }\n//\n//    private void removeAllNode(GUID nodeGUID){\n//        List<GUIDDistributedTrieNode> childNodes = this.trieTreeManipulator.getChild(nodeGUID);\n//        this.trieTreeManipulator.purge(nodeGUID);\n//        this.trieTreeManipulator.removePath(nodeGUID);\n//        if (childNodes==null) return;\n//        for (GUIDDistributedTrieNode guidDistributedTrieNode :childNodes){\n//            removeNode(guidDistributedTrieNode.getGuid());\n//        }\n//    }\n//\n//    private void upDateAllPath(GUID guid){\n//        updatePath(guid);\n//        List<GUIDDistributedTrieNode> childNodes = this.trieTreeManipulator.getChild(guid);\n//        Debug.trace(\"节点\"+guid+\"的子节点有\"+childNodes.toString());\n//        for(GUIDDistributedTrieNode guidDistributedTrieNode :childNodes){\n//            if (guidDistributedTrieNode !=null){\n//                upDateAllPath(guidDistributedTrieNode.getGuid());\n//            }\n//        }\n//    }\n//    private void updatePath(GUID guid){\n//        GUIDDistributedTrieNode node = this.trieTreeManipulator.getNode(guid);\n//        String nodeName = getNodeName(node);\n//        String pathString=\"\";\n//        pathString=pathString+nodeName;\n//        while (node.getParentGUIDs() != null){\n//            for (GUID parentGUID : node.getParentGUIDs()){\n//                node = this.trieTreeManipulator.getNode(parentGUID);\n//                nodeName = getNodeName(node);\n//                pathString = nodeName + \".\" + pathString;\n//            }\n//        }\n//        this.trieTreeManipulator.updatePath(guid,pathString);\n//    }\n//    private String getNodeName(GUIDDistributedTrieNode node){\n//        UOI type = node.getType();\n//        ServiceTreeNode newInstance = (ServiceTreeNode)type.newInstance();\n//        MetaNodeOperator operator = metaNodeOperatorProxy.getOperator(newInstance.getMetaType());\n//        ServiceTreeNode serviceTreeNode = operator.get(node.getGuid());\n//        Debug.trace(\"获取到了节点\"+serviceTreeNode);\n//        return serviceTreeNode.getName();\n//    }\n//}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/system/BasicResultResponse.java",
    "content": "package com.walnut.sparta.system;\n\nimport java.io.Serializable;\n\nimport org.springframework.http.HttpStatus;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\n\npublic class BasicResultResponse<T > implements Pinenut, Serializable {\n    private Integer    code = HttpStatus.OK.value();\n    private String     msg; //错误信息\n    private T          data; //数据\n\n    public static <T> BasicResultResponse<T > success() {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > successMsg( String msg  ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.msg  = msg;\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > success( T object ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.data = object;\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > error( String msg ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.msg  = msg;\n        result.code = HttpStatus.INTERNAL_SERVER_ERROR.value();\n        return result;\n    }\n\n\n    /**\n     * 获取\n     * @return code\n     */\n    public Integer getCode() {\n        return this.code;\n    }\n\n    /**\n     * 设置\n     * @param code\n     */\n    public void setCode(Integer code) {\n        this.code = code;\n    }\n\n    /**\n     * 获取\n     * @return msg\n     */\n    public String getMsg() {\n        return this.msg;\n    }\n\n    /**\n     * 设置\n     * @param msg\n     */\n    public void setMsg(String msg) {\n        this.msg = msg;\n    }\n\n    /**\n     * 获取\n     * @return data\n     */\n    public T getData() {\n        return this.data;\n    }\n\n    /**\n     * 设置\n     * @param data\n     */\n    public void setData(T data) {\n        this.data = data;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"code\" , this.code ),\n                new KeyValue<>( \"msg\"  , this.msg ),\n                new KeyValue<>( \"data\" , this.data )\n        } );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/java/com/walnut/sparta/system/SystemController.java",
    "content": "package com.walnut.sparta.system;\n\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RestController;\n\n\n@RestController\n@RequestMapping( \"/system\" )\npublic class SystemController {\n    //    @GetMapping( \"/undefined\" )\n//    public String undefined() {\n//        return \"Hello, hi, good afternoon! This is undefined specking!\";\n//    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/main/resources/uid/default-uid-spring.xml",
    "content": "﻿<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<beans xmlns=\"http://www.springframework.org/schema/beans\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n       xsi:schemaLocation=\"\n\t\thttp://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-3.1.xsd\">\n\n    <!-- UID generator -->\n    <bean id=\"disposableWorkerIdAssigner\" class=\"com.pinecone.ulf.util.guid.i64.worker.GenericDisposableWorkerIdAssigner\"/>\n\n    <bean id=\"defaultUidGenerator\" class=\"com.pinecone.ulf.util.guid.i64.GuidAllocator72V2\" lazy-init=\"false\">\n<!--        <property name=\"workerIdAssigner\" ref=\"disposableWorkerIdAssigner\"/>-->\n\n        <!-- Specified bits & epoch as your demand. No specified the default value will be used -->\n        <property name=\"timeBits\" value=\"29\"/>\n        <property name=\"workerBits\" value=\"21\"/>\n        <property name=\"seqBits\" value=\"13\"/>\n        <property name=\"epochStr\" value=\"2016-09-20\"/>\n    </bean>\n\n    <!-- Import mybatis config -->\n<!--    <import resource=\"classpath:/uid/mybatis-spring.xml\"/>-->\n\n</beans>\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/UniTrieMaptronTest.java",
    "content": "import com.pinecone.framework.unit.trie.GenericReparseNode;\nimport com.pinecone.framework.unit.trie.UniTrieMaptron;\n\nimport org.junit.jupiter.api.BeforeEach;\nimport org.junit.jupiter.api.Test;\nimport java.util.Map;\nimport java.util.Set;\nimport static org.junit.jupiter.api.Assertions.*;\nimport java.util.AbstractMap;\npublic class UniTrieMaptronTest {\n\n    private UniTrieMaptron<String, String> trieMap;\n\n    @BeforeEach\n    public void setUp() {\n        trieMap = new UniTrieMaptron<>();\n    }\n\n    @Test\n    public void testPutAndGet() {\n        trieMap.put(\"a/b/c\", \"value1\");\n        trieMap.put(\"a/b/d\", \"value2\");\n        assertEquals(\"value1\", trieMap.get(\"a/b/c\"));\n        assertEquals(\"value2\", trieMap.get(\"a/b/d\"));\n        //assertNull(trieMap.get(\"a/b\"));\n    }\n\n    @Test\n    public void testPutReference() {\n        trieMap.put(\"a/b/c\", \"value1\");\n        //trieMap.putReference(\"ref1\", new GenericReparseNode<>(\"a/b/c\",trieMap));\n\n\n        //assertEquals(\"value1\", trieMap.get(\"ref1\"));\n    }\n\n    @Test\n    public void testContainsKey() {\n        trieMap.put(\"a/b/c\", \"value1\");\n//        assertTrue(trieMap.containsKey(\"a/b/c\"));\n//        assertFalse(trieMap.containsKey(\"a/b\"));\n    }\n\n    @Test\n    public void testContainsValue() {\n        trieMap.put(\"a/b/c\", \"value1\");\n        assertTrue(trieMap.containsValue(\"value1\"));\n        assertFalse(trieMap.containsValue(\"value2\"));\n    }\n\n    @Test\n    public void testRemove() {\n        trieMap.put(\"a/b/c\", \"value1\");\n        assertEquals(\"value1\", trieMap.remove(\"a/b/c\"));\n        assertNull(trieMap.get(\"a/b/c\"));\n        assertNull(trieMap.remove(\"a/b/c\")); // Key already removed\n    }\n\n    @Test\n    public void testPutAll() {\n        Map<String, String> map = Map.of(\n                \"a/b/c\", \"value1\",\n                \"x/y/z\", \"value2\"\n        );\n        trieMap.putAll(map);\n\n        assertEquals(\"value1\", trieMap.get(\"a/b/c\"));\n        assertEquals(\"value2\", trieMap.get(\"x/y/z\"));\n    }\n\n    @Test\n    public void testClear() {\n        trieMap.put(\"a/b/c\", \"value1\");\n        trieMap.put(\"x/y/z\", \"value2\");\n        trieMap.clear();\n\n//        assertTrue(trieMap.isEmpty());\n//        assertNull(trieMap.get(\"a/b/c\"));\n//        assertNull(trieMap.get(\"x/y/z\"));\n    }\n\n    @Test\n    public void testKeySet() {\n        trieMap.put(\"a/b/c\", \"value1\");\n        trieMap.put(\"x/y/z\", \"value2\");\n        Set<String> keys = trieMap.keySet();\n\n        assertTrue(keys.contains(\"a/b/c\"));\n        assertTrue(keys.contains(\"x/y/z\"));\n    }\n\n    @Test\n    public void testValues() {\n        trieMap.put(\"a/b/c\", \"value1\");\n        trieMap.put(\"x/y/z\", \"value2\");\n        assertTrue(trieMap.values().contains(\"value1\"));\n        assertTrue(trieMap.values().contains(\"value2\"));\n    }\n\n\n    @Test\n    public void testEntrySet() {\n        // 初始化测试数据\n        trieMap.put(\"apple\", \"fruit\");\n        trieMap.put(\"banana\", \"fruit\");\n        trieMap.put(\"car\", \"vehicle\");\n        trieMap.put(\"cat\", \"animal\");\n\n        // 获取 entrySet\n        Set<Map.Entry<String, String>> entrySet = trieMap.entrySet();\n\n        // 确保 entrySet 的大小与 Trie 中的键值对数量一致\n        assertEquals(4, entrySet.size(), \"EntrySet should contain 4 entries.\");\n\n        // 检查具体的键值对是否正确\n        assertTrue(entrySet.contains(new AbstractMap.SimpleEntry<>(\"apple\", \"fruit\")), \"EntrySet should contain ('apple', 'fruit').\");\n        assertTrue(entrySet.contains(new AbstractMap.SimpleEntry<>(\"banana\", \"fruit\")), \"EntrySet should contain ('banana', 'fruit').\");\n        assertTrue(entrySet.contains(new AbstractMap.SimpleEntry<>(\"car\", \"vehicle\")), \"EntrySet should contain ('car', 'vehicle').\");\n        assertTrue(entrySet.contains(new AbstractMap.SimpleEntry<>(\"cat\", \"animal\")), \"EntrySet should contain ('cat', 'animal').\");\n\n        // 移除一个键值对，确保 entrySet 反映了变化\n        trieMap.remove(\"cat\");\n        entrySet = trieMap.entrySet();\n        assertEquals(3, entrySet.size(), \"EntrySet should contain 3 entries after removal.\");\n        assertFalse(entrySet.contains(new AbstractMap.SimpleEntry<>(\"cat\", \"animal\")), \"EntrySet should not contain ('cat', 'animal') after removal.\");\n    }\n\n    @Test\n    public void testEntrySetIsEmpty() {\n        // 确保在空的 Trie 上 entrySet 为空\n        Set<Map.Entry<String, String>> entrySet = trieMap.entrySet();\n        assertTrue(entrySet.isEmpty(), \"EntrySet should be empty for a new Trie.\");\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/ender/TestEnderHydra.java",
    "content": "package com.ender;\n\nimport java.util.Map;\nimport java.util.UUID;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.deploy.ibatis.hydranium.DeployMappingDriver;\nimport com.pinecone.hydra.deploy.kom.UniformDeployInstrument;\nimport com.pinecone.hydra.proc.LocalHostedProcess;\nimport com.pinecone.hydra.proc.LocalUProcess;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.proc.event.ProcessEventHandler;\nimport com.pinecone.hydra.proc.image.ArchEntryPointRunnable;\nimport com.pinecone.hydra.proc.image.EntryPointRunnable;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.image.GenericClassImage;\nimport com.pinecone.hydra.proc.image.LocalHostedClassImage;\nimport com.pinecone.hydra.registry.GenericKOMRegistry;\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport com.pinecone.hydra.registry.ibatis.hydranium.RegistryMappingDriver;\nimport com.pinecone.hydra.reign.UnixInstitutionalizedMetaImperiumPrivy;\nimport com.pinecone.hydra.storage.mfs.MappingFileSystem;\nimport com.pinecone.hydra.storage.file.external.ExternalFolder;\nimport com.pinecone.hydra.storage.mfs.NativeMFile;\nimport com.pinecone.hydra.storage.mfs.NativeMappingFileSystem;\nimport com.pinecone.hydra.system.imperium.KernelObjectRootMountPoint;\nimport com.pinecone.hydra.system.imperium.KernelRootMountPoint;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.system.ko.kom.ExpressInstrument;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128V7;\nimport com.walnut.archcraft.ender.EnderHydra;\nimport com.walnut.odin.task.GenericRavenTaskConfig;\nimport com.walnut.odin.task.RavenTaskInstrument;\nimport com.walnut.odin.task.mapper.OdinUniformTaskMappingDriver;\n\nclass Floki extends EnderHydra {\n    public Floki( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Floki( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        UnixInstitutionalizedMetaImperiumPrivy privy = new UnixInstitutionalizedMetaImperiumPrivy( this, null );\n\n        ExpressInstrument instrument = privy.getExpressInstrument();\n\n\n        this.prepareKOMTrees( instrument );\n\n        MappingFileSystem mappingFileSystem = new NativeMappingFileSystem( \"E:/\" );\n        //MappingFileSystem mappingFileSystem = new NativeMappingFileSystem( \"/\" );\n        instrument.directMount( KernelRootMountPoint.Mount.getMountPoint() + \"/volE\", mappingFileSystem);\n\n\n        this.testSimple( instrument );\n\n//        this.testProcess( instrument );\n    }\n\n    private void testProcess( ExpressInstrument instrument ) throws Exception{\n        ProcessManager manager = this.processManager();\n        //instrument.mount( KernelRootMountPoint.Process.getMountPoint(), manager );\n\n\n\n\n        ProcessEventHandler eventHandler = new ProcessEventHandler() {\n            @Override\n            public void fired( EntryPointRunnable runnable, ProcessEvent event ) {\n                Debug.bluef( runnable, event );\n            }\n        };\n\n        ExecutionImage image = new LocalHostedClassImage( \"gay\", new ArchEntryPointRunnable( eventHandler ) {\n            @Override\n            public int main( Map<String, String[]> args ) {\n                Debug.greenfs( \"Hello, hi, I am `\" + this.ownedProcess().getName() + \"`!\" );\n                Debug.greenfs( this.ownedProcess().getPID() );\n                Debug.greenfs( this.ownedProcess().getLocalPID() );\n\n                Debug.greenfs( this.ownedProcess().getEnvironmentVariables() );\n                Debug.greenfs( this.ownedProcess().getStartupArguments() );\n                Debug.bluef( this.ownedProcess().getControllableLevel() );\n                Debug.bluef( this.ownedProcess().getOwnedProcessManager() );\n                Debug.greenfs( this.ownedProcess().parentProcess() );\n                return 0;\n            }\n        }, manager );\n        LocalUProcess process = manager.createLocalHostedProcess( image, null, Map.of( \"fuck\", new String[]{ \"you\", \"she\", \"he\", \"it\" } ) );\n\n        Debug.redfs( manager.fetchProcesses() );\n\n        this.getServgramOrchestrator().add(process);\n        process.start();\n        this.getServgramOrchestrator().syncWaitingTerminated();\n\n        Debug.redfs( manager.fetchProcesses() );\n    }\n\n    private void prepareKOMTrees( ExpressInstrument instrument ) {\n        OdinUniformTaskMappingDriver categoryMappingDriver = new OdinUniformTaskMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        RavenTaskInstrument ravenTaskInstrument = new RavenTaskInstrument( categoryMappingDriver, new GenericRavenTaskConfig() );\n\n\n        KOIMappingDriver koiMappingDriver = new RegistryMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        KOMRegistry registry = new GenericKOMRegistry( koiMappingDriver );\n\n        DeployMappingDriver deployMappingDriver = new DeployMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        UniformDeployInstrument deployInstrument = new UniformDeployInstrument( deployMappingDriver );\n\n        instrument.mount( KernelObjectRootMountPoint.TaskMeta.getMountPoint(), ravenTaskInstrument );\n        instrument.mount( KernelObjectRootMountPoint.Registry.getMountPoint(), registry );\n        instrument.mount( KernelObjectRootMountPoint.DeployMeta.getMountPoint(), deployInstrument );\n    }\n\n    private void testSimple( ExpressInstrument instrument ) {\n        EntityNode entityNode = instrument.queryNode( \"meta/task/test/job/task\" );\n\n        //Debug.fmp( 2, entityNode );\n        Debug.fmp( 2, instrument.querySystemKernelObjectPath( entityNode.getGuid() ) );\n\n        Debug.fmp( 2, instrument.getMountedInstrument( \"meta/task\" ) );\n\n        Debug.greenfs( instrument.fetchOwnMappingPath() );\n\n        Debug.fmp( 2, instrument.queryNode( \"conf/registry/game3a/witcher/people/s4/urge\" ) );\n        Debug.fmp( 2, instrument.queryNode( \"conf/registry/game3a/witcher/people/s4/urge\" ) );\n\n        Debug.fmp( 2, instrument.queryNode( \"/dev/deploy/root/test/cluster/vm1\" ) );\n\n\n        //EntityNode myf = instrument.queryNode( \"mnt/volE/Users\" );\n        EntityNode myf = instrument.queryNode( \"/mnt/volE/MyFiles\" );\n        Debug.fmp( 2, myf );\n        NativeMFile myff = (NativeMFile) myf;\n\n        Debug.fmp( 2, myff.listFiles() );\n\n    }\n}\n\npublic class TestEnderHydra {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Floki loki = (Floki) Pinecone.sys().getTaskManager().add( new Floki( args, Pinecone.sys() ) );\n            loki.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/rpc/TestGrpcService.java",
    "content": "package com.rpc;\n\nimport com.acorn.redqueen.service.conduct.RedCollectiveServiceRegiment;\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.grpc.client.GrpcAppointClient;\nimport com.pinecone.hydra.grpc.client.GrpcClientConfig;\nimport com.pinecone.hydra.grpc.server.GrpcAppointServer;\nimport com.pinecone.hydra.grpc.server.GrpcServerConfig;\nimport com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver;\nimport com.pinecone.hydra.service.kom.UniformServiceInstrument;\nimport com.pinecone.hydra.service.registry.grpc.client.GrpcServiceClient;\nimport com.pinecone.hydra.service.registry.grpc.server.GrpcServiceAppointServer;\nimport com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface;\nimport com.pinecone.hydra.service.registry.server.UniformServiceManager;\nimport com.pinecone.hydra.service.registry.client.HuskyServiceClient;\nimport com.pinecone.hydra.service.registry.dto.ServiceMetaDTO;\nimport com.pinecone.hydra.service.registry.ulf.HuskyServiceAppointServer;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.uma.DuplexAppointClient;\nimport com.pinecone.hydra.uma.HuskyDuplexExpress;\nimport com.pinecone.hydra.uma.wolf.WolvesAppointServer;\nimport com.pinecone.hydra.umc.wolf.client.UlfClient;\nimport com.pinecone.hydra.umc.wolf.client.WolfMCClient;\nimport com.pinecone.hydra.umc.wolf.server.WolfMCServer;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.ulf.util.guid.i64.GuidAllocator72V2;\n\nimport java.util.List;\n\nclass Brian extends Tritium {\n    public Brian( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Brian( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n\n    public void vitalize () throws Exception {\n        KOIMappingDriver koiMappingDriver = new ServiceMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n\n        UniformServiceInstrument servicesTree = new UniformServiceInstrument( koiMappingDriver );\n\n        UniformServiceManager serviceManager = new UniformServiceManager( servicesTree );\n        GrpcServiceAppointServer grpcServer = new GrpcServiceAppointServer(\n                new GrpcAppointServer( new GrpcServerConfig( new JSONMaptron( \"{ port: 5888 }\" ) ))\n        );\n\n        serviceManager.hookAppointServer(grpcServer);\n        RedCollectiveServiceRegiment serviceRegiment = new RedCollectiveServiceRegiment(this, servicesTree, serviceManager);\n        serviceRegiment.startServiceManage();\n\n\n        GrpcServiceClient client = new GrpcServiceClient(\n                new GrpcAppointClient( new GuidAllocator72V2().nextGUIDi64(), new GrpcClientConfig( new JSONMaptron( \"{ host: 'localhost', port: 5888 }\" ) ) ),\n                servicesTree.getGuidAllocator()\n        );\n        client.startService();\n\n        testUniformServiceRegister_Proactive(client, serviceManager);\n\n    }\n\n    public static void testUniformServiceRegister_Proactive( GrpcServiceClient client, UniformServiceManager serviceManager ) throws Exception {\n        ServiceMetaDTO meta = client.getMetaManipulation().queryServiceMetaByPath(\"root/test/app/ser\");\n        Debug.bluef(\"Meta: \" + meta);\n\n        String guid = client.getMetaManipulation().evalCreationStatement( \"{ root: { test: { app: { metaType: ApplicationElement } } } }\");\n        Debug.bluef(\"Creation GUID: \" + guid);\n\n        meta = client.getMetaManipulation().queryServiceMetaByPath(\"root/test/app/test1\");\n        Debug.greenfs( meta );\n\n        client.registerService( client.getGuidAllocator().parse(meta.getGuid()), null );\n\n        List<ServiceMetaDTO> serviceMetaDTOS = client.getMetaManipulation().fetchServiceInsMetaByServiceId(meta.getGuid());\n        Debug.bluefs( serviceMetaDTOS );\n\n        client.getAppointNodus().close();\n    }\n\n\n\n    public void vitalize1 () throws Exception {\n        KOIMappingDriver koiMappingDriver = new ServiceMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n\n        UniformServiceInstrument servicesTree = new UniformServiceInstrument( koiMappingDriver );\n\n        WolfMCServer wolfKing = new WolfMCServer( \"\", this, new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n\n        UniformServiceManager serviceManager = new UniformServiceManager( servicesTree );\n        serviceManager.hookAppointServer( new HuskyServiceAppointServer( new WolvesAppointServer( wolfKing, HuskyDuplexExpress.class ) ));\n        RedCollectiveServiceRegiment serviceRegiment = new RedCollectiveServiceRegiment(this, servicesTree, serviceManager);\n\n        serviceRegiment.startServiceManage();\n\n\n        UlfClient ulfClient = new WolfMCClient(\n                new GuidAllocator72V2().nextGUIDi64(), \"\", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( \"Messagers.Messagers.WolfMCKingpin\" )\n        );\n        HuskyServiceClient managerClient = new HuskyServiceClient( ulfClient, servicesTree.getGuidAllocator() );\n        managerClient.startService();\n\n        this.testUniformServiceRegister_Proactive( managerClient );\n\n        //this.oldTest( servicesTree );\n    }\n\n    public void testUniformServiceRegister_Proactive( HuskyServiceClient managerClient ) throws Exception {\n        DuplexAppointClient client = managerClient.getAppointNodus();\n        ServiceMetaManipulationIface metaIface = client.getIface(ServiceMetaManipulationIface.class);\n        ServiceMetaDTO meta = metaIface.queryServiceMetaByPath( \"root/test/app/ser\" );\n        Debug.greenfs( meta );\n\n        String guid = metaIface.evalCreationStatement( \"{ root: { test: { app: { metaType: ApplicationElement, alias:as, services: { test1: { metaType: ServiceElement, type: Microservice } } } } } }\" );\n        ServiceMetaDTO meta1 = metaIface.queryServiceMetaByPath( \"root/test/app/test1\" );\n        Debug.greenfs( meta1 );\n\n        managerClient.registerService( managerClient.getGuidAllocator().parse(meta1.getGuid()), null );\n\n        List<ServiceMetaDTO> serviceMetaDTOS = metaIface.fetchServiceInsMetaByServiceId( meta1.getGuid() );\n        Debug.bluefs( serviceMetaDTOS );\n\n        //managerClient.deregister();\n        client.close();\n\n        //Debug.trace(iface.hasOwnedServiceByServiceId( \"181e9e6-000395-0000-94\" ));\n    }\n\n}\n\npublic class TestGrpcService {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Brian brian = (Brian) Pinecone.sys().getTaskManager().add( new Brian( args, Pinecone.sys() ) );\n            brian.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestAccount.java",
    "content": "package com.sparta;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.account.UniformAccountManager;\nimport com.pinecone.hydra.account.entity.GenericAccount;\nimport com.pinecone.hydra.account.entity.GenericDomain;\nimport com.pinecone.hydra.account.entity.GenericGroup;\nimport com.pinecone.hydra.account.ibatis.hydranium.UserMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.tritium.Tritium;\n\nclass Geralt extends Tritium {\n    public Geralt(String[] args, CascadeSystem parent) {\n        this(args, null, parent);\n    }\n\n    public Geralt(String[] args, String szName, CascadeSystem parent) {\n        super(args, szName, parent);\n    }\n\n    @Override\n    public void vitalize() throws Exception {\n        KOIMappingDriver koiMappingDriver = new UserMappingDriver(\n                this, (IbatisClient) this.getMiddlewareDirector().getRDBManager().getRDBClientByName(\"MySQLKingHydranium\"), this.getDispenserCenter()\n        );\n\n        UniformAccountManager uniformAccountManager = new UniformAccountManager( koiMappingDriver );\n        //this.testInsert( uniformAccountManager );\n        this.testQuery( uniformAccountManager );\n    }\n\n    public void testInsert( UniformAccountManager uniformAccountManager ){\n        GenericDomain genericDomain = new GenericDomain();\n        genericDomain.setName(\"用户域\");\n        GenericGroup genericGroup = new GenericGroup();\n        genericGroup.setName(\"用户组\");\n        GenericAccount genericAccount = new GenericAccount();\n        genericAccount.setName(\"用户\");\n\n        uniformAccountManager.put( genericAccount );\n        uniformAccountManager.put( genericGroup );\n        uniformAccountManager.put( genericDomain );\n        uniformAccountManager.addChildren( genericDomain.getGuid(), genericGroup.getGuid() );\n        uniformAccountManager.addChildren( genericGroup.getGuid(), genericAccount.getGuid() );\n    }\n\n    public void testQuery( UniformAccountManager uniformAccountManager ){\n        Debug.trace(uniformAccountManager.get(uniformAccountManager.queryGUIDByPath( \"用户域/用户组/用户\" )));\n    }\n}\npublic class TestAccount {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Geralt Geralt = (Geralt) Pinecone.sys().getTaskManager().add( new Geralt( args, Pinecone.sys() ) );\n            Geralt.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestBucket.java",
    "content": "package com.sparta;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.hydra.bucket.ibatis.hydranium.BucketMappingDriver;\nimport com.pinecone.hydra.storage.bucket.TitanBucketInstrument;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.tritium.Tritium;\n\nclass Ken extends Tritium {\n    public Ken( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Ken( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        KOIMappingDriver koiMappingDriver = new BucketMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n\n        TitanBucketInstrument bucketInstrument = new TitanBucketInstrument( koiMappingDriver );\n    }\n\n}\n\npublic class TestBucket {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Ken Ken = (Ken) Pinecone.sys().getTaskManager().add( new Ken( args, Pinecone.sys() ) );\n            Ken.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestDeployTree.java",
    "content": "package com.sparta;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.deploy.PhysicalHost;\nimport com.pinecone.hydra.deploy.entity.GenericPhysicalHost;\nimport com.pinecone.hydra.deploy.ibatis.hydranium.DeployMappingDriver;\nimport com.pinecone.hydra.deploy.kom.UniformDeployInstrument;\nimport com.pinecone.hydra.deploy.kom.entity.GenericPhysicalHostElement;\nimport com.pinecone.hydra.deploy.kom.entity.GenericVirtualMachineElement;\nimport com.pinecone.hydra.deploy.kom.entity.PhysicalHostElement;\nimport com.pinecone.hydra.deploy.kom.marshaling.DeployJSONDecoder;\nimport com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128;\nimport com.walnut.archcraft.ender.EnderHydra;\n\n\nclass Randon extends EnderHydra {\n    public Randon( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Randon( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        KOIMappingDriver koiMappingDriver = new DeployMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n\n\n        UniformDeployInstrument deployInstrument = new UniformDeployInstrument( koiMappingDriver );\n\n        this.testGet( deployInstrument );\n\n    }\n\n    private void testInsert(UniformDeployInstrument instrument) {\n/*        String jsonConfig = \"{\"\n                + \"name: 'dataSyncJob', \"\n                + \"description: 'Synchronize DB records between clusters', \"\n                + \"extraInformation: 'retries=3; timeout=5000ms', \"\n                + \"enable: false\"\n                + \"}\";*/\n        String jsonConfig = \"{\"\n                + \"name: 'Spark', \"\n                + \"description: 'Track server health metrics in real-time', \"\n                + \"extraInformation: 'interval=60s; alertThreshold=90%', \"\n                + \"enable: false, \"\n                + \"}\";\n\n\n        /*Debug.trace(instrument.queryGUIDByPath(\"specialTask\"));*/\n    }\n\n    private void testGet( UniformDeployInstrument instrument ){\n/*        DeployJSONDecoder decoder = new DeployJSONDecoder( instrument );\n        decoder.decode( new JSONMaptron( \"{ root: { test: { cluster: { metaType: ClusterElement, type:Physic, deployments: { vm1: { metaType: VirtualMachineElement, ipAddress: 192.168.1.1, status: 12222s } } } } } }\" ) );\n\n        Debug.fmp( 2, instrument.queryElement( \"root\" ).toJSONObject() );\n\n        Debug.greenfs( instrument.queryElement( \"root/test/cluster/vm1\" ) );*/\n\n        PhysicalHostElement physicalHost = new GenericPhysicalHostElement();\n        physicalHost.setName(\"testPhysicalHost\");\n        physicalHost.setIpAddress(\"127.0.0.1\");\n        physicalHost.setHardwareSpecs(\"Intel i7-7700HQ\");\n        physicalHost.setLocalDomain(\"localhost\");\n        physicalHost.setWideDomain(\"wideDomain\");\n        physicalHost.setStatus(\"OK\");\n        physicalHost.setEnable(true);\n        instrument.put(physicalHost);\n        //Debug.trace(deployInstrument.getPath( GUIDs.GUID72(\"181e9e4-000395-0000-d4\") ));\n    }\n\n    private void testUpdate( UniformDeployInstrument instrument ) {\n\n        GenericVirtualMachineElement virtualMachine = new GenericVirtualMachineElement();\n        virtualMachine.setName(\"testVirtualMachine08\");\n        virtualMachine.setIpAddress(\"127.0.0.9\");\n        virtualMachine.setStatus(\"OK\");\n        virtualMachine.setEnable(true);\n        virtualMachine.setDescription(\"testVirtualMachine009\");\n        virtualMachine.setMetaGuid(GUIDs.GUID128(\"2261a1a-000377-0000-78\"));\n        virtualMachine.setGuid(GUIDs.GUID128(\"24e2fc4-00016c-0000-dc\"));\n        virtualMachine.setAffiliateHostGuid(GUIDs.GUID128(\"2261a1a-000377-0000-75\"));\n        instrument.update( virtualMachine );\n    }\n\n    private void testInsertPhysicalHost(UniformDeployInstrument instrument) {\n\n /*       GenericPhysicalHostElement physicalHost = new GenericPhysicalHostElement();\n        physicalHost.setName(\"testPhysicalHost\");\n        physicalHost.setIpAddress(\"127.0.0.1\");\n        physicalHost.setHardwareSpecs(\"Intel i7-7700HQ\");\n        physicalHost.setStatus(\"OK\");\n        physicalHost.setLocalDomain(\"testDomain\");\n\n        //physicalHost.setGuid( GUIDs.GUID72(\"1b05246-0002cc-0001-f1\"));\n\n       *//* instrument.newPhysicalHost(physicalHost);*//*\n        instrument.put( physicalHost );\n        Debug.info( \"physicalHost: \" + physicalHost);\n\n*/\n/*        GenericQuickElement quickElement = new GenericQuickElement();\n        quickElement.setTypeName(\"testQuickElement009\");\n\n       Debug.trace(instrument.put(quickElement)) ;*/\n        //测试quick\n/*        Debug.trace(instrument.get(GUIDs.GUID72(\"2508594-0002eb-0000-c0\"))) ;\n        //测试virtualElement\n        Debug.trace(instrument.get(GUIDs.GUID72(\"24e2fc4-00016c-0000-dc\"))) ;\n        //测试physicalHost\n        Debug.trace(instrument.get(GUIDs.GUID72(\"2511a12-0003bb-0001-d0\"))) ;*/\n/*        GenericPhysicalHostElement physicalHost = new GenericPhysicalHostElement( instrument);\n        physicalHost.setName(\"testPhysicalHost\");\n        physicalHost.setIpAddress(\"127.0.0.1\");\n        physicalHost.setHardwareSpecs(\"Intel i7-7700HQ\");\n        physicalHost.setStatus(\"OK\");\n        physicalHost.setLocalDomain(\"testDomain\");\n\n        instrument.put( physicalHost );*/\n     /*   Debug.trace(instrument.get(GUIDs.GUID72(\"2508b12-000080-0000-58\"))) ;\n        GenericNamespace  namespace = new GenericNamespace();\n        namespace.setName(\"testNamespace\");\n        namespace.setDescription(\"testNamespace\");\n        namespace.setExtraInformation(\"testNamespace\");\n        instrument.put( namespace );*/\n\n/*        TreeNode roodNode = instrument.queryElement(\"testNamespace\" );\n        Debug.greenfs(\"根节点信息: \" + roodNode.evinceTreeNode().toJSONString());\n        GenericQuickElement taskElement = new GenericQuickElement(\n                new JSONMaptron(\"{ name: '特殊服务9', parentGuid: '\" + roodNode.getGuid() + \"' }\")\n        );\n        instrument.put(taskElement);*/\n/*\n        Debug.trace( instrument.(  GUIDs.GUID72(\"250e136-0002ab-0001-bc\")) );\n*/\n/*        TreeNode root = instrument.queryElement(\"testNamespace\");\n       Debug.trace(instrument.getChildren(root.getGuid())) ;*/\n\n        /*Debug.fmp(2, \"完整树结构:\\n\" + root.toJSONString());*/\n\n/*\n        Debug.trace(instrument.remove();)\n*/\n  /*      GenericQuickElement quickElement = new GenericQuickElement();\n        quickElement.setName(\"testQuickElement\");\n        quickElement.setTypeName(\"testQuickElement01\");\n        quickElement.setDescription(\"testQuickElement02\");\n        instrument.put( quickElement );*/\n\n/*       Debug.trace( instrument.affirmQuick(\"testQuickElement\") );\n       ElementNode quickElement = instrument.queryElement(\"testQuickElement\");\n       Debug.trace(quickElement);*/\n    }\n\n    private void testInsertVirtualMachine( UniformDeployInstrument instrument ) {\n\n        GenericVirtualMachineElement virtualMachine = new GenericVirtualMachineElement();\n        virtualMachine.setName(\"testVirtualMachine01\");\n        virtualMachine.setIpAddress(\"127.0.0.5\");\n        virtualMachine.setStatus(\"OK\");\n        virtualMachine.setEnable(true);\n        virtualMachine.setDescription(\"testVirtualMachine\");\n        virtualMachine.setMetaGuid(GUIDs.GUID128(\"2261a1a-000377-0000-78\"));\n        virtualMachine.setGuid(GUIDs.GUID128(\"2261a1a-000377-0000-76\"));\n        virtualMachine.setAffiliateHostGuid(GUIDs.GUID128(\"2261a1a-000377-0000-75\"));\n        instrument.put( virtualMachine );\n   /*     GenericVirtualMachineElement virtualMachine = new GenericVirtualMachineElement();\n        virtualMachine = (GenericVirtualMachineElement)instrument.get( GUIDs.GUID72(\"24e2fc4-00016c-0000-dc\"));\n        Debug.trace(virtualMachine);*/\n\n        instrument.get(GUIDs.GUID128(\"24b1e50-000044-0000-50\"));\n\n\n      /*  GenericVirtualMachine  virtualMachine = new GenericVirtualMachine();\n        virtualMachine.setName(\"VirtualMachine1\");\n        virtualMachine.setIpAddress(\"192.168.1.1\");\n        virtualMachine.setStatus(\"OK\");\n        //virtualMachine.setGuid( GUIDs.GUID72(\"1b05246-0002cc-0001-f2\"));\n        //virtualMachine.setAffiliateHostGuid(GUIDs.GUID72(\"1b05246-0002cc-0001-f3\"));\n        instrument.newVirtualMachine(virtualMachine);\n        Debug.info( \"virtualMachine: \" + virtualMachine);\n*/\n\n/*        GenericVirtualMachineElement  virtualMachine = new GenericVirtualMachineElement();\n        virtualMachine.setName(\"VirtualMachine1\");\n        virtualMachine.setIpAddress(\"192.168.1.1\");\n        virtualMachine.setStatus(\"OK\");\n        virtualMachine.setEnabled(true);\n        virtualMachine.setExtraInformation(\"extraInformation\");\n        virtualMachine.setDescription(\"description\");\n        virtualMachine.setGuid(GUIDs.GUID72( \"1b05246-0002cc-0001-f3\"));\n        virtualMachine.setMetaGuid(GUIDs.GUID72(\"1b05246-0002cc-0001-f4\"));\n        instrument.put(virtualMachine);\n        Debug.trace(virtualMachine.toJSONString());*/\n    }\n\n}\n\npublic class TestDeployTree {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Randon Jesse = (Randon) Pinecone.sys().getTaskManager().add( new Randon( args, Pinecone.sys() ) );\n            Jesse.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestInnerTree.java",
    "content": "package com.sparta;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.scenario.ibatis.hydranium.ScenarioMappingDriver;\nimport com.pinecone.hydra.scenario.tree.DistributedScenarioMetaTree;\nimport com.pinecone.hydra.scenario.tree.GenericDistributedScenarioMetaTree;\nimport com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver;\nimport com.pinecone.hydra.service.kom.UniformServiceInstrument;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.pinecone.tritium.Tritium;\n\n\nclass LadyGaga extends Tritium {\n    public LadyGaga( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public LadyGaga( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n//        Sparta sparta = new Sparta( \"Sparta\", this );\n//        sparta.execute();\n//\n//        Thread shutdowner = new Thread(()->{\n//            Debug.sleep( 5000 );\n//            sparta.terminate();\n//        });\n//        //shutdowner.start();\n//\n//        this.getTaskManager().add( sparta );\n//        this.getTaskManager().syncWaitingTerminated();\n\n\n    }\n\n    private void testService(){\n        KOIMappingDriver koiMappingDriver = new ServiceMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        UniformServiceInstrument distributedScopeServiceTree = new UniformServiceInstrument(koiMappingDriver);\n\n        //Debug.trace(distributedScopeServiceTree.getNode(GUIDs.GUID72(\"f83ccfc-0002f9-0000-b4\")).toString());\n        Debug.trace(distributedScopeServiceTree.getPath(GUIDs.GUID128(\"f83ccfc-0002f9-0000-b4\")));\n    }\n\n    private void testScenario(){\n        KOIMappingDriver koiMappingDriver = new ScenarioMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        DistributedScenarioMetaTree distributedScenarioMetaTree = new GenericDistributedScenarioMetaTree(koiMappingDriver);\n//        GenericNamespaceNode genericNamespaceNode = new GenericNamespaceNode();\n//        genericNamespaceNode.setName(\"瘟疫公司\");\n//        genericNamespaceNode.setNamespaceNodeMeta(new GenericNamespaceNodeMeta());\n//        genericNamespaceNode.setScenarioCommonData(new GenericScenarioCommonData());\n//        distributedScenarioMetaTree.insert(genericNamespaceNode);\n        distributedScenarioMetaTree.get(GUIDs.GUID128(\"1f5bced8-000315-0002-70\"));\n    }\n}\n\n\npublic class TestInnerTree {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            LadyGaga ladyGaga = (LadyGaga) Pinecone.sys().getTaskManager().add( new LadyGaga( args, Pinecone.sys() ) );\n            ladyGaga.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestKOMKing.java",
    "content": "package com.sparta;\n\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.storage.mfs.MappingFileSystem;\nimport com.pinecone.hydra.storage.mfs.NativeMappingFileSystem;\nimport com.pinecone.hydra.system.ko.runtime.KernelExpressInstrument;\nimport com.pinecone.hydra.system.ko.runtime.GenericRuntimeInstrumentConfig;\nimport com.pinecone.hydra.unit.imperium.entity.EntityNode;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.walnut.odin.task.GenericRavenTaskConfig;\nimport com.walnut.odin.task.RavenTaskInstrument;\nimport com.walnut.odin.task.mapper.OdinUniformTaskMappingDriver;\n\nclass Loki extends Tritium {\n    public Loki( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Loki( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        OdinUniformTaskMappingDriver categoryMappingDriver = new OdinUniformTaskMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        RavenTaskInstrument ravenTaskInstrument = new RavenTaskInstrument( categoryMappingDriver, new GenericRavenTaskConfig() );\n\n\n        KernelExpressInstrument kernelExpressInstrument = new KernelExpressInstrument( this, \"\", new GenericRuntimeInstrumentConfig());\n        kernelExpressInstrument.setTargetingName(\"task1\");\n        kernelExpressInstrument.mount( \"task1/afc\", ravenTaskInstrument );\n\n        MappingFileSystem mappingFileSystem = new NativeMappingFileSystem( \"E:/\" );\n        kernelExpressInstrument.directMount( \"direct/test\", mappingFileSystem);\n\n        this.testSimple( kernelExpressInstrument );\n    }\n\n    private void testSimple( KernelExpressInstrument instrument ) {\n        EntityNode entityNode = instrument.queryNode( \"direct/test/MyFiles\" );\n        Debug.trace( entityNode );\n        //Debug.fmp( 2, entityNode );\n//        Debug.fmp( 2, instrument.querySystemKernelObjectPath( entityNode.getGuid() ) );\n//\n//        Debug.fmp( 2, instrument.getMountedInstrument( \"task1/afc\" ) );\n    }\n}\n\npublic class TestKOMKing {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Loki loki = (Loki) Pinecone.sys().getTaskManager().add( new Loki( args, Pinecone.sys() ) );\n            loki.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestLayer.java",
    "content": "package com.sparta;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.layer.ibatis.hydranium.LayerMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.vgraph.layer.AtlasLayer;\nimport com.pinecone.hydra.unit.vgraph.layer.AtlasLayerNamespace;\nimport com.pinecone.hydra.unit.vgraph.layer.VLayerInstrument;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nimport java.util.ArrayList;\n\nclass Louis extends Tritium {\n    public Louis( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Louis( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        KOIMappingDriver koiMappingDriver = new LayerMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        VLayerInstrument vLayerManager = new VLayerInstrument(koiMappingDriver);\n        this.testQuery(vLayerManager);\n        //this.testInsert( vLayerManager );\n    }\n\n    public void testInsert(VLayerInstrument vLayerManager) {\n//        AtlasLayer atlasLayer = new AtlasLayer();\n//        atlasLayer.setName(\"图层1\");\n//        ArrayList<GUID> sourceGuids = new ArrayList<>();\n//        sourceGuids.add( GUIDs.GUID128(\"01972f7e-1347-7ef4-bdbb-efdc52b7ddf4\") );\n//        sourceGuids.add( GUIDs.GUID128(\"01972f7e-15c0-72b8-ad49-41fa8027ca32\") );\n//        sourceGuids.add( GUIDs.GUID128(\"01972f7e-15cd-7dcf-8884-88a587ec2c4e\") );\n//        sourceGuids.add( GUIDs.GUID128(\"01972f7e-15d9-7565-8ca0-040644fd4493\") );\n//        sourceGuids.add( GUIDs.GUID128(\"01972f7e-15e4-73be-863f-02fee71bdc6b\") );\n//        sourceGuids.add( GUIDs.GUID128(\"01972f7e-15ee-71a3-8110-7b61c273e7c7\") );\n//        atlasLayer.setSourceGuids( sourceGuids );\n//        atlasLayer.setSourceGuids( sourceGuids );\n//\n//        ArrayList<GUID> sinkGuids = new ArrayList<>();\n//        sinkGuids.add( GUIDs.GUID128(\"01972f7e-164e-7f80-8e67-a22060a3afd7\") );\n//        atlasLayer.setSinkGuids( sinkGuids );\n//\n\n\n//        AtlasLayer atlasLayer = new AtlasLayer();\n//        atlasLayer.setName(\"图层11\");\n//\n//        ArrayList<GUID> sourceGuids = new ArrayList<>();\n//        sourceGuids.add(GUIDs.GUID128(\"01972f7e-1347-7ef4-bdbb-efdc52b7ddf4\"));\n//        sourceGuids.add(GUIDs.GUID128(\"01972f7e-15c0-72b8-ad49-41fa8027ca32\"));\n//\n//        atlasLayer.setSourceGuids( sourceGuids );\n//\n//        ArrayList<GUID> sinkGuids = new ArrayList<>();\n//        sinkGuids.add(GUIDs.GUID128(\"01972f7e-164e-7f80-8e67-a22060a3afd7\"));\n//        atlasLayer.setSinkGuids(sinkGuids);\n\n        AtlasLayer atlasLayer = new AtlasLayer();\n        atlasLayer.setName(\"图层12\");\n\n        ArrayList<GUID> sourceGuids = new ArrayList<>();\n        sourceGuids.add(GUIDs.GUID128(\"01972f7e-15cd-7dcf-8884-88a587ec2c4e\"));\n        sourceGuids.add(GUIDs.GUID128(\"01972f7e-15d9-7565-8ca0-040644fd4493\"));\n        sourceGuids.add(GUIDs.GUID128(\"01972f7e-15e4-73be-863f-02fee71bdc6b\"));\n        sourceGuids.add(GUIDs.GUID128(\"01972f7e-15ee-71a3-8110-7b61c273e7c7\"));\n\n        atlasLayer.setSourceGuids( sourceGuids );\n\n        ArrayList<GUID> sinkGuids = new ArrayList<>();\n        sinkGuids.add(GUIDs.GUID128(\"01972f7e-164e-7f80-8e67-a22060a3afd7\"));\n        atlasLayer.setSinkGuids( sinkGuids );\n\n        vLayerManager.put(atlasLayer);\n\n//        AtlasLayerNamespace atlasLayerNamespace = new AtlasLayerNamespace();\n//        atlasLayerNamespace.setName(\"这是测试命名空间\");\n//        vLayerManager.put( atlasLayerNamespace );\n        //vLayerManager.addChild( GUIDs.GUID128(\"2261a1a-000377-0000-78\"), GUIDs.GUID128(\"2261524-000394-0001-fc\") );\n\n    }\n\n    public void testQuery( VLayerInstrument vLayerManager ) {\n        // todo 这种情况使用路径取不出\n        //Debug.trace(vLayerManager.queryGUIDByPath( \"图层1/图层11\" ));\n    }\n}\n\npublic class TestLayer {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Louis louis = (Louis) Pinecone.sys().getTaskManager().add( new Louis( args, Pinecone.sys() ) );\n            louis.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestQueue.java",
    "content": "package com.sparta;\n\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.queue.ibatis.hydranium.QueueMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.unit.iqueue.ConfigurableMegaDeflectPriorityQueueMeta;\nimport com.pinecone.hydra.unit.iqueue.MagnitudeDPQueue;\nimport com.pinecone.hydra.unit.iqueue.ArchQueueTableMeta;\nimport com.pinecone.hydra.unit.iqueue.MegaDeflectPriorityQueueMeta;\nimport com.pinecone.hydra.unit.iqueue.entity.GenericQueueElement;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\nclass Chris extends Tritium {\n    public Chris( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Chris( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        KOIMappingDriver koiMappingDriver = new QueueMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        MegaDeflectPriorityQueueMeta queueTableMeta = new ConfigurableMegaDeflectPriorityQueueMeta();\n        queueTableMeta.setQueueTableName( \"hydra_queue_nodes\" );\n        MagnitudeDPQueue dpQueue = new MagnitudeDPQueue(koiMappingDriver, 6L, \"segment_name\", \"测试队列\", queueTableMeta);\n        this.testInsert( dpQueue );\n        //this.testQuery( dpQueue );\n    }\n\n    public void testInsert( MagnitudeDPQueue dpQueue ) {\n        GenericQueueElement element = new GenericQueueElement();\n        element.setObjectGuid(GUIDs.GUID128(\"22989c2-000225-0000-4c\"));\n        element.setPriority(2);\n        dpQueue.pushBack( element );\n    }\n\n    public void testQuery( MagnitudeDPQueue dpQueue ) {\n        Debug.trace(\"目前的队列是否为空：\" + dpQueue.isEmpty());\n        for( int i = 0; i < 3; i++ ) {\n            Debug.trace( \"输出队列头数据：\" +dpQueue.popFront() + \"目前的位置是：\" + dpQueue.currentPosition() );\n            Debug.trace( \"目前队列的size是：\" + dpQueue.size() );\n        }\n    }\n}\npublic class TestQueue {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Chris chris = (Chris) Pinecone.sys().getTaskManager().add( new Chris( args, Pinecone.sys() ) );\n            chris.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestRegistry.java",
    "content": "package com.sparta;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.registry.KOMRegistry;\nimport com.pinecone.hydra.registry.GenericKOMRegistry;\nimport com.pinecone.hydra.registry.entity.ElementNode;\nimport com.pinecone.hydra.registry.entity.Properties;\nimport com.pinecone.hydra.registry.ibatis.hydranium.RegistryMappingDriver;\nimport com.pinecone.hydra.registry.marshaling.RegistryJSONDecoder;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\nclass StanMarsh extends Tritium {\n    public StanMarsh( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public StanMarsh( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        KOIMappingDriver koiMappingDriver = new RegistryMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n\n        KOMRegistry registry = new GenericKOMRegistry( koiMappingDriver );\n\n        //this.testBasicInsert( registry );\n        //this.testDeletion( registry );\n        //this.testDataExtends( registry );\n        //this.testHardLink( registry );\n        //this.testCopy( registry );\n        //this.testMove( registry );\n        //this.testMisc( registry );\n        this.testSelector( registry );\n        //this.testAttributes( registry );\n        //this.testMarshaling( registry );\n    }\n\n    private void testBasicInsert( KOMRegistry registry ) {\n        registry.putProperties( \"game/minecraft/wizard1\", new JSONMaptron( \"{ name:ken, age:22, species:human, job:wizard }\" ) );\n        registry.putProperties( \"game/minecraft/sorcerer1\", new JSONMaptron( \"{ name:dragonking, hp:666, species:dragon, job:sorcerer }\" ) );\n        registry.putProperties( \"game/terraria/mob1\", new JSONMaptron( \"{ name:lural, age:666, species:cthulhu, job:mob }\" ) );\n        registry.putProperties( \"game/witcher/mob1\", new JSONMaptron( \"{ name:witcher_mob1, age:-789, species:'undefined', job:mob }\" ) );\n        registry.putProperties( \"game/witcher/mob2\", new JSONMaptron( \"{ name:wxsdw, age:666, species:cthulhu, job:mob }\" ) );\n        registry.putProperties( \"game/witcher/mob3\", new JSONMaptron( \"{ name:mob3, age:661, species:cthulhu2, job:mob2 }\" ) );\n        registry.putProperties( \"game/witcher/people/xxx\", new JSONMaptron( \"{ name:xxxx, age:999, species:elf, job:warrior }\" ) );\n        registry.putProperties( \"game/witcher/people/xx2\", new JSONMaptron( \"{ name:xxx2, age:992, species:elf, job:warrior }\" ) );\n\n        registry.putProperties( \"game3a/witcher/people/s4/urge\", new JSONMaptron( \"{ name:darkurge, age:996, species:dragon, job:warrior }\" ) );\n\n        registry.putTextValue( \"game/witcher/jesus\", \"JSONObject\", \"{k:p}\" );\n    }\n\n    private void testDeletion( KOMRegistry registry ) {\n        registry.remove( \"game\" );\n        registry.remove(\"game3a\");\n//        registry.remove( \"game/witcher\" );\n//        registry.remove( \"game/minecraft\" );\n//        registry.remove(\"game/terraria\");\n//        registry.remove(\"game/witcher\");\n        //Debug.fmp( 2, registry.getProperties( registry.queryGUIDByFN( \"game.witcher.mob3\" ) ).getValue( \"name\" ) );\n        //Debug.fmp( 2, registry.get( registry.queryGUIDByFN( \"game3a\" ) ) );\n\n        //registry.remove( \"game\" );\n\n//        registry.affirmProperties( \"泰拉瑞亚.灾厄.至尊灾厄\" );\n//        registry.remove( \"泰拉瑞亚.灾厄.至尊灾厄\" );\n//        Debug.fmp( 4, registry.getProperties( \"泰拉瑞亚.灾厄.至尊灾厄\" ).toJSONObject() );\n    }\n\n    private void testMove( KOMRegistry registry ) {\n        //registry.move( \"game/terraria/mob1\", \"game/minecraft/mob1\" );\n        //registry.move( \"game/minecraft/\", \"game/terraria/more\" );\n\n        //registry.move( \"game/minecraft/sorcerer1 \", \"game/terraria/.\" );\n        //Debug.trace( registry.queryElement( \"game/terraria/sorcerer1\" ), registry.queryElement( \"game/minecraft/sorcerer1\" )  );\n    }\n\n    private void testCopy( KOMRegistry registry ) {\n        //this.testBasicInsert( registry );\n        //registry.queryElement(\"game/minecraft/sorcerer1\").evinceProperties().copyTo(registry.queryGUIDByPath(\"game/minecraft/wizard1\"));\n\n        //registry.getProperties( \"game/terraria/mob1\" ).copyTo( \"game/moregame/mmob4\" );\n\n        //Debug.trace( registry.getProperties( \"game/moregame/mmob4\" ) );\n\n        //registry.getNamespace( \"game3a/witcher/\" ).copyTo( registry.affirmNamespace( \"game/owo\" ).getGuid() );\n        //registry.getNamespace( \"game3a/witcher\" ).copyTo( registry.affirmNamespace( \"game/owo\" ).getGuid() );\n\n\n        //Debug.trace( registry.getNamespace( \"game/owo\" ).getChildren() );\n\n\n        //registry.copy( \"game/minecraft/sorcerer1 \", \"game/terraria/.\" );\n        //Debug.trace( registry.queryElement( \"game/terraria/sorcerer1\" ), registry.queryElement( \"game/minecraft/sorcerer1\" )  );\n\n        //registry.copy( \"game/minecraft\", \"game/terraria\" );\n        //Debug.trace( registry.queryElement( \"game/terraria\" ).evinceNamespace().listItem(), registry.queryElement( \"game/minecraft\" ).evinceNamespace().listItem()  );\n\n        //registry.copy( \"game/minecraft\", \"game/terraria/\" );\n        //Debug.trace( registry.queryElement( \"game/terraria\" ).evinceNamespace().listItem(), registry.queryElement( \"game/minecraft\" ).evinceNamespace().listItem()  );\n\n        //registry.copy( \"game/minecraft\", \"game/terraria/new\" );\n        //Debug.trace( registry.queryElement( \"game/terraria/new\" ).evinceNamespace().listItem(), registry.queryElement( \"game/minecraft\" ).evinceNamespace().listItem()  );\n    }\n\n    private void testDataExtends( KOMRegistry registry ) {\n//          Debug.trace(registry.fetchRoot());\n//        registry.setAffinity(new GUID72(\"1f7c33d6-000309-0000-f8\"),new GUID72(\"1f7c33d6-0003c1-0000-b0\"));\n\n//        registry.setInheritance();\n        //Debug.trace(registry.queryGUIDByPath(\"game/terraria/mob1\"));\n\n        //registry.newLinkTag(\"game/terraria/mob1\",\"game/minecraft\",\"mob1\");\n        GUID guid = registry.queryGUIDByPath(\"game/terraria/mob1\");\n        Debug.trace(guid);\n\n\n\n        //registry.putProperties( \"game/fiction/character/dragonKing\", new JSONMaptron( \"{ name:DragonKing, age:666, species:dragon, job:sorcerer, hp:999999 }\" ) );\n        //registry.putProperties( \"game/3a/character/red-prince\", new JSONMaptron( \"{ name: RedPrince, species:lizard, job:warrior, force:777777 }\" ) );\n        //registry.setDataAffinity( \"game/3a/character/red-prince\", \"game/fiction/character/dragonKing\" );\n\n\n        GUID kingId   = registry.queryGUIDByPath( \"game/fiction/character/dragonKing\" );\n        GUID princeId = registry.queryGUIDByPath( \"game/3a/character/red-prince\" );\n\n        Debug.fmp( 2, registry.getProperties( \"game/fiction/character/dragonKing\" ) );\n        Debug.fmp( 2, registry.getProperties( princeId ) );\n\n        //Debug.fmp( 2, registry.getProperties( \"game/fiction/character/dragonKing\" ).toJSONObject() );\n        //Debug.fmp( 2, registry.getProperties( princeId ).toJSONObject() );\n\n        Properties princePro = registry.getProperties( princeId );\n        Debug.trace( princePro.getValue( \"hp\" ) );\n        Debug.trace( princePro.getValue( \"name\" ) );\n        Debug.trace( princePro.containsKey( \"hp\" ) );\n        Debug.trace( princePro.hasOwnProperty( \"hp\" ) );\n\n\n//        Property property = princePro.get( \"name\" );\n//        property.setValue( \"RedPrince\" );\n//        princePro.update( property );\n//        princePro.set( \"name\", \"RedPrince\" );\n//        Debug.trace( princePro.getValue( \"name\" ) );\n\n\n        //princePro.put( \"hpc\", 999999 );\n        //Debug.trace( princePro.getValue( \"hp\" ) );\n\n        //princePro.remove( \"hpc\" );\n        //princePro.remove( \"hp\" );\n\n//        registry.newHardLink( \"game3a/mix/wizard1\", \"game/witcher\" );\n\n\n\n\n        //Debug.fmp( 2, registry.getProperties( \"game/terraria/more/sorcerer1\" ).toJSONObject() );\n\n        //registry.move();\n\n//        Debug.fmp( 2, registry.getProperties( \"game/terraria/mob1\" ).toJSONObject() );\n        //Debug.fmp( 2, registry.getProperties( \"game/minecraft/mob1\" ).toJSONObject() );\n\n\n\n\n\n//\n//        registry.putProperties( \"movie/terraria/mob1\", new JSONMaptron( \"{ name:lural, age:666, species:cthulhu, job:mob }\" ) );\n//        Debug.trace(registry.fetchRoot());\n\n\n    }\n\n    private void testMisc( KOMRegistry registry ) {\n        //registry.putProperties( \"game/fiction/character/dragonKing\", new JSONMaptron( \"{ name:DragonKing, age:666, species:dragon, job:sorcerer, hp:999999 }\" ) );\n        //registry.putProperties( \"game/3a/character/red-prince\", new JSONMaptron( \"{ name: RedPrince, species:lizard, job:warrior, force:777777 }\" ) );\n\n        //registry.rename( \"game/3a/character/red-prince\", \"red-prince2\" );\n\n        Debug.trace( registry.getProperties( \"game/3a/character/red-prince\" ) );\n    }\n\n    private void testHardLink( KOMRegistry registry ) {\n        //this.testBasicInsert( registry );\n\n        //Debug.trace( registry.queryElement( \"game/minecraft\" ) );\n        //registry.newLinkTag( \"game/witcher\", \"game/minecraft\", \"mount\" );\n\n        //Debug.trace( registry.getMasterTrieTree().queryAllLinkedCount( registry.queryGUIDByPath( \"game/witcher\" ) ) );\n        //Debug.trace( registry.getMasterTrieTree().queryStrongLinkedCount( registry.queryGUIDByPath( \"game/witcher\" ) ) );\n\n        //Debug.fmp( 2, registry.queryElement( \"game/minecraft/mount/\" ) );\n        //Debug.fmp( 2, registry.queryElement( \"game/minecraft/mount/mob2\" ) );\n        //Debug.fmp( 2, registry.queryElement( \"mount\" ) );\n        //Debug.fmp( 2, registry.queryElement( \"game/witcher/jesus/\" ) );\n\n        //registry.remove( \"game/minecraft/mount\" );\n        Debug.fmp( 2, registry.queryElement( \"game3a\" ).evinceNamespace().getEnumId() );\n\n        //var children = registry.queryElement( \"game\" ).evinceNamespace().getChildren();\n        //var mc = children.get(\"minecraft\").evinceNamespace().getChildren();\n\n//        var children = registry.queryElement( \"game3a\" ).evinceNamespace().getChildren();\n//        var mc = children.get(\"witcher\").evinceNamespace().getChildren();\n//        Debug.trace( 2, mc );\n\n        //Debug.trace( registry.get )\n        //Debug.fmp( 2, registry.queryElement( \"game/minecraft/\" ).evinceNamespace().listItem() );\n\n\n    }\n\n    private void testSelector( KOMRegistry registry ) {\n        this.testBasicInsert( registry );\n        //Debug.trace( registry.querySelectorJ( \"game.minecraft.wizard1.name\" ) );\n\n        Debug.fmp( 2, registry.querySelectorJ( \"game/witcher/jesus\" ) );\n    }\n\n    private void testAttributes( KOMRegistry registry ) throws Exception {\n        ElementNode node = registry.queryElement( \"game/minecraft/sorcerer1\" );\n        //node.getAttributes().setAttribute( \"title\", \"king\" );\n        //node.getAttributes().clear();\n\n        Debug.fmp( 2, node.getAttributes().size() );\n\n\n    }\n\n    private void testMarshaling( KOMRegistry registry ) {\n//        RegistryJSONEncoder encoder = new RegistryJSONEncoder( registry );\n\n//        ElementNode node = registry.queryElement( \"game/witcher/jesus\" );\n//        Debug.trace( encoder.encode( node ) );\n\n\n        RegistryJSONDecoder decoder = new RegistryJSONDecoder( registry );\n\n        Debug.trace( decoder.decode( new JSONMaptron( \"{ game: { character: { \" +\n                \"Ifan: { name: Ifan, hp:90, species: Human }, RedPrince: { name:RedPrince, hp:100, species: Lizard } \" +\n                \"}, attr: 1234, file: text_files } }\" ), null ).evinceNamespace().toJSONObject() );\n\n        Debug.trace( registry.querySelectorJ( \"game\" ) );\n//        registry.queryElement( \"game/character/Ifan\" ).getAttributes().setAttribute( \"state\", \"live\" );\n\n\n//        RegistryEncoder encoder = new RegistryDOMEncoder( registry );\n//        ElementNode node = registry.queryElement( \"game\" );\n//        Debug.echo( encoder.encode( node ).toString() );\n\n    }\n\n}\n\n\npublic class TestRegistry {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            StanMarsh ladyGaga = (StanMarsh) Pinecone.sys().getTaskManager().add( new StanMarsh( args, Pinecone.sys() ) );\n            ladyGaga.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestRemoteProcess.java",
    "content": "package com.sparta;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.UProcess;\nimport com.pinecone.hydra.proc.UniformProcessManager;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.proc.event.ProcessEventHandler;\nimport com.pinecone.hydra.proc.image.ArchEntryPointRunnable;\nimport com.pinecone.hydra.proc.image.EntryPointRunnable;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.image.LocalHostedClassImage;\nimport com.pinecone.hydra.proc.image.kom.VirtualExeImageInstrument;\nimport com.pinecone.hydra.proc.image.kom.VirtualMappingExeImageInstrument;\nimport com.pinecone.hydra.umc.wolf.client.UlfClient;\nimport com.pinecone.hydra.umc.wolf.client.WolfMCClient;\nimport com.pinecone.hydra.umc.wolf.server.WolfMCServer;\nimport com.walnut.archcraft.ender.EnderHydra;\nimport com.walnut.odin.proc.client.RavenRemoteProcessManagerClient;\nimport com.walnut.odin.proc.client.RemoteProcessManagerClient;\nimport com.walnut.odin.proc.entity.RemoteVitalizationResponse;\nimport com.walnut.odin.proc.entity.UProcessRuntimeMeta;\nimport com.walnut.odin.proc.server.RavenRemoteProcessManagerServer;\nimport com.walnut.odin.proc.server.RemoteProcessManagerServer;\n\nimport java.net.URI;\nimport java.util.Collection;\nimport java.util.Map;\n\nclass Dante extends EnderHydra {\n    public Dante( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Dante( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        WolfMCServer wolfKing = new WolfMCServer( \"\", this, new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n        RemoteProcessManagerServer server = new RavenRemoteProcessManagerServer( this.processManager(), wolfKing );\n        server.startService();\n\n\n\n        ProcessManager clientPM = new UniformProcessManager(\n                this, null, \"Miao\", \"\", null\n        );\n        UlfClient ulfClient = new WolfMCClient(\n                this.getSystemGuidAllocator72().nextGUIDi64(), \"\", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( \"Messagers.Messagers.WolfMCKingpin\" )\n        );\n        RemoteProcessManagerClient client = new RavenRemoteProcessManagerClient( clientPM, ulfClient );\n        client.startService();\n\n\n        //this.testClientProactiveCreation( server, client );\n        this.testServerProactiveCreation( server, client );\n\n        //this.testImageInstrument();\n    }\n\n    private void testImageInstrument() {\n        VirtualExeImageInstrument imageInstrument = new VirtualMappingExeImageInstrument( this, \"\" );\n\n        ProcessManager manager = this.processManager();\n        ProcessEventHandler eventHandler = new ProcessEventHandler() {\n            @Override\n            public void fired(EntryPointRunnable runnable, ProcessEvent event ) {\n                Debug.bluef( runnable, event );\n            }\n        };\n\n        ExecutionImage image = new LocalHostedClassImage( \"image1\", new ArchEntryPointRunnable( eventHandler ) {\n            @Override\n            public int main( Map<String, String[]> args ) {\n                Debug.greenfs( \"Hello, hi, I am `\" + this.ownedProcess().getName() + \"`!\" );\n                return 0;\n            }\n        }, manager );\n\n\n        imageInstrument.mount( \"hola/senorita\", image );\n\n        Debug.greenfs( imageInstrument.queryImage( \"hola/senorita/image1\" ).getName() );\n    }\n\n    private void testClientProactiveCreation( RemoteProcessManagerServer server, RemoteProcessManagerClient client ) throws Exception {\n        ProcessManager manager = this.processManager();\n\n        ProcessEventHandler eventHandler = new ProcessEventHandler() {\n            @Override\n            public void fired( EntryPointRunnable runnable, ProcessEvent event ) {\n                Debug.bluef( runnable, event );\n            }\n        };\n\n        ExecutionImage image = new LocalHostedClassImage( \"gay\", new ArchEntryPointRunnable( eventHandler ) {\n            @Override\n            public int main( Map<String, String[]> args ) {\n                Debug.greenfs( \"Hello, hi, I am `\" + this.ownedProcess().getName() + \"`!\" );\n                Debug.greenfs( this.ownedProcess().getPID() );\n                Debug.greenfs( this.ownedProcess().getLocalPID() );\n\n                Debug.greenfs( this.ownedProcess().getEnvironmentVariables() );\n                Debug.greenfs( this.ownedProcess().getStartupArguments() );\n                Debug.bluef( this.ownedProcess().getControllableLevel() );\n                Debug.bluef( this.ownedProcess().getOwnedProcessManager() );\n                Debug.greenfs( this.ownedProcess().parentProcess() );\n\n                return 0;\n            }\n        }, manager );\n\n        //LocalUProcess process = manager.createLocalHostedProcess( image, null, Map.of( \"fuck\", new String[]{ \"you\", \"she\", \"he\", \"it\" } ) );\n\n\n        UProcess process = client.createLocalUProcess(image, null, Map.of(\"fuck\", new String[]{\"you\", \"she\", \"he\", \"it\"}), null);\n        server.startRemoteUProcess( process.getGuid() );\n    }\n\n    private void testServerProactiveCreation( RemoteProcessManagerServer server, RemoteProcessManagerClient client ) throws Exception {\n        ProcessManager manager = this.processManager();\n        ProcessEventHandler eventHandler = new ProcessEventHandler() {\n            @Override\n            public void fired(EntryPointRunnable runnable, ProcessEvent event ) {\n                Debug.bluef( runnable, event );\n            }\n        };\n\n        ExecutionImage image = new LocalHostedClassImage( \"image_c\", new ArchEntryPointRunnable( eventHandler ) {\n            @Override\n            public int main( Map<String, String[]> args ) {\n                Debug.greenfs( \"Hello, hi, I am `\" + this.ownedProcess().getName() + \"`!\" );\n                Debug.sleep( 1000 );\n                Debug.greenfs( \"Miao~\" );\n                return 1984;\n            }\n        }, manager );\n\n\n\n\n        client.registerLocalScopeExecutionImage( \"hola/senorita\", image );\n\n        ExecutionImage ic = client.queryExecutionImage( \"hola/senorita/image_c\" );\n        ExecutionImage ig = client.queryExecutionImage( \"/sys/public/global/exe/images/hola/senorita/image_c\" );\n\n        Debug.redfs( ic, ig );\n\n        ic = client.queryExecutionImage( new URI(\"uofs:///hola/senorita/image_c\") );\n        ig = client.queryExecutionImage( new URI(\"uofs:///sys/public/global/exe/images/hola/senorita/image_c\") );\n\n        Debug.redfs( ic, ig );\n\n        RemoteVitalizationResponse response = server.vitalizeRemoteUProcess(\n                client.getClientId(), new URI(\"uofs:///sys/public/global/exe/images/hola/senorita/image_c\"), this.getPID(),\n                Map.of(\"fuck\", new String[]{\"you\", \"she\", \"he\", \"it\"}), Map.of(\"kill\", new String[]{\"you\", \"she\", \"he\", \"it\"})\n        );\n\n        Collection<UProcess> ps = server.searchProcessesByName( \"image_c\" );\n        UProcess proc = ps.iterator().next();\n        Debug.greenfs( proc.getName() );\n\n        UProcessRuntimeMeta meta = server.queryProcessRuntimeMeta( proc.getPID() );\n        Debug.warn( meta.getName() );\n    }\n\n}\npublic class TestRemoteProcess {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Dante dante = (Dante) Pinecone.sys().getTaskManager().add( new Dante( args, Pinecone.sys() ) );\n            dante.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestRuntime.java",
    "content": "package com.sparta;\n\nimport java.util.Map;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.system.regime.arch.Lord;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.proc.event.ProcessEventHandler;\nimport com.pinecone.hydra.proc.event.ProcessLifecycleHandler;\nimport com.pinecone.hydra.proc.image.ArchEntryPointRunnable;\nimport com.pinecone.hydra.proc.image.EntryPointRunnable;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.image.LocalHostedClassImage;\nimport com.pinecone.hydra.umc.wolf.client.UlfClient;\nimport com.pinecone.hydra.umc.wolf.client.WolfMCClient;\nimport com.pinecone.hydra.umc.wolf.server.WolfMCServer;\nimport com.walnut.odin.atlas.advance.GenericTapedBFSGraphAdvancer;\nimport com.walnut.odin.atlas.advance.strategy.AtlasPriorityProcessStrategy;\nimport com.walnut.odin.atlas.advance.strategy.MegaInDegreeFirstStrategy;\nimport com.walnut.odin.atlas.graph.RuntimeAtlasInstrument;\nimport com.walnut.odin.atlas.graph.UniformRuntimeAtlas;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.unit.iqueue.ConfigurableMegaDeflectPriorityQueueMeta;\nimport com.pinecone.hydra.unit.iqueue.ConfigurableMegaStratumQueueMeta;\nimport com.pinecone.hydra.unit.iqueue.MagnitudeDPQueue;\nimport com.pinecone.hydra.unit.iqueue.MegaDPStratumQueue;\nimport com.pinecone.hydra.unit.iqueue.MegaDeflectPriorityQueueMeta;\nimport com.pinecone.hydra.unit.iqueue.MegaStratumQueueMeta;\nimport com.pinecone.hydra.unit.vgraph.MagnitudeVectorDAG;\nimport com.pinecone.hydra.unit.vgraph.entity.GraphNode;\nimport com.pinecone.hydra.unit.vgraph.layer.LayerInstrument;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.archcraft.ender.EnderHydra;\nimport com.walnut.odin.conduct.CollectiveTaskLegionary;\nimport com.walnut.odin.conduct.RavenCollectiveTaskLegionary;\nimport com.walnut.odin.conduct.schedule.RavenTaskScheduler;\nimport com.walnut.odin.system.Odin;\nimport com.walnut.odin.task.CentralizedTaskInstrument;\n\nclass Rick extends EnderHydra {\n    public Rick( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Rick( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n\n        WolfMCServer wolfKing = new WolfMCServer( \"\", this, new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n        this.getDispenserCenter().getInstanceDispenser().registerInstance( \"TaskWolfKing\", wolfKing );\n\n        Lord lord = this.getLordFederation().instantiate( \"KernelOdinLord\", \"./system/setup/lords/odin.json5\" );\n\n        Odin odin = (Odin) lord;\n        odin.vitalize();\n\n\n        LayerInstrument layerInstrument = odin.layerInstrument();\n        CentralizedTaskInstrument uniformTaskInstrument = odin.taskRegiment().taskInstrument();\n        RuntimeAtlasInstrument uniformRuntimeAtlas = odin.atlasInstrument();\n\n\n\n\n        //this.testInsert(uniformRuntimeAtlas);\n        //this.testQuery( uniformRuntimeAtlas );\n        //this.testTape( uniformRuntimeAtlas, koiMappingDriver );\n        //this.testAdvancer( uniformRuntimeAtlas, koiMappingDriver,layerInstrument );\n        this.testOrchestrator( odin );\n    }\n\n    public void testInsert(UniformRuntimeAtlas uniformRuntimeAtlas) {\n        GuidAllocator guidAllocator = uniformRuntimeAtlas.getGuidAllocator();\n\n//        for( int i = 1; i<=12; i++ ) {\n//            TaskAtlasNode taskAtlasNode = new TaskAtlasNode();\n//            taskAtlasNode.setName(\"测试图节点\" + i);\n//            uniformRuntimeAtlas.put( taskAtlasNode );\n//        }\n\n        //uniformRuntimeAtlas.put( GUIDs.GUID72(\"252386a-0000ca-0001-f0\"),taskAtlasNode );\n        uniformRuntimeAtlas.addChild(GUIDs.GUID128(\"01972f7e-1642-75c5-aa70-82a752fd5e05\"),GUIDs.GUID128(\"01972f7e-164e-7f80-8e67-a22060a3afd7\"));\n        //uniformRuntimeAtlas.put(GUIDs.GUID72(\"20dc3d8-00007b-0000-50\"), taskAtlasNode);\n    }\n\n    public void testQuery(UniformRuntimeAtlas uniformRuntimeAtlas) {\n//        GuidAllocator guidAllocator = uniformRuntimeAtlas.getGuidAllocator();\n//        TaskGraphNode query = uniformRuntimeAtlas.query(GUIDs.GUID72(\"20dc3d8-00007b-0000-50\"));\n//        Debug.trace(query.toJSONString());\n//        List<String> path = uniformRuntimeAtlas.getPath(GUIDs.GUID72(\"210f43c-000017-0000-64\"));\n//        Debug.trace(path);\n\n        GraphNode graphNode = uniformRuntimeAtlas.queryGraphNodeByTaskGuid(GUIDs.GUID128(\"21164d6-0003e5-000f-50\"));\n        Debug.trace(graphNode.toJSONString());\n\n        TaskElement taskElement = uniformRuntimeAtlas.queryTaskElementByGuid(GUIDs.GUID128(\"233e952-000010-0000-c0\"));\n\n        Debug.trace(taskElement.toJSONObject());\n    }\n\n    public void testTape( UniformRuntimeAtlas uniformRuntimeAtlas, KOIMappingDriver driver ) {\n//        MagnitudeVectorDAG magnitudeVectorDAG = new MagnitudeVectorDAG( GUIDs.GUID128(\"22610ea-00002d-0000-a0\"),uniformRuntimeAtlas.getMasterManipulator().getVectorGraphMasterManipulator(), uniformRuntimeAtlas.getConfig()  );\n//        GraphStratumTape tapeded = uniformRuntimeAtlas.tapedGraphStratumAdvancer(magnitudeVectorDAG, driver);\n//        //Debug.trace(tapeded.next().toJSONString());\n//        Debug.trace(tapeded.fetchNodes(2,1));\n    }\n\n    public void testAdvancer( UniformRuntimeAtlas uniformRuntimeAtlas, KOIMappingDriver driver, LayerInstrument layerInstrument ) {\n        MagnitudeVectorDAG magnitudeVectorDAG = (MagnitudeVectorDAG) uniformRuntimeAtlas.queryByPath( \"l1/l11\" );\n        MegaDeflectPriorityQueueMeta meta1 = new ConfigurableMegaDeflectPriorityQueueMeta();\n        meta1.setQueueTableName( \"hydra_queue_nodes\" );\n        MegaStratumQueueMeta meta2 = new ConfigurableMegaStratumQueueMeta();\n        meta2.setQueueTableName( \"hydra_temporary_queue_nodes\" );\n        MagnitudeDPQueue magnitudeDPQueue = new MagnitudeDPQueue(driver, 0, \"segment_name\", \"测试队列\", meta1);\n        MegaDPStratumQueue megaDPStratumQueue = new MegaDPStratumQueue(driver, \"segment_name\", \"测试临时队列\", meta2);\n\n        MegaInDegreeFirstStrategy strategyChain = new MegaInDegreeFirstStrategy(uniformRuntimeAtlas, magnitudeDPQueue, megaDPStratumQueue,layerInstrument);\n        AtlasPriorityProcessStrategy atlasPriorityProcessStrategy = new AtlasPriorityProcessStrategy();\n        atlasPriorityProcessStrategy.addStrategy( strategyChain );\n        GenericTapedBFSGraphAdvancer advancer = new GenericTapedBFSGraphAdvancer( uniformRuntimeAtlas, magnitudeDPQueue,atlasPriorityProcessStrategy );\n        advancer.traverse(magnitudeVectorDAG);\n    }\n\n    public void testOrchestrator( Odin odin ) throws Exception {\n        odin.taskRegiment().startRemoteProcessServer();\n\n        UlfClient ulfClient = new WolfMCClient(\n                this.getSystemGuidAllocator72().nextGUIDi64(), \"\", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( \"Messagers.Messagers.WolfMCKingpin\" )\n        );\n        CollectiveTaskLegionary regimentClient = new RavenCollectiveTaskLegionary( \"jesus\", this, ulfClient );\n        regimentClient.startService();\n        regimentClient.joinRegiment();\n\n        regimentClient.remoteProcessManagerClient().addProcessLifecycleHandler(new ProcessLifecycleHandler() {\n            @Override\n            public void fired(String imageAddress, EntryPointRunnable runnable, ProcessEvent event ) {\n                Debug.greenfs( imageAddress, event );\n            }\n        });\n\n\n        ProcessManager manager = regimentClient.processManager();\n        ProcessEventHandler eventHandler = new ProcessEventHandler() {\n            @Override\n            public void fired(EntryPointRunnable runnable, ProcessEvent event ) {\n                Debug.bluef( runnable, event );\n            }\n        };\n\n        ExecutionImage image = new LocalHostedClassImage( \"image_c\", new ArchEntryPointRunnable( eventHandler ) {\n            @Override\n            public int main( Map<String, String[]> args ) {\n                Debug.greenfs( \"Hello, hi, I am `\" + this.ownedProcess().getName() + \"`!\" );\n                Debug.sleep( 1000 );\n                Debug.greenfs( \"Miao~\" );\n\n                //throw new IrrationalProvokedException();\n                return 1984;\n            }\n        }, manager );\n        manager.getImageLoader().registerLocalScopeExecutionImage( \"hola/senorita\", image );\n\n\n\n\n\n        RavenTaskScheduler scheduler = (RavenTaskScheduler) odin.taskScheduler();\n        scheduler.fetch();\n    }\n\n\n}\npublic class TestRuntime {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Rick rick = (Rick) Pinecone.sys().getTaskManager().add( new Rick( args, Pinecone.sys() ) );\n            rick.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestSFM.java",
    "content": "package com.sparta;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer;\nimport com.pinecone.hydra.umb.kafka.WolfMCKafkaClient;\nimport com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode;\nimport com.pinecone.hydra.umb.wolf.WolfMCBClient;\nimport com.pinecone.hydra.umct.WolfMCExpress;\nimport com.pinecone.tritium.Tritium;\nimport com.walnut.sailor.stream.fm.SFMConfig;\nimport com.walnut.sailor.stream.fm.SFMSessionValidatorController;\nimport com.walnut.sailor.stream.fm.SailorFMConfig;\nimport com.walnut.sailor.stream.fm.SailorFMDistributionService;\nimport com.walnut.sailor.stream.fm.SingleStreamFileMultiDistributionService;\nimport com.walnut.sailor.stream.fm.event.SFMEventSubscriber;\n\nclass Lois extends Tritium {\n    public Lois( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Lois( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        this.testSFM();\n    }\n\n    private void testSFM() throws Exception {\n        UlfBroadcastControlNode controlNode  = new WolfMCBClient(new WolfMCKafkaClient(\"b-serverkingpin:9092\"), \"\", this, WolfMCExpress.class);\n        SFMConfig config = new SailorFMConfig( new JSONMaptron(\"{\\n\" +\n                \"      \\\"fileFrameSize\\\": 972800,\\n\" +\n                \"      \\\"sessionExpiredTimeMillis\\\": 7200000,\\n\" +\n                \"      \\\"fileCloudDistributeTransmitTopic\\\": \\\"ucdn-file-cloud-distribute-transmit-topic\\\",\\n\" +\n                \"      \\\"fileServiceTransmitGroup\\\": \\\"UCDNFileServiceTransmitGroup\\\",\\n\" +\n                \"      \\\"storageDirectory\\\": \\\"E:/fs/\\\",\\n\" +\n                \"    }\") );\n        SingleStreamFileMultiDistributionService service = new SailorFMDistributionService( controlNode, config );\n        service.registerDirectionRoute( \"major\", \"E:/fs/\" );\n\n        service.registerFileTransmitCompleteEventSubscriber(new SFMEventSubscriber() {\n            @Override\n            public void afterEventTriggered( String path, String fileName, String directoryPath ) {\n                Debug.greenfs( \"MiaoMiao~\", path, fileName, directoryPath );\n            }\n        });\n\n        service.start();\n\n        BroadcastControlConsumer consumer = controlNode.createBroadcastControlConsumer( config.getFileCloudDistributeTransmitTopic(), config.getFileServiceTransmitGroup() );\n        consumer.registerController( new SFMSessionValidatorController() );\n\n        service.distributeFile( \"Ton Koopman - Toccata and Fugue in D minor, BWV 565 b.flac\", \"E:/\", \"major\" );\n    }\n}\n\n\npublic class TestSFM {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Lois lois = (Lois) Pinecone.sys().getTaskManager().add( new Lois( args, Pinecone.sys() ) );\n            lois.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestServiceManager.java",
    "content": "package com.sparta;\n\nimport com.acorn.redqueen.service.conduct.RedCollectiveServiceRegiment;\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.UniformServiceInstrument;\nimport com.pinecone.hydra.service.registry.server.ServiceLifecycleIface;\nimport com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface;\nimport com.pinecone.hydra.service.registry.server.UniformServiceManager;\nimport com.pinecone.hydra.service.registry.client.HuskyServiceClient;\nimport com.pinecone.hydra.service.registry.dto.RegisterServiceDTO;\nimport com.pinecone.hydra.service.registry.dto.ServiceMetaDTO;\nimport com.pinecone.hydra.service.registry.ulf.HuskyServiceAppointServer;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.uma.DuplexAppointClient;\nimport com.pinecone.hydra.uma.HuskyDuplexExpress;\nimport com.pinecone.hydra.uma.wolf.WolvesAppointClient;\nimport com.pinecone.hydra.uma.wolf.WolvesAppointServer;\nimport com.pinecone.hydra.umc.wolf.client.UlfClient;\nimport com.pinecone.hydra.umc.wolf.client.WolfMCClient;\nimport com.pinecone.hydra.umc.wolf.server.WolfMCServer;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.ulf.util.guid.i64.GuidAllocator72V2;\n\nimport java.util.List;\n\nclass Brian extends Tritium {\n    public Brian( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Brian( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        KOIMappingDriver koiMappingDriver = new ServiceMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n\n        UniformServiceInstrument servicesTree = new UniformServiceInstrument( koiMappingDriver );\n\n        WolfMCServer wolfKing = new WolfMCServer( \"\", this, new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n\n        UniformServiceManager serviceManager = new UniformServiceManager( servicesTree );\n        serviceManager.hookAppointServer( new HuskyServiceAppointServer( new WolvesAppointServer( wolfKing, HuskyDuplexExpress.class ) ));\n        RedCollectiveServiceRegiment serviceRegiment = new RedCollectiveServiceRegiment(this, servicesTree, serviceManager);\n\n        serviceRegiment.startServiceManage();\n\n\n        UlfClient ulfClient = new WolfMCClient(\n                new GuidAllocator72V2().nextGUIDi64(), \"\", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( \"Messagers.Messagers.WolfMCKingpin\" )\n        );\n        HuskyServiceClient managerClient = new HuskyServiceClient( ulfClient, servicesTree.getGuidAllocator() );\n        managerClient.startService();\n\n        this.testUniformServiceRegister_Proactive( managerClient );\n\n        //this.oldTest( servicesTree );\n    }\n\n    public void testUniformServiceRegister_Proactive( HuskyServiceClient managerClient ) throws Exception {\n        DuplexAppointClient client = managerClient.getAppointNodus();\n        ServiceMetaManipulationIface metaIface = client.getIface(ServiceMetaManipulationIface.class);\n        ServiceMetaDTO meta = metaIface.queryServiceMetaByPath( \"root/test/app/ser\" );\n        Debug.greenfs( meta );\n\n        String guid = metaIface.evalCreationStatement( \"{ root: { test: { app: { metaType: ApplicationElement, alias:as, services: { test1: { metaType: ServiceElement, type: Microservice } } } } } }\" );\n        ServiceMetaDTO meta1 = metaIface.queryServiceMetaByPath( \"root/test/app/test1\" );\n        Debug.greenfs( meta1 );\n\n        managerClient.registerService( managerClient.getGuidAllocator().parse(meta1.getGuid()), null );\n\n        List<ServiceMetaDTO> serviceMetaDTOS = metaIface.fetchServiceInsMetaByServiceId( meta1.getGuid() );\n        Debug.bluefs( serviceMetaDTOS );\n\n        //managerClient.deregister();\n        client.close();\n\n        //Debug.trace(iface.hasOwnedServiceByServiceId( \"181e9e6-000395-0000-94\" ));\n    }\n\n\n\n    private void oldTest(ServiceInstrument servicesTree) throws Exception {\n        WolfMCServer          wolfKing = new WolfMCServer( \"\", this, new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n        WolvesAppointServer wolfServer = new WolvesAppointServer( wolfKing, HuskyDuplexExpress.class );\n        UniformServiceManager serviceManager = new UniformServiceManager( servicesTree );\n        serviceManager.hookAppointServer( new HuskyServiceAppointServer( wolfServer ) );\n        wolfKing.execute();\n\n        Debug.sleep( 500 );\n\n\n        DuplexAppointClient wolf = new WolvesAppointClient(\n                new WolfMCClient( 2048, \"\", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( \"Messagers.Messagers.WolfMCKingpin\" ) )\n        );\n        wolf.execute();\n        wolf.compile( ServiceLifecycleIface.class, false );\n        wolf.compile( ServiceMetaManipulationIface.class, false );\n        this.testServiceRegister( wolf );\n    }\n\n    public void testServiceRegister( DuplexAppointClient client ) {\n        ServiceLifecycleIface iface = client.getIface( ServiceLifecycleIface.class );\n        ServiceMetaManipulationIface metaIface = client.getIface(ServiceMetaManipulationIface.class);\n\n        RegisterServiceDTO serviceDTO1 = new RegisterServiceDTO();\n        serviceDTO1.setServiceId( \"1769872-0002d2-0003-cc\" );\n        serviceDTO1.setClientId( 1234L );\n\n        RegisterServiceDTO serviceDTO2 = new RegisterServiceDTO();\n        serviceDTO2.setServiceId( \"181e9e6-000395-0000-94\" );\n        serviceDTO2.setClientId(1235L);\n\n        iface.registerService( serviceDTO1 );\n        iface.registerService( serviceDTO2 );\n\n        List<ServiceMetaDTO> serviceMetaDTOS = metaIface.fetchServiceInsMetaByServiceId( \"1769872-0002d2-0003-cc\" );\n        Debug.trace( serviceMetaDTOS );\n\n        iface.deregisterServiceByInstanceId( \"181e9e6-000395-0000-94\" );\n//\n        Debug.trace(iface.hasOwnedServiceByServiceId( \"181e9e6-000395-0000-94\" ));\n\n    }\n\n}\n\npublic class  TestServiceManager{\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Brian brian = (Brian) Pinecone.sys().getTaskManager().add( new Brian( args, Pinecone.sys() ) );\n            brian.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestServiceTree.java",
    "content": "package com.sparta;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.UniformServiceInstrument;\nimport com.pinecone.hydra.service.kom.entity.GenericServiceElement;\nimport com.pinecone.hydra.service.kom.marshaling.ServiceJSONDecoder;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.ulf.util.guid.GUIDs;\n\n\nclass Jesse extends Tritium {\n    public Jesse( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Jesse( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        KOIMappingDriver koiMappingDriver = new ServiceMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n\n        UniformServiceInstrument servicesTree = new UniformServiceInstrument( koiMappingDriver );\n        //this.testInsert( servicesTree );\n        this.testGet( servicesTree );\n        //this.testDelete( servicesTree );\n    }\n\n    private void testInsert( ServiceInstrument serviceInstrument){\n//        GenericNamespace namespace = new GenericNamespace();\n//        namespace.setName( \"Test1\" );\n//        serviceInstrument.put( namespace );\n\n        //Debug.trace( serviceInstrument.get( GUIDs.GUID72(\"03c2f90-000133-000 0-44\") ) );\n\n\n//        GenericApplicationElement applicationNode = new GenericApplicationElement(\n//                new JSONMaptron( \"{ name:specialApp, alias:jesus, deploymentMethod:Container, path:'/xxx/xxx/ggg', resourceType:human,\" +\n//                        \"type:Social, description: 'This is jesus', extraInformation: 'more', level:'L1', primaryImplLang: java, scenario:'/scenario/dragon/king'  }\" )\n//        );\n//\n//        applicationNode.apply( new JSONMaptron( \"{ name:specialApp2, deploymentMethod:VM }\" ) );\n//        serviceInstrument.put( applicationNode );\n\n        GenericServiceElement serviceNode = new GenericServiceElement(\n                new JSONMaptron( \"{ name:'特殊服务', alias:jesus, serviceType:System, path:'/xxx/xxx/ggg', resourceType:human,\" +\n                        \"type:Social, description: 'This is special', extraInformation: 'more', level:'L1', primaryImplLang: java, scenario:'/scenario/dragon/king'  }\" )\n        );\n        serviceInstrument.put( serviceNode );\n    }\n\n    private void testGet( ServiceInstrument serviceInstrument ){\n        //Debug.trace( serviceInstrument.queryGUIDByPath( \"规则1/很好的服务/我的世界\" ) );\n        //Debug.trace( serviceInstrument.getPath(GUIDs.GUID72( \"03c4a36-000381-0000-48\" ) ) );\n        //Debug.trace( serviceInstrument.get( GUIDs.GUID72(\"03e60e8-0000ae-0000-20\") ) );\n        //Debug.trace( serviceInstrument.get( GUIDs.GUID72(\"03e60e8-0000c5-0000-48\") ) );\n        //Debug.trace( serviceInstrument.get( GUIDs.GUID72(\"03e60e8-000117-0000-18\") ) );\n//        Debug.trace( servicesTree.get( GUIDs.GUID72( \"02be396-0001e9-0000-e4\" ) ) );\n        //Debug.trace( serviceInstrument.affirmApplication( \"Test1/App1\" ) );\n\n//        Debug.trace( serviceInstrument.affirmService( \"root/特殊服务\" ) );\n//        Debug.trace( serviceInstrument.affirmApplication( \"root/species/orc\" ) );\n//        Debug.trace( serviceInstrument.affirmNamespace(\"root/species\") );\n//        Debug.trace( serviceInstrument.affirmNamespace( \"root\" ).fetchChildren() );\n//\n//        serviceInstrument.affirmApplication( \"root/species/orc\" ).addChild( new GenericServiceElement( new JSONMaptron( \"{ name: slaughter }\" ) ) );\n//\n//        Debug.trace( serviceInstrument.affirmApplication( \"root/species/orc\" ).fetchChildren() );\n//\n//        Debug.trace( serviceInstrument.queryElement( \"root/species/orc/slaughter\" ).toJSONObject() );\n//\n//        serviceInstrument.affirmNamespace( \"root\" ).addChild( new GenericNamespace( new JSONMaptron( \"{ name: weapon, scenario: s1, description: d1, level:L1, primaryImplLang:Java }\" ) ) );\n//\n//        Debug.fmp( 2, serviceInstrument.queryElement( \"root/weapon\" ).evinceNamespace().toJSONDetails() );\n\n\n\n\n        ServiceJSONDecoder decoder = new ServiceJSONDecoder( serviceInstrument );\n        decoder.decode( new JSONMaptron( \"{ root: { test: { app: { metaType: ApplicationElement, alias:as, services: { ser: { metaType: ServiceElement, type: Microservice } } } } } }\" ) );\n\n        Debug.fmp( 2, serviceInstrument.queryElement( \"root\" ).toJSONObject() );\n        //Debug.trace(serviceInstrument.getPath( GUIDs.GUID72(\"181e9e4-000395-0000-d4\") ));\n    }\n\n    private void testDelete( ServiceInstrument serviceInstrument){\n        serviceInstrument.remove( GUIDs.GUID128(\"181e9e4-000395-0000-d4\") );\n    }\n}\n\npublic class TestServiceTree {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Jesse Jesse = (Jesse) Pinecone.sys().getTaskManager().add( new Jesse( args, Pinecone.sys() ) );\n            Jesse.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestSparta.java",
    "content": "package com.sparta;\n\nimport org.springframework.context.ApplicationContextInitializer;\nimport org.springframework.context.ConfigurableApplicationContext;\nimport org.springframework.context.support.GenericApplicationContext;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.system.functions.Executor;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver;\nimport com.pinecone.hydra.storage.file.FileSystemConfig;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.KernelFileSystemConfig;\nimport com.pinecone.hydra.storage.file.UniformObjectFileSystem;\nimport com.pinecone.hydra.storage.volume.KernelVolumeConfig;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.tritium.Tritium;\nimport com.walnut.sparta.Sparta;\nimport com.walnut.sparta.SpartaBoot;\n\nclass JesusChrist extends Tritium {\n    public JesusChrist( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public JesusChrist( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        Sparta sparta = new Sparta( \"Sparta\", this );\n\n\n        Thread shutdowner = new Thread(()->{\n            Debug.sleep( 5000 );\n            sparta.terminate();\n        });\n        //shutdowner.start();\n\n\n\n        KOIMappingDriver koiMappingDriver = new VolumeMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        KOIMappingDriver koiFileMappingDriver = new FileMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n\n        JSONConfig selfConfig = new JSONConfig();\n        FileSystemConfig fileSystemConfig = new KernelFileSystemConfig( selfConfig.queryJSONObject( \"service.PrimaryUniformFileSystem\" ) );\n        VolumeConfig volumeConfig = new KernelVolumeConfig( selfConfig.queryJSONObject( \"service.PrimaryUniformVolumeManager\" ) );\n        KOMFileSystem fileSystem = new UniformObjectFileSystem( koiFileMappingDriver, fileSystemConfig );\n        UniformVolumeManager volumeTree = new UniformVolumeManager( koiMappingDriver, volumeConfig );\n\n\n        sparta.setPrimarySources( SpartaBoot.class );\n\n        sparta.setInitializer(new Executor() {\n            @Override\n            public void execute() throws Exception {\n                sparta.getSpringApplication().addInitializers(new ApplicationContextInitializer<ConfigurableApplicationContext>() {\n                    @Override\n                    public void initialize( ConfigurableApplicationContext applicationContext ) {\n                        GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext;\n                        genericApplicationContext.registerBean(\"primaryFileSystem\", UniformObjectFileSystem.class, () -> (UniformObjectFileSystem)fileSystem);\n                        genericApplicationContext.registerBean(\"primaryVolume\", UniformVolumeManager.class, () -> (UniformVolumeManager) volumeTree);\n                    }\n                });\n            }\n        });\n\n\n        sparta.execute();\n\n\n\n\n\n        this.getTaskManager().add( sparta );\n        this.getTaskManager().syncWaitingTerminated();\n    }\n}\n\n\npublic class TestSparta {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            JesusChrist jesus = (JesusChrist) Pinecone.sys().getTaskManager().add( new JesusChrist( args, Pinecone.sys() ) );\n            jesus.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestTaskTree.java",
    "content": "package com.sparta;\n\nimport java.util.Map;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.proc.ProcessManager;\nimport com.pinecone.hydra.proc.event.ProcessEvent;\nimport com.pinecone.hydra.proc.event.ProcessEventHandler;\nimport com.pinecone.hydra.proc.event.ProcessLifecycleHandler;\nimport com.pinecone.hydra.proc.image.ArchEntryPointRunnable;\nimport com.pinecone.hydra.proc.image.EntryPointRunnable;\nimport com.pinecone.hydra.proc.image.ExecutionImage;\nimport com.pinecone.hydra.proc.image.LocalHostedClassImage;\nimport com.pinecone.hydra.task.kom.TaskInstrument;\nimport com.pinecone.hydra.task.kom.entity.GenericTaskElement;\nimport com.pinecone.hydra.task.kom.entity.TaskElement;\nimport com.pinecone.hydra.task.kom.instance.InstanceEntry;\nimport com.pinecone.hydra.task.kom.instance.InstanceInstrument;\nimport com.pinecone.hydra.task.kom.marshaling.TaskJSONDecoder;\nimport com.pinecone.hydra.umc.wolf.client.UlfClient;\nimport com.pinecone.hydra.umc.wolf.client.WolfMCClient;\nimport com.pinecone.hydra.umc.wolf.server.WolfMCServer;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.archcraft.ender.EnderHydra;\nimport com.walnut.odin.conduct.CollectiveTaskRegiment;\nimport com.walnut.odin.conduct.CollectiveTaskLegionary;\nimport com.walnut.odin.conduct.RavenCollectiveTaskRegiment;\nimport com.walnut.odin.conduct.RavenCollectiveTaskLegionary;\nimport com.walnut.odin.conduct.entity.LaunchedContext;\nimport com.walnut.odin.task.CentralizedTaskInstrument;\nimport com.walnut.odin.task.GenericRavenTaskConfig;\nimport com.walnut.odin.task.RavenTaskInstrument;\nimport com.walnut.odin.task.dto.CategoryTag;\nimport com.walnut.odin.task.dto.GenericCategoryTag;\nimport com.walnut.odin.task.mapper.OdinUniformTaskMappingDriver;\nimport com.walnut.odin.task.service.CategoryService;\nimport com.walnut.odin.task.troll.LaunchFeature;\n\n\nclass Randy extends EnderHydra {\n    public Randy( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Randy( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        OdinUniformTaskMappingDriver categoryMappingDriver = new OdinUniformTaskMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        RavenTaskInstrument ravenTaskInstrument = new RavenTaskInstrument( categoryMappingDriver, new GenericRavenTaskConfig() );\n\n        //this.testCategory( ravenTaskInstrument );\n\n        //this.testInsert( ravenTaskInstrument );\n        //this.testGet( ravenTaskInstrument );\n        //this.testDelete( instrument );\n\n        //this.testInstance( ravenTaskInstrument );\n\n        //CollectiveTaskRegiment taskRegiment = new RavenCollectiveTaskRegiment( this, ravenTaskInstrument );\n        //this.testTaskRegimentBase( taskRegiment ,ravenTaskInstrument);\n\n\n        this.testInstanceLaunch( ravenTaskInstrument );\n    }\n\n    private void testTaskRegimentBase( CollectiveTaskRegiment regiment ,  RavenTaskInstrument instrument) {\n/*      TaskElement taskElement = new GenericTaskElement();\n        taskElement.setName(\"spartaTest00058\");\n        taskElement.setType(\"sparta\");\n        taskElement.setResourceType(\"spartaTest00058\");\n        taskElement.setImagePath(\"spartaTest0005\");\n        taskElement.setDeploymentMethod(\"spartaTest0017\");\n        taskElement.setPriority(1);\n        taskElement.setActuallyPriority(1);\n        taskElement.setDryRun(true);\n         RavenTask task = regiment.createTask(  taskElement, taskElement.getGuid());\n          Debug.trace(task);\n      RavenTaskInstance instance = task.createInstance();\n      Debug.trace(instance);\n      Debug.trace(instrument.queryElement(\"spartaTest00058\"));*/\n        TaskElement  taskElement = (TaskElement) instrument.queryElement(\"spartaTest00058\");\n        Debug.trace(taskElement);\n        taskElement.setName(\"spartaTest00059855\");\n        Debug.trace(taskElement);\n        Debug.trace(instrument.getPath( GUIDs.GUID128(\"01977911-62b1-70f3-bd4f-060e889c088e\")));\n     regiment.affirmTask(\"spartaTest00058\", GUIDs.GUID128(\"01977911-62b1-70f3-bd4f-060e889c088e\"), taskElement);\n//regiment.purgeTask(GUIDs.GUID128(\"019776f2-e80a-7675-ba4b-7d1d415d8088\"));\n    }\n\n    private void testCategory( RavenTaskInstrument instrument ) {\n        CategoryService categoryService = instrument.getCategoryService();\n\n        CategoryTag tag = new GenericCategoryTag();\n        tag.setCategoryName( \"Data\" );\n        tag.setCategoryType( \"System\" );\n        Debug.greenfs( categoryService.setCategoryTag( \"root/test/job/task\", tag ) );\n    }\n\n    private void testInsert( RavenTaskInstrument instrument ) {\n//        GenericNamespace namespace = new GenericNamespace();\n//        namespace.setName( \"Test1\" );\n//        instrument.put( namespace );\n\n        //Debug.trace( instrument.get( GUIDs.GUID72(\"03c2f90-000133-000 0-44\") ) );\n\n\n//        GenericApplicationElement applicationNode = new GenericApplicationElement(\n//                new JSONMaptron( \"{ name:specialApp, alias:jesus, deploymentMethod:Container, path:'/xxx/xxx/ggg', resourceType:human,\" +\n//                        \"type:Social, description: 'This is jesus', extraInformation: 'more', level:'L1', primaryImplLang: java, scenario:'/scenario/dragon/king'  }\" )\n//        );\n//\n//        applicationNode.apply( new JSONMaptron( \"{ name:specialApp2, deploymentMethod:VM }\" ) );\n//        instrument.put( applicationNode );\n        for( int i = 1; i <=12; i++ ) {\n            GenericTaskElement taskElement = new GenericTaskElement(\n                    new JSONMaptron( \"{ name:'测试服务\"+i+\"', alias:jesus, serviceType:System, resourceType:human,\" +\n                            \"type:Social, description: 'This is special', extraInformation: 'more', level:'L1', primaryImplLang: java, scenario:'/scenario/dragon/king'  }\" )\n            );\n            instrument.put( taskElement );\n        }\n\n\n\n          //instrument.affirmOwnedNode( GUIDs.GUID128(\"01972f5a-7edc-79bd-9655-ea50ae5b0887\"),GUIDs.GUID128(\"01972f5b-4d56-7d1c-811c-b855bfdb5dcb\") );\n        //instrument.newHardLink( GUIDs.GUID128(\"01972f5a-7edc-79bd-9655-ea50ae5b0887\"), GUIDs.GUID128(\"01972f59-4049-77fc-827f-a9976425c01c\") );\n    }\n\n    private void testGet( TaskInstrument instrument ){\n        //Debug.trace( instrument.queryGUIDByPath( \"规则1/很好的服务/我的世界\" ) );\n        //Debug.trace( instrument.getPath(GUIDs.GUID72( \"03c4a36-000381-0000-48\" ) ) );\n        //Debug.trace( instrument.get( GUIDs.GUID72(\"03e60e8-0000ae-0000-20\") ) );\n        //Debug.trace( instrument.get( GUIDs.GUID72(\"03e60e8-0000c5-0000-48\") ) );\n        //Debug.trace( instrument.get( GUIDs.GUID72(\"03e60e8-000117-0000-18\") ) );\n//        Debug.trace( instrument.get( GUIDs.GUID72( \"02be396-0001e9-0000-e4\" ) ) );\n        //Debug.trace( instrument.affirmApplication( \"Test1/App1\" ) );\n\n//        Debug.trace( instrument.affirmService( \"root/特殊服务\" ) );\n//        Debug.trace( instrument.affirmApplication( \"root/species/orc\" ) );\n//        Debug.trace( instrument.affirmNamespace(\"root/species\") );\n//        Debug.trace( instrument.affirmNamespace( \"root\" ).fetchChildren() );\n//\n//        instrument.affirmApplication( \"root/species/orc\" ).addChild( new GenericServiceElement( new JSONMaptron( \"{ name: slaughter }\" ) ) );\n//\n//        Debug.trace( instrument.affirmApplication( \"root/species/orc\" ).fetchChildren() );\n//\n//        Debug.trace( instrument.queryElement( \"root/species/orc/slaughter\" ).toJSONObject() );\n//\n//        instrument.affirmNamespace( \"root\" ).addChild( new GenericNamespace( new JSONMaptron( \"{ name: weapon, scenario: s1, description: d1, level:L1, primaryImplLang:Java }\" ) ) );\n//\n//        Debug.fmp( 2, instrument.queryElement( \"root/weapon\" ).evinceNamespace().toJSONDetails() );\n\n\n\n\n        TaskJSONDecoder decoder = new TaskJSONDecoder( instrument );\n        decoder.decode( new JSONMaptron( \"{ root: { test: { job: { metaType: AppElement, type:SysJob, tasks: { task: { metaType: TaskElement, type: SparkTask } } } } } }\" ) );\n\n        Debug.fmp( 2, instrument.queryElement( \"root\" ).toJSONObject() );\n//        GUID128 guid128 = GUIDs.GUID128(\"019714af-e0ec-7f2a-94a3-cd740efccb6c\");\n//        Debug.trace(instrument.getPath( GUIDs.GUID128(\"019714af-e0ec-7f2a-94a3-cd740efccb6c\") ));\n    }\n\n    private void testDelete( TaskInstrument instrument ) {\n        instrument.remove( GUIDs.GUID128(\"181e9e4-000395-0000-d4\") );\n    }\n\n    private void testInstance( TaskInstrument instrument ) {\n        InstanceInstrument instanceInstrument = instrument.getInstanceInstrument();\n\n        GUID taskGuid = instrument.queryGUIDByPath( \"root/test/job/task\" );\n        //InstanceEntry instanceEntry = new GenericInstanceEntry( new JSONMaptron( \"{priority:456, taskType: Spark, instanceName: test123}\" ) );\n\n        //instanceInstrument.addInstance( taskGuid, instanceEntry );\n\n        //Debug.fmp( 2, instanceEntry );\n\n//        TaskElement taskElement = (TaskElement) instrument.queryElement( \"root/test/job/task\" );\n//        taskElement.setImagePath( \"uofs:///sys/public/global/exe/images/hola/senorita/image_c\" );\n//        instrument.update( taskElement );\n\n        InstanceEntry instanceEntry = instanceInstrument.makeInstanceEntry( taskGuid );\n        Debug.fmp( 2, instanceEntry );\n    }\n\n\n\n\n\n    private void testInstanceLaunch( TaskInstrument instrument ) throws Exception {\n        WolfMCServer wolfKing = new WolfMCServer( \"\", this, new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n        CollectiveTaskRegiment regiment = new RavenCollectiveTaskRegiment( this, (CentralizedTaskInstrument) instrument, wolfKing );\n        regiment.startRemoteProcessServer();\n\n\n\n        UlfClient ulfClient = new WolfMCClient(\n                this.getSystemGuidAllocator72().nextGUIDi64(), \"\", this, this.getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( \"Messagers.Messagers.WolfMCKingpin\" )\n        );\n        CollectiveTaskLegionary regimentClient = new RavenCollectiveTaskLegionary( \"jesus\", this, ulfClient );\n        regimentClient.startService();\n        regimentClient.joinRegiment();\n\n        regimentClient.remoteProcessManagerClient().addProcessLifecycleHandler(new ProcessLifecycleHandler() {\n            @Override\n            public void fired( String imageAddress, EntryPointRunnable runnable, ProcessEvent event ) {\n                Debug.greenfs( imageAddress, event );\n            }\n        });\n\n\n\n\n\n\n//        TaskExecutionLauncher launcher = regiment.taskExecutionLauncher();\n//        GUID taskGuid = instrument.queryGUIDByPath( \"root/test/job/task\" );\n//        RavenTask task = regiment.getTaskByGuid( taskGuid );\n//        RavenTaskInstance instance = task.createInstance();\n\n\n        //ProcessManager manager = this.processManager();\n        ProcessManager manager = regimentClient.processManager();\n        ProcessEventHandler eventHandler = new ProcessEventHandler() {\n            @Override\n            public void fired(EntryPointRunnable runnable, ProcessEvent event ) {\n                Debug.bluef( runnable, event );\n            }\n        };\n\n        ExecutionImage image = new LocalHostedClassImage( \"image_c\", new ArchEntryPointRunnable( eventHandler ) {\n            @Override\n            public int main( Map<String, String[]> args ) {\n                Debug.greenfs( \"Hello, hi, I am `\" + this.ownedProcess().getName() + \"`!\" );\n                Debug.sleep( 1000 );\n                Debug.greenfs( \"Miao~\" );\n\n                //throw new IrrationalProvokedException();\n                return 1984;\n            }\n        }, manager );\n        manager.getImageLoader().registerLocalScopeExecutionImage( \"hola/senorita\", image );\n\n\n\n        LaunchFeature feature = new LaunchFeature();\n        //UProcess uProcess = launcher.createLocally( instance, feature );\n        //UProcess uProcess = launcher.createRemotely( instance, client.getClientId(), feature );\n\n        //uProcess.start();\n\n        //launcher.launchRemotely( instance, client.getClientId(), feature );\n        //launcher.launchLocally( instance, feature );\n\n\n\n\n//        // Test processor\n//        GenericTaskProcessorEntity processorEntity = new GenericTaskProcessorEntity(\n//                new JSONMaptron(\"{name:r1, clusterPath:'/r1', clusterName: 'r1', local: false, priority: 100, queueMeta: {\" +\n//                        \"name: r1_q, maxCapacity: 100, minCapacity: 100, runtimeInstanceCapacity: 50}}\" +\n//                        \"}}\")\n//        );\n//        processorEntity.setControlClientId( client.getClientId() );\n//\n//        TaskExecutionProcessor processor = new RavenTaskExecutionProcessor(processorEntity, launcher);\n//        processor.pipeLaunch( List.of( TaskLaunchContext.of( feature, instance ) ) );\n\n\n\n        //TaskDispatcher taskDispatcher = regiment.taskDispatcher();\n        //UProcess process = taskDispatcher.create( instance, feature );\n        LaunchedContext context = regiment.create( \"root/test/job/task\", feature );\n        Debug.greenfs( context.getProcess().getPID() );\n\n    }\n\n\n\n\n\n}\n\npublic class TestTaskTree {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Randy Jesse = (Randy) Pinecone.sys().getTaskManager().add( new Randy( args, Pinecone.sys() ) );\n            Jesse.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestUOFS.java",
    "content": "package com.sparta;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.framework.util.json.JSONObject;\nimport com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver;\nimport com.pinecone.hydra.storage.file.FileSystemConfig;\nimport com.pinecone.hydra.storage.file.KernelFileSystemConfig;\nimport com.pinecone.hydra.storage.file.UniformObjectFileSystem;\nimport com.pinecone.hydra.storage.file.external.GenericNativeExternalFolder;\nimport com.pinecone.hydra.storage.file.external.KenExternalFileSystemInstrument;\nimport com.pinecone.hydra.storage.file.entity.ClusterPage;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.io.TitanFileChannelChanface;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.builder.ComponentUOFSBuilder;\nimport com.pinecone.hydra.storage.file.builder.UOFSBuilder;\nimport com.pinecone.hydra.storage.file.entity.FSNodeAllotment;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64;\nimport com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64;\nimport com.pinecone.hydra.storage.volume.KernelVolumeConfig;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.storage.volume.VolumeManager;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.framework.util.id.GuidAllocator;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128V1;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128V2;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128V3;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128V4;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128V5;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128V6;\nimport com.pinecone.ulf.util.guid.i128.GuidAllocator128V7;\nimport com.pinecone.ulf.util.guid.i128.GUID128;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.nio.channels.FileChannel;\nimport java.nio.file.StandardOpenOption;\n\nclass Steve extends Tritium {\n    public Steve( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Steve( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        KOIMappingDriver koiMappingDriver = new FileMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        KOIMappingDriver koiVolumeMappingDriver = new VolumeMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n\n\n        JSONObject jo = new JSONMaptron( \"{ DefaultVolumeGuid:'1788a74-000136-0000-f8', DefaultTempFilePath: 'D:/文件系统/temp/' }\" );\n        FileSystemConfig config = new KernelFileSystemConfig( jo );\n        VolumeConfig volumeConfig = new KernelVolumeConfig( jo );\n\n        UOFSBuilder builder = new ComponentUOFSBuilder( koiMappingDriver, config );\n        KOMFileSystem fileSystem = new UniformObjectFileSystem( koiMappingDriver, config );\n//        FileSystemCacheConfig cacheConfig = new MappedFileSystemCacheConfig(new JSONMaptron(\"{redisHost: \\\"47.115.216.203\\\",redisPort: 6379, redisTimeOut: 2000, redisPassword: 1234abcd, redisDatabase: 0}\"));\n//        KOMFileSystem fileSystem = builder.registerComponentor( new UOFSCacheComponentor(cacheConfig) ).buildByRegistered();\n        UniformVolumeManager volumeManager = new UniformVolumeManager(koiVolumeMappingDriver, volumeConfig);\n        GuidAllocator guidAllocator = fileSystem.getGuidAllocator();\n        //Debug.trace( fileSystem.get( GUIDs.GUID72( \"020c8b0-000006-0002-54\" ) ) );\n        this.testInsert( fileSystem );\n        //this.testUpload(fileSystem);\n        //this.testDelete( fileSystem );\n        //this.testChannelReceive( fileSystem, volumeManager );\n        //this.testChannelExport( fileSystem, volumeManager );\n        //this.testQuery( fileSystem );\n        //this.testExternal( fileSystem );\n        //this.testCopy( fileSystem,volumeManager );\n\n\n        //this.testClusterPage( fileSystem );\n//        GuidAllocator128 guidAllocator128 = new GuidAllocator128V1();\n//        Debug.trace(\"Guid128V1：\" + guidAllocator128.nextGUID() );\n//        guidAllocator128 = new GuidAllocator128V2();\n//        Debug.trace(\"Guid128V2：\" + guidAllocator128.nextGUID() );\n//        guidAllocator128 = new GuidAllocator128V3();\n//        Debug.trace(\"Guid128V3：\" + guidAllocator128.nextGUID() );\n//        guidAllocator128 = new GuidAllocator128V4();\n//        Debug.trace(\"Guid128V4：\" + guidAllocator128.nextGUID() );\n//        guidAllocator128 = new GuidAllocator128V5();\n//        Debug.trace(\"Guid128V5：\" + guidAllocator128.nextGUID() );\n//        guidAllocator128 = new GuidAllocator128V6();\n//        Debug.trace(\"Guid128V6：\" + guidAllocator128.nextGUID() );\n//        guidAllocator128 = new GuidAllocator128V7();\n//        GUID128 g = (GUID128) guidAllocator128.nextGUID();\n//        Debug.trace(\"Guid128V7：\" + g, g.toUUID() );\n//        Debug.trace( guidAllocator128.parse(\"00000000-0000-0000-0000-000000000000\") );\n    }\n\n    private void testQuery ( KOMFileSystem fileSystem ){\n        Debug.trace( fileSystem.queryGUIDByPath(\"我的文件/总2127.mp4\") );\n    }\n\n    private void testInsert( KOMFileSystem fileSystem ){\n        fileSystem.affirmFolder(\"game/我的世界\");\n        fileSystem.affirmFileNode(\"game/我的世界/村民\");\n        fileSystem.affirmFileNode(\"game/我的世界/暮色森林/暮色惡魂\");\n        fileSystem.affirmFileNode(\"game/泰拉瑞亚/腐化之地/世界吞噬者\");\n        fileSystem.affirmFileNode(\"movie/生还危机/浣熊市\");\n    }\n\n    private void testCopy(KOMFileSystem fileSystem, VolumeManager volumeManager) {\n//        fileSystem.copy(\"我的文件/图片\",\"我的文件/我的文件\",volumeManager);\n        FileNode fileNode = fileSystem.getFileNode(GUIDs.GUID128(\"14bc124-00012c-0004-f8\"));\n        Debug.trace( fileNode.getPath() );\n    }\n\n    private void testExternal(KOMFileSystem fileSystem){\n        KenExternalFileSystemInstrument directFileSystemAccess = new KenExternalFileSystemInstrument(fileSystem);\n//        GenericExternalSymbolic externalSymbolic = new GenericExternalSymbolic();\n//        externalSymbolic.setName(\"xxx\");\n//        externalSymbolic.setGuid( fileSystem.getGuidAllocator().nextGUID() );\n//        directFileSystemAccess.insertExternalSymbolic( externalSymbolic );\n\n        ElementNode e = fileSystem.queryElement( \"red\" );\n        //e.evinceFolder().createExternalSymbolic( \"external\" );\n\n\n\n//        ExternalFile externalFile = (GenericExternalFile)directFileSystemAccess.queryElement(\"我的文件/external/《智育》概要设计.docx\");\n//        Debug.trace(externalFile.getPath());\n        GenericNativeExternalFolder externalFolder = new GenericNativeExternalFolder(new File(\"D:/文件\"));\n        Debug.trace(externalFolder.getName());\n        Debug.trace(externalFolder.toJSONString());\n    }\n\n\n    private void testDelete(KOMFileSystem fileSystem ){\n        fileSystem.remove( \"game\" );\n        fileSystem.remove( \"movie\" );\n    }\n\n    private void testChannelReceive( KOMFileSystem fileSystem, UniformVolumeManager volumeManager ) throws IOException {\n        //LogicVolume volume = volumeManager.get(GUIDs.GUID72( \"09d62c0-00037e-0006-c8\" ));\n        FSNodeAllotment fsNodeAllotment = fileSystem.getFSNodeAllotment();\n        File file = new File(\"D:/井盖视频块/我的视频.mp4\");\n        FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ);\n        TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel );\n        FileNode fileNode = fsNodeAllotment.newFileNode();\n        fileNode.setDefinitionSize( file.length() );\n        fileNode.setName( file.getName() );\n        String destDirPath = \"D:/井盖视频块/我的视频.mp4\";\n        TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64( fileSystem, destDirPath, fileNode,titanFileChannelKChannel,volumeManager );\n        fileSystem.receive( receiveEntity );\n    }\n\n    private void testChannelExport( KOMFileSystem fileSystem, UniformVolumeManager volumeManager ) throws IOException {\n        FileNode fileNode = (FileNode) fileSystem.get(fileSystem.queryGUIDByPath(\"D:/井盖视频块/我的视频.mp4\"));\n        File file = new File(\"D:\\\\文件系统\\\\大文件\\\\我的视频.mp4\");\n        FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND);\n        TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel );\n        TitanFileExportEntity64 exportEntity = new TitanFileExportEntity64( fileSystem, volumeManager, fileNode, kChannel );\n        fileSystem.export( exportEntity );\n    }\n\n    private void testClusterPage( KOMFileSystem fileSystem ){\n        ClusterPage clusterPage = fileSystem.fetchClustersByFileGuid( GUIDs.GUID128( \"1632d6e-0001de-0003-e4\" ) );\n        long sum = clusterPage.getClusters();\n\n        for ( long i = 0; i < sum; ++i ) {\n            Debug.trace( clusterPage.getLocalCluster( i ) );\n        }\n\n    }\n\n}\npublic class TestUOFS {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Steve Steve = (Steve) Pinecone.sys().getTaskManager().add( new Steve( args, Pinecone.sys() ) );\n            Steve.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/com/sparta/TestVolume.java",
    "content": "package com.sparta;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver;\nimport com.pinecone.hydra.storage.io.TitanFileChannelChanface;\nimport com.pinecone.hydra.storage.io.TitanInputStreamChanface;\nimport com.pinecone.hydra.storage.io.TitanOutputStreamChanface;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.UniformObjectFileSystem;\nimport com.pinecone.hydra.storage.volume.UnifiedTransmitConstructor;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.ExporterEntity;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.MountPoint;\nimport com.pinecone.hydra.storage.volume.entity.ReceiveEntity;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.SpannedVolume;\nimport com.pinecone.hydra.storage.TitanStorageExportIORequest;\nimport com.pinecone.hydra.storage.TitanStorageReceiveIORequest;\nimport com.pinecone.hydra.storage.volume.entity.VolumeAllotment;\nimport com.pinecone.hydra.storage.volume.entity.VolumeCapacity64;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalPhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.simple.export.TitanSimpleExportEntity64;\nimport com.pinecone.hydra.storage.volume.entity.local.simple.recevice.TitanSimpleReceiveEntity64;\nimport com.pinecone.hydra.storage.volume.entity.local.spanned.export.TitanSpannedExportEntity64;\nimport com.pinecone.hydra.storage.volume.entity.local.spanned.receive.SpannedReceiveEntity64;\nimport com.pinecone.hydra.storage.volume.entity.local.spanned.receive.TitanSpannedReceiveEntity64;\nimport com.pinecone.hydra.storage.volume.kvfs.KenVolumeFileSystem;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.pinecone.framework.util.id.GuidAllocator;\n\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.FileOutputStream;\nimport java.io.IOException;\nimport java.nio.channels.FileChannel;\nimport java.nio.file.StandardOpenOption;\nimport java.sql.SQLException;\n\n\nclass Alice extends Tritium {\n    public Alice( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public Alice( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n    @Override\n    public void vitalize () throws Exception {\n        KOIMappingDriver koiMappingDriver = new VolumeMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        KOIMappingDriver koiFileMappingDriver = new FileMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n\n        //KOMFileSystem fileSystem = new UniformObjectFileSystem( koiFileMappingDriver, null );\n\n        UniformVolumeManager volumeTree = new UniformVolumeManager( koiMappingDriver, null );\n        VolumeAllotment volumeAllotment = volumeTree.getVolumeAllotment();\n\n\n        //this.testSimpleThread();\n        //this.testDirectReceive( volumeTree );\n        //this.testDirectExport( volumeTree );\n        //Debug.trace( volumeTree.queryGUIDByPath( \"逻辑卷三/逻辑卷一\" ) );\n        //volumeTree.get( GUIDs.GUID72( \"05e44c4-00022b-0006-20\" ) ).build();\n        this.testStripedInsert( volumeTree );\n        //this.testSpannedInsert( volumeTree );\n        //this.testStripedReceive( volumeTree );\n        //this.testStripedExport( volumeTree );\n        //this.testHash( volumeTree );\n        //this.testSpannedReceive( volumeTree );\n        //this.testSpannedExport( volumeTree );\n        //this.testSimpleReceive( volumeTree );\n        //this.testSimpleExport( volumeTree );\n        //this.testConsumer( volumeTree );\n    }\n\n\n\n\n\n    private void testStripedInsert( UniformVolumeManager volumeManager ) throws SQLException {\n        VolumeAllotment volumeAllotment = volumeManager.getVolumeAllotment();\n        VolumeCapacity64 volumeCapacity1 = volumeAllotment.newVolumeCapacity();\n        volumeCapacity1.setDefinitionCapacity( 100*1024*1024 );\n        VolumeCapacity64 volumeCapacity2 = volumeAllotment.newVolumeCapacity();\n        volumeCapacity2.setDefinitionCapacity( 200*1024*1024 );\n\n        LocalPhysicalVolume physicalVolume1 = volumeAllotment.newLocalPhysicalVolume();\n        physicalVolume1.setType(\"PhysicalVolume\");\n        physicalVolume1.setVolumeCapacity( volumeCapacity1 );\n        physicalVolume1.setName( \"C\" );\n        MountPoint mountPoint1 = volumeAllotment.newMountPoint();\n        mountPoint1.setMountPoint(\"D:/文件系统/簇1\");\n        physicalVolume1.setMountPoint( mountPoint1 );\n\n        LocalPhysicalVolume physicalVolume2 = volumeAllotment.newLocalPhysicalVolume();\n        physicalVolume2.setType(\"PhysicalVolume\");\n        physicalVolume2.setVolumeCapacity( volumeCapacity2 );\n        physicalVolume2.setName( \"D\" );\n        MountPoint mountPoint2 = volumeAllotment.newMountPoint();\n        mountPoint2.setMountPoint( \"D:/文件系统/簇2\" );\n        physicalVolume2.setMountPoint( mountPoint2 );\n\n        VolumeCapacity64 logicVolumeCapacity1 = volumeAllotment.newVolumeCapacity();\n        logicVolumeCapacity1.setDefinitionCapacity( 100*1024*1024 );\n        VolumeCapacity64 logicVolumeCapacity2 = volumeAllotment.newVolumeCapacity();\n        logicVolumeCapacity2.setDefinitionCapacity( 200*1024*1024 );\n        VolumeCapacity64 logicVolumeCapacity3 = volumeAllotment.newVolumeCapacity();\n        logicVolumeCapacity3.setDefinitionCapacity( 300*1024*1024 );\n\n        volumeManager.insertPhysicalVolume( physicalVolume1 );\n        volumeManager.insertPhysicalVolume( physicalVolume2 );\n\n        LocalSimpleVolume simpleVolume1 = volumeAllotment.newLocalSimpleVolume();\n        simpleVolume1.setName( \"简单卷一\" );\n        simpleVolume1.setType( \"SimpleVolume\" );\n        simpleVolume1.setVolumeCapacity( logicVolumeCapacity1 );\n\n        LocalSimpleVolume simpleVolume2 = volumeAllotment.newLocalSimpleVolume();\n        simpleVolume2.setName( \"简单卷二\" );\n        simpleVolume2.setVolumeCapacity( logicVolumeCapacity2 );\n        simpleVolume2.setType( \"SimpleVolume\" );\n\n        LocalStripedVolume stripedVolume = volumeAllotment.newLocalStripedVolume();\n        stripedVolume.setName( \"条带卷\" );\n        stripedVolume.setVolumeCapacity( logicVolumeCapacity3 );\n        stripedVolume.setType( \"StripedVolume\" );\n\n\n        simpleVolume1.build();\n        simpleVolume2.build();\n        stripedVolume.build();\n\n        simpleVolume1.extendLogicalVolume( physicalVolume1.getGuid() );\n        simpleVolume2.extendLogicalVolume( physicalVolume2.getGuid() );\n        stripedVolume.storageExpansion( simpleVolume1.getGuid() );\n        stripedVolume.storageExpansion( simpleVolume2.getGuid() );\n        //stripedVolume.storageExpansion( GUIDs.GUID72(\"0a21870-000251-0006-f0\") );\n    }\n\n    private void testSpannedInsert( UniformVolumeManager volumeManager ) throws SQLException {\n        VolumeAllotment volumeAllotment = volumeManager.getVolumeAllotment();\n        VolumeCapacity64 volumeCapacity1 = volumeAllotment.newVolumeCapacity();\n        volumeCapacity1.setDefinitionCapacity( 300*1024*1024 );\n        VolumeCapacity64 volumeCapacity2 = volumeAllotment.newVolumeCapacity();\n        volumeCapacity2.setDefinitionCapacity( 400*1024*1024 );\n\n        LocalPhysicalVolume physicalVolume1 = volumeAllotment.newLocalPhysicalVolume();\n        physicalVolume1.setType(\"PhysicalVolume\");\n        physicalVolume1.setVolumeCapacity( volumeCapacity1 );\n        physicalVolume1.setName( \"E\" );\n        MountPoint mountPoint1 = volumeAllotment.newMountPoint();\n        mountPoint1.setMountPoint(\"D:/文件系统/簇4\");\n        physicalVolume1.setMountPoint( mountPoint1 );\n\n        LocalPhysicalVolume physicalVolume2 = volumeAllotment.newLocalPhysicalVolume();\n        physicalVolume2.setType(\"PhysicalVolume\");\n        physicalVolume2.setVolumeCapacity( volumeCapacity2 );\n        physicalVolume2.setName( \"F\" );\n        MountPoint mountPoint2 = volumeAllotment.newMountPoint();\n        mountPoint2.setMountPoint( \"D:/文件系统/簇5\" );\n        physicalVolume2.setMountPoint( mountPoint2 );\n\n        VolumeCapacity64 logicVolumeCapacity1 = volumeAllotment.newVolumeCapacity();\n        logicVolumeCapacity1.setDefinitionCapacity( 300*1024*1024 );\n        VolumeCapacity64 logicVolumeCapacity2 = volumeAllotment.newVolumeCapacity();\n        logicVolumeCapacity2.setDefinitionCapacity( 400*1024*1024 );\n        VolumeCapacity64 logicVolumeCapacity3 = volumeAllotment.newVolumeCapacity();\n        logicVolumeCapacity3.setDefinitionCapacity( 700*1024*1024 );\n\n        LocalSimpleVolume simpleVolume1 = volumeAllotment.newLocalSimpleVolume();\n        simpleVolume1.setName( \"简单卷四\" );\n        simpleVolume1.setType( \"SimpleVolume\" );\n        simpleVolume1.setVolumeCapacity( logicVolumeCapacity1 );\n\n        LocalSimpleVolume simpleVolume2 = volumeAllotment.newLocalSimpleVolume();\n        simpleVolume2.setName( \"简单卷五\" );\n        simpleVolume2.setVolumeCapacity( logicVolumeCapacity2 );\n        simpleVolume2.setType( \"SimpleVolume\" );\n\n        LocalSpannedVolume spannedVolume = volumeAllotment.newLocalSpannedVolume();\n        spannedVolume.setName( \"跨区卷\" );\n        spannedVolume.setVolumeCapacity( logicVolumeCapacity3 );\n        spannedVolume.setType( \"spannedVolume\" );\n\n        volumeManager.insertPhysicalVolume( physicalVolume1 );\n        volumeManager.insertPhysicalVolume( physicalVolume2 );\n        simpleVolume1.build();\n        simpleVolume2.build();\n\n        simpleVolume1.extendLogicalVolume( physicalVolume1.getGuid() );\n        simpleVolume2.extendLogicalVolume( physicalVolume2.getGuid() );\n        spannedVolume.storageExpansion( simpleVolume1.getGuid() );\n        spannedVolume.storageExpansion( simpleVolume2.getGuid() );\n        spannedVolume.build();\n    }\n\n    void testStripedReceive( UniformVolumeManager volumeManager ) throws IOException {\n        GuidAllocator guidAllocator = volumeManager.getGuidAllocator();\n        LogicVolume volume = volumeManager.get(volumeManager.queryGUIDByPath(\"条带卷\"));\n        TitanStorageReceiveIORequest titanReceiveStorageObject = new TitanStorageReceiveIORequest();\n        File file = new File(\"D:/井盖视频块/我的视频.mp4\");\n        titanReceiveStorageObject.setName( \"我的视频\" );\n        titanReceiveStorageObject.setSize( file.length() );\n        titanReceiveStorageObject.setStorageObjectGuid( guidAllocator.nextGUID() );\n\n        FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ);\n        TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel );\n//        FileInputStream stream = new FileInputStream( file );\n//        TitanInputStreamChanface kChannel = new TitanInputStreamChanface(stream);\n        UnifiedTransmitConstructor unifiedTransmitConstructor = new UnifiedTransmitConstructor();\n        ReceiveEntity entity = unifiedTransmitConstructor.getReceiveEntity(volume.getClass(), volumeManager, titanReceiveStorageObject, kChannel, volume);\n        //TitanStripedReceiveEntity64 receiveEntity = new TitanStripedReceiveEntity64( volumeManager, titanReceiveStorageObject, kChannel, (StripedVolume) volume);\n        volume.receive( entity );\n\n\n        //StorageIOResponse storageIOResponse = volume.channelReceive(titanReceiveStorageObject, titanKChannel);\n    }\n\n    void testSpannedReceive( UniformVolumeManager volumeManager ) throws IOException {\n        GuidAllocator guidAllocator = volumeManager.getGuidAllocator();\n        LogicVolume volume = volumeManager.get(volumeManager.queryGUIDByPath(\"跨区卷\"));\n        TitanStorageReceiveIORequest titanReceiveStorageObject = new TitanStorageReceiveIORequest();\n        File file = new File(\"D:/井盖视频块/我的视频.mp4\");\n        titanReceiveStorageObject.setName( \"视频\" );\n        titanReceiveStorageObject.setSize( file.length() );\n        titanReceiveStorageObject.setStorageObjectGuid( guidAllocator.nextGUID() );\n\n        FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ);\n        TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel );\n\n        SpannedReceiveEntity64 receiveEntity = new TitanSpannedReceiveEntity64( volumeManager, titanReceiveStorageObject, kChannel, (SpannedVolume) volume);\n        volume.receive( receiveEntity );\n    }\n\n    void testSimpleReceive( UniformVolumeManager volumeManager ) throws IOException {\n        GuidAllocator guidAllocator = volumeManager.getGuidAllocator();\n        LogicVolume volume = volumeManager.get(GUIDs.GUID128(\"12146c0-0000ca-0000-8c\"));\n        TitanStorageReceiveIORequest titanReceiveStorageObject = new TitanStorageReceiveIORequest();\n        File file = new File(\"C:/Users/29796/OneDrive/图片/R-C.jpg\");\n        titanReceiveStorageObject.setName( \"视频\" );\n        titanReceiveStorageObject.setSize( file.length() );\n        titanReceiveStorageObject.setStorageObjectGuid( guidAllocator.nextGUID() );\n\n//        FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ);\n//        TitanFileChannelKChannel kChannel = new TitanFileChannelKChannel(channel);\n        FileInputStream fileInputStream = new FileInputStream( file );\n        TitanInputStreamChanface kChannel = new TitanInputStreamChanface( fileInputStream );\n        TitanSimpleReceiveEntity64 receiveEntity = new TitanSimpleReceiveEntity64( volumeManager, titanReceiveStorageObject, kChannel, (SimpleVolume) volume);\n        volume.randomReceive( receiveEntity,0,file.length() );\n    }\n\n\n    void testStripedExport( UniformVolumeManager volumeManager ) throws Exception {\n        File file = new File(\"D:\\\\文件系统\\\\大文件\\\\我的视频.mp4\");\n        File originalFile = new File( \"D:/井盖视频块/我的视频.mp4\" );\n        FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND);\n        TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel );\n//        FileOutputStream stream = new FileOutputStream( file );\n//        TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface(stream);\n        LogicVolume volume = volumeManager.get(volumeManager.queryGUIDByPath(\"条带卷\"));\n        TitanStorageExportIORequest titanExportStorageObject = new TitanStorageExportIORequest();\n        Debug.trace(originalFile.length());\n        titanExportStorageObject.setSize( originalFile.length() );\n        titanExportStorageObject.setStorageObjectGuid( GUIDs.GUID128(\"0d96fa2-000013-0001-f0\") );\n        //titanExportStorageObject.setSourceName(\"D:/文件系统/簇1/文件夹/视频_0662cf6-0000cd-0001-10.storage\");\n        //volume.channelExport( titanExportStorageObject, titanFileChannelKChannel);\n        UnifiedTransmitConstructor unifiedTransmitConstructor = new UnifiedTransmitConstructor();\n        ExporterEntity entity = unifiedTransmitConstructor.getExportEntity(volume.getClass(), volumeManager, titanExportStorageObject, kChannel, volume);\n        //TitanStripedExportEntity64 exportEntity = new TitanStripedExportEntity64( volumeManager, titanExportStorageObject, kChannel, (StripedVolume) volume);\n        volume.export( entity );\n    }\n\n    void testSpannedExport( UniformVolumeManager volumeManager ) throws IOException {\n        File file = new File(\"D:\\\\文件系统\\\\大文件\\\\我的视频.mp4\");\n        File originalFile = new File( \"D:/井盖视频块/我的视频.mp4\" );\n        FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND);\n        TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel );\n        LogicVolume volume = volumeManager.get(volumeManager.queryGUIDByPath(\"跨区卷\"));\n        TitanStorageExportIORequest titanExportStorageObject = new TitanStorageExportIORequest();\n        titanExportStorageObject.setSize( originalFile.length() );\n        titanExportStorageObject.setStorageObjectGuid( GUIDs.GUID128(\"0dc08ee-000129-0001-d0\") );\n        //titanExportStorageObject.setSourceName(\"D:\\\\文件系统\\\\簇4\\\\视频_09ab8ac-0003d7-0001-04.storage\");\n\n        TitanSpannedExportEntity64 exportEntity = new TitanSpannedExportEntity64( volumeManager, titanExportStorageObject, kChannel, (SpannedVolume) volume);\n        volume.export( exportEntity );\n    }\n\n    void testSimpleExport( UniformVolumeManager volumeManager ) throws IOException {\n        File file = new File(\"D:\\\\文件系统\\\\大文件\\\\我的图片2.jpg\");\n        File originalFile = new File( \"C:/Users/29796/OneDrive/图片/R-C.jpg\" );\n//        FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND);\n//        TitanFileChannelKChannel kChannel = new TitanFileChannelKChannel(channel);\n        LogicVolume volume = volumeManager.get(GUIDs.GUID128(\"12146c0-0000ca-0000-8c\"));\n        TitanStorageExportIORequest titanExportStorageObject = new TitanStorageExportIORequest();\n        titanExportStorageObject.setSize( originalFile.length() - 1024 * 200 );\n        titanExportStorageObject.setStorageObjectGuid( GUIDs.GUID128(\"1567f8c-000038-0006-ac\") );\n        titanExportStorageObject.setSourceName( \"D:/文件系统/簇1/R-C.jpg_1567f8c-000038-0006-ac.storage\" );\n\n        FileOutputStream fileOutputStream = new FileOutputStream( file );\n        TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface( fileOutputStream );\n        TitanSimpleExportEntity64 exportEntity = new TitanSimpleExportEntity64( volumeManager, titanExportStorageObject, kChannel,(SimpleVolume) volume);\n        //volume.export( exportEntity,0,originalFile.length() - 1024 * 200 );\n        volume.export( exportEntity,originalFile.length() - 1024 * 200, 1024 * 200 );\n    }\n\n\n    void testHash( UniformVolumeManager volumeManager ){\n        KenVolumeFileSystem kenVolumeFileSystem = new KenVolumeFileSystem(volumeManager);\n//        for( int i = 0; i < 1000000; i++ ){\n//            GUID128 guid72 = GUIDs.Dummy128();\n//            int hash = kenVolumeFileSystem.hashStorageObjectID(guid72, 2);\n//            if( hash != 0 && hash != 1 ){\n//                Debug.trace( guid72 );\n//            }\n//        }\n        Debug.trace( kenVolumeFileSystem.hashStorageObjectID( GUIDs.GUID128( \"0860ff4-0003ac-0000-cc\" ), 2 ) );\n    }\n\n    void testConsumer( UniformVolumeManager volumeManager )  {\n        LogicVolume volume = volumeManager.get(volumeManager.queryGUIDByPath(\"条带卷\"));\n        UnifiedTransmitConstructor unifiedTransmitConstructor = new UnifiedTransmitConstructor();\n        Debug.trace( volume.getClass() );\n        Debug.trace( unifiedTransmitConstructor.getReceiveEntity( volume.getClass() ) );\n    }\n\n}\npublic class TestVolume {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            Alice Alice = (Alice) Pinecone.sys().getTaskManager().add( new Alice( args, Pinecone.sys() ) );\n            Alice.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n\n\n"
  },
  {
    "path": "Sparta/sparta-core-console/src/test/java/test.java",
    "content": "import org.junit.Test;\nimport org.springframework.boot.test.context.SpringBootTest;\nimport org.springframework.test.context.ContextConfiguration;\n\n@SpringBootTest\n@ContextConfiguration(locations = { \"classpath:uid/default-uid-spring.xml\" })\npublic class test {\n    @Test\n    public void test(){\n\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uac-console/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sparta</artifactId>\n        <groupId>com.walnuts.sparta</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.sparta.account.console</groupId>\n    <artifactId>sparta-uac-console</artifactId>\n    <version>2.1.0</version>\n\n    <properties>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>junit</groupId>\n            <artifactId>junit</artifactId>\n            <version>3.8.1</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n            <artifactId>hydra-kom-default-driver</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.sparta.api.uac</groupId>\n            <artifactId>sparta-api-uac</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-config</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.summer.springram</groupId>\n            <artifactId>springram</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.sdk.thrift</groupId>\n            <artifactId>hydra-lib-thrift-sdk</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-test</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.testng</groupId>\n            <artifactId>testng</artifactId>\n            <version>RELEASE</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.thrift</groupId>\n            <artifactId>libthrift</artifactId>\n            <version>0.18.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>io.jsonwebtoken</groupId>\n            <artifactId>jjwt-impl</artifactId>\n            <version>0.11.2</version>\n            <scope>runtime</scope>\n        </dependency>\n        <dependency>\n            <groupId>io.jsonwebtoken</groupId>\n            <artifactId>jjwt-jackson</artifactId> <!-- or jjwt-gson if Gson is preferred -->\n            <version>0.11.2</version>\n            <scope>runtime</scope>\n        </dependency>\n        <dependency>\n            <groupId>io.jsonwebtoken</groupId>\n            <artifactId>jjwt-api</artifactId>\n            <version>0.11.2</version>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/SpartaAccountService.java",
    "content": "package com.walnut.sparta.account;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.servgram.Servgram;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.pinecone.summer.spring.Springron;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\n\npublic class SpartaAccountService extends Springron implements Slf4jTraceable {\n    public SpartaAccountService( String szName, Processum parent, String[] springbootArgs ) {\n        super( szName, parent, springbootArgs );\n        this.mSpringKernel.setPrimarySources( SpartaBoot.class );\n    }\n\n    public SpartaAccountService( String szName, Processum parent ) {\n        this( szName, parent, new String[0] );\n    }\n\n    @Override\n    protected void loadConfig() {\n        this.mServgramList     = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey );\n        Object dyServgramConf  = this.mServgramList.get( this.gramName() );\n        if( dyServgramConf instanceof String ) {\n            try{\n                this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) );\n            }\n            catch ( IOException ignore ) {\n                this.getLogger().info( \"[Notice] Spring will use the default config `application.yaml`.\" );\n            }\n        }\n        else {\n            this.mServgramConf = this.mServgramList.getChild( this.gramName() );\n        }\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/SpartaBoot.java",
    "content": "package com.walnut.sparta.account;\n\nimport org.springframework.boot.autoconfigure.SpringBootApplication;\n\n@SpringBootApplication\n\npublic class SpartaBoot {\n}\n"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/api/controller/v2/AccountController.java",
    "content": "package com.walnut.sparta.account.api.controller.v2;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.JSON;\nimport com.pinecone.hydra.account.AccountManager;\nimport com.pinecone.hydra.account.entity.ACNodeAllotment;\nimport com.pinecone.hydra.account.entity.Account;\nimport com.pinecone.hydra.account.entity.Domain;\nimport com.pinecone.hydra.account.entity.GenericAccount;\nimport com.pinecone.hydra.account.entity.GenericAuthorization;\nimport com.pinecone.hydra.account.entity.GenericCredential;\nimport com.pinecone.hydra.account.entity.GenericDomain;\nimport com.pinecone.hydra.account.entity.GenericGroup;\nimport com.pinecone.hydra.account.entity.GenericPrivilege;\nimport com.pinecone.hydra.account.entity.GenericRole;\nimport com.pinecone.hydra.account.entity.Group;\nimport com.pinecone.hydra.account.entity.Privilege;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.sparta.account.domian.vo.UserLoginVO;\nimport com.walnut.sparta.account.interceptor.RequiresAuthentication;\nimport com.walnut.sparta.account.properties.JwtProperties;\nimport com.walnut.sparta.account.util.JwtUtil;\nimport com.walnut.sparta.account.api.response.BasicResultResponse;\nimport com.walnut.sparta.account.domian.vo.AccountLoginVO;\n\nimport org.springframework.beans.BeanUtils;\nimport org.springframework.web.bind.annotation.*;\n\nimport javax.annotation.Resource;\nimport java.time.LocalDateTime;\nimport java.time.format.DateTimeFormatter;\nimport java.util.ArrayList;\nimport java.util.Collection;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\n@RestController\n@RequestMapping( \"/api/v2/account\" )\n@CrossOrigin\npublic class AccountController {\n    @Resource\n    private AccountManager  primaryAccount;\n\n    private JwtProperties jwtProperties;\n    public  AccountController(JwtProperties jwtProperties) {\n        this.jwtProperties = jwtProperties;\n    }\n\n    @PutMapping(\"/create/domain\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> createDomain( @RequestParam(\"doMainName\") String doMainName ){\n        ACNodeAllotment allotment = this.primaryAccount.getAllotment();\n        Domain domain = allotment.newDomain();\n        domain.setName( doMainName );\n        this.primaryAccount.put(domain);\n        return BasicResultResponse.success();\n    }\n\n    @PutMapping(\"/update/domain\")\n    @RequiresAuthentication\n    public BasicResultResponse<Boolean> updateDomain(\n            @RequestParam(\"guid\") String guid,\n            @RequestParam(\"name\") String name) {\n        // 查询域是否存在\n        GenericDomain domain = new GenericDomain();\n        domain.setGuid(GUIDs.GUID128(guid));\n        domain.setName(name); // 更新域名称\n        this.primaryAccount.updateDomain(domain); // 保存更新\n        return BasicResultResponse.success(true);\n    }\n    @DeleteMapping(\"remove/domain\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> removeDomain( @RequestParam(\"domainGuid\") String domainGuid ){\n\n        Collection<TreeNode> children = this.primaryAccount.getChildren( GUIDs.GUID128(domainGuid));\n        for (TreeNode treeNode : children) {\n            this.primaryAccount.remove(treeNode.getGuid());\n        }\n        this.primaryAccount.remove( GUIDs.GUID128(domainGuid) );\n        return BasicResultResponse.success();\n    }\n\n    @PutMapping(\"/create/group\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> createGroup( @RequestParam(\"parentGuid\") String parentGuid, @RequestParam(\"groupName\") String groupName ){\n        GenericGroup genericGroup = new GenericGroup();\n        genericGroup.setName(groupName);\n        this.primaryAccount.put( genericGroup );\n        this.primaryAccount.addChildren(GUIDs.GUID128(parentGuid), genericGroup.getGuid() );\n        return BasicResultResponse.success();\n    }\n    @DeleteMapping(\"/remove/group\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> removeGroup( @RequestParam(\"groupGuid\") String groupGuid ){\n        Collection<TreeNode> children = this.primaryAccount.getChildren(GUIDs.GUID128(groupGuid));\n        System.out.println(children.isEmpty());\n        System.out.println(groupGuid);\n        if (children.isEmpty()) {\n            this.primaryAccount.remove(GUIDs.GUID128(groupGuid));\n            return BasicResultResponse.success(\"删除成功\");\n        }\n        return BasicResultResponse.error(\"Group is not empty\");\n    }\n\n    @PutMapping(\"/update/group\")\n    @RequiresAuthentication\n    public BasicResultResponse<Boolean> updateGroup(\n            @RequestParam(\"groupGuid\") String groupGuid,\n            @RequestParam(\"groupName\") String groupName) {\n        // 查询组是否存在\n        Group group = this.primaryAccount.queryGroupByGroupGuid(GUIDs.GUID128(groupGuid));\n        if (group != null) {\n            group.setName(groupName); // 更新组名称\n            this.primaryAccount.updateGroup(group); // 保存更新\n            return BasicResultResponse.success(true);\n        } else {\n            return BasicResultResponse.error(\"Group not found\");\n        }\n    }\n\n    @GetMapping(\"/query/users/byGroup\")\n    @RequiresAuthentication\n    public String queryUsersByGroup(@RequestParam(\"groupGuid\") String groupGuid) {\n        List<GenericAccount> accounts = new ArrayList<>();\n        Collection<GUID> guids = this.primaryAccount.fetchChildrenGuids(GUIDs.GUID128(groupGuid));\n        for (GUID guid : guids)\n        {\n            accounts.add((GenericAccount) this.primaryAccount.queryAccountByUserGuid(guid));\n\n        }\n        return BasicResultResponse.success(accounts).toJSONString();\n    }\n\n    @GetMapping(\"/query/path\")\n    @RequiresAuthentication\n    public String queryNodeByPath( @RequestParam(\"path\") String path ){\n        GUID guid = this.primaryAccount.queryGUIDByPath(path);\n        return BasicResultResponse.success(this.primaryAccount.get(guid)).toJSONString();\n    }\n\n    @PutMapping(\"/create/account\")\n    @RequiresAuthentication\n    public String createAccount(\n            @RequestParam(\"userName\") String userName,\n            @RequestParam(\"nickName\") String nickName,\n            @RequestParam(\"kernelCredential\") String kernelCredential,\n            @RequestParam(\"kernelGroupType\") String kernelGroupType,\n            @RequestParam(\"role\") String role,\n            @RequestParam(\"parentGuid\") String parentGuid) {\n        LocalDateTime now = LocalDateTime.now();\n        DateTimeFormatter formatter = DateTimeFormatter.ofPattern(\"yyyy-MM-dd HH:mm:ss\");\n        String formattedTime = now.format(formatter);\n        System.out.println(\"Account created at: \" + formattedTime);\n        GenericAccount account = new GenericAccount(this.primaryAccount);\n        account.setName(userName);\n        System.out.println(account.getName());\n        if (this.primaryAccount.queryAccountGuidByName(account.getName()).isEmpty()) {\n            account.setNickName(nickName);\n            GenericCredential credential = new GenericCredential(\n                    this.primaryAccount.getGuidAllocator().nextGUID(),\n                    userName,\n                    kernelCredential,\n                    now,\n                    now,\n                    \"TextPassword\"\n            );\n            this.primaryAccount.insertCredential(credential);\n            account.setCredentialGuid(credential.getGuid());\n            account.setKernelCredential(kernelCredential);\n            account.setKernelGroupType(kernelGroupType);\n            account.setCreateTime(now);\n            account.setUpdateTime(now);\n            account.setRole(role);\n            this.primaryAccount.put(account);\n            List<GenericRole> list = this.primaryAccount.queryAllRoles();\n            for (GenericRole roles : list) {\n                if (roles.getName().equals(role)) {\n                    String[] privilegeGuids = roles.getPrivilegeGuids().split(\",\");\n                    for (String privilegeGuid : privilegeGuids) {\n                        GenericAuthorization authorization = new GenericAuthorization(\n                                account.getGuid(),\n                                account.getName(),\n                                credential.getGuid(),\n                                kernelGroupType,\n                                now,\n                                now\n                        );\n                        authorization.setPrivilegeGuid(GUIDs.GUID128(privilegeGuid.trim())); // 去除可能的空格\n                        authorization.setGuid(this.primaryAccount.getGuidAllocator().nextGUID());\n                        this.primaryAccount.insertAuthorization(authorization);\n                    }\n                    break;\n                }\n            }\n\n            this.primaryAccount.addChildren(GUIDs.GUID128(parentGuid), account.getGuid());\n            return BasicResultResponse.success(account).toJSONString();\n        }\n        return BasicResultResponse.error(\"Account already exists\").toJSONString();\n    }\n\n    @PutMapping(\"/update/account\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> updateAccount(\n            @RequestParam(\"userGuid\") String userGuid,\n            @RequestParam(\"nickName\") String nickName,\n            @RequestParam(\"kernelCredential\") String kernelCredential,\n            @RequestParam(\"kernelGroupType\") String kernelGroupType,\n            @RequestParam(\"role\") String role,\n            @RequestParam(\"newUsername\") String newUsername\n    ) {\n        LocalDateTime now = LocalDateTime.now();\n        Account account = this.primaryAccount.queryAccountByUserGuid(GUIDs.GUID128(userGuid));\n        if (account != null) {\n            account.setNickName(nickName);\n            account.setKernelCredential(kernelCredential);\n            account.setKernelGroupType(kernelGroupType);\n            account.setCreateTime(now);\n            account.setUpdateTime(now);\n            account.setRole(role);\n            account.setName(newUsername); // 允许修改用户名\n            account.setGuid(GUIDs.GUID128(userGuid));\n            System.out.println(account);\n            this.primaryAccount.updateAccount(account);\n        }\n        return BasicResultResponse.success();\n    }\n\n    @DeleteMapping(\"/remove/account\")\n    @RequiresAuthentication\n    public BasicResultResponse<Boolean> removeAccount( @RequestParam(\"userGuid\") String userGuid ) {\n        Account account=this.primaryAccount.queryAccountByUserGuid(GUIDs.GUID128(userGuid));\n        List<GenericAuthorization> authorizations = this.primaryAccount.queryAuthorizationByUserGuid(account.getGuid());\n        for (GenericAuthorization authorization : authorizations) {\n            this.primaryAccount.remove(authorization.getGuid());\n        }\n        this.primaryAccount.remove(account.getGuid());\n        return BasicResultResponse.success(true);\n    }\n\n    @PutMapping(\"/login\")\n    public String login( @RequestParam(\"userName\") String userName, @RequestParam(\"kernelCredential\") String kernelCredential ) {\n        // 查询用户 GUID\n        List<GUID> userGuidList = this.primaryAccount.queryAccountGuidByName(userName);\n        if (userGuidList == null || userGuidList.isEmpty()) {\n            return BasicResultResponse.error(\"Account not found\").toJSONString();\n        }\n\n        GUID userGuid = userGuidList.get(0); // 用户名是唯一的\n        boolean isLogin = this.primaryAccount.queryAccountByGuid(userGuid, kernelCredential);\n        if (!isLogin) {\n            return BasicResultResponse.error(\"Account or kernelCredential error\").toJSONString();\n        }\n\n        // 用户登录成功，生成 JWT 令牌\n        Map<String, Object> claims = new HashMap<>();\n        claims.put(\"userId\", userGuid.toString()); // 假设 userGuid 是用户的唯一标识\n\n        // 从配置文件中读取 JWT 配置\n        String userSecretKey = \"1212121hsodhsdhasdhsaldhsalhdlsahdlsad\"; // 替换为实际的密钥\n        long userTtl = 3600000; // 替换为实际的过期时间\n        System.out.println(this.jwtProperties.getUserSecretKey());\n        System.out.println(\"User Secret Key: \" + userSecretKey);\n        System.out.println(\"User TTL: \" + userTtl);\n\n        String token = JwtUtil.createJWT(userSecretKey, userTtl, claims);\n\n        UserLoginVO userLoginVo = new UserLoginVO();\n        userLoginVo.setUserid(userGuid.toString());\n        userLoginVo.setUserName(userName);\n        userLoginVo.setUserToken(token);\n\n        System.out.println(userLoginVo);\n\n        return BasicResultResponse.success(userLoginVo).toJSONString();\n    }\n\n    @GetMapping(\"/query/allAccount\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> queryAllAccount() {\n        List<GenericAccount> accounts = this.primaryAccount.queryAllAccount();\n        return BasicResultResponse.success(accounts.toString());\n    }\n\n\n\n    @PutMapping(\"/query/Authorization/ByUserName\")\n    @RequiresAuthentication\n    public String queryAuthorizationByUserName( @RequestParam(\"userName\") String userName ) {\n        List<GUID> userGuidList =this.primaryAccount.queryAccountGuidByName(userName);\n        if (userGuidList.isEmpty()) {\n            return BasicResultResponse.error(\"Account not found\").toJSONString();\n        }\n        GUID userGuid = userGuidList.get(0); // 假设用户名是唯一的\n        List<GenericAuthorization> authorizations = this.primaryAccount.queryAuthorizationByUserGuid(userGuid);\n        return BasicResultResponse.success(authorizations).toJSONString();\n    }\n\n    @GetMapping(\"/query/domain\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> queryDomain() {\n        List<GenericDomain> domains = this.primaryAccount.queryAllDomain();\n        return BasicResultResponse.success(domains.toString());\n    }\n\n    @GetMapping(\"/query/account\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> queryAccount(\n            @RequestParam(\"userName\") String userName)\n    {\n        List<GUID> userGuidList =this.primaryAccount.queryAccountGuidByName(userName);\n        System.out.println(userGuidList);\n        if (userGuidList.isEmpty()) {\n            return BasicResultResponse.error(\"Account not found\");\n        }\n        GUID userGuid = userGuidList.get(0); // 假设用户名是唯一的\n        GenericAccount account = (GenericAccount) this.primaryAccount.get(userGuid);\n        AccountLoginVO accountLoginVo = new AccountLoginVO();\n        BeanUtils.copyProperties(account,accountLoginVo);\n        return BasicResultResponse.success(accountLoginVo.toJSONString());\n    }\n\n    @GetMapping(\"/query/domain/groups\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> queryDomainGroups(\n            @RequestParam(\"domainGuid\") String domainGuid) {\n        try {\n            Collection<TreeNode> children = this.primaryAccount.getChildren(GUIDs.GUID128(domainGuid));\n            List<Map<String, String>> groups = new ArrayList<>();\n            for (TreeNode child : children) {\n                if (child instanceof Group) {\n                    Group group = this.primaryAccount.queryGroupByGroupGuid(child.getGuid());\n                    Map<String, String> groupInfo = new HashMap<>();\n                    groupInfo.put(\"domainName\", this.primaryAccount.queryDomainNameByGuid(GUIDs.GUID128(domainGuid)));\n                    System.out.println(this.primaryAccount.queryDomainNameByGuid(GUIDs.GUID128(domainGuid)));\n                    groupInfo.put(\"groupName\", group.getName());\n                    groupInfo.put(\"groupGuid\", group.getGuid().toString());\n                    groups.add(groupInfo);\n                }\n            }\n            return BasicResultResponse.success(JSON.stringify(groups));\n        } catch (Exception e) {\n            return BasicResultResponse.error(\"Failed to query groups: \" + e.getMessage());\n        }\n    }\n\n    @GetMapping(\"/query/group\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> queryDomainGroup(\n            @RequestParam(\"domainGuid\") String domainGuid\n    ){\n        Collection<TreeNode> children = this.primaryAccount.getChildren(GUIDs.GUID128(domainGuid));\n        List<Group> groups = new ArrayList<>();\n        for (TreeNode child : children) {\n            if (child instanceof Group) {\n                groups.add( this.primaryAccount.queryGroupByGroupGuid(child.getGuid()));\n            }\n        }\n        return BasicResultResponse.success(groups.toString());\n    }\n\n    @PutMapping(\"/create/privilege\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> createPrivilege(\n            @RequestParam(\"token\") String token,\n            @RequestParam(\"name\") String name,\n            @RequestParam(\"privilegeCode\") String privilegeCode,\n            @RequestParam(\"type\") String type,\n            @RequestParam(value = \"parentPrivGuid\", required = false) String parentPrivGuid)\n    {\n        System.out.println(token);\n        GenericPrivilege privilege = new GenericPrivilege(\n                this.primaryAccount.getGuidAllocator().nextGUID(),\n                token,\n                name,\n                privilegeCode,\n                LocalDateTime.now(),\n                LocalDateTime.now(),\n                type\n        );\n        // 检查parentPrivGuid是否为空或空字符串\n        if (parentPrivGuid != null && !parentPrivGuid.isEmpty()) {\n            privilege.setParentPrivGuid(GUIDs.GUID128(parentPrivGuid));\n        } else {\n            privilege.setParentPrivGuid(null);\n        }\n        System.out.println(privilege.getParentPrivGuid());\n        this.primaryAccount.insertPrivilege(privilege);\n        return BasicResultResponse.success();\n    }\n    @PutMapping(\"/update/privilege\")\n    @RequiresAuthentication\n    public String updatePrivilege(\n            @RequestParam(\"guid\") String guid,\n            @RequestParam(\"name\") String name,\n            @RequestParam(\"token\") String token,\n            @RequestParam(\"type\") String type,\n            @RequestParam(\"privilegeCode\") String privilegeCode) {\n        Privilege privilege = this.primaryAccount.queryPrivilegeByGuid(GUIDs.GUID128(guid));\n        if (privilege != null) {\n            privilege.setName(name);\n            privilege.setToken(token);\n            privilege.setType(type);\n            privilege.setPrivilegeCode(privilegeCode);\n            this.primaryAccount.updatePrivilege(privilege);\n            return BasicResultResponse.success(privilege).toJSONString();\n        }\n        return BasicResultResponse.error(\"权限不存在\").toJSONString();\n    }\n    @DeleteMapping(\"/remove/privilege\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> removePrivilege(\n            @RequestParam(\"privilegeGuid\") String privilegeGuid)\n    {\n        this.primaryAccount.removePrivilege(GUIDs.GUID128(privilegeGuid));\n        return BasicResultResponse.success();\n    }\n    @GetMapping(\"/List/privilege\")\n    @RequiresAuthentication\n    public String listPrivilege(\n    )\n    {\n\n        List<GenericPrivilege> privileges = this.primaryAccount.queryAllPrivileges();\n        return BasicResultResponse.success(privileges).toJSONString();\n    }\n    @PutMapping(\"/create/role\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> createRole(\n            @RequestParam(\"roleName\") String roleName,\n            @RequestParam(\"roleType\") String roleType,\n            @RequestParam(\"privilegeGuids\") String privilegeGuids)\n    {\n        GenericRole role = new GenericRole(\n                roleName,\n                privilegeGuids,\n                LocalDateTime.now(),\n                LocalDateTime.now(),\n                roleType\n        );\n        this.primaryAccount.insertRole(role);\n        return BasicResultResponse.success();\n    }\n    @PutMapping(\"/update/role\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> updateRole(\n            @RequestParam(\"roleName\") String roleName,\n            @RequestParam(\"roleType\") String roleType,\n            @RequestParam(\"privilegeGuids\") String privilegeGuids)\n    {\n        GenericRole role = new GenericRole(\n                roleName,\n                privilegeGuids,\n                LocalDateTime.now(),\n                LocalDateTime.now(),\n                roleType\n        );\n        this.primaryAccount.updateRole(role);\n        return BasicResultResponse.success();\n    }\n    @GetMapping(\"/query/all/role\")\n    @RequiresAuthentication\n    public String queryAllRole()\n    {\n        List<GenericRole> roles = this.primaryAccount.queryAllRoles();\n        System.out.println(roles);\n        GenericRole role = roles.get(0);\n        System.out.println(role.getPrivilegeGuids());\n        return BasicResultResponse.success(roles).toJSONString();\n    }\n    @PutMapping(\"/create/Authorization\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> createAuthorization(\n            @RequestParam(\"userName\") String userName,\n            @RequestParam(\"privilegeToken\") String privilegeToken,\n            @RequestParam(\"privilegeGuid\") String privilegeGuids)\n    {\n        List<GUID> userGuidList=this.primaryAccount.queryAccountGuidByName(userName);\n        GUID userGuid= userGuidList.get(0);\n        Account account = this.primaryAccount.queryAccountByUserGuid(userGuid);\n        GUID credentialGuid = account.getCredentialGuid();\n        System.out.println(userGuid);\n        if (userGuidList.isEmpty()) {\n            return BasicResultResponse.error(\"Account not found\");\n        }\n\n        GenericAuthorization authorization = new GenericAuthorization(\n                userGuid,\n                userName,\n                credentialGuid,\n                privilegeToken,\n                LocalDateTime.now(),\n                LocalDateTime.now()\n        );\n        authorization.setGuid(this.primaryAccount.getGuidAllocator().nextGUID());\n        authorization.setPrivilegeGuid(GUIDs.GUID128(privilegeGuids));\n        this.primaryAccount.insertAuthorization(authorization);\n\n        return BasicResultResponse.success();\n    }\n\n    @DeleteMapping(\"/delete/Authorization\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> deleteAuthorization(\n            @RequestParam (\"authorizationGuid\") String Guid)\n    {\n        this.primaryAccount.removeAuthorizationByGuid(GUIDs.GUID128(Guid));\n        return BasicResultResponse.success();\n    }\n\n\n    @DeleteMapping(\"/remove/role\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> removeRole(\n            @RequestParam(\"id\") int id)\n    {\n        this.primaryAccount.removeRole(id);\n        return BasicResultResponse.success();\n    }\n    @GetMapping(\"/query/Authorization\")\n    @RequiresAuthentication\n    public String queryAuthorization(\n    )\n    {\n        List<GenericAuthorization> authorizations = this.primaryAccount.queryAllAuthorization();\n        return BasicResultResponse.success(authorizations).toJSONString();\n    }\n    @PutMapping(\"/update/authorization\")\n    @RequiresAuthentication\n    public BasicResultResponse<String> updateAuthorization(\n            @RequestParam(\"guid\") String guid\n    ) {\n        try {\n            // 更新授权信息的逻辑\n            this.primaryAccount.updateAuthorization(GUIDs.GUID128(guid));\n            return BasicResultResponse.success();\n        } catch (Exception e) {\n            return BasicResultResponse.error(\"更新授权失败: \" + e.getMessage());\n        }\n    }\n}"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/api/controller/xxx.java",
    "content": "package com.walnut.sparta.account.api.controller;\n\npublic class xxx {\n}\n"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/api/response/BasicResultResponse.java",
    "content": "package com.walnut.sparta.account.api.response;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport org.springframework.http.HttpStatus;\n\nimport java.io.Serializable;\n\npublic class BasicResultResponse<T> implements Pinenut, Serializable {\n    private Integer    code = HttpStatus.OK.value();\n    private String     msg; //错误信息\n    private T          data; //数据\n\n    public static <T> BasicResultResponse<T > success() {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > successMsg( String msg  ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.msg  = msg;\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > success( T object ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.data = object;\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > error( String msg ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.msg  = msg;\n        result.code = HttpStatus.INTERNAL_SERVER_ERROR.value();\n        return result;\n    }\n\n\n    /**\n     * 获取\n     * @return code\n     */\n    public Integer getCode() {\n        return this.code;\n    }\n\n    /**\n     * 设置\n     * @param code\n     */\n    public void setCode(Integer code) {\n        this.code = code;\n    }\n\n    /**\n     * 获取\n     * @return msg\n     */\n    public String getMsg() {\n        return this.msg;\n    }\n\n    /**\n     * 设置\n     * @param msg\n     */\n    public void setMsg(String msg) {\n        this.msg = msg;\n    }\n\n    /**\n     * 获取\n     * @return data\n     */\n    public T getData() {\n        return this.data;\n    }\n\n    /**\n     * 设置\n     * @param data\n     */\n    public void setData(T data) {\n        this.data = data;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"code\" , this.code ),\n                new KeyValue<>( \"msg\"  , this.msg ),\n                new KeyValue<>( \"data\" , this.data )\n        } );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/config/WebConfig.java",
    "content": "package com.walnut.sparta.account.config;\n\nimport com.walnut.sparta.account.interceptor.AuthenticationInterceptor;\n\nimport org.springframework.context.annotation.Configuration;\nimport org.springframework.web.servlet.config.annotation.InterceptorRegistry;\nimport org.springframework.web.servlet.config.annotation.WebMvcConfigurer;\n\n@Configuration\npublic class WebConfig implements WebMvcConfigurer {\n\n    @Override\n    public void addInterceptors(InterceptorRegistry registry) {\n        registry.addInterceptor(new AuthenticationInterceptor())\n                .addPathPatterns(\"/**\") // 拦截所有路径\n                .excludePathPatterns(\"/api/v2/account/login\"); // 排除登录接口\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/domian/vo/AccountLoginVO.java",
    "content": "package com.walnut.sparta.account.domian.vo;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class AccountLoginVO implements Pinenut {\n    private String userName;\n    private String nickName;\n\n    public AccountLoginVO(String userName, String nickName) {\n        this.userName = userName;\n        this.nickName = nickName;\n    }\n\n    public String getUserName() {\n        return userName;\n    }\n\n    public void setUserName(String userName) {\n        this.userName = userName;\n    }\n\n    public String getNickName() {\n        return nickName;\n    }\n\n    public void setNickName(String nickName) {\n        this.nickName = nickName;\n    }\n\n    public AccountLoginVO() {\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/domian/vo/QueryallUserVO.java",
    "content": "package com.walnut.sparta.account.domian.vo;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.account.AccountManager;\nimport com.pinecone.hydra.account.entity.Account;\nimport com.pinecone.hydra.account.entity.ArchElementNode;\n\nimport java.time.LocalDateTime;\n\npublic class QueryallUserVO extends ArchElementNode implements Account{\n        protected long enumId;\n\n        protected String name;\n\n        protected GUID guid;\n\n        protected String nickName;\n\n        protected String kernelGroupType;\n        protected String role;\n\n        protected LocalDateTime createTime;\n\n        protected LocalDateTime updateTime;\n\n        public QueryallUserVO(\n                String name,\n                GUID guid,\n                String nickName,\n                String kernelGroupType,\n                String role,\n                LocalDateTime createTime,\n                LocalDateTime updateTime\n        ) {\n            this.name = name;\n            this.guid = guid;\n            this.nickName = nickName;\n            this.kernelGroupType = kernelGroupType;\n            this.role = role;\n            this.createTime = createTime;\n            this.updateTime = updateTime;\n        }\n\n\n        public QueryallUserVO(){\n            super();\n        }\n\n        public QueryallUserVO(AccountManager accountManager){\n            super(accountManager);\n        }\n\n\n        @Override\n        public String getRole() {\n            return this.role;\n        }\n\n        @Override\n        public void setRole(String role) {\n            this.role = role;\n        }\n\n        @Override\n        public String getNickName() {\n            return this.nickName;\n        }\n\n        @Override\n        public void setNickName(String nickName) {\n            this.nickName = nickName;\n        }\n\n    @Override\n    public String getKernelCredential() {\n        return null;\n    }\n\n    @Override\n    public void setKernelCredential(String kernelCredential) {\n\n    }\n\n    @Override\n    public GUID getCredentialGuid() {\n        return null;\n    }\n\n    @Override\n    public void setCredentialGuid(GUID credentialGuid) {\n\n    }\n\n    @Override\n        public String getKernelGroupType() {\n            return this.kernelGroupType;\n        }\n\n        @Override\n        public void setKernelGroupType(String kernelGroupType) {\n            this.kernelGroupType = kernelGroupType;\n        }\n\n        @Override\n        public LocalDateTime getCreateTime() {\n            return this.createTime;\n        }\n\n        @Override\n        public void setCreateTime(LocalDateTime createTime) {\n            this.createTime = createTime;\n        }\n\n        @Override\n        public LocalDateTime getUpdateTime() {\n            return this.updateTime;\n        }\n\n        @Override\n        public void setUpdateTime(LocalDateTime updateTime) {\n            this.updateTime = updateTime;\n        }\n        @Override\n        public String toJSONString() {\n            return BeanJSONEncoder.BasicEncoder.encode( this );\n        }\n\n        @Override\n        public String toString() {\n            return this.toJSONString();\n        }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/domian/vo/UserLoginVO.java",
    "content": "package com.walnut.sparta.account.domian.vo;\n\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\n\npublic class UserLoginVO {\n    private String userid;\n    private String userName;\n    private String UserToken;\n\n    public String getUserid() {\n        return userid;\n    }\n\n    public void setUserid(String userid) {\n        this.userid = userid;\n    }\n\n    public String getUserName() {\n        return userName;\n    }\n\n    public void setUserName(String userName) {\n        this.userName = userName;\n    }\n\n    public String getUserToken() {\n        return UserToken;\n    }\n\n    public void setUserToken(String userToken) {\n        UserToken = userToken;\n    }\n\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/interceptor/AuthenticationInterceptor.java",
    "content": "package com.walnut.sparta.account.interceptor;\n\nimport com.walnut.sparta.account.util.JwtUtil;\nimport io.jsonwebtoken.Claims;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.web.method.HandlerMethod;\nimport org.springframework.web.servlet.HandlerInterceptor;\n\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport java.lang.reflect.Method;\n\npublic class AuthenticationInterceptor implements HandlerInterceptor {\n\n    private final Logger log = LoggerFactory.getLogger(AuthenticationInterceptor.class);\n\n    private final String userSecretKey = \"1212121hsodhsdhasdhsaldhsalhdlsahdlsad\"; // 应与生成Token的密钥一致\n\n    @Override\n    public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {\n        // 检查是否需要认证\n        if (!(handler instanceof HandlerMethod)) {\n            return true;\n        }\n        HandlerMethod handlerMethod = (HandlerMethod) handler;\n        Method method = handlerMethod.getMethod();\n\n        // 判断方法或类是否有@RequiresAuthentication注解\n        RequiresAuthentication classAnnotation = handlerMethod.getBeanType().getAnnotation(RequiresAuthentication.class);\n        RequiresAuthentication methodAnnotation = method.getAnnotation(RequiresAuthentication.class);\n        if (classAnnotation == null && methodAnnotation == null) {\n            return true; // 无需认证\n        }\n\n        // 获取Token\n        String authHeader = request.getHeader(\"Authorization\");\n        if (authHeader == null || !authHeader.startsWith(\"Bearer \")) {\n            response.sendError(HttpServletResponse.SC_UNAUTHORIZED, \"未提供认证Token\");\n            log.warn(\"Unauthorized access attempt: Missing or invalid Authorization header\");\n            return false;\n        }\n        String token = authHeader.substring(7);\n\n        // 打印 Token\n        log.info(\"Received Token: {}\", token);\n\n        // 验证Token\n        try {\n            boolean isValid = JwtUtil.verifyToken(token, userSecretKey);\n            if (!isValid) {\n                response.sendError(HttpServletResponse.SC_UNAUTHORIZED, \"无效的Token\");\n                log.warn(\"Unauthorized access attempt: Invalid token\");\n                return false;\n            }\n            System.out.println(\"Token验证\"+isValid);\n\n            Claims claims = JwtUtil.parseJWT(userSecretKey,token );\n            request.setAttribute(\"userId\", claims.get(\"userId\"));\n        } catch (Exception e) {\n            response.sendError(HttpServletResponse.SC_UNAUTHORIZED, \"Token验证失败：\" + e.getMessage());\n            log.error(\"Unauthorized access attempt: Token verification failed\", e);\n            return false;\n        }\n\n        return true;\n    }\n}\n/*\npublic class AuthenticationInterceptor implements HandlerInterceptor {\n\n    private final String userSecretKey = \"1212121hsodhsdhasdhsaldhsalhdlsahdlsad\"; // 应与生成Token的密钥一致\n\n    @Override\n    public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {\n        // 检查是否需要认证\n        if (!(handler instanceof HandlerMethod)) {\n            return true;\n        }\n        HandlerMethod handlerMethod = (HandlerMethod) handler;\n        Method method = handlerMethod.getMethod();\n\n        // 判断方法或类是否有@RequiresAuthentication注解\n        RequiresAuthentication classAnnotation = handlerMethod.getBeanType().getAnnotation(RequiresAuthentication.class);\n        RequiresAuthentication methodAnnotation = method.getAnnotation(RequiresAuthentication.class);\n        if (classAnnotation == null && methodAnnotation == null) {\n            return true; // 无需认证\n        }\n\n        // 获取Token\n        String authHeader = request.getHeader(\"Authorization\");\n        if (authHeader == null || !authHeader.startsWith(\"Bearer \")) {\n            response.sendError(HttpServletResponse.SC_UNAUTHORIZED, \"未提供认证Token\");\n            return false;\n        }\n        String token = authHeader.substring(7);\n\n        // 验证Token\n        try {\n            boolean isValid = JwtUtil.verifyToken(token, userSecretKey);\n            if (!isValid) {\n                response.sendError(HttpServletResponse.SC_UNAUTHORIZED, \"无效的Token\");\n                return false;\n            }\n            // 可以解析Claims并设置到请求属性中，供后续使用\n            Claims claims = JwtUtil.parseJWT(token, userSecretKey);\n            request.setAttribute(\"userId\", claims.get(\"userId\"));\n        } catch (Exception e) {\n            response.sendError(HttpServletResponse.SC_UNAUTHORIZED, \"Token验证失败：\" + e.getMessage());\n            return false;\n        }\n\n        return true;\n    }\n}*/\n"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/interceptor/RequiresAuthentication.java",
    "content": "package com.walnut.sparta.account.interceptor;\n\nimport java.lang.annotation.ElementType;\nimport java.lang.annotation.Retention;\nimport java.lang.annotation.RetentionPolicy;\nimport java.lang.annotation.Target;\n\n@Target({ElementType.METHOD, ElementType.TYPE})\n@Retention(RetentionPolicy.RUNTIME)\npublic @interface RequiresAuthentication {\n}\n"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/properties/JwtProperties.java",
    "content": "package com.walnut.sparta.account.properties;\n\nimport org.springframework.boot.context.properties.ConfigurationProperties;\nimport org.springframework.stereotype.Component;\n\n@Component\n@ConfigurationProperties(prefix = \"jwt\")\npublic class JwtProperties {\n    private long userTtl;\n    private String userTokenName;\n    private String userSecretKey;\n    public JwtProperties() {\n    }\n\n    /**\n     * 生成jwt令牌相关配置\n     */\n\n\n    public JwtProperties(String userSecretKey, long userTtl, String userTokenName) {\n        this.userSecretKey = userSecretKey;\n        this.userTtl = userTtl;\n        this.userTokenName = userTokenName;\n    }\n\n    public String getUserSecretKey() {\n        return userSecretKey;\n    }\n\n    public void setUserSecretKey(String userSecretKey) {\n        this.userSecretKey = userSecretKey;\n    }\n\n    public long getUserTtl() {\n        return userTtl;\n    }\n\n    public void setUserTtl(long userTtl) {\n        this.userTtl = userTtl;\n    }\n\n    public String getUserTokenName() {\n        return userTokenName;\n    }\n\n    public void setUserTokenName(String userTokenName) {\n        this.userTokenName = userTokenName;\n    }\n\n\n\n}"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/rpc/thrift/AccountIfaceImpl.java",
    "content": "package com.walnut.sparta.account.rpc.thrift;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.account.AccountManager;\nimport com.walnut.sparta.account.api.response.BasicResultResponse;\n\nimport org.apache.thrift.TException;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\n\n@Component\npublic class AccountIfaceImpl implements AccountIface.Iface {\n    @Resource\n    private AccountManager primaryAccount;\n\n    @Override\n    public String queryNodeByPath(String path) throws TException {\n        GUID guid = this.primaryAccount.queryGUIDByPath(path);\n        return BasicResultResponse.success(this.primaryAccount.get(guid)).toJSONString();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/rpc/thrift/AccountRPCService.java",
    "content": "package com.walnut.sparta.account.rpc.thrift;\n\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.thrift.server.MultiplexedServer;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.PostConstruct;\nimport javax.annotation.Resource;\n\n@Component\npublic class AccountRPCService {\n    @Resource\n    private AccountIfaceImpl accountIfaceImpl;\n\n\n    @PostConstruct\n    public void init(){\n        MultiplexedServer multiplexedServer = new MultiplexedServer(\n                new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 16701, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\")\n        );\n        multiplexedServer.registerProcessor( \"Account\", new AccountIface.Processor<>(accountIfaceImpl) );\n        multiplexedServer.start();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/java/com/walnut/sparta/account/util/JwtUtil.java",
    "content": "package com.walnut.sparta.account.util;\n\nimport io.jsonwebtoken.*;\nimport io.jsonwebtoken.security.Keys;\nimport io.jsonwebtoken.security.SignatureException;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport javax.crypto.SecretKey;\nimport java.nio.charset.StandardCharsets;\nimport java.util.Date;\nimport java.util.Map;\n\npublic class JwtUtil {\n    /**\n     * 生成jwt\n     * 使用Hs256算法, 私匙使用固定秘钥\n     *\n     * @param secretKey jwt秘钥\n     * @param ttlMillis jwt过期时间(毫秒)\n     * @param claims    设置的信息\n     * @return\n     */\n public static String createJWT(String secretKey, long ttlMillis, Map<String, Object> claims) {\n        // 指定签名的时候使用的签名算法，也就是header那部分\n        SignatureAlgorithm signatureAlgorithm = SignatureAlgorithm.HS256;\n        // 生成JWT的时间\n        long expMillis = System.currentTimeMillis() + ttlMillis;\n        Date exp = new Date(expMillis);\n\n        // 设置jwt的body\n        JwtBuilder builder = Jwts.builder()\n                // 如果有私有声明，一定要先设置这个自己创建的私有的声明，这个是给builder的claim赋值，一旦写在标准的声明赋值之后，就是覆盖了那些标准的声明的\n                .setClaims(claims)\n                // 设置签名使用的签名算法和签名使用的秘钥\n                .signWith(signatureAlgorithm, secretKey.getBytes(StandardCharsets.UTF_8))\n                // 设置过期时间\n                .setExpiration(exp);\n        return builder.compact();\n    }\n/*    public static String createJWT(String secretKey, long ttlMillis, Map<String, Object> claims) {\n        // 使用 Keys.hmacShaKeyFor 生成 SecretKey\n        SecretKey key = Keys.hmacShaKeyFor(secretKey.getBytes(StandardCharsets.UTF_8));\n        // 设置 JWT 的过期时间\n        long expMillis = System.currentTimeMillis() + ttlMillis;\n        Date exp = new Date(expMillis);\n\n        // 构建 JWT\n        JwtBuilder builder = Jwts.builder()\n                .setClaims(claims)\n                // 注意：这里需要传入 SecretKey 和算法\n                .signWith(key, SignatureAlgorithm.HS256)\n                .setExpiration(exp);\n        return builder.compact();\n    }*/\n    /**\n     * Token解密\n     *\n     * @param secretKey jwt秘钥 此秘钥一定要保留好在服务端, 不能暴露出去, 否则sign就可以被伪造, 如果对接多个客户端建议改造成多个\n     * @param token     加密后的token\n     * @return\n     */\n    private static final Logger log = LoggerFactory.getLogger(JwtUtil.class);\n\n    /**\n     * 解析 JWT Token\n     * @param secretKey 密钥\n     * @param token JWT Token\n     * @return 解析后的 Claims\n     */\n    public static Claims parseJWT(String secretKey, String token) {\n        System.out.println(\"Token: \" + token);\n\n        if (token == null || token.isEmpty() || token.split(\"\\\\.\").length != 3) {\n            log.error(\"JWT格式错误: Token为空或格式不正确\");\n            throw new IllegalArgumentException(\"无效的JWT令牌\");\n        }\n        try {\n            // 使用与生成时相同的密钥\n            byte[] keyBytes = secretKey.getBytes(StandardCharsets.UTF_8);\n            SecretKey key = Keys.hmacShaKeyFor(keyBytes);\n            return Jwts.parserBuilder()\n                    .setSigningKey(key)\n                    .build()\n                    .parseClaimsJws(token)\n                    .getBody();\n        } catch (JwtException e) {\n            log.error(\"JWT解析失败: {}\", e.getMessage());\n            throw new IllegalArgumentException(\"无效的JWT令牌\");\n        }\n    }\n\n    /**\n     * 验证JWT的有效性\n     *\n     * @param token         加密后的token\n     * @param userSecretKey 用户的私钥\n     * @return 如果token有效返回true，否则返回false\n     */\n    public static boolean verifyToken(String token, String userSecretKey) {\n        try {\n            // 使用用户的私钥解析JWT\n            SecretKey secretKey = Keys.hmacShaKeyFor(userSecretKey.getBytes(StandardCharsets.UTF_8));\n            Jwts.parserBuilder()\n                    .setSigningKey(secretKey)\n                    .build()\n                    .parseClaimsJws(token);\n            // 如果解析成功，说明token有效\n            return true;\n        } catch (ExpiredJwtException e) {\n            // token已过期\n            System.out.println(\"Token has expired: \" + e.getMessage());\n        } catch (UnsupportedJwtException e) {\n            // 不支持的JWT格式\n            System.out.println(\"Unsupported JWT: \" + e.getMessage());\n        } catch (MalformedJwtException e) {\n            // JWT格式错误\n            System.out.println(\"Invalid JWT string: \" + e.getMessage());\n        } catch (SignatureException e) {\n            // 签名验证失败\n            System.out.println(\"Invalid JWT signature: \" + e.getMessage());\n        } catch (IllegalArgumentException e) {\n            // JWT字符串为空或null\n            System.out.println(\"JWT string is null or empty or only whitespace: \" + e.getMessage());\n        } catch (Exception e) {\n            // 其他异常\n            System.out.println(\"Other error: \" + e.getMessage());\n        }\n        // 如果捕获到异常，说明token无效\n        return false;\n    }\n}"
  },
  {
    "path": "Sparta/sparta-uac-console/src/main/resources/Account.thrift",
    "content": "\nservice AccountIface {\n    string queryNodeByPath(1: string path);\n}"
  },
  {
    "path": "Sparta/sparta-uac-console/src/test/java/com/walnut/sparta/account/TestSpartaAccount.java",
    "content": "package com.walnut.sparta.account;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.system.functions.Executor;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.account.AccountManager;\nimport com.pinecone.hydra.account.UniformAccountManager;\nimport com.pinecone.hydra.account.ibatis.hydranium.UserMappingDriver;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\nimport org.springframework.context.ApplicationContextInitializer;\nimport org.springframework.context.ConfigurableApplicationContext;\nimport org.springframework.context.support.GenericApplicationContext;\n\nclass JesusChrist extends Tritium {\n    public JesusChrist( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public JesusChrist( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        SpartaAccountService sparta = new SpartaAccountService( \"SpartaAccountService\", this );\n\n\n        Thread shutdowner = new Thread(()->{\n            Debug.sleep( 5000 );\n            sparta.terminate();\n        });\n        //shutdowner.start();\n\n\n\n\n        sparta.setPrimarySources( SpartaBoot.class );\n\n\n        KOIMappingDriver koiAccountMappingDriver = new UserMappingDriver(\n                sparta, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n\n        AccountManager     accountManager = new UniformAccountManager( koiAccountMappingDriver );\n\n        sparta.setInitializer(new Executor() {\n            @Override\n            public void execute() throws Exception {\n                sparta.getSpringApplication().addInitializers(new ApplicationContextInitializer<ConfigurableApplicationContext>() {\n                    @Override\n                    public void initialize( ConfigurableApplicationContext applicationContext ) {\n                        GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext;\n                        genericApplicationContext.registerBean(\"primaryAccount\", AccountManager.class, () -> (AccountManager) accountManager);\n                    }\n                });\n            }\n        });\n\n\n        sparta.execute();\n\n\n\n\n\n        this.getTaskManager().add( sparta );\n        this.getTaskManager().syncWaitingTerminated();\n    }\n}\n\n\npublic class TestSpartaAccount {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            JesusChrist jesus = (JesusChrist) Pinecone.sys().getTaskManager().add( new JesusChrist( args, Pinecone.sys() ) );\n            jesus.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <modelVersion>4.0.0</modelVersion>\n    <parent>\n        <artifactId>sparta</artifactId>\n        <groupId>com.walnuts.sparta</groupId>\n        <version>2.5.1</version>\n    </parent>\n<!--    <build>-->\n<!--        <plugins>-->\n<!--            <plugin>-->\n<!--                <groupId>org.apache.maven.plugins</groupId>-->\n<!--                <artifactId>maven-compiler-plugin</artifactId>-->\n<!--                <configuration>-->\n<!--                    <source>11</source>-->\n<!--                    <target>11</target>-->\n<!--                </configuration>-->\n<!--            </plugin>-->\n<!--        </plugins>-->\n<!--    </build>-->\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.springframework.boot</groupId>\n                <artifactId>spring-boot-maven-plugin</artifactId>\n\n                <executions>\n                    <execution>\n                        <phase>package</phase>\n                        <goals>\n                            <goal>repackage</goal>\n                        </goals>\n                    </execution>\n                </executions>\n\n                <configuration>\n                    <includeSystemScope>true</includeSystemScope>\n                    <mainClass>com.walnut.sparta.ucdn.console.UCDNBoot</mainClass>\n                </configuration>\n            </plugin>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <groupId>com.walnut.sparta.ucdn.console</groupId>\n    <artifactId>sparta-ucdn-console</artifactId>\n    <version>2.1.0</version>\n\n\n    <dependencies>\n        <dependency>\n            <groupId>junit</groupId>\n            <artifactId>junit</artifactId>\n            <version>3.8.1</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.summer.springram</groupId>\n            <artifactId>springram</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n            <artifactId>hydra-kom-default-driver</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-websocket</artifactId>\n            <version>3.4.3</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.sparta.api.uofs</groupId>\n            <artifactId>sparta-api-uofs</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.redstone.kernel</groupId>\n            <artifactId>redstone-architecture</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.redstone.stones</groupId>\n            <artifactId>redstone-message-stones</artifactId>\n            <version>2.1.0</version>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-test</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.mybatis.spring.boot</groupId>\n            <artifactId>mybatis-spring-boot-starter</artifactId>\n            <version>2.2.2</version>\n        </dependency>\n        <dependency>\n            <groupId>com.auth0</groupId>\n            <artifactId>java-jwt</artifactId>\n            <version>4.4.0</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-service-control</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/SpartaBoot.java",
    "content": "package com.walnut.sparta.ucdn.console;\n\nimport org.springframework.boot.autoconfigure.SpringBootApplication;\n\n@SpringBootApplication\npublic class SpartaBoot {\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/UCDNBoot.java",
    "content": "package com.walnut.sparta.ucdn.console;\n\nimport com.pinecone.Pinecone;\nimport com.walnut.sparta.ucdn.console.infrastructure.UCDNContentDelivery;\n\n\npublic class UCDNBoot {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            UCDNContentDelivery ucdn = (UCDNContentDelivery) Pinecone.sys().getTaskManager().add(\n                    new UCDNContentDelivery( args, Pinecone.sys() )\n            );\n            ucdn.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/CDNFileController.java",
    "content": "package com.walnut.sparta.ucdn.console.api.controller.v2;\n\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.version.VersionManage;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.archcraft.redstone.response.GenericResultResponse;\nimport com.walnut.sparta.ucdn.console.domain.service.FileSystemService;\nimport com.walnut.sparta.ucdn.console.infrastructure.dto.RenameDTO;\nimport com.walnut.sparta.ucdn.console.infrastructure.dto.UpdateFileNameDTO;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.DeleteMapping;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.PostMapping;\nimport org.springframework.web.bind.annotation.RequestBody;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\n\nimport javax.annotation.Resource;\n\n@RestController\n@RequestMapping( \"/api/v2/ucdn/file\" )\n@CrossOrigin\npublic class CDNFileController {\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    @Resource\n    private FileSystemService fileSystemService;\n\n    @Resource\n    private VersionManage primaryVersion;\n\n    /**\n     * 创建文件\n     * @param filePath 文件路径\n     * @return 返回操作状态\n     */\n    @GetMapping(\"/create\")\n    public GenericResultResponse<String> createFile(@RequestParam String filePath ){\n        this.primaryFileSystem.affirmFileNode( filePath );\n        return GenericResultResponse.success();\n    }\n\n    /**\n     * 获取文件或文件夹属性\n     * @param nodeGuid 文件或文件夹guid\n     * @return 返回属性信息\n     */\n    @GetMapping(\"/attribute\")\n    public GenericResultResponse<FileTreeNode> attribute(@RequestParam(\"nodeGuid\") String nodeGuid ){\n        FileTreeNode fileTreeNode = this.primaryFileSystem.get(GUIDs.GUID128(nodeGuid));\n        return GenericResultResponse.success( fileTreeNode );\n    }\n\n    /**\n     * 移除文件夹或者文件\n     * @param fileGuid 文件夹或者文件guid\n     * @return 返回操作结果\n     */\n    @DeleteMapping(\"/remove\")\n    public GenericResultResponse<String> removeFile(String fileGuid ){\n        this.fileSystemService.remove( GUIDs.GUID128( fileGuid ) );\n        this.primaryFileSystem.remove( GUIDs.GUID128( fileGuid ) );\n        return GenericResultResponse.success();\n    }\n\n    /**\n     * 重命名文件或文件夹\n     * @param dto 信息\n     * @return 返回操作信息\n     */\n    @PostMapping(\"/rename\")\n    public GenericResultResponse<String> renameFile(@RequestBody RenameDTO dto){\n        this.primaryFileSystem.renameFile( dto.getPath(), dto.getNewName() );\n        return GenericResultResponse.success();\n    }\n\n    /**\n     * 重命名接口\n     * @param dto 重命名数据\n     * @return\n     */\n    @PostMapping(\"/updateFileName\")\n    public GenericResultResponse<String> updateFileName(@RequestBody UpdateFileNameDTO dto){\n        this.primaryFileSystem.renameFile( dto.getFilePath(), dto.getNewFileName() );\n        return GenericResultResponse.success();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/CDNFolderController.java",
    "content": "package com.walnut.sparta.ucdn.console.api.controller.v2;\n\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.bucket.BucketInstrument;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.version.VersionManage;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.archcraft.redstone.response.GenericResultResponse;\nimport com.walnut.sparta.ucdn.console.infrastructure.UCDNConstants;\nimport com.walnut.sparta.ucdn.console.mapper.ClusterFileSyncMapper;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.DeleteMapping;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\n\nimport javax.annotation.Resource;\nimport java.util.List;\n\n@RestController\n@RequestMapping( \"/api/v2/ucdn/folder\" )\n@CrossOrigin\npublic class CDNFolderController {\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    @Resource\n    private VersionManage versionManage;\n\n    @Resource\n    private ClusterFileSyncMapper fileSyncMapper;\n\n    @Resource\n    private BucketInstrument bucketInstrument;\n\n    /**\n     * 获取文件夹下所有内容\n     * @param folderGuid 文件夹guid\n     * @returnS\n     */\n    @GetMapping(\"/folder/listItem\")\n    public String listItem(@RequestParam String folderGuid ){\n        Folder folder = this.primaryFileSystem.getFolder(GUIDs.GUID128(folderGuid));\n        List<FileTreeNode> fileTreeNodes = folder.listItem();\n        for ( FileTreeNode fileTreeNode : fileTreeNodes ) {\n            if ( this.versionManage.queryIsManage(fileTreeNode.getGuid()) ){\n                List<GUID> versions = versionManage.fetchVersions(fileTreeNode.getGuid());\n                GUID firstVersion = versions.get(0);\n                FileTreeNode firstVersionFileTreeNode = this.primaryFileSystem.get(firstVersion);\n                String fileName = firstVersionFileTreeNode.getName();\n                String fileExtension = \"\";\n\n                if (fileName.contains(UCDNConstants.period)) {\n                    fileExtension = fileName.substring(fileName.lastIndexOf(UCDNConstants.period) + 1);\n                }\n                fileTreeNode.setName(fileTreeNode.getName()+UCDNConstants.period+fileExtension);\n                Integer syncState = this.fileSyncMapper.queryState(fileTreeNode.getGuid());\n                if( syncState == null ){\n                    fileTreeNode.evinceFolder().setSyncState( 0 );\n                }\n                else {\n                    fileTreeNode.evinceFolder().setSyncState( 1 );\n                }\n            }\n        }\n        return  GenericResultResponse.success(fileTreeNodes).toJSONString() ;\n    }\n\n    /**\n     * 创建文件夹\n     * @param destDirPath 文件夹路径\n     * @return 返回操作状态\n     */\n    @GetMapping(\"/create\")\n    public GenericResultResponse<String> createFolder(@RequestParam(\"destDirPath\") String destDirPath ){\n        this.primaryFileSystem.affirmFolder( destDirPath );\n        return GenericResultResponse.success();\n    }\n\n    /**\n     * 获取文件或文件夹属性\n     * @param nodeGuid 文件或文件夹guid\n     * @return 返回属性信息\n     */\n    @GetMapping(\"/attribute\")\n    public GenericResultResponse< FileTreeNode > attribute(@RequestParam(\"nodeGuid\") String nodeGuid ){\n        FileTreeNode fileTreeNode = this.primaryFileSystem.get(GUIDs.GUID128(nodeGuid));\n        return GenericResultResponse.success( fileTreeNode );\n    }\n\n    /**\n     * 获取所有根文件夹\n     * @return 返回根信息\n     */\n    @GetMapping(\"/list/root\")\n    public String listRoot(){\n        List<FileTreeNode> roots = this.primaryFileSystem.fetchRoot();\n        return GenericResultResponse.success( roots ).toJSONString();\n    }\n\n    /**\n     * 移除文件夹或者文件\n     * @param fileGuid 文件夹或者文件guid\n     * @return 返回操作结果\n     */\n    @DeleteMapping(\"/remove/file\")\n    public GenericResultResponse<String> removeFile(String fileGuid ){\n        this.primaryFileSystem.remove( GUIDs.GUID128( fileGuid ) );\n        return GenericResultResponse.success();\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/ClientController.java",
    "content": "package com.walnut.sparta.ucdn.console.api.controller.v2;\n\nimport com.walnut.archcraft.redstone.response.GenericResultResponse;\nimport com.walnut.sparta.ucdn.console.domain.service.NodeFileDistributionService;\n\nimport org.springframework.beans.factory.annotation.Value;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.PostMapping;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\nimport org.springframework.web.multipart.MultipartFile;\n\nimport javax.annotation.Resource;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.UUID;\n\n@RestController\n@CrossOrigin\n@RequestMapping( \"/api/v2/ucdn/client\" )\npublic class ClientController {\n    @Resource\n    protected NodeFileDistributionService service;\n\n    @Value(\"${service.LocalUploadTemporaryWorkingDirectory}\")\n    private String majorTemporaryClusterFileDirectory;\n\n    @Value(\"${service.TemporaryFileExtends}\")\n    private String temporaryFileExtends;\n    /**\n     *\n     * @param filePath 文件要上传的路径\n     * @param file 文件本体\n     * @return\n     */\n    @PostMapping(\"/upload\")\n    public GenericResultResponse<String> upload(@RequestParam(\"filePath\") String filePath, @RequestParam(\"file\") MultipartFile file, @RequestParam(\"topic\") String topic ) throws IOException, InterruptedException {\n        File tempFile = new File(majorTemporaryClusterFileDirectory+ UUID.randomUUID()+temporaryFileExtends);\n        if( !tempFile.createNewFile() ){\n            throw new IOException( \"Creating file compromised, what :\" + tempFile.toPath() );\n        }\n        file.transferTo(tempFile);\n\n        this.service.upload( filePath,tempFile,topic );\n        if( !tempFile.delete() ){\n            throw new IOException( \"Purging temporary file compromised, what :\" + tempFile.toPath() );\n        }\n        return GenericResultResponse.success();\n    }\n\n    @GetMapping(\"/testDistribution\")\n    public void testDistribution( @RequestParam(\"path\") String path, @RequestParam(\"topic\") String topic ) throws IOException, InterruptedException {\n        this.service.testDistribution( path,topic );\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/SiteController.java",
    "content": "package com.walnut.sparta.ucdn.console.api.controller.v2;\n\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.bucket.BucketInstrument;\nimport com.pinecone.hydra.storage.bucket.entity.GenericSite;\nimport com.pinecone.hydra.storage.bucket.entity.Site;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.walnut.archcraft.redstone.response.GenericResultResponse;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.DeleteMapping;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.PutMapping;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\n\nimport javax.annotation.Resource;\nimport java.util.List;\n\n@RestController\n@RequestMapping( \"/api/v2/ucdn/site\" )\n@CrossOrigin\npublic class SiteController implements Pinenut {\n    @Resource\n    private BucketInstrument bucketInstrument;\n\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    /**\n     * 创建站点\n     * @param siteName 站点名\n     * @return 返回操作结果\n     */\n    @PutMapping(\"/create\")\n    public GenericResultResponse<String> createSite(@RequestParam(\"siteName\") String siteName){\n        System.out.println(siteName);\n        Folder folder = this.primaryFileSystem.affirmFolder( siteName );\n        GenericSite site = new GenericSite();\n        site.setSiteName( siteName );\n        site.setMountPointGuid(folder.getGuid());\n        this.bucketInstrument.createSite( site );\n\n        return GenericResultResponse.success();\n    }\n\n    /**\n     * 删除站点\n     * @param siteName 站点名\n     * @return 操作结果\n     */\n    @DeleteMapping(\"/delete\")\n    public GenericResultResponse<String> removeSite(@RequestParam(\"siteName\") String siteName ){\n        this.bucketInstrument.removeSite(siteName);\n\n        return GenericResultResponse.success();\n    }\n\n    /**\n     * 获取全部站点\n     * @return 返回全部站点\n     */\n    @GetMapping(\"/list\")\n    public String listSite(){\n        List<Site> sites = this.bucketInstrument.listSite();\n        return GenericResultResponse.success(sites).toJSONString();\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/SiteNodeController.java",
    "content": "package com.walnut.sparta.ucdn.console.api.controller.v2;\n\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.entity.ServiceElement;\nimport com.pinecone.hydra.service.registry.server.ServiceLifecycleIface;\nimport com.pinecone.hydra.service.registry.dto.RegisterServiceDTO;\nimport com.pinecone.hydra.storage.bucket.BucketInstrument;\nimport com.pinecone.hydra.storage.bucket.entity.GenericSiteNode;\nimport com.pinecone.hydra.storage.bucket.entity.SiteNode;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.archcraft.redstone.response.GenericResultResponse;\nimport com.walnut.sparta.ucdn.console.infrastructure.dto.SiteNodeDTO;\nimport com.walnut.sparta.ucdn.console.infrastructure.vo.SiteNodeVO;\nimport com.walnut.sparta.ucdn.console.infrastructure.service.UCDNServiceManager;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.DeleteMapping;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.PostMapping;\nimport org.springframework.web.bind.annotation.RequestBody;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\n\nimport javax.annotation.Resource;\nimport java.util.ArrayList;\nimport java.util.List;\n\n@RestController\n@RequestMapping( \"/api/v2/ucdn/siteNode\" )\n@CrossOrigin\npublic class SiteNodeController {\n    @Resource\n    private BucketInstrument bucketInstrument;\n\n    @Resource\n    private UCDNServiceManager UCDNServiceManager;\n\n    @Resource\n    private ServiceInstrument primaryService;\n\n    @GetMapping(\"/query/siteGuid\")\n    public String querySiteNodeBySiteGuid(@RequestParam(\"siteGuid\") String siteGuid){\n        ServiceLifecycleIface lifecycleIface = this.UCDNServiceManager.getLifecycleIface();\n        ArrayList<SiteNodeVO> siteNodeVOS = new ArrayList<>();\n        List<SiteNode> siteNodes = this.bucketInstrument.querySiteNodeBySiteGuid(GUIDs.GUID128( siteGuid ));\n        for( SiteNode siteNode : siteNodes ){\n            if( lifecycleIface.hasOwnedServiceByServiceId( siteNode.getRelatedService().toString() ) ){\n                siteNode.setState( 1 );\n            }else {\n                siteNode.setState( 0 );\n            }\n            SiteNodeVO siteNodeVO = new SiteNodeVO(siteNode);\n            siteNodeVO.setRelatedServicePath( this.primaryService.getPath( siteNode.getRelatedService() ) );\n            siteNodeVOS.add( siteNodeVO );\n        }\n        return GenericResultResponse.success(siteNodeVOS).toJSONString();\n    }\n\n    @DeleteMapping(\"/remove/siteNodeGuid\")\n    public GenericResultResponse<String> removeSiteNode(@RequestParam(\"siteNodeGuid\") String siteNodeGuid ){\n        ServiceLifecycleIface lifecycleIface = this.UCDNServiceManager.getLifecycleIface();\n        SiteNode siteNode = this.bucketInstrument.querySiteNode(GUIDs.GUID128(siteNodeGuid));\n        lifecycleIface.deregisterServiceByInstanceId( siteNode.getRelatedService().toString() );\n        this.bucketInstrument.removeSiteNode( GUIDs.GUID128( siteNodeGuid ) );\n        return GenericResultResponse.success();\n    }\n\n    @PostMapping(\"/create\")\n    public GenericResultResponse<String> createSiteNode(@RequestBody SiteNodeDTO dto){\n        GenericSiteNode siteNode = new GenericSiteNode();\n        siteNode.setSiteGuid( GUIDs.GUID128( dto.getSiteGuid() ) );\n        siteNode.setNodeName( dto.getNodeName() );\n        siteNode.setRelatedService( GUIDs.GUID128( dto.getRelatedService() ) );\n        GUID guid = this.bucketInstrument.createSiteNode(siteNode);\n        return GenericResultResponse.success(guid.toString());\n    }\n\n    @PostMapping(\"/update\")\n    public GenericResultResponse<String> updateSiteNode(@RequestBody SiteNodeDTO dto ){\n        GenericSiteNode siteNode = new GenericSiteNode();\n        siteNode.setNodeName( dto.getNodeName() );\n        siteNode.setNodeGuid( GUIDs.GUID128( dto.getNodeGuid() ) );\n        siteNode.setState( dto.getState() );\n        siteNode.setIsEnabled( dto.getIsEnabled() );\n        siteNode.setSiteGuid( GUIDs.GUID128( dto.getSiteGuid() ) );\n        this.bucketInstrument.updateSiteNode( siteNode );\n        return GenericResultResponse.success();\n    }\n\n    @GetMapping(\"/fetch/allService\")\n    public String fetchAllService(){\n        List<ServiceElement> serviceElements = this.primaryService.fetchAllService();\n        return GenericResultResponse.success(serviceElements).toJSONString();\n    }\n\n    @PostMapping(\"/test/registerService\")\n    public GenericResultResponse<String> testRegisterService(@RequestBody RegisterServiceDTO dto ){\n        this.UCDNServiceManager.getLifecycleIface().registerService( dto );\n        Debug.trace( \"是否存在\" + this.UCDNServiceManager.getLifecycleIface().hasOwnedServiceByServiceId( dto.getServiceId() ) );\n        return GenericResultResponse.success();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/TransmitController.java",
    "content": "package com.walnut.sparta.ucdn.console.api.controller.v2;\n\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.bucket.BucketInstrument;\nimport com.pinecone.hydra.storage.bucket.entity.Site;\nimport com.pinecone.hydra.storage.bucket.source.SiteManipulator;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.external.ExternalFile;\nimport com.pinecone.hydra.storage.file.external.GenericNativeExternalFile;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.FSNodeAllotment;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.file.entity.GenericFileNode;\nimport com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64;\nimport com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.io.TitanFileChannelChanface;\nimport com.pinecone.hydra.storage.io.TitanOutputStreamChanface;\nimport com.pinecone.hydra.storage.version.VersionManage;\nimport com.pinecone.hydra.storage.version.entity.TitanVersion;\nimport com.pinecone.hydra.storage.version.entity.TitanVersionMapping;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.archcraft.redstone.response.GenericResultResponse;\nimport com.walnut.sparta.ucdn.console.domain.service.NodeFileDistributionService;\nimport com.walnut.sparta.ucdn.console.infrastructure.UCDNConsoleContents;\nimport com.walnut.sparta.ucdn.console.infrastructure.dto.ClusterFileSyncDTO;\n\nimport org.springframework.beans.factory.annotation.Value;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.PostMapping;\nimport org.springframework.web.bind.annotation.RequestBody;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\nimport org.springframework.web.multipart.MultipartFile;\n\nimport javax.annotation.Resource;\nimport javax.servlet.ServletOutputStream;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.nio.channels.FileChannel;\nimport java.nio.file.StandardOpenOption;\nimport java.util.Map;\nimport java.util.UUID;\n\n@RestController\n@RequestMapping( \"/api/v2/ucdn/transmit\" )\n@CrossOrigin\npublic class TransmitController {\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    @Resource\n    private UniformVolumeManager primaryVolume;\n\n    @Resource\n    private BucketInstrument bucketInstrument;\n\n    @Resource\n    private VersionManage primaryVersion;\n    @Resource\n    private VersionManage versionManage;\n\n    @Resource\n    private NodeFileDistributionService fileDistributionService;\n\n    @Value(\"${service.LocalUploadTemporaryWorkingDirectory}\")\n    private String majorTemporaryClusterFileDirectory;\n\n    @Value(\"${service.TemporaryFileExtends}\")\n    private String temporaryFileExtends;\n\n    @GetMapping(\"/download/guid\")\n    public void  getFile(HttpServletRequest request, HttpServletResponse response) throws IOException {\n        Map<String, String[]> parameterMap = request.getParameterMap();\n        String[] guids = parameterMap.get(\"guid\");\n        GUID storageObjectGuid = null;\n        if( guids != null ){\n            storageObjectGuid = GUIDs.GUID128( guids[0] );\n        }\n\n        ServletOutputStream outputStream = response.getOutputStream();\n        TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface(outputStream);\n\n        FileNode storageObject = (FileNode) this.primaryFileSystem.get(storageObjectGuid);\n\n        TitanFileExportEntity64 entity = new TitanFileExportEntity64(this.primaryFileSystem, this.primaryVolume, storageObject, kChannel);\n        this.primaryFileSystem.export( entity );\n\n    }\n\n    /**\n     * 上传文件\n     * @param filePath 目标路径\n     * @param version 版本号\n     * @param file 文件\n     * @param siteName 站点\n     * @return 返回操作结果\n     */\n    @PostMapping(\"/upload\")\n    public GenericResultResponse<String> CDNUpload(@RequestParam(\"siteName\") String siteName, @RequestParam(\"filePath\") String filePath, @RequestParam(\"version\") String version, @RequestParam(\"file\") MultipartFile file) throws IOException {\n        SiteManipulator siteManipulator = this.bucketInstrument.getSiteManipulator();\n        Site site = siteManipulator.querySiteByName(siteName);\n        if( site == null ){\n            return GenericResultResponse.error(\"站点不存在\");\n        }\n        int dotIndex = filePath.lastIndexOf(UCDNConsoleContents.PERIOD);\n        String baseName = filePath.substring(0, dotIndex);\n        String extension = filePath.substring(dotIndex + 1);\n        String realFilePath = this.primaryFileSystem.getPath(site.getMountPointGuid()) + UCDNConsoleContents.FORWARD_SLASH + baseName;\n\n        FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment();\n        Folder node = this.primaryFileSystem.affirmFolder(realFilePath);\n        String storageObjectPath = realFilePath + UCDNConsoleContents.VERSION_PREFIX+ UCDNConsoleContents.FORWARD_SLASH + version +UCDNConsoleContents.PERIOD+ extension;\n        File tempFile = new File(majorTemporaryClusterFileDirectory+ UUID.randomUUID()+temporaryFileExtends);\n        if( !tempFile.createNewFile() ){\n            throw new IOException( \"Creating file compromised, what :\" + tempFile.toPath() );\n        }\n        file.transferTo(tempFile);\n\n        FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.READ);\n        TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel );\n        FileNode fileNode = fsNodeAllotment.newFileNode();\n        fileNode.setDefinitionSize( tempFile.length() );\n        fileNode.setName( tempFile.getName() );\n        TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64( this.primaryFileSystem,storageObjectPath, fileNode,titanFileChannelKChannel,this.primaryVolume );\n\n        this.primaryFileSystem.receive( receiveEntity );\n\n        FileTreeNode storageObject = this.primaryFileSystem.get(this.primaryFileSystem.queryGUIDByPath(storageObjectPath));\n        TitanVersion titanVersion = new TitanVersion();\n        titanVersion.setVersion( version );\n        titanVersion.setFileGuid( node.getGuid() );\n        titanVersion.setTargetStorageObjectGuid( storageObject.getGuid() );\n        titanVersion.setVersionGuid( this.primaryFileSystem.getGuidAllocator().nextGUID() );\n        TitanVersionMapping versionMapping = new TitanVersionMapping();\n        versionMapping.setFileGuid(titanVersion.getFileGuid());\n        versionMapping.setEnableVersionGuid(titanVersion.getTargetStorageObjectGuid());\n        versionMapping.setVersionGuid((titanVersion.getVersionGuid()));\n        this.versionManage.insertVesionMapping(versionMapping);\n        this.primaryVersion.insert( titanVersion );\n        if( !tempFile.delete() ){\n            throw new IOException( \"Purging temporary file compromised, what :\" + tempFile.toPath() );\n        }\n\n        return GenericResultResponse.success();\n    }\n    /**\n     * 使用文件路径下载文件\n     */\n    @GetMapping(\"/download/path\")\n    public void getFileByPath(HttpServletRequest request, HttpServletResponse response) throws IOException {\n        Map<String, String[]> parameterMap = request.getParameterMap();\n        String[] paths = parameterMap.get(\"path\");\n        String path = null;\n\n        if(paths != null){\n            path = paths[0];\n        }\n\n        ServletOutputStream outputStream = response.getOutputStream();\n        TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface(outputStream);\n\n        ElementNode elementNode = this.primaryFileSystem.queryElement(path);\n        if(elementNode instanceof GenericNativeExternalFile){\n            ExternalFile externalFile = (ExternalFile) elementNode;\n            File nativeFile = externalFile.getNativeFile();\n            try (FileInputStream fileInputStream = new FileInputStream(nativeFile)) {\n                byte[] buffer = new byte[1024];\n                int bytesRead;\n                while ((bytesRead = fileInputStream.read(buffer)) != -1) {\n                    outputStream.write(buffer, 0, bytesRead);\n                }\n                // 刷新输出流\n                outputStream.flush();\n                return;\n            } catch (IOException e) {\n                // 处理异常，比如记录日志等\n                e.printStackTrace();\n            }\n        }\n\n        if( elementNode instanceof GenericFileNode){\n            FileNode fileNode = (FileNode) elementNode;\n            TitanFileExportEntity64 entity = new TitanFileExportEntity64(this.primaryFileSystem, this.primaryVolume, fileNode, kChannel);\n            this.primaryFileSystem.export( entity );\n        }\n    }\n\n    @PostMapping(\"/clusterFileSync\")\n    public void clusterFileSync(@RequestBody ClusterFileSyncDTO dto) throws IOException, InterruptedException {\n        this.fileDistributionService.clusterFileSync( dto );\n    }\n\n    private Chanface getKChannel( File file ) throws IOException {\n        FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ);\n        return new TitanFileChannelChanface( channel );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/v2/VersionController.java",
    "content": "package com.walnut.sparta.ucdn.console.api.controller.v2;\n\nimport com.pinecone.hydra.storage.version.VersionManage;\nimport com.pinecone.hydra.storage.version.entity.TitanVersion;\nimport com.pinecone.hydra.storage.version.entity.TitanVersionMapping;\nimport com.pinecone.hydra.storage.version.entity.VersionMapping;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.archcraft.redstone.response.GenericResultResponse;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.PutMapping;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\n\nimport javax.annotation.Resource;\n\n@RestController\n@RequestMapping( \"/api/v2/ucdn/version\" )\n@CrossOrigin\npublic class VersionController {\n    @Resource\n    private VersionManage versionManage;\n\n    @PutMapping(\"/create/VersionMapping\")\n    public GenericResultResponse<String> createVersionMapping(@RequestParam(\"fileGuid\") String fileGuid,\n                                                              @RequestParam(\"enableVersionGuid\") String enableVersionGuid\n    ) {\n        TitanVersion titanVersion = this.versionManage.queryByTargetStorageObjectGuid(GUIDs.GUID128(enableVersionGuid));\n        TitanVersionMapping versionMapping = new TitanVersionMapping();\n        versionMapping.setFileGuid(GUIDs.GUID128(fileGuid));\n        versionMapping.setEnableVersionGuid(GUIDs.GUID128(enableVersionGuid));\n        versionMapping.setVersionGuid((titanVersion.getVersionGuid()));\n        if (!this.versionManage.isExistEnableVersionMapping(versionMapping.getEnableVersionGuid())){\n            this.versionManage.insertVesionMapping(versionMapping);\n        }\n        else\n            this.versionManage.UpdateVesionMapping(versionMapping);\n        return GenericResultResponse.success();\n    }\n    @GetMapping(\"/query/VersionMapping\")\n    public String queryVersionMapping(@RequestParam(\"fileGuid\") String fileGuid) {\n        VersionMapping versionMapping = this.versionManage.queryVersionMapping(GUIDs.GUID128(fileGuid));\n        TitanVersion version =new TitanVersion();\n        if (versionMapping != null){\n            version=this.versionManage.queryByTargetStorageObjectGuid(versionMapping.getEnableVersionGuid());\n        }\n        return GenericResultResponse.success(version).toJSONString();\n    }\n    @PutMapping(\"/update/VersionMapping\")\n    public GenericResultResponse<String> updateVersionMapping(\n            @RequestParam(\"fileGuid\") String fileGuid,\n            @RequestParam(\"enableVersionGuid\") String enableVersionGuid) {\n        VersionMapping versionMapping = this.versionManage.queryVersionMapping(GUIDs.GUID128(fileGuid));\n        versionMapping.setEnableVersionGuid(GUIDs.GUID128(enableVersionGuid));\n        this.versionManage.UpdateVesionMapping(versionMapping);\n        return GenericResultResponse.success();\n    }\n\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/api/controller/xx.java",
    "content": "package com.walnut.sparta.ucdn.console.api.controller;\n\npublic class xx {\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/config/AppCDNMyBatisConfig.java",
    "content": "package com.walnut.sparta.ucdn.console.config;\n\nimport org.apache.ibatis.session.SqlSessionFactory;\nimport org.mybatis.spring.SqlSessionFactoryBean;\nimport org.mybatis.spring.annotation.MapperScan;\nimport org.springframework.context.annotation.Bean;\nimport org.springframework.context.annotation.Configuration;\nimport org.springframework.core.io.support.PathMatchingResourcePatternResolver;\nimport org.springframework.core.io.support.ResourcePatternResolver;\n\nimport javax.sql.DataSource;\n\n@Configuration\n@MapperScan(\"com.walnut.sparta.ucdn.console.mapper\")\npublic class AppCDNMyBatisConfig {\n\n    @Bean\n    public SqlSessionFactory sqlSessionFactory( DataSource dataSource ) throws Exception {\n        SqlSessionFactoryBean sqlSessionFactoryBean = new SqlSessionFactoryBean();\n        sqlSessionFactoryBean.setDataSource(dataSource);\n\n        ResourcePatternResolver resolver = new PathMatchingResourcePatternResolver();\n        sqlSessionFactoryBean.setTypeHandlersPackage( \"com.pinecone.hydra.entity.ibatis\" );\n        sqlSessionFactoryBean.setMapperLocations(resolver.getResources(\"classpath*:mapper/*.xml\"));\n\n        return sqlSessionFactoryBean.getObject();\n    }\n}"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/config/BeanConfig.java",
    "content": "package com.walnut.sparta.ucdn.console.config;\n\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.registry.dto.RegisterServiceDTO;\nimport com.pinecone.hydra.umb.kafka.WolfMCKafkaClient;\nimport com.pinecone.hydra.umb.rocket.WolfMCRocketClient;\nimport com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode;\nimport com.pinecone.hydra.umb.wolf.WolfMCBClient;\nimport com.pinecone.hydra.umct.WolfMCExpress;\nimport com.walnut.sparta.ucdn.console.infrastructure.UCDNContentDelivery;\nimport com.walnut.sparta.ucdn.console.infrastructure.UCDNConstants;\nimport com.walnut.sparta.ucdn.console.ufm.FileMultiDistributionIface;\nimport com.walnut.sparta.ucdn.console.ufm.SessionValidator;\nimport com.walnut.sparta.ucdn.console.infrastructure.service.UCDNCentralServiceManager;\nimport com.walnut.sparta.ucdn.console.infrastructure.service.UCDNServiceManager;\nimport org.springframework.context.annotation.Bean;\nimport org.springframework.context.annotation.Configuration;\n\nimport javax.annotation.Resource;\n\n@Configuration\npublic class BeanConfig {\n    @Resource\n    private UCDNContentDelivery UCDNContentDelivery;\n\n    @Resource\n    private ServiceInstrument primaryService;\n\n    @Bean( name = \"kafkaFileServiceClient\")\n    public UlfBroadcastControlNode kafkaFileServiceClient(){\n        UlfBroadcastControlNode client = new WolfMCBClient(new WolfMCKafkaClient(UCDNConstants.KafkaServer), \"\", this.UCDNContentDelivery, WolfMCExpress.class);\n        client.compile( FileMultiDistributionIface.class,false );\n        return client;\n    }\n\n    @Bean( name = \"rocketFileServiceClient\")\n    public UlfBroadcastControlNode rocketFileServiceClient(){\n        UlfBroadcastControlNode client = new WolfMCBClient(new WolfMCRocketClient(UCDNConstants.RocketServer,UCDNConstants.UCDNFileServiceTransmitGroup), \"\", this.UCDNContentDelivery, WolfMCExpress.class);\n        client.compile( SessionValidator.class,false );\n        return client;\n    }\n\n    @Bean\n    public UCDNServiceManager ucdnServiceManager() throws Exception {\n        UCDNCentralServiceManager ucdnServiceManager = new UCDNCentralServiceManager(this.UCDNContentDelivery);\n        ucdnServiceManager.getLifecycleIface().registerService( new RegisterServiceDTO( UCDNConstants.clientId, UCDNConstants.serviceId, UCDNConstants.deployId ));\n        return ucdnServiceManager;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/config/WebConfig.java",
    "content": "package com.walnut.sparta.ucdn.console.config;\n\n\nimport com.walnut.sparta.ucdn.console.interceptor.JWTInterceptor;\nimport org.springframework.context.annotation.Configuration;\nimport org.springframework.web.servlet.config.annotation.InterceptorRegistry;\nimport org.springframework.web.servlet.config.annotation.WebMvcConfigurer;\n\nimport javax.annotation.Resource;\n\n@Configuration\npublic class WebConfig implements WebMvcConfigurer {\n    @Resource\n    private JWTInterceptor jwtInterceptor;\n\n    @Override\n    public void addInterceptors(InterceptorRegistry registry) {\n        registry.addInterceptor(jwtInterceptor).addPathPatterns(\"/**\");\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/config/WebSocketConfig.java",
    "content": "package com.walnut.sparta.ucdn.console.config;\n\nimport org.springframework.context.annotation.Bean;\nimport org.springframework.context.annotation.Configuration;\nimport org.springframework.messaging.simp.config.MessageBrokerRegistry;\nimport org.springframework.web.socket.config.annotation.EnableWebSocketMessageBroker;\nimport org.springframework.web.socket.config.annotation.StompEndpointRegistry;\nimport org.springframework.web.socket.config.annotation.WebSocketMessageBrokerConfigurer;\nimport org.springframework.web.socket.server.standard.ServerEndpointExporter;\n\n@Configuration\npublic class WebSocketConfig {\n    @Bean\n    public ServerEndpointExporter serverEndpointExporter(){\n        return new ServerEndpointExporter();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/FileSystemService.java",
    "content": "package com.walnut.sparta.ucdn.console.domain.service;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface FileSystemService {\n    void remove( GUID fileGuid );\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/NodeFileDistributionService.java",
    "content": "package com.walnut.sparta.ucdn.console.domain.service;\n\nimport com.walnut.sparta.ucdn.console.infrastructure.dto.ClusterFileSyncDTO;\n\nimport java.io.File;\nimport java.io.IOException;\n\npublic interface NodeFileDistributionService {\n    void upload( String path, File file, String topic ) throws IOException, InterruptedException;\n\n    void testDistribution( String path, String topic ) throws IOException, InterruptedException;\n\n    void clusterFileSync( ClusterFileSyncDTO dto ) throws IOException, InterruptedException;\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/ClusterFileSyncTransaction.java",
    "content": "package com.walnut.sparta.ucdn.console.domain.service.cluster;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ClusterFileSyncTransaction extends Pinenut {\n    int getClusterNodeCount();\n\n    void setClusterNodeCount( int clusterNodeCount );\n\n    int checkRemainingCount();\n\n    int decreaseRemainingCount();\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/ClusterFileSyncTransactionManager.java",
    "content": "package com.walnut.sparta.ucdn.console.domain.service.cluster;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ConcurrentMap;\n\npublic class ClusterFileSyncTransactionManager implements ClusterFileTransactionManager {\n    private ConcurrentMap< GUID, ConcurrentMap<GUID, ClusterFileSyncTransaction>> transactionMap;\n\n    public ClusterFileSyncTransactionManager(){\n        this.transactionMap = new ConcurrentHashMap<>();\n    }\n\n    @Override\n    public void register(GUID fileGuid, ConcurrentMap<GUID, ClusterFileSyncTransaction> transactions) {\n        this.transactionMap.put( fileGuid, transactions );\n    }\n\n    @Override\n    public ConcurrentMap<GUID, ClusterFileSyncTransaction> getTransactions(GUID fileGuid ) {\n        return this.transactionMap.get( fileGuid );\n    }\n\n    @Override\n    public void removeTransactions( GUID fileGuid ) {\n        this.transactionMap.remove( fileGuid );\n    }\n\n    @Override\n    public boolean checkTransactionFinished( GUID fileGuid ) {\n        ConcurrentMap<GUID, ClusterFileSyncTransaction> transactions = this.getTransactions(fileGuid);\n        for( ClusterFileSyncTransaction clusterFileSyncTransaction : transactions.values() ){\n            if( clusterFileSyncTransaction.checkRemainingCount() != 0 ){\n                return false;\n            }\n        }\n        return true;\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/ClusterFileSynchronizationService.java",
    "content": "package com.walnut.sparta.ucdn.console.domain.service.cluster;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface ClusterFileSynchronizationService extends Pinenut {\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/ClusterFileSynchronizationServiceImpl.java",
    "content": "package com.walnut.sparta.ucdn.console.domain.service.cluster;\n\nimport javax.annotation.Resource;\n\npublic class ClusterFileSynchronizationServiceImpl implements ClusterFileSynchronizationService {\n    @Resource\n    private UFMTransactionSynchronizedNotifier webSocketService;\n\n    private ClusterFileTransactionManager      transactionManager;\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/ClusterFileTransactionManager.java",
    "content": "package com.walnut.sparta.ucdn.console.domain.service.cluster;\n\nimport com.pinecone.framework.system.regime.arch.Manager;\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.concurrent.ConcurrentMap;\n\npublic interface ClusterFileTransactionManager extends Manager {\n\n    void register( GUID fileGuid, ConcurrentMap<GUID, ClusterFileSyncTransaction> transactions );\n\n    ConcurrentMap<GUID, ClusterFileSyncTransaction> getTransactions(GUID fileGuid );\n\n    void removeTransactions( GUID fileGuid );\n\n    boolean checkTransactionFinished( GUID fileGuid );\n\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/FileSynchronizedEventSubscriber.java",
    "content": "package com.walnut.sparta.ucdn.console.domain.service.cluster;\n\nimport java.io.IOException;\nimport java.util.concurrent.ConcurrentMap;\nimport javax.websocket.Session;\n\nimport com.pinecone.framework.system.ProvokeHandleException;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.bucket.BucketInstrument;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.version.VersionManage;\nimport com.walnut.sparta.ucdn.console.infrastructure.vo.SyncFinishedVO;\nimport com.walnut.sparta.ucdn.console.mapper.ClusterFileSyncMapper;\nimport com.walnut.sparta.ucdn.console.ufm.event.UFMEventSubscriber;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\npublic class FileSynchronizedEventSubscriber implements UFMEventSubscriber {\n    private Logger                             logger;\n\n    private VersionManage                      versionManage;\n\n    private ClusterFileTransactionManager      transactionManager;\n\n    private UFMTransactionSynchronizedNotifier transactionSynchronizedNotifier;\n\n    private ClusterFileSyncMapper              clusterFileSyncMapper;\n\n    public FileSynchronizedEventSubscriber(\n            VersionManage versionManage, ClusterFileTransactionManager transactionManager,\n            UFMTransactionSynchronizedNotifier transactionSynchronizedNotifier, ClusterFileSyncMapper clusterFileSyncMapper\n    ) {\n        this.logger                             = LoggerFactory.getLogger( this.getClass() );\n        this.versionManage                      = versionManage;\n        this.transactionManager                 = transactionManager;\n        this.transactionSynchronizedNotifier    = transactionSynchronizedNotifier;\n        this.clusterFileSyncMapper = clusterFileSyncMapper;\n    }\n\n    @Override\n    public void afterEventTriggered( String path, String serviceId, FileNode fileNode ) {\n        try {\n            GUID versionFileGuid = this.versionManage.getVersionFileByGuid(fileNode.getGuid());\n            ConcurrentMap<GUID, ClusterFileSyncTransaction> map = this.transactionManager.getTransactions(versionFileGuid);\n            ClusterFileSyncTransaction clusterFileSyncTransaction = map.get(fileNode.getGuid());\n            clusterFileSyncTransaction.decreaseRemainingCount();\n            Session session = this.transactionSynchronizedNotifier.getSession();\n            SyncFinishedVO finishedVO = new SyncFinishedVO(path, serviceId, 1);\n            session.getBasicRemote().sendText(finishedVO.toJSONString());\n            if( this.transactionManager.checkTransactionFinished( versionFileGuid ) ){\n                this.logger.info( \"File {} synchronized done.\", versionFileGuid );\n                this.clusterFileSyncMapper.insert( versionFileGuid, 1,null );\n                session.close();\n                this.transactionManager.removeTransactions( versionFileGuid );\n            }\n        }\n        catch ( IOException e ) {\n            throw new ProvokeHandleException( e );\n        }\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/MultiClusterFileSyncTransaction.java",
    "content": "package com.walnut.sparta.ucdn.console.domain.service.cluster;\n\nimport java.util.concurrent.atomic.AtomicInteger;\n\npublic class MultiClusterFileSyncTransaction implements ClusterFileSyncTransaction {\n    private int             clusterNodeCount;\n\n    private AtomicInteger   remainingCount;\n\n    public MultiClusterFileSyncTransaction( int clusterNodeCount ) {\n        this.clusterNodeCount = clusterNodeCount;\n        this.remainingCount = new AtomicInteger( clusterNodeCount );\n    }\n\n    public MultiClusterFileSyncTransaction() {\n    }\n\n    @Override\n    public int getClusterNodeCount() {\n        return clusterNodeCount;\n    }\n\n    @Override\n    public void setClusterNodeCount( int clusterNodeCount ) {\n        this.clusterNodeCount = clusterNodeCount;\n    }\n\n    @Override\n    public int checkRemainingCount() {\n        return this.remainingCount.getAcquire();\n    }\n\n    @Override\n    public int decreaseRemainingCount() {\n        return this.remainingCount.decrementAndGet();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/cluster/UFMTransactionSynchronizedNotifier.java",
    "content": "package com.walnut.sparta.ucdn.console.domain.service.cluster;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.stereotype.Component;\n\nimport javax.websocket.OnClose;\nimport javax.websocket.OnError;\nimport javax.websocket.OnMessage;\nimport javax.websocket.OnOpen;\nimport javax.websocket.Session;\nimport javax.websocket.server.ServerEndpoint;\n\n@ServerEndpoint( value = \"/websocket/ucdn/monitor/nodes/transactionSynchronized\" )\n@Component\npublic class UFMTransactionSynchronizedNotifier {\n    private Logger log = LoggerFactory.getLogger( this.getClass() );\n\n    private static Session session;\n\n    @OnOpen\n    public void onOpen( Session session ){\n        UFMTransactionSynchronizedNotifier.session = session;\n    }\n\n    @OnMessage\n    public void onMessage( String msg, Session session ){\n        this.log.info(msg);\n    }\n\n    @OnClose\n    public void onClose(){\n        this.log.info( \"TransactionSynchronized notifier has been successfully shutdown.\" );\n    }\n\n    @OnError\n    public void onError( Session session, Throwable error ){\n        this.log.error( \"TransactionSynchronized error: \", error );\n    }\n\n    public Session getSession(){\n        return session;\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/impl/FileSystemServiceImpl.java",
    "content": "package com.walnut.sparta.ucdn.console.domain.service.impl;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.ClusterPage;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.file.entity.LocalCluster;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.walnut.sparta.ucdn.console.domain.service.FileSystemService;\nimport org.springframework.stereotype.Service;\n\nimport javax.annotation.Resource;\nimport java.sql.SQLException;\nimport java.util.List;\n\n@Service\npublic class FileSystemServiceImpl implements FileSystemService {\n\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    @Resource\n    private UniformVolumeManager primaryVolume;\n\n    @Override\n    public void remove( GUID fileGuid ){\n        FileTreeNode fileTreeNode = this.primaryFileSystem.get(fileGuid);\n        if( fileTreeNode instanceof Folder){\n            Folder folder = (Folder) fileTreeNode;\n            List<TreeNode> children = this.primaryFileSystem.getChildren(folder.getGuid());\n            for( TreeNode treeNode : children ){\n                this.remove( treeNode.getGuid() );\n            }\n        }\n        else if( fileTreeNode instanceof FileNode){\n            FileNode fileNode = (FileNode) fileTreeNode;\n            ClusterPage clusterPage = this.primaryFileSystem.fetchClustersByFileGuid( fileNode.getGuid() );\n            long fileClusterNum = clusterPage.getClusters();\n            for( long i = 0; i < fileClusterNum; i++ ){\n                LocalCluster frame = clusterPage.getLocalCluster( i );\n                try {\n                    this.primaryVolume.removeStorageObject( frame );\n                } catch (SQLException e) {\n                    throw new RuntimeException(e);\n                }\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/domain/service/impl/NodeFileDistributionServiceImpl.java",
    "content": "package com.walnut.sparta.ucdn.console.domain.service.impl;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.service.registry.server.ServiceLifecycleIface;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FSNodeAllotment;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64;\nimport com.pinecone.hydra.storage.io.TitanFileChannelChanface;\nimport com.pinecone.hydra.storage.version.VersionManage;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.sparta.ucdn.console.domain.service.cluster.ClusterFileSyncTransaction;\nimport com.walnut.sparta.ucdn.console.domain.service.cluster.ClusterFileSyncTransactionManager;\nimport com.walnut.sparta.ucdn.console.domain.service.cluster.ClusterFileTransactionManager;\nimport com.walnut.sparta.ucdn.console.domain.service.cluster.FileSynchronizedEventSubscriber;\nimport com.walnut.sparta.ucdn.console.domain.service.cluster.UFMTransactionSynchronizedNotifier;\nimport com.walnut.sparta.ucdn.console.domain.service.cluster.MultiClusterFileSyncTransaction;\nimport com.walnut.sparta.ucdn.console.infrastructure.dto.ClusterFileSyncDTO;\nimport com.walnut.sparta.ucdn.console.mapper.ClusterFileSyncMapper;\nimport com.walnut.sparta.ucdn.console.ufm.FileMultiDistributionService;\nimport com.walnut.sparta.ucdn.console.domain.service.NodeFileDistributionService;\nimport com.walnut.sparta.ucdn.console.ufm.UOFSFileMultiDistributionService;\nimport com.walnut.sparta.ucdn.console.infrastructure.UCDNContentDelivery;\nimport com.walnut.sparta.ucdn.console.infrastructure.service.UCDNServiceManager;\n\nimport org.springframework.stereotype.Service;\n\nimport javax.annotation.PostConstruct;\nimport javax.annotation.Resource;\nimport java.io.File;\nimport java.io.IOException;\nimport java.nio.channels.FileChannel;\nimport java.nio.file.StandardOpenOption;\nimport java.util.List;\nimport java.util.concurrent.ConcurrentHashMap;\n\n@Service\npublic class NodeFileDistributionServiceImpl implements NodeFileDistributionService {\n\n    @Resource\n    private KOMFileSystem                               primaryFileSystem;\n\n    @Resource\n    private UniformVolumeManager                        primaryVolume;\n\n    private FileMultiDistributionService                fileMultiDistributionService;\n\n    @Resource\n    private UCDNContentDelivery UCDNContentDelivery;\n\n    @Resource\n    private UCDNServiceManager                          ucdnServiceManager;\n\n    @Resource\n    private VersionManage                               primaryVersion;\n\n    private ClusterFileTransactionManager               clusterFileTransactionManager;\n\n    @Resource\n    private UFMTransactionSynchronizedNotifier          ufmTransactionSynchronizedNotifier;\n\n    @Resource\n    private ClusterFileSyncMapper                       clusterFileSyncMapper;\n\n\n\n    @PostConstruct\n    private void init() throws UMBServiceException {\n        this.clusterFileTransactionManager = new ClusterFileSyncTransactionManager();\n        this.fileMultiDistributionService = new UOFSFileMultiDistributionService( this.UCDNContentDelivery.getSpartaUCDNService() );\n        this.fileMultiDistributionService.registerFileTransmitCompleteEventSubscriber( new FileSynchronizedEventSubscriber(\n                this.primaryVersion, this.clusterFileTransactionManager,this.ufmTransactionSynchronizedNotifier, this.clusterFileSyncMapper )\n        );\n        this.fileMultiDistributionService.start();\n    }\n\n    @Override\n    public void upload( String path, File file, String topic ) throws IOException, InterruptedException {\n        FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment();\n        FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ);\n        TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel );\n        FileNode fileNode = fsNodeAllotment.newFileNode();\n        fileNode.setDefinitionSize( file.length() );\n        fileNode.setName( file.getName() );\n        TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64( this.primaryFileSystem,path, fileNode,titanFileChannelKChannel,this.primaryVolume );\n\n        this.primaryFileSystem.receive( receiveEntity );\n\n        if( !topic.isBlank() ){\n            this.fileMultiDistributionService.fileDistribution( fileNode, topic );\n        }\n\n    }\n\n    @Override\n    public void testDistribution(String path, String topic) throws IOException, InterruptedException {\n        FileNode fileNode = (FileNode)this.primaryFileSystem.queryElement(path);\n\n        this.fileMultiDistributionService.fileDistribution( fileNode, topic );\n    }\n\n    @Override\n    public void clusterFileSync( ClusterFileSyncDTO dto ) throws IOException, InterruptedException {\n        Folder folder = this.primaryFileSystem.getFolder( GUIDs.GUID128(dto.getFileGuid()) );\n        List<GUID> guids = this.primaryVersion.fetchVersions(folder.getGuid());\n        ServiceLifecycleIface lifecycleIface = this.ucdnServiceManager.getLifecycleIface();\n        int serviceNum = lifecycleIface.countRegisteredService();\n\n        ConcurrentHashMap<GUID, ClusterFileSyncTransaction> map = new ConcurrentHashMap<>();\n        for ( GUID guid : guids ){\n            MultiClusterFileSyncTransaction transaction = new MultiClusterFileSyncTransaction(serviceNum);\n            map.put( guid, transaction );\n        }\n\n        this.clusterFileTransactionManager.register( folder.getGuid(), map );\n        for( GUID guid : guids ){\n            FileNode fileNode = this.primaryFileSystem.getFileNode(guid);\n            this.fileMultiDistributionService.fileDistribution(\n                    fileNode, this.fileMultiDistributionService.getConfig().getFileCloudDistributeTransmitTopic()\n            );\n        }\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/ClusterLock.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\nimport java.util.concurrent.atomic.AtomicInteger;\n\npublic class ClusterLock implements Pinenut {\n    private AtomicInteger waitThreatNum;\n\n    public ClusterLock(){\n        this.waitThreatNum = new AtomicInteger(0);\n    }\n\n    public AtomicInteger getWaitThreatNum(){\n        return this.waitThreatNum;\n    }\n\n    public void increment(){\n        this.waitThreatNum.getAndIncrement();\n    }\n\n    public void decrement(){\n        this.waitThreatNum.getAndDecrement();\n    }\n\n }\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/FSContentDeliveryService.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface FSContentDeliveryService extends Pinenut {\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/SpartaUCDNService.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.functions.Executor;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.hydra.bucket.ibatis.hydranium.BucketMappingDriver;\nimport com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver;\nimport com.pinecone.hydra.servgram.Servgram;\nimport com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.UniformServiceInstrument;\nimport com.pinecone.hydra.service.registry.server.UniformServiceManager;\nimport com.pinecone.hydra.service.registry.ulf.HuskyServiceAppointServer;\nimport com.pinecone.hydra.storage.bucket.TitanBucketInstrument;\nimport com.pinecone.hydra.storage.file.FileSystemConfig;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.KernelFileSystemConfig;\nimport com.pinecone.hydra.storage.file.UniformObjectFileSystem;\nimport com.pinecone.hydra.storage.version.TitanVersionManage;\nimport com.pinecone.hydra.storage.version.VersionManage;\nimport com.pinecone.hydra.storage.volume.KernelVolumeConfig;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.system.component.ComponentInitializationException;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.uma.DuplexAppointClient;\nimport com.pinecone.hydra.version.ibatis.hydranium.VersionMappingDriver;\nimport com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.summer.spring.Springron;\nimport com.walnut.archcraft.redstone.messge.PrimaryMessageWareStone;\nimport com.walnut.sparta.ucdn.console.SpartaBoot;\nimport com.walnut.sparta.ucdn.console.ufm.UCFMConfig;\nimport com.walnut.sparta.ucdn.console.ufm.UFMConfig;\n\nimport org.springframework.context.ApplicationContextInitializer;\nimport org.springframework.context.ConfigurableApplicationContext;\nimport org.springframework.context.support.GenericApplicationContext;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\n\npublic class SpartaUCDNService extends Springron implements UCDNService {\n    protected KOIMappingDriver koiMappingDriver;\n\n    protected KOIMappingDriver koiFileMappingDriver;\n\n    protected KOIMappingDriver koiBucketMappingDriver;\n\n    protected KOIMappingDriver koiVersionMappingDriver;\n\n    protected KOIMappingDriver koiServiceMappingDriver;\n\n\n    protected KOMFileSystem fileSystem;\n\n    protected UniformVolumeManager volumeTree;\n\n    protected TitanBucketInstrument bucketInstrument;\n\n    protected TitanVersionManage versionManage;\n\n    protected ServiceInstrument serviceInstrument;\n\n\n    protected PrimaryMessageWareStone  primaryMessageWareStone;\n\n    protected UniformServiceManager    serviceManager;\n\n    protected UFMConfig                clusterFileSynchronizationConfig;\n\n    protected void initKOMSubsystem() throws ComponentInitializationException {\n        this.koiMappingDriver = new VolumeMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n        this.koiFileMappingDriver = new FileMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n        this.koiBucketMappingDriver = new BucketMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n        this.koiVersionMappingDriver = new VersionMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n        this.koiServiceMappingDriver = new ServiceMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n\n        JSONConfig selfConfig = (JSONConfig) this.getConfig();\n        FileSystemConfig fileSystemConfig = new KernelFileSystemConfig( selfConfig.queryJSONObject( \"service.PrimaryUniformFileSystem\" ) );\n        this.fileSystem         = new UniformObjectFileSystem( this.koiFileMappingDriver, fileSystemConfig );\n\n        VolumeConfig volumeConfig = new KernelVolumeConfig( selfConfig.queryJSONObject( \"service.PrimaryUniformVolumeManager\" ) );\n        this.volumeTree         = new UniformVolumeManager( this.koiMappingDriver, volumeConfig );\n        this.bucketInstrument   = new TitanBucketInstrument( this.koiBucketMappingDriver );\n        this.versionManage      = new TitanVersionManage( this.koiVersionMappingDriver );\n        this.serviceInstrument = new UniformServiceInstrument( this.koiServiceMappingDriver );\n    }\n\n    protected void initMessageWares() throws ComponentInitializationException {\n        this.primaryMessageWareStone = new WolfKingMessageWareStone( this );\n    }\n\n    protected void initModules() throws ComponentInitializationException {\n        this.serviceManager = new UniformServiceManager( this.serviceInstrument );\n        this.serviceManager.hookAppointServer( new HuskyServiceAppointServer( this.primaryMessageWareStone.getWolfKingAppointServer() ) );\n\n        JSONConfig selfConfig = (JSONConfig) this.getConfig();\n        this.clusterFileSynchronizationConfig = new UCFMConfig( selfConfig.queryJSONObject( \"service.ClusterFileSynchronizationConfig\" ) );\n    }\n\n    protected void startGlobalMiddlewares() throws ComponentInitializationException {\n        try {\n            this.getPrimaryMessageMiddlewareDirector().getWolfKingAppointServer().execute();\n            Debug.sleep( 500 );\n            this.getPrimaryMessageMiddlewareDirector().getWolfAppointClient().execute();\n        }\n        catch ( Exception e ) {\n            throw new ComponentInitializationException( e );\n        }\n    }\n\n    protected void initSpringBeanFactorySubsystem() throws ComponentInitializationException {\n        this.setPrimarySources( SpartaBoot.class );\n        this.setInitializer(new Executor() {\n            @Override\n            public void execute() throws Exception {\n                SpartaUCDNService.this.getSpringApplication().addInitializers(new ApplicationContextInitializer<ConfigurableApplicationContext>() {\n                    @Override\n                    public void initialize( ConfigurableApplicationContext applicationContext ) {\n                        GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext;\n                        genericApplicationContext.registerBean(\"primaryFileSystem\", UniformObjectFileSystem.class, () -> (UniformObjectFileSystem) fileSystem);\n                        genericApplicationContext.registerBean(\"primaryVolume\", UniformVolumeManager.class, () -> (UniformVolumeManager) volumeTree);\n                        genericApplicationContext.registerBean(\"primaryBucket\", TitanBucketInstrument.class, () -> (TitanBucketInstrument) bucketInstrument);\n                        genericApplicationContext.registerBean(\"primaryVersion\", VersionManage.class, () -> (VersionManage) versionManage);\n                        genericApplicationContext.registerBean(\"primaryService\", ServiceInstrument.class, () -> serviceInstrument);\n                        genericApplicationContext.registerBean(\"primaryWolfDuplexAppointClient\", DuplexAppointClient.class, () ->  primaryMessageWareStone.getWolfAppointClient());\n                        genericApplicationContext.registerBean(\"uofsContentDelivery\", UCDNContentDelivery.class, () -> (UCDNContentDelivery) SpartaUCDNService.this.parentSystem());\n                    }\n                });\n            }\n        });\n    }\n\n    protected void initSubsystem() throws ComponentInitializationException {\n        this.initKOMSubsystem();\n        this.initMessageWares();\n        this.initModules();\n        this.startGlobalMiddlewares();\n        this.initSpringBeanFactorySubsystem();\n    }\n\n    public SpartaUCDNService( String szName, Processum parent, String[] springbootArgs ) throws ComponentInitializationException {\n        super( szName, parent, springbootArgs );\n        this.mSpringKernel.setPrimarySources( SpartaBoot.class );\n\n        this.initSubsystem();\n    }\n\n    public SpartaUCDNService( String szName, Processum parent ) throws ComponentInitializationException {\n        this( szName, parent, new String[0] );\n    }\n\n    @Override\n    protected void loadConfig() {\n        this.mServgramList     = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey );\n        Object dyServgramConf  = this.mServgramList.get( this.gramName() );\n        if( dyServgramConf instanceof String ) {\n            try{\n                this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) );\n            }\n            catch ( IOException ignore ) {\n                this.getLogger().info( \"[Notice] Spring will use the default config `application.yaml`.\" );\n            }\n        }\n        else {\n            this.mServgramConf = this.mServgramList.getChild( this.gramName() );\n        }\n    }\n\n    @Override\n    public Tritium parentSystem() {\n        return (Tritium)super.parentSystem();\n    }\n\n    @Override\n    public KOMFileSystem getKOMFileSystem() {\n        return this.fileSystem;\n    }\n\n    @Override\n    public UniformVolumeManager getUniformVolumeManager() {\n        return this.volumeTree;\n    }\n\n    @Override\n    public TitanBucketInstrument getTitanBucketInstrument() {\n        return this.bucketInstrument;\n    }\n\n    @Override\n    public TitanVersionManage getTitanVersionManage() {\n        return this.versionManage;\n    }\n\n    @Override\n    public ServiceInstrument getServiceInstrument() {\n        return this.serviceInstrument;\n    }\n\n    @Override\n    public PrimaryMessageWareStone getPrimaryMessageMiddlewareDirector() {\n        return this.primaryMessageWareStone;\n    }\n\n    @Override\n    public UniformServiceManager getUniformServiceManager() {\n        return this.serviceManager;\n    }\n\n    @Override\n    public UFMConfig getClusterFileSynchronizationConfig() {\n        return this.clusterFileSynchronizationConfig;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/UCDNConsoleContents.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class UCDNConsoleContents implements Pinenut {\n    public static String VOLUME_TYPE_PHYSICAL = \"PhysicalVolume\";\n\n    public static String VOLUME_TYPE_SIMPLE = \"SimpleVolume\";\n\n    public static String VOLUME_TYPE_SPANNED = \"SpannedVolume\";\n\n    public static String VOLUME_TYPE_STRIPED = \"StripedVolume\";\n\n    public static String VERSION_PREFIX = \"/$version\";\n\n    public static String FORWARD_SLASH = \"/\";\n\n    public static String PERIOD = \".\";\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/UCDNConstants.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure;\n\npublic class UCDNConstants {\n    public static String KafkaServer = \"localhost:9092\";\n\n    public static  String RocketServer = \"localhost:9876\";\n\n    public static  String UCDNFileServiceTransmitGroup = \"UCDNFileServiceTransmitGroup\";\n\n    public static String serviceId = \"1769872-0002d2-0003-cc\";\n\n    public static String deployId = \"1769872-0002d2-0003-cc\";\n\n    public static long clientId = 1;\n\n    public static String period = \".\";\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/UCDNContentDelivery.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure;\n\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.tritium.Tritium;\n\npublic class UCDNContentDelivery extends Tritium implements FSContentDeliveryService {\n    protected SpartaUCDNService spartaUCDNService;\n\n    public UCDNContentDelivery(String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public UCDNContentDelivery(String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    public SpartaUCDNService getSpartaUCDNService(){\n        return this.spartaUCDNService;\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        this.spartaUCDNService = new SpartaUCDNService( \"SpartaUCDNService\", this );\n        this.spartaUCDNService.execute();\n\n\n        this.getTaskManager().add(this.spartaUCDNService);\n        this.getTaskManager().syncWaitingTerminated();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/UCDNService.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure;\n\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.registry.server.UniformServiceManager;\nimport com.pinecone.hydra.storage.bucket.TitanBucketInstrument;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.version.TitanVersionManage;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.walnut.archcraft.redstone.messge.PrimaryMessageWareStone;\nimport com.walnut.sparta.ucdn.console.ufm.UFMConfig;\n\npublic interface UCDNService extends Slf4jTraceable {\n    KOMFileSystem getKOMFileSystem();\n\n    UniformVolumeManager getUniformVolumeManager();\n\n    TitanBucketInstrument getTitanBucketInstrument();\n\n    TitanVersionManage getTitanVersionManage();\n\n    ServiceInstrument getServiceInstrument();\n\n    // TODO, For next, that will to systemically integrate the Primary-Middleware-Stone into the uniform-director.\n    PrimaryMessageWareStone getPrimaryMessageMiddlewareDirector();\n\n    UniformServiceManager getUniformServiceManager();\n\n    UFMConfig getClusterFileSynchronizationConfig();\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/WolfKingMessageWareStone.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.util.json.JSONMaptron;\nimport com.pinecone.hydra.service.registry.server.ServiceLifecycleIface;\nimport com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface;\nimport com.pinecone.hydra.system.component.ComponentInitializationException;\nimport com.pinecone.hydra.uma.DuplexAppointClient;\nimport com.pinecone.hydra.uma.DuplexAppointServer;\nimport com.pinecone.hydra.uma.HuskyDuplexExpress;\nimport com.pinecone.hydra.uma.wolf.WolvesAppointClient;\nimport com.pinecone.hydra.uma.wolf.WolvesAppointServer;\nimport com.pinecone.hydra.umb.kafka.WolfMCKafkaClient;\nimport com.pinecone.hydra.umb.rocket.WolfMCRocketClient;\nimport com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode;\nimport com.pinecone.hydra.umb.wolf.WolfMCBClient;\nimport com.pinecone.hydra.umc.wolf.client.UlfClient;\nimport com.pinecone.hydra.umc.wolf.client.WolfMCClient;\nimport com.pinecone.hydra.umc.wolf.server.UlfServer;\nimport com.pinecone.hydra.umc.wolf.server.WolfMCServer;\nimport com.pinecone.hydra.umct.WolfMCExpress;\nimport com.pinecone.tritium.Tritium;\nimport com.walnut.archcraft.redstone.messge.PrimaryMessageWareStone;\nimport com.walnut.sparta.ucdn.console.ufm.FileMultiDistributionIface;\nimport com.walnut.sparta.ucdn.console.ufm.SessionValidator;\n\npublic class WolfKingMessageWareStone implements PrimaryMessageWareStone {\n    protected DuplexAppointServer      wolfKingAppointServer;\n\n    protected DuplexAppointClient      wolfAppointClient;\n\n    protected UlfBroadcastControlNode  primaryKafkaClient;\n\n    protected UlfBroadcastControlNode  primaryRocketClient;\n\n    protected Processum                parentProcess;\n\n    public WolfKingMessageWareStone( Processum parentProcess ) throws ComponentInitializationException {\n        this.parentProcess = parentProcess;\n\n        this.initSelf();\n    }\n\n    private void initPrimaryAppointClientSegment() throws Exception {\n        UlfClient embedRPCClient = new WolfMCClient( 2048, \"PrimaryWolfMCClient\", this.parentSystem(), this.parentSystem().getMiddlewareDirector().getMiddlewareConfig().queryJSONObject( \"Messagers.Messagers.WolfMCKingpin\" ) );\n        this.wolfAppointClient = new WolvesAppointClient( embedRPCClient );\n\n        this.wolfAppointClient.compile( ServiceLifecycleIface.class, false );\n        this.wolfAppointClient.compile( ServiceMetaManipulationIface.class, false );\n    }\n\n    private void initPrimaryAppointServerSegment() throws Exception {\n        UlfServer embedRPCServer = new WolfMCServer( \"WolfKingMCServer\", this.parentSystem(), new JSONMaptron(\"{host: \\\"0.0.0.0\\\",\\n\" +\n                \"port: 5777, SocketTimeout: 800, KeepAliveTimeout: 3600, MaximumConnections: 1e6}\") );\n        this.wolfKingAppointServer = new WolvesAppointServer( embedRPCServer, HuskyDuplexExpress.class );\n        //this.serviceManager = new UniformServiceManager( serviceInstrument, wolfServer );\n    }\n\n    private void initPrimaryBroadcastSegment() throws Exception {\n        this.primaryKafkaClient = new WolfMCBClient(new WolfMCKafkaClient(UCDNConstants.KafkaServer), \"\", this.parentSystem(), WolfMCExpress.class);\n        this.primaryKafkaClient.compile( FileMultiDistributionIface.class,false );\n\n        this.primaryRocketClient = new WolfMCBClient(new WolfMCRocketClient(UCDNConstants.RocketServer,UCDNConstants.UCDNFileServiceTransmitGroup), \"\", this.parentSystem(), WolfMCExpress.class);\n        this.primaryRocketClient.compile(SessionValidator.class,false);\n    }\n\n    private void initSelf() throws ComponentInitializationException {\n        try {\n            this.initPrimaryAppointServerSegment();\n            this.initPrimaryAppointClientSegment();\n            this.initPrimaryBroadcastSegment();\n        }\n        catch ( Exception e ) {\n            throw new ComponentInitializationException( e );\n        }\n    }\n\n    @Override\n    public Processum getParentProcess() {\n        return this.parentProcess;\n    }\n\n    @Override\n    public DuplexAppointServer getWolfKingAppointServer() {\n        return this.wolfKingAppointServer;\n    }\n\n    @Override\n    public DuplexAppointClient getWolfAppointClient() {\n        return this.wolfAppointClient;\n    }\n\n    @Override\n    public UlfBroadcastControlNode getPrimaryKafkaClient() {\n        return this.primaryKafkaClient;\n    }\n\n    @Override\n    public UlfBroadcastControlNode getPrimaryRocketClient() {\n        return this.primaryRocketClient;\n    }\n\n    @Override\n    public Tritium parentSystem() {\n        return (Tritium)this.parentProcess.parentSystem();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/ClusterFileSyncDTO.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.dto;\n\npublic class ClusterFileSyncDTO {\n    private String fileGuid;\n\n    public String getFileGuid() {\n        return fileGuid;\n    }\n\n    public void setFileGuid(String fileGuid) {\n        this.fileGuid = fileGuid;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/DownloadObjectByChannelDTO.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.dto;\n\npublic class DownloadObjectByChannelDTO {\n    private String destDirPath;\n    private String targetPath;\n\n    public DownloadObjectByChannelDTO() {\n    }\n\n    public DownloadObjectByChannelDTO(String destDirPath, String targetPath) {\n        this.destDirPath = destDirPath;\n        this.targetPath = targetPath;\n    }\n\n\n    public String getDestDirPath() {\n        return destDirPath;\n    }\n\n\n    public void setDestDirPath(String destDirPath) {\n        this.destDirPath = destDirPath;\n    }\n\n\n    public String getTargetPath() {\n        return targetPath;\n    }\n\n\n    public void setTargetPath(String targetPath) {\n        this.targetPath = targetPath;\n    }\n\n    public String toString() {\n        return \"downloadObjectByChannelDto{destDirPath = \" + destDirPath + \", targetPath = \" + targetPath + \"}\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/LogicVolumeDTO.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.dto;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class LogicVolumeDTO implements Pinenut {\n    private String name;\n\n    private long definitionCapacity;\n\n    private String extConfig;\n\n\n    public LogicVolumeDTO() {\n    }\n\n    public LogicVolumeDTO(String name, long definitionCapacity, String extConfig) {\n        this.name = name;\n        this.definitionCapacity = definitionCapacity;\n        this.extConfig = extConfig;\n    }\n\n    public String getName() {\n        return name;\n    }\n\n    public void setName(String name) {\n        this.name = name;\n    }\n\n\n    public long getDefinitionCapacity() {\n        return definitionCapacity;\n    }\n\n\n    public void setDefinitionCapacity(long definitionCapacity) {\n        this.definitionCapacity = definitionCapacity;\n    }\n\n\n    public String getExtConfig() {\n        return extConfig;\n    }\n\n\n    public void setExtConfig(String extConfig) {\n        this.extConfig = extConfig;\n    }\n\n    public String toString() {\n        return \"SimpleVolumeDTO{name = \" + name + \", definitionCapacity = \" + definitionCapacity + \", extConfig = \" + extConfig + \"}\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/PhysicalVolumeDTO.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.dto;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class PhysicalVolumeDTO implements Pinenut {\n    private String name;\n\n    private long definitionCapacity;\n\n    private String extConfig;\n\n    private String mountPoint;\n\n\n    public PhysicalVolumeDTO() {\n    }\n\n    public PhysicalVolumeDTO(String name, long definitionCapacity, String extConfig, String mountPoint) {\n        this.name = name;\n        this.definitionCapacity = definitionCapacity;\n        this.extConfig = extConfig;\n        this.mountPoint = mountPoint;\n    }\n\n\n    public String getName() {\n        return name;\n    }\n\n\n    public void setName(String name) {\n        this.name = name;\n    }\n\n\n    public long getDefinitionCapacity() {\n        return definitionCapacity;\n    }\n\n\n    public void setDefinitionCapacity(long definitionCapacity) {\n        this.definitionCapacity = definitionCapacity;\n    }\n\n\n    public String getExtConfig() {\n        return extConfig;\n    }\n\n\n    public void setExtConfig(String extConfig) {\n        this.extConfig = extConfig;\n    }\n\n\n    public String getMountPoint() {\n        return mountPoint;\n    }\n\n\n    public void setMountPoint(String mountPoint) {\n        this.mountPoint = mountPoint;\n    }\n\n    public String toString() {\n        return \"PhysicalVolumeDTO{name = \" + name + \", definitionCapacity = \" + definitionCapacity + \", extConfig = \" + extConfig + \", mountPoint = \" + mountPoint + \"}\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/RenameDTO.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.dto;\n\npublic class RenameDTO {\n    private String path;\n\n    private String newName;\n\n    public String getPath() {\n        return path;\n    }\n\n    public void setPath(String path) {\n        this.path = path;\n    }\n\n    public String getNewName() {\n        return newName;\n    }\n\n    public void setNewName(String newName) {\n        this.newName = newName;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/SiteNodeDTO.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.dto;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic class SiteNodeDTO {\n    protected long enumId;\n\n    protected String nodeName;\n\n    protected String nodeGuid;\n\n    protected String siteGuid;\n\n    protected int state;\n\n    protected int isEnabled;\n\n    protected String relatedService;\n\n    public long getEnumId() {\n        return enumId;\n    }\n\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n    public String getNodeName() {\n        return nodeName;\n    }\n\n    public void setNodeName(String nodeName) {\n        this.nodeName = nodeName;\n    }\n\n    public String getNodeGuid() {\n        return nodeGuid;\n    }\n\n    public void setNodeGuid(String nodeGuid) {\n        this.nodeGuid = nodeGuid;\n    }\n\n    public String getSiteGuid() {\n        return siteGuid;\n    }\n\n    public void setSiteGuid(String siteGuid) {\n        this.siteGuid = siteGuid;\n    }\n\n    public int getState() {\n        return state;\n    }\n\n    public void setState(int state) {\n        this.state = state;\n    }\n\n    public int getIsEnabled() {\n        return isEnabled;\n    }\n\n    public void setIsEnabled(int isEnabled) {\n        this.isEnabled = isEnabled;\n    }\n\n    public String getRelatedService() {\n        return relatedService;\n    }\n\n    public void setRelatedService(String relatedService) {\n        this.relatedService = relatedService;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/StorageExpansionDTO.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.dto;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class StorageExpansionDTO implements Pinenut {\n    public String logicGuid;\n\n    public String childGuid;\n\n\n    public StorageExpansionDTO() {\n    }\n\n    public StorageExpansionDTO(String logicGuid, String childGuid) {\n        this.logicGuid = logicGuid;\n        this.childGuid = childGuid;\n    }\n\n\n    public String getLogicGuid() {\n        return logicGuid;\n    }\n\n\n    public void setLogicGuid(String logicGuid) {\n        this.logicGuid = logicGuid;\n    }\n\n\n    public String getChildGuid() {\n        return childGuid;\n    }\n\n\n    public void setChildGuid(String childGuid) {\n        this.childGuid = childGuid;\n    }\n\n    public String toString() {\n        return \"StorageExpansionDTO{logicGuid = \" + logicGuid + \", physicalGuid = \" + childGuid + \"}\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/UpdateFileNameDTO.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.dto;\n\npublic class UpdateFileNameDTO {\n    private String filePath;\n\n    private String newFileName;\n\n    public String getFilePath() {\n        return this.filePath;\n    }\n\n    public void setFilePath(String filePath) {\n        this.filePath = filePath;\n    }\n\n    public String getNewFileName() {\n        return this.newFileName;\n    }\n\n    public void setNewFileName(String newFileName) {\n        this.newFileName = newFileName;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/dto/UpdateObjectByChannelDTO.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.dto;\n\nimport org.springframework.web.multipart.MultipartFile;\n\npublic class UpdateObjectByChannelDTO {\n    private String          volumeGuid;\n    private String          destDirPath;\n    private MultipartFile   object;\n\n\n    public UpdateObjectByChannelDTO() {\n    }\n\n    public UpdateObjectByChannelDTO(String volumeGuid, String destDirPath, MultipartFile object) {\n        this.volumeGuid = volumeGuid;\n        this.destDirPath = destDirPath;\n        this.object = object;\n    }\n\n    public String getVolumeGuid() {\n        return volumeGuid;\n    }\n\n\n    public void setVolumeGuid(String volumeGuid) {\n        this.volumeGuid = volumeGuid;\n    }\n\n\n    public String getDestDirPath() {\n        return destDirPath;\n    }\n\n\n    public void setDestDirPath(String destDirPath) {\n        this.destDirPath = destDirPath;\n    }\n\n\n    public MultipartFile getObject() {\n        return object;\n    }\n\n\n    public void setObject(MultipartFile object) {\n        this.object = object;\n    }\n\n    public String toString() {\n        return \"updateObjectDto{volumeGuid = \" + volumeGuid + \", destDirPath = \" + destDirPath + \", object = \" + object + \"}\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/service/UCDNCentralServiceManager.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.service;\n\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.registry.server.ServiceLifecycleIface;\nimport com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface;\nimport com.pinecone.hydra.service.registry.server.UniformServiceManager;\nimport com.pinecone.hydra.uma.DuplexAppointClient;\nimport com.pinecone.hydra.uma.DuplexAppointServer;\nimport com.walnut.sparta.ucdn.console.infrastructure.UCDNService;\nimport com.walnut.sparta.ucdn.console.infrastructure.UCDNContentDelivery;\n\npublic class UCDNCentralServiceManager implements UCDNServiceManager {\n    protected DuplexAppointServer              serviceControlAppointServer;\n\n    protected DuplexAppointClient              serviceRecallAppointClient;\n\n    protected UniformServiceManager            serviceManager;\n\n    protected ServiceInstrument                serviceInstrument;\n\n    protected ServiceLifecycleIface            serviceRegistryLifecycleIface;\n\n    protected ServiceMetaManipulationIface     serviceMateIface;\n\n    protected UCDNService                      ucdnService;\n\n    public UCDNCentralServiceManager( UCDNContentDelivery UCDNContentDelivery) {\n        this.ucdnService                   = UCDNContentDelivery.getSpartaUCDNService();\n        this.serviceInstrument = this.ucdnService.getServiceInstrument();\n        this.serviceManager                = ucdnService.getUniformServiceManager();\n\n        this.serviceControlAppointServer   = this.ucdnService.getPrimaryMessageMiddlewareDirector().getWolfKingAppointServer();\n        this.serviceRecallAppointClient    = this.ucdnService.getPrimaryMessageMiddlewareDirector().getWolfAppointClient();\n\n        this.serviceRegistryLifecycleIface = this.serviceRecallAppointClient.getIface( ServiceLifecycleIface.class );\n        this.serviceMateIface              = this.serviceRecallAppointClient.getIface( ServiceMetaManipulationIface.class );\n    }\n\n    @Override\n    public DuplexAppointServer getWolfServer() {\n        return this.serviceControlAppointServer;\n    }\n\n    @Override\n    public ServiceInstrument getServiceInstrument() {\n        return this.serviceInstrument;\n    }\n\n    @Override\n    public DuplexAppointClient getDuplexAppointClient() {\n        return this.serviceRecallAppointClient;\n    }\n\n    @Override\n    public ServiceLifecycleIface getLifecycleIface() {\n        return this.serviceRegistryLifecycleIface;\n    }\n\n    @Override\n    public ServiceMetaManipulationIface getMateIface() {\n        return this.serviceMateIface;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/service/UCDNServiceManager.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.service;\n\nimport com.pinecone.framework.system.regime.arch.Manager;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.registry.server.ServiceLifecycleIface;\nimport com.pinecone.hydra.service.registry.server.ServiceMetaManipulationIface;\nimport com.pinecone.hydra.uma.DuplexAppointClient;\nimport com.pinecone.hydra.uma.DuplexAppointServer;\n\npublic interface UCDNServiceManager extends Manager {\n    DuplexAppointServer getWolfServer();\n\n    ServiceInstrument getServiceInstrument();\n\n    DuplexAppointClient getDuplexAppointClient();\n\n    ServiceLifecycleIface getLifecycleIface();\n\n    ServiceMetaManipulationIface getMateIface();\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/vo/FolderContentVo.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.vo;\n\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\n\nimport java.util.List;\n\npublic class FolderContentVo {\n    private List< FileTreeNode > fileTreeNodes;\n\n\n    public FolderContentVo() {\n    }\n\n    public FolderContentVo(List<FileTreeNode> fileTreeNodes) {\n        this.fileTreeNodes = fileTreeNodes;\n    }\n\n    public List<FileTreeNode> getFileTreeNodes() {\n        return fileTreeNodes;\n    }\n\n\n    public void setFileTreeNodes(List<FileTreeNode> fileTreeNodes) {\n        this.fileTreeNodes = fileTreeNodes;\n    }\n\n    public String toString() {\n        return \"FolderContentVo{fileTreeNodes = \" + fileTreeNodes + \"}\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/vo/SiteNodeVO.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.vo;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\nimport com.pinecone.hydra.storage.bucket.entity.SiteNode;\n\npublic class SiteNodeVO implements Pinenut {\n    protected long enumId;\n\n    protected String nodeName;\n\n    protected GUID nodeGuid;\n\n    protected GUID siteGuid;\n\n    protected int state;\n\n    protected int isEnabled;\n\n    protected GUID relatedService;\n\n    protected String relatedServicePath;\n\n    public SiteNodeVO(){}\n\n    public SiteNodeVO(SiteNode siteNode){\n        this.enumId = siteNode.getEnumId();\n        this.nodeGuid = siteNode.getNodeGuid();\n        this.nodeName = siteNode.getNodeName();\n        this.relatedService = siteNode.getRelatedService();\n        this.isEnabled = siteNode.getIsEnabled();\n        this.siteGuid = siteNode.getSiteGuid();\n        this.state = siteNode.getState();\n    }\n\n\n    public long getEnumId() {\n        return this.enumId;\n    }\n\n\n    public void setEnumId(long enumId) {\n        this.enumId = enumId;\n    }\n\n\n    public String getNodeName() {\n        return this.nodeName;\n    }\n\n\n    public void setNodeName(String nodeName) {\n        this.nodeName = nodeName;\n    }\n\n\n    public GUID getNodeGuid() {\n        return this.nodeGuid;\n    }\n\n\n    public void setNodeGuid(GUID nodeGuid) {\n        this.nodeGuid = nodeGuid;\n    }\n\n\n    public GUID getSiteGuid() {\n        return this.siteGuid;\n    }\n\n\n    public void setSiteGuid(GUID siteGuid) {\n        this.siteGuid = siteGuid;\n    }\n\n\n    public int getState() {\n        return this.state;\n    }\n\n\n    public void setState(int state) {\n        this.state = state;\n    }\n\n\n    public int getIsEnabled() {\n        return this.isEnabled;\n    }\n\n\n    public void setIsEnabled(int isEnabled) {\n        this.isEnabled = isEnabled;\n    }\n\n\n    public GUID getRelatedService() {\n        return this.relatedService;\n    }\n\n\n    public void setRelatedService(GUID relatedService) {\n        this.relatedService = relatedService;\n    }\n\n    public String getRelatedServicePath(){\n        return this.relatedServicePath;\n    }\n\n    public void setRelatedServicePath(String relatedServicePath){\n        this.relatedServicePath = relatedServicePath;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/infrastructure/vo/SyncFinishedVO.java",
    "content": "package com.walnut.sparta.ucdn.console.infrastructure.vo;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.json.homotype.BeanJSONEncoder;\n\npublic class SyncFinishedVO implements Pinenut {\n    private String path;\n\n    private String serviceId;\n\n    private int syncState;\n\n    public SyncFinishedVO(){}\n\n    public SyncFinishedVO( String path, String serviceId, int syncState ){\n        this.path = path;\n        this.serviceId = serviceId;\n        this.syncState = syncState;\n    }\n\n\n    public String getPath() {\n        return this.path;\n    }\n\n    public void setPath(String path) {\n        this.path = path;\n    }\n\n    public String getServiceId() {\n        return this.serviceId;\n    }\n\n    public void setServiceId(String serviceId) {\n        this.serviceId = serviceId;\n    }\n\n    public int getSyncState() {\n        return this.syncState;\n    }\n\n    public void setSyncState(int syncState) {\n        this.syncState = syncState;\n    }\n\n    @Override\n    public String toJSONString() {\n        return BeanJSONEncoder.BasicEncoder.encode( this );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/interceptor/JWTInterceptor.java",
    "content": "package com.walnut.sparta.ucdn.console.interceptor;\n\nimport com.alibaba.fastjson.JSONObject;\nimport com.walnut.archcraft.redstone.response.GenericResultResponse;\nimport com.walnut.sparta.ucdn.console.util.JWTUtil;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.http.HttpMethod;\nimport org.springframework.stereotype.Component;\nimport org.springframework.util.StringUtils;\nimport org.springframework.web.servlet.HandlerInterceptor;\n\n@Component\npublic class JWTInterceptor implements HandlerInterceptor {\n    private Logger log = LoggerFactory.getLogger( this.getClass() );\n    @Override\n    public boolean preHandle(javax.servlet.http.HttpServletRequest request, javax.servlet.http.HttpServletResponse response, Object handler) throws Exception {\n        String url=request.getRequestURI();\n        if (request.getMethod().equals(HttpMethod.OPTIONS.name())) {\n            return true;\n        }\n        //log.info(\"请求的路径是：\"+ url);\n        if (url.contains(\"login\")||url.contains(\"register\")||url.contains(\"send_code\")||url.contains(\"download\")){\n            log.info(\"Allow login or registration operations\");\n            return true;\n        }\n        String jwt=request.getHeader(\"Token\");\n        if (!StringUtils.hasLength(jwt)){\n            log.info(\"The request header Token is empty\");\n            GenericResultResponse error = GenericResultResponse.error(\"not login\");\n            String jsonString = JSONObject.toJSONString(error);\n            response.getWriter().write(jsonString);\n            return false;\n        }\n        try {\n            JWTUtil.ParseJWt(jwt);\n        } catch (Exception e){\n            log.info(\"Token parsing failed\");\n            GenericResultResponse error = GenericResultResponse.error(\"Not logged in\");\n            String jsonString = JSONObject.toJSONString(error);\n            response.getWriter().write(jsonString);\n            return false;\n        }\n        return true;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/mapper/ClusterFileSyncMapper.java",
    "content": "package com.walnut.sparta.ucdn.console.mapper;\n\nimport com.pinecone.framework.util.id.GUID;\nimport org.apache.ibatis.annotations.Delete;\nimport org.apache.ibatis.annotations.Insert;\nimport org.apache.ibatis.annotations.Mapper;\nimport org.apache.ibatis.annotations.Param;\nimport org.apache.ibatis.annotations.Select;\nimport org.apache.ibatis.annotations.Update;\n\n@Mapper\npublic interface ClusterFileSyncMapper {\n    @Insert(\"INSERT INTO `hydra_ucdn_fmd_sync_status` (`file_guid`, `state`, `site_guid`) VALUES ( #{fileGuid},#{state},#{siteGuid})\")\n    void insert(@Param(\"fileGuid\") GUID fileGuid, @Param(\"state\") int state, @Param(\"siteGuid\") GUID siteGuid);\n\n    @Delete(\"DELETE FROM `hydra_ucdn_fmd_sync_status` WHERE `file_guid` = #{fileGuid}\")\n    void remove( GUID fileGuid );\n\n    @Update(\"UPDATE `hydra_ucdn_fmd_sync_status` SET `state` = #{state} WHERE `file_guid` = #{fileGuid}\")\n    void updateState(@Param(\"fileGuid\") GUID fileGuid, @Param(\"state\") int state );\n\n    @Select(\"SELECT `state` FROM `hydra_ucdn_fmd_sync_status` WHERE `file_guid` = #{fileGuid}\")\n    Integer queryState( @Param(\"fileGuid\") GUID fileGuid );\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/FMDTransactionBlock.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\npublic class FMDTransactionBlock {\n    private Object clusterLock;\n\n    private Long clusterCompletedCount;\n\n    private Long consumerCompletedCount;\n\n    public FMDTransactionBlock(){}\n\n    public FMDTransactionBlock(Object clusterLock, Long clusterCompletedCount, Long consumerCompletedCount) {\n        this.clusterLock = clusterLock;\n        this.clusterCompletedCount = clusterCompletedCount;\n        this.consumerCompletedCount = consumerCompletedCount;\n    }\n\n    public Object getClusterLock() {\n        return this.clusterLock;\n    }\n\n    public void setClusterLock(Object clusterLock) {\n        this.clusterLock = clusterLock;\n    }\n\n    public Long getClusterCompletedCount() {\n        return this.clusterCompletedCount;\n    }\n\n    public void setClusterCompletedCount(Long clusterCompletedCount) {\n        this.clusterCompletedCount = clusterCompletedCount;\n    }\n\n    public Long getConsumerCompletedCount() {\n        return this.consumerCompletedCount;\n    }\n\n    public void setConsumerCompletedCount(Long consumerCompletedCount) {\n        this.consumerCompletedCount = consumerCompletedCount;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/FileMultiDistributionController.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.ClusterPage;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.FSNodeAllotment;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.Cluster;\nimport com.pinecone.hydra.storage.file.entity.LocalCluster;\nimport com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64;\nimport com.pinecone.hydra.storage.io.TitanFileChannelChanface;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umct.AddressMapping;\nimport com.pinecone.hydra.umct.stereotype.Controller;\nimport com.walnut.sparta.ucdn.console.infrastructure.ClusterLock;\nimport com.walnut.sparta.ucdn.console.infrastructure.UCDNConstants;\nimport com.walnut.sparta.ucdn.console.ufm.protocol.RequestHead;\nimport com.walnut.sparta.ucdn.console.ufm.session.UFMTransaction;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.RandomAccessFile;\nimport java.nio.channels.FileChannel;\nimport java.nio.file.Path;\nimport java.nio.file.StandardOpenOption;\n\n@Controller\n@AddressMapping( \"com.pinecone.hydra.uofs.ufm.FileMultiDistributionIface.\" )\npublic class FileMultiDistributionController implements Pinenut {\n    private Logger                            logger;\n\n    protected KOMFileSystem                   primaryFileSystem;\n\n    protected UniformVolumeManager            primaryVolume;\n\n    protected SessionPhaser                   sessionPhaser;\n\n    protected SessionValidator                fileSessionValidator;\n\n    protected UFMConfig                       config;\n\n    public FileMultiDistributionController( UOFSFileMultiDistributionService distributionService ) throws UMBServiceException {\n        this.logger                 = LoggerFactory.getLogger( this.getClass() );\n        this.primaryFileSystem      = distributionService.primaryFileSystem;\n        this.primaryVolume          = distributionService.primaryVolume;\n        this.sessionPhaser          = distributionService.sessionPhaser;\n        this.fileSessionValidator   = distributionService.fileSessionValidator;\n        this.config                 = distributionService.config;\n    }\n\n    @AddressMapping(\"startDistribution\")\n    public void setFileMate( RequestHead head, String path, long definitionSize ) {\n        if( this.sessionPhaser.getSessionTransaction( head.getSessionId() ) != null ){\n            this.logger.warn( \"[Warning] UCDNService `startDistribution` session assertion compromised.\" );\n            this.sessionPhaser.removeSessionTransaction( head.getSessionId() );\n            return;\n        }\n\n        this.logger.info( \"UCDNService invoked `startDistribution`. <Start>\" );\n\n        long sessionId = head.getSessionId();\n        FileNode fileNode = this.primaryFileSystem.affirmFileNode( path );\n        fileNode.setDefinitionSize( definitionSize );\n        this.primaryFileSystem.update( fileNode );\n        this.sessionPhaser.registerClusterCount( fileNode.getGuid(),0 );\n\n        UFMTransaction ufmTransaction = new UFMTransaction( fileNode.getGuid() );\n        ufmTransaction.setLastEventArrivedMills( System.currentTimeMillis() );\n        this.sessionPhaser.registerSessionTransaction( sessionId, ufmTransaction );\n        this.sessionPhaser.getSessionTransaction( sessionId ).finishStartTransmit();\n\n        this.logger.info( \"UCDNService invoked `startDistribution`. <Done>\" );\n    }\n\n    @AddressMapping(\"setFrameMeta\")\n    public void setFrameMeta( RequestHead head, UFMDClusterDO frameMeta ) throws IOException {\n        long sessionId = head.getSessionId();\n        if ( this.assertTransmitTransaction ( frameMeta.getFilePath(), head) ) {\n            this.logger.warn( \"[Warning] UCDNService `setFrameMeta` session assertion compromised.\" );\n            return;\n        }\n\n        this.logger.info( \"UCDNService invoked `setFrameMeta`. <Start>\" );\n        FSNodeAllotment allotment = this.primaryFileSystem.getFSNodeAllotment();\n        String filePath = frameMeta.getFilePath();\n        ElementNode elementNode = this.primaryFileSystem.queryElement(filePath);\n        LocalCluster localCluster = allotment.newLocalCluster();\n\n        localCluster.setSegId(frameMeta.getSegId() );\n        localCluster.setSourceName( frameMeta.getSourceName() );\n        localCluster.setSize(frameMeta.getSize() );\n        localCluster.setFileGuid( elementNode.getGuid() );\n\n        localCluster.save();\n        this.sessionPhaser.getSessionTransaction( sessionId ).setLastEventArrivedMills( System.currentTimeMillis() );\n\n        this.logger.info( \"UCDNService invoked `setFrameMeta`. <Done>\" );\n    }\n\n    @AddressMapping(\"transmitClusterFrame\")\n    public void transmitClusterFrame( RequestHead head, UFMDClusterFrame ufmdClusterFrame ) throws IOException, InterruptedException {\n        long sessionId = head.getSessionId();\n        if ( this.assertTransmitTransaction ( ufmdClusterFrame.getPath(), head) ) {\n            this.logger.warn( \"[Warning] UCDNService `transmitClusterFrame` session assertion compromised.\" );\n            return;\n        }\n\n        this.logger.info( \"UCDNService invoked `transmitClusterFrame`. <Start>\" );\n        ElementNode elementNode = this.primaryFileSystem.queryElement(ufmdClusterFrame.getPath());\n        Cluster cluster = this.primaryFileSystem.getClusterByFileWithId(elementNode.getGuid(), ufmdClusterFrame.getSegId());\n\n        if( this.sessionPhaser.getClusterLock(cluster.getSegGuid()) == null ){\n            this.sessionPhaser.registerClusterLock( cluster.getSegGuid(), new ClusterLock());\n        }\n        else {\n            synchronized (this.sessionPhaser.getClusterLock(cluster.getSegGuid())){\n                this.sessionPhaser.getClusterLock(cluster.getSegGuid()).increment();\n                this.sessionPhaser.getClusterLock(cluster.getSegGuid()).wait();\n            }\n        }\n\n        RandomAccessFile fos = this.sessionPhaser.getClusterOutputStream( cluster.getSegGuid() );\n\n        Path     temporaryPath = this.config.formatTemporaryPath( cluster.getSegGuid().toString() );\n        String szTemporaryPath = temporaryPath.toString();\n        File tempFile = new File( szTemporaryPath );\n        if( fos == null ){\n            fos =  new RandomAccessFile( tempFile,\"rw\" );\n            this.sessionPhaser.registerClusterOutputStream( cluster.getSegGuid(), fos );\n        }\n\n        fos.seek(ufmdClusterFrame.getOffset() );\n        fos.write( ufmdClusterFrame.getBytes() );\n\n        this.sessionPhaser.getSessionTransaction( sessionId ).setLastEventArrivedMills( System.currentTimeMillis() );\n\n        if( cluster.getSize() == tempFile.length() ) {\n            this.sessionPhaser.removeClusterCount( cluster.getSegGuid() );\n            this.frameTerminate( head, ufmdClusterFrame.getPath(), ufmdClusterFrame.getSegId(), ufmdClusterFrame.getTotalSegNum() );\n        }\n\n        if( this.sessionPhaser.getClusterLock(cluster.getSegGuid()) != null ){\n            if( this.sessionPhaser.getClusterLock(cluster.getSegGuid()).getWaitThreatNum().get() == 0 ){\n                this.sessionPhaser.removeClusterLock( cluster.getSegGuid() );\n                this.logger.info( \"UCDNService invoked `transmitClusterFrame`. <Done>\" );\n                return;\n            }\n            synchronized ( this.sessionPhaser.getClusterLock(cluster.getSegGuid()) ){\n                this.sessionPhaser.getClusterLock( cluster.getSegGuid() ).decrement();\n                this.sessionPhaser.getClusterLock(cluster.getSegGuid()).notify();\n            }\n        }\n\n        this.logger.info( \"UCDNService invoked `transmitClusterFrame`. <Done>\" );\n    }\n\n    //todo 添加写完后向主节点发送完成指令\n    @AddressMapping(\"frameTerminate\")\n    public void frameTerminate( RequestHead head, String path, long segId, long totalSegNum ) throws IOException {\n        long sessionId = head.getSessionId();\n        if ( this.assertTransmitTransaction ( path, head) ) {\n            this.logger.warn( \"[Warning] UCDNService `frameTerminate` session assertion compromised.\" );\n            return;\n        }\n\n        this.logger.info( \"UCDNService invoked `frameTerminate`. <Start>\" );\n        FileNode fileNode = (FileNode) this.primaryFileSystem.queryElement(path);\n        LocalCluster frame = (LocalCluster)this.primaryFileSystem.getClusterByFileWithId(fileNode.getGuid(), segId);\n\n        Path     temporaryPath = this.config.formatTemporaryPath( frame.getSegGuid().toString() );\n        String szTemporaryPath = temporaryPath.toString();\n        File          tempFile = new File( szTemporaryPath );\n        try {\n            if ( !tempFile.exists() ){\n                throw new IOException( \"Creating file compromised, what :\" + szTemporaryPath );\n            }\n\n            FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.READ);\n            TitanFileChannelChanface chanface = new TitanFileChannelChanface( channel );\n\n            TitanFileReceiveEntity64 receiveEntity64 = new TitanFileReceiveEntity64(this.primaryFileSystem, path, fileNode, chanface, this.primaryVolume);\n            receiveEntity64.receive( segId );\n\n            this.sessionPhaser.incrementClusterCount( fileNode.getGuid() );\n            this.logger.info(\"`frameTerminate` Currently finished transition cluster：\" + this.sessionPhaser.getClusterCount( fileNode.getGuid() ));\n            if( this.sessionPhaser.getClusterCount( fileNode.getGuid() ) == this.config.getBatchTransmitMemberThreshold() ){\n                this.sessionPhaser.resetClusterCount( fileNode.getGuid() );\n                this.fileSessionValidator.stageClusterGroupComplete( path );\n            }\n        }\n        finally {\n            RandomAccessFile outputStream = this.sessionPhaser.getClusterOutputStream(frame.getSegGuid());\n            outputStream.close();\n            this.sessionPhaser.removeClusterOutputStream( frame.getSegGuid() );\n            if ( !tempFile.delete() ) {\n                throw new IOException( \"Temporary file has been purged failed.\" );\n            }\n            if( segId == totalSegNum - 1 ){\n                this.sessionPhaser.getSessionTransaction( sessionId ).setLastEventArrivedMills( System.currentTimeMillis() );\n                this.sessionPhaser.getSessionTransaction( sessionId ).finishTransmitFileContent();\n                this.sessionPhaser.getSessionTransaction( sessionId ).finishFileDistributionComplete();\n                this.sessionPhaser.removeClusterCount( fileNode.getGuid() );\n                this.sessionPhaser.removeFileLock( fileNode.getGuid() );\n                this.sessionPhaser.removeConsumerCount( fileNode.getGuid() );\n                this.sessionPhaser.removeSessionTransaction( sessionId );\n                this.fileSessionValidator.fileTransmitComplete( path, UCDNConstants.serviceId );\n            }\n            else {\n                this.sessionPhaser.getSessionTransaction( sessionId ).setLastEventArrivedMills( System.currentTimeMillis() );\n            }\n        }\n\n        this.logger.info( \"UCDNService invoked `frameTerminate`. <Done>\" );\n    }\n\n    protected boolean assertTransmitTransaction( String filePath, RequestHead head ) throws IOException {\n        long sessionId = head.getSessionId();\n        UFMTransaction transaction = this.sessionPhaser.getSessionTransaction(sessionId);\n        if( transaction == null ){\n            this.logger.warn( \"[Warning] UCDNService `assertTransmitTransaction` session doesn`t existed. <Pass>\" );\n            return true;\n        }\n        long currentTimeMillis = System.currentTimeMillis();\n        if( currentTimeMillis - transaction.getLastEventArrivedMills() > this.config.getSessionExpiredTimeMillis() ){\n            this.logger.warn( \"[Warning] UCDNService `assertTransmitTransaction` session has expired. <Pass>\" );\n            this.sessionPhaser.removeSessionTransaction( sessionId );\n            this.transmitRollBack( filePath, sessionId );\n            return true;\n        }\n        if( !transaction.isStartTransmit() ){\n            this.logger.warn( \"[Warning] UCDNService `assertTransmitTransaction` illegal transaction stage, which should never has started yet. <Pass>\" );\n            this.sessionPhaser.removeSessionTransaction( sessionId );\n            this.transmitRollBack( filePath, sessionId );\n            return true;\n        }\n        return false;\n    }\n\n    private void transmitRollBack( String filePath, long sessionId ) throws IOException {\n        this.logger.warn( \"[Warning] UCDNService `transmitRollBack`. <Start>\" );\n\n        FileNode fileNode = (FileNode) this.primaryFileSystem.queryElement(filePath);\n        ClusterPage clusterPage = this.primaryFileSystem.fetchClustersByFileGuid( fileNode.getGuid() );\n\n        long fileClusterNum = clusterPage.getClusters();\n\n        for( long i = 0; i < fileClusterNum; ++i ){\n            LocalCluster frame = clusterPage.getLocalCluster( i );\n            RandomAccessFile clusterOutputStream = this.sessionPhaser.getClusterOutputStream(frame.getSegGuid());\n            clusterOutputStream.close();\n            this.sessionPhaser.removeClusterOutputStream( frame.getSegGuid() );\n        }\n        this.sessionPhaser.removeClusterCount( fileNode.getGuid() );\n        this.sessionPhaser.removeFileLock( fileNode.getGuid() );\n        this.sessionPhaser.removeClusterCount( fileNode.getGuid() );\n        //this.primaryFileSystem.remove( fileNode.getGuid() );\n        this.sessionPhaser.removeSessionTransaction( sessionId );\n\n        this.logger.warn( \"[Warning] UCDNService `transmitRollBack`. <Done>\" );\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/FileMultiDistributionIface.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\nimport com.pinecone.hydra.umct.stereotype.Iface;\nimport com.walnut.sparta.ucdn.console.ufm.protocol.RequestHead;\n\n@Iface ( \"com.pinecone.hydra.uofs.ufm.FileMultiDistributionIface\" )\npublic interface FileMultiDistributionIface {\n\n    void startDistribution  ( RequestHead head, String path, long definitionSize );\n\n    void setFrameMeta ( RequestHead head, UFMDClusterDO frameMeta );\n\n    void transmitClusterFrame ( RequestHead head, UFMDClusterFrame contentVO );\n\n    void frameTerminate( RequestHead head, String path, long segId );\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/FileMultiDistributionService.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlProducer;\nimport com.walnut.sparta.ucdn.console.ufm.event.UFMEventSubscriber;\n\nimport java.io.IOException;\nimport java.util.Collection;\n\npublic interface FileMultiDistributionService extends Pinenut {\n    void fileDistribution( FileNode fileNode, String topic ) throws IOException, InterruptedException;\n\n    void test() throws UMBServiceException;\n\n    BroadcastControlConsumer getTransmitConsumer( String topic,String group );\n\n    BroadcastControlProducer getTransmitProducer();\n\n    FileMultiDistributionService registerFileTransmitCompleteEventSubscriber( UFMEventSubscriber subscriber ) ;\n\n    FileMultiDistributionService deregisterFileTransmitCompleteEventSubscriber( UFMEventSubscriber subscriber ) ;\n\n    Collection<UFMEventSubscriber> fetchFileTransmitCompleteEventSubscribers();\n\n    boolean hasStarted();\n\n    void start() throws UMBServiceException ;\n\n    void shutdown();\n\n    UFMConfig getConfig();\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/SessionPhaser.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.id.GUID;\nimport com.walnut.sparta.ucdn.console.infrastructure.ClusterLock;\nimport com.walnut.sparta.ucdn.console.ufm.session.UFMTransaction;\n\nimport java.io.RandomAccessFile;\n\npublic interface SessionPhaser extends Pinenut {\n    void registerFileLock(GUID guid, Object object );\n\n    Object getFileLock(GUID guid );\n\n    void removeFileLock( GUID guid );\n\n    void registerClusterLock(GUID guid, ClusterLock clusterLock);\n\n    ClusterLock getClusterLock( GUID guid );\n\n    void removeClusterLock( GUID guid );\n\n    void registerClusterCount(GUID guid, long count );\n\n    long getClusterCount(GUID guid );\n\n    void removeClusterCount( GUID guid );\n\n    void incrementClusterCount(GUID guid );\n\n    void resetClusterCount(GUID guid );\n\n    void registerConsumerCount(GUID guid, Long count );\n\n    long getConsumerCount(GUID guid );\n\n    void removeConsumerCount( GUID guid );\n\n    void incrementConsumerCount(GUID guid );\n\n    void resetConsumerCount(GUID guid );\n\n    void registerSessionTransaction(Long sessionId, UFMTransaction ufmTransaction);\n\n    UFMTransaction getSessionTransaction( Long sessionId );\n\n    void removeSessionTransaction( Long sessionId );\n\n    void registerClusterOutputStream(GUID guid, RandomAccessFile fileOutputStream);\n\n    RandomAccessFile getClusterOutputStream(GUID guid );\n\n    void removeClusterOutputStream( GUID guid );\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/SessionValidator.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umct.stereotype.Iface;\n\nimport java.io.IOException;\n\n@Iface\npublic interface SessionValidator extends Pinenut {\n\n    void stageClusterGroupComplete( String path ) throws IOException;\n\n    void stageFileTransmitComplete( String path ) throws IOException;\n\n    void fileTransmitComplete( String path, String serviceId ) throws IOException;\n\n    void start() throws UMBServiceException;\n\n    void shutdown();\n\n    boolean hasStarted();\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UCFMConfig.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\nimport java.nio.file.Path;\nimport java.util.Map;\n\npublic class UCFMConfig implements UFMConfig {\n\n    protected int      mnFileFrameSize;\n\n    protected int      mnBatchTransmitMemberThreshold;\n\n    protected long     mnSessionExpiredTimeMillis;\n\n    protected String   mszFileCloudDistributeTransmitTopic;\n\n    protected String   mszFileCloudDistributeEventTopic;\n\n    protected String   mszFileServiceTransmitGroup;\n\n    protected String   mszTemporaryFileExtends;\n\n    protected String   mszMajorTemporaryClusterFileDirectory;\n\n    protected String   mszLocalMasterTemporaryClusterFileDirectory;\n\n    public UCFMConfig ( Map<String, Object > configMap ) {\n        this.mnFileFrameSize = ( (Number)configMap.get(\"fileFrameSize\") ).intValue();\n        this.mnBatchTransmitMemberThreshold = ( (Number)configMap.get(\"batchTransmitMemberThreshold\") ).intValue();\n        this.mnSessionExpiredTimeMillis = ( (Number)configMap.get(\"sessionExpiredTimeMillis\") ).longValue();\n        this.mszFileCloudDistributeTransmitTopic = (String) configMap.get(\"fileCloudDistributeTransmitTopic\");\n        this.mszFileCloudDistributeEventTopic = (String) configMap.get(\"fileCloudDistributeEventTopic\");\n        this.mszFileServiceTransmitGroup = (String) configMap.get(\"fileServiceTransmitGroup\");\n        this.mszTemporaryFileExtends = (String) configMap.get(\"temporaryFileExtends\");\n        this.mszMajorTemporaryClusterFileDirectory = (String) configMap.get(\"majorTemporaryClusterFileDirectory\");\n        this.mszLocalMasterTemporaryClusterFileDirectory = (String) configMap.get(\"localMasterTemporaryClusterFileDirectory\");\n    }\n\n    @Override\n    public int getFileFrameSize() {\n        return this.mnFileFrameSize;\n    }\n\n    @Override\n    public String getFileCloudDistributeTransmitTopic() {\n        return this.mszFileCloudDistributeTransmitTopic;\n    }\n\n    @Override\n    public String getFileCloudDistributeEventTopic() {\n        return this.mszFileCloudDistributeEventTopic;\n    }\n\n    @Override\n    public long getSessionExpiredTimeMillis() {\n        return this.mnSessionExpiredTimeMillis;\n    }\n\n    @Override\n    public String getFileServiceTransmitGroup() {\n        return this.mszFileServiceTransmitGroup;\n    }\n\n    @Override\n    public String getTemporaryFileExtends() {\n        return this.mszTemporaryFileExtends;\n    }\n\n    @Override\n    public String getMajorTemporaryClusterFileDirectory() {\n        return this.mszMajorTemporaryClusterFileDirectory;\n    }\n\n    @Override\n    public Path formatTemporaryPath( String segName ) {\n        return Path.of( this.getMajorTemporaryClusterFileDirectory(), segName + this.getTemporaryFileExtends() );\n    }\n\n    @Override\n    public Path formatMasterTemporaryPath( String segName ) {\n        return Path.of( this.mszLocalMasterTemporaryClusterFileDirectory, segName + this.getTemporaryFileExtends() );\n    }\n\n    @Override\n    public int getBatchTransmitMemberThreshold() {\n        return this.mnBatchTransmitMemberThreshold;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UFMConfig.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\nimport java.nio.file.Path;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface UFMConfig extends Pinenut {\n\n    int getFileFrameSize();\n\n    String getFileCloudDistributeTransmitTopic();\n\n    String getFileCloudDistributeEventTopic();\n\n    String getFileServiceTransmitGroup();\n\n    String getTemporaryFileExtends();\n\n    String getMajorTemporaryClusterFileDirectory();\n\n    Path formatTemporaryPath( String segName );\n\n    Path formatMasterTemporaryPath( String segName );\n\n    long getSessionExpiredTimeMillis();\n\n    int getBatchTransmitMemberThreshold();\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UFMDClusterDO.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\nimport com.walnut.sparta.ucdn.console.ufm.protocol.FileMeta64;\n\npublic class UFMDClusterDO extends FileMeta64 {\n    protected String filePath;\n\n    protected long segId;\n\n\n    public UFMDClusterDO() {\n        super();\n    }\n\n    public UFMDClusterDO( String sourceName, long size, long validateVal, String filePath, long segId ) {\n        super( sourceName, size, validateVal );\n        this.filePath   = filePath;\n        this.segId      = segId;\n    }\n\n\n    public String getFilePath() {\n        return filePath;\n    }\n\n    public void setFilePath( String filePath ) {\n        this.filePath = filePath;\n    }\n\n    public long getSegId() {\n        return segId;\n    }\n\n    public void setSegId(long segId) {\n        this.segId = segId;\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UFMDClusterFrame.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class UFMDClusterFrame implements Pinenut {\n    private byte[] bytes;\n\n    private String path;\n\n    private long segId;\n\n    private long totalSegNum;\n\n    private long offset;\n\n    public UFMDClusterFrame( byte[] bytes, String path, long segId, long totalSegNum, long offset ) {\n        this.bytes = bytes;\n        this.path = path;\n        this.segId = segId;\n        this.totalSegNum = totalSegNum;\n        this.offset = offset;\n    }\n\n    public UFMDClusterFrame(){}\n\n    public byte[] getBytes() {\n        return bytes;\n    }\n\n    public void setBytes(byte[] bytes) {\n        this.bytes = bytes;\n    }\n\n    public String getPath() {\n        return path;\n    }\n\n    public void setPath(String path) {\n        this.path = path;\n    }\n\n    public long getSegId() {\n        return segId;\n    }\n\n    public void setSegId(long segId) {\n        this.segId = segId;\n    }\n\n    public long getTotalSegNum(){\n        return this.totalSegNum;\n    }\n\n    public void setTotalSegNum( long totalSegNum ){\n        this.totalSegNum = totalSegNum;\n    }\n\n    public long getOffset(){\n        return this.offset;\n    }\n\n    public void setOffset( long offset ){\n        this.offset = offset;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UFMSessionPhaser.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.walnut.sparta.ucdn.console.infrastructure.ClusterLock;\nimport com.walnut.sparta.ucdn.console.ufm.session.UFMTransaction;\n\nimport java.io.RandomAccessFile;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ConcurrentMap;\n\n//@Component\npublic class UFMSessionPhaser implements SessionPhaser {\n    private ConcurrentMap<Long, UFMTransaction >    sessionTransactions; //用sessionId记录事件是否存在\n\n    // File.Guid => Lock\n    private ConcurrentMap<GUID, Object>             fileLocksMap;//大文件的Guid，用来保存阶段锁\n\n    private ConcurrentMap<GUID, ClusterLock>        ClusterLocksMap;//簇锁，保证同时只有一个线程在写入临时文件\n\n    // File.Guid => Cluster.count (N)\n    private ConcurrentMap<GUID, Long>               clusterComplatedPhaserMap;//大文件Guid，用来记录已经传输了多少簇\n\n    // File.Guid => Consumer.count (N)\n    private ConcurrentMap<GUID, Long>               consumerComplatedPhaserMap;//大文件Guid，用来记录目前有多少线程完成任务\n\n    private ConcurrentMap<GUID, RandomAccessFile>   clusterOutputStreamMap; // 簇GUID，用来记录当前簇的IO操作通道\n\n    public UFMSessionPhaser() {\n        this.sessionTransactions          = new ConcurrentHashMap<>();\n        this.fileLocksMap                 = new ConcurrentHashMap<>();\n        this.clusterComplatedPhaserMap    = new ConcurrentHashMap<>();\n        this.consumerComplatedPhaserMap   = new ConcurrentHashMap<>();\n        this.clusterOutputStreamMap       = new ConcurrentHashMap<>();\n        this.ClusterLocksMap              = new ConcurrentHashMap<>();\n    }\n\n\n    @Override\n    public void registerFileLock( GUID guid, Object object ) {\n        this.fileLocksMap.put( guid, object );\n    }\n\n    @Override\n    public Object getFileLock( GUID guid ) {\n        return this.fileLocksMap.get( guid );\n    }\n\n    @Override\n    public void removeFileLock(GUID guid) {\n        this.fileLocksMap.remove( guid );\n    }\n\n    @Override\n    public void registerClusterLock(GUID guid, ClusterLock clusterLock) {\n        this.ClusterLocksMap.put( guid, clusterLock );\n    }\n\n    @Override\n    public ClusterLock getClusterLock(GUID guid) {\n        return this.ClusterLocksMap.get( guid );\n    }\n\n    @Override\n    public void removeClusterLock(GUID guid) {\n        this.ClusterLocksMap.remove( guid );\n    }\n\n    @Override\n    public void removeClusterCount(GUID guid) {\n        this.clusterComplatedPhaserMap.remove( guid );\n    }\n\n    @Override\n    public void removeConsumerCount(GUID guid) {\n        this.consumerComplatedPhaserMap.remove( guid );\n    }\n\n    @Override\n    public void registerClusterCount( GUID guid, long count ) {\n        this.clusterComplatedPhaserMap.put( guid,count );\n    }\n\n    @Override\n    public long getClusterCount( GUID guid ) {\n        return this.clusterComplatedPhaserMap.get( guid );\n    }\n\n    @Override\n    public void incrementClusterCount( GUID guid ) {\n        Long l = this.clusterComplatedPhaserMap.get(guid);\n        this.clusterComplatedPhaserMap.put( guid, l+1 );\n    }\n\n    @Override\n    public void resetClusterCount( GUID guid ) {\n        this.clusterComplatedPhaserMap.put( guid, 0L );\n    }\n\n    @Override\n    public void registerConsumerCount( GUID guid, Long count ) {\n        this.consumerComplatedPhaserMap.put( guid, count );\n    }\n\n    @Override\n    public long getConsumerCount( GUID guid ) {\n        return this.consumerComplatedPhaserMap.get( guid );\n    }\n\n    @Override\n    public void incrementConsumerCount( GUID guid ) {\n        Long l = this.consumerComplatedPhaserMap.get(guid);\n        this.consumerComplatedPhaserMap.put( guid, l+1 );\n    }\n\n    @Override\n    public void resetConsumerCount( GUID guid ) {\n        this.consumerComplatedPhaserMap.put( guid, 0L );\n    }\n\n    @Override\n    public void registerSessionTransaction(Long sessionId, UFMTransaction ufmTransaction) {\n        this.sessionTransactions.put( sessionId, ufmTransaction );\n    }\n\n    @Override\n    public UFMTransaction getSessionTransaction(Long sessionId) {\n        return this.sessionTransactions.get( sessionId );\n    }\n\n    @Override\n    public void removeSessionTransaction(Long sessionId) {\n        this.sessionTransactions.remove( sessionId );\n    }\n\n    @Override\n    public void registerClusterOutputStream(GUID guid, RandomAccessFile fileOutputStream) {\n        this.clusterOutputStreamMap.put( guid, fileOutputStream );\n    }\n\n    @Override\n    public RandomAccessFile getClusterOutputStream(GUID guid) {\n        return this.clusterOutputStreamMap.get( guid );\n    }\n\n    @Override\n    public void removeClusterOutputStream(GUID guid) {\n        this.clusterOutputStreamMap.remove( guid );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UFMSessionValidator.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlProducer;\nimport com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode;\n\nimport java.io.IOException;\n\npublic class UFMSessionValidator implements SessionValidator {\n\n    protected UlfBroadcastControlNode          eventMQClient;\n\n    protected BroadcastControlProducer         eventProducer;\n\n    protected BroadcastControlConsumer         eventConsumer;\n\n    protected UFMConfig                        config;\n\n    protected UOFSFileMultiDistributionService distributionService;\n\n\n    public UFMSessionValidator( UOFSFileMultiDistributionService distributionService ) {\n        this.config              = distributionService.config;\n        this.distributionService = distributionService;\n        this.eventMQClient       = distributionService.ucdnService.getPrimaryMessageMiddlewareDirector().getPrimaryRocketClient();\n    }\n\n    @Override\n    public boolean hasStarted() {\n        return this.eventProducer != null;\n    }\n\n    @Override\n    public void start() throws UMBServiceException {\n        if ( !this.hasStarted() ) {\n            this.eventProducer     = this.eventMQClient.createBroadcastControlProducer();\n            this.eventConsumer     = this.eventMQClient.createBroadcastControlConsumer( this.config.getFileCloudDistributeEventTopic() );\n            this.eventConsumer.registerController( new UFMSessionValidatorController( this.distributionService ) );\n            this.eventConsumer.start();\n            this.eventProducer.start();\n        }\n    }\n\n    @Override\n    public void shutdown() {\n        if ( this.hasStarted() ) {\n            this.eventProducer.close();\n            this.eventConsumer.close();\n            this.eventProducer = null;\n            this.eventConsumer = null;\n        }\n    }\n\n    @Override\n    public void stageClusterGroupComplete( String path ) throws IOException {\n        this.eventProducer.issueInform(\n                this.config.getFileCloudDistributeEventTopic(), \"com.walnut.sparta.ucdn.console.ufm.SessionValidator.stageClusterGroupComplete\", path\n        );\n    }\n\n    @Override\n    public void stageFileTransmitComplete( String path ) throws IOException {\n        this.eventProducer.issueInform(\n                this.config.getFileCloudDistributeEventTopic(), \"com.walnut.sparta.ucdn.console.ufm.SessionValidator.stageFileTransmitComplete\", path\n        );\n    }\n\n    @Override\n    public void fileTransmitComplete( String path, String serviceId ) throws IOException {\n        this.eventProducer.issueInform(\n                this.config.getFileCloudDistributeEventTopic(), \"com.walnut.sparta.ucdn.console.ufm.SessionValidator.fileTransmitComplete\", path,serviceId\n        );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UFMSessionValidatorController.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.umct.AddressMapping;\nimport com.pinecone.hydra.umct.stereotype.Controller;\nimport com.walnut.sparta.ucdn.console.ufm.event.UFMEventSubscriber;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.IOException;\nimport java.util.Collection;\n\n@Controller\n@AddressMapping( \"com.walnut.sparta.ucdn.console.ufm.SessionValidator.\" )\npublic class UFMSessionValidatorController implements Pinenut {\n    private Logger                        logger;\n\n    private SessionPhaser                 sessionPhaser;\n\n    private KOMFileSystem                 primaryFileSystem;\n\n    private FileMultiDistributionService  distributionService;\n\n    public UFMSessionValidatorController( UOFSFileMultiDistributionService distributionService ){\n        this.logger               = LoggerFactory.getLogger( this.getClass() );\n        this.distributionService  = distributionService;\n        this.primaryFileSystem    = distributionService.primaryFileSystem;\n        this.sessionPhaser        = distributionService.sessionPhaser;\n    }\n\n    @AddressMapping( \"stageClusterGroupComplete\" )\n    public void stageClusterGroupComplete( String path ){\n        this.logger.info( \"UFMService invoked stageClusterGroupComplete.\" );\n\n        ElementNode elementNode = this.primaryFileSystem.queryElement(path);\n        this.sessionPhaser.incrementConsumerCount( elementNode.getGuid() );\n\n        if( this.sessionPhaser.getConsumerCount( elementNode.getGuid() ) == 1 ){\n            final Object lock = this.sessionPhaser.getFileLock( elementNode.getGuid() );\n            synchronized ( lock ){\n                lock.notify();\n            }\n            this.sessionPhaser.resetConsumerCount( elementNode.getGuid() );\n        }\n    }\n\n    @AddressMapping( \"stageFileTransmitComplete\" )\n    public void stageFileTransmitComplete( String path ){\n        this.logger.info( \"SlaveNode {}, file receive complete.\", path );\n    }\n\n    @AddressMapping( \"fileTransmitComplete\" )\n    public void fileTransmitComplete( String path, String serviceId ) throws IOException {\n        FileNode fileNode = (FileNode)this.primaryFileSystem.queryElement(path);\n        //GUID versionFileGuid = this.versionManage.getVersionFileByGuid(fileNode.getGuid());\n\n        Collection<UFMEventSubscriber> subscribers = this.distributionService.fetchFileTransmitCompleteEventSubscribers();\n        for ( UFMEventSubscriber subscriber : subscribers ) {\n            subscriber.afterEventTriggered( path, serviceId, fileNode );\n        }\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/UOFSFileMultiDistributionService.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm;\n\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.ClusterPage;\nimport com.pinecone.hydra.storage.file.entity.FSNodeAllotment;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.LocalCluster;\nimport com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64;\nimport com.pinecone.hydra.storage.io.TitanFileChannelChanface;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlProducer;\nimport com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.sparta.ucdn.console.infrastructure.UCDNService;\nimport com.walnut.sparta.ucdn.console.ufm.event.UFMEventSubscriber;\nimport com.walnut.sparta.ucdn.console.ufm.protocol.RequestHead;\n\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.nio.channels.FileChannel;\nimport java.nio.file.Path;\nimport java.nio.file.StandardOpenOption;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.List;\n\npublic class UOFSFileMultiDistributionService implements FileMultiDistributionService {\n    protected KOMFileSystem                       primaryFileSystem;\n\n    protected UniformVolumeManager                primaryVolume;\n\n    protected SessionPhaser                       sessionPhaser;\n\n    protected UlfBroadcastControlNode             transmitClient;\n\n    protected BroadcastControlProducer            transmitProducer;\n\n    protected BroadcastControlConsumer            transmitConsumer;\n\n    protected List<UFMEventSubscriber>            fileTransmitCompleteEventSubscribers;\n\n    protected SessionValidator                    fileSessionValidator;\n\n    protected UCDNService                         ucdnService;\n\n    protected UFMConfig                           config;\n\n    public UOFSFileMultiDistributionService( UCDNService ucdnService ) {\n        this.ucdnService            = ucdnService;\n        this.primaryFileSystem      = ucdnService.getKOMFileSystem();\n        this.primaryVolume          = ucdnService.getUniformVolumeManager();\n        this.sessionPhaser          = new UFMSessionPhaser();\n        this.transmitClient         = ucdnService.getPrimaryMessageMiddlewareDirector().getPrimaryKafkaClient();\n        this.config                 = ucdnService.getClusterFileSynchronizationConfig();\n        this.fileSessionValidator   = new UFMSessionValidator( this );\n\n        this.fileTransmitCompleteEventSubscribers = new ArrayList<>();\n    }\n\n    @Override\n    public FileMultiDistributionService registerFileTransmitCompleteEventSubscriber( UFMEventSubscriber subscriber ) {\n        if ( this.hasStarted() ) {\n            throw new IllegalStateException( \"FileMultiDistributionService has already started.\" );\n        }\n\n        this.fileTransmitCompleteEventSubscribers.add( subscriber );\n        return this;\n    }\n\n    @Override\n    public FileMultiDistributionService deregisterFileTransmitCompleteEventSubscriber( UFMEventSubscriber subscriber ) {\n        if ( this.hasStarted() ) {\n            throw new IllegalStateException( \"FileMultiDistributionService has already started.\" );\n        }\n\n        this.fileTransmitCompleteEventSubscribers.remove( subscriber );\n        return this;\n    }\n\n\n    @Override\n    public boolean hasStarted() {\n        return this.transmitProducer != null;\n    }\n\n    @Override\n    public void start() throws UMBServiceException {\n        if ( !this.hasStarted() ) {\n            this.transmitProducer = this.transmitClient.createBroadcastControlProducer();\n            this.transmitConsumer = this.transmitClient.createBroadcastControlConsumer( this.config.getFileCloudDistributeTransmitTopic(), this.config.getFileServiceTransmitGroup() );\n            this.transmitConsumer.registerController( new FileMultiDistributionController( this ) );\n            this.transmitConsumer.start();\n            this.transmitProducer.start();\n\n            if ( !this.fileSessionValidator.hasStarted() ) {\n                this.fileSessionValidator.start();\n            }\n        }\n    }\n\n    @Override\n    public void shutdown() {\n        if ( this.hasStarted() ) {\n            this.transmitConsumer.close();\n            this.transmitProducer.close();\n            this.transmitConsumer = null;\n            this.transmitProducer = null;\n\n            if ( this.fileSessionValidator.hasStarted() ) {\n                this.fileSessionValidator.shutdown();\n            }\n        }\n    }\n\n    @Override\n    public UFMConfig getConfig() {\n        return this.config;\n    }\n\n    @Override\n    public Collection<UFMEventSubscriber> fetchFileTransmitCompleteEventSubscribers() {\n        return this.fileTransmitCompleteEventSubscribers;\n    }\n\n    @Override\n    public void fileDistribution( FileNode fileNode, String topic ) throws IOException, InterruptedException {\n        this.sessionPhaser.registerFileLock( fileNode.getGuid(), new Object() );\n        FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment();\n        FileMultiDistributionIface fileDistribution = this.transmitProducer.getIface(FileMultiDistributionIface.class, topic);\n        String path = this.primaryFileSystem.getPath(fileNode.getGuid());\n\n\n        long requestId = 0;\n        RequestHead head = RequestHead.newRequest().setSessionId( System.currentTimeMillis() );\n        fileDistribution.startDistribution( head, path, fileNode.getPhysicalSize() );\n        //this.producer.issueInform( topic, \"com.walnut.sparta.ucdn.console.umc.FileDistribution.startDistribution\",path,fileNode.getPhysicalSize() );\n\n        ClusterPage clusterPage = this.primaryFileSystem.fetchClustersByFileGuid( fileNode.getGuid() );\n\n        long fileClusterNum = clusterPage.getClusters();\n\n        int distributionFrameNum = 0;\n        this.sessionPhaser.registerConsumerCount( fileNode.getGuid(),0L );\n\n        for( long i = 0; i < fileClusterNum; ++i ){\n            LocalCluster frame = clusterPage.getLocalCluster( i );\n            // TODO, Remote\n            UFMDClusterDO UFMDClusterDO = new UFMDClusterDO(\n                    frame.getSourceName(), frame.getSize(), frame.getCrc32(),\n                    path, i\n            );\n\n            fileDistribution.setFrameMeta( head, UFMDClusterDO );\n\n            Path tempFilePath     = this.config.formatMasterTemporaryPath( frame.getSegGuid().toString() );\n            String szTempFilePath = tempFilePath.toString();\n            File tempFile = new File( szTempFilePath );\n\n            if ( !tempFile.createNewFile() ){\n                throw new IOException( \"Creating file compromised, what :\" + szTempFilePath );\n            }\n            FileChannel channel = FileChannel.open( tempFile.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND );\n            TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel );\n            FileNode newFileNode = fsNodeAllotment.newFileNode();\n            newFileNode.setPath( frame.getSourceName() );\n            newFileNode.setDefinitionSize( frame.getSize() );\n            TitanFileExportEntity64 exportEntity = new TitanFileExportEntity64( this.primaryFileSystem, this.primaryVolume, newFileNode, kChannel );\n            exportEntity.export( frame );\n\n            FileInputStream fileInputStream = new FileInputStream(tempFile);\n\n//            int bufferSize = 950 * 1024;\n//            byte[] buffer = new byte[ bufferSize ];\n//            int bytesRead;\n//\n//            while( ( bytesRead = fileInputStream.read( buffer ) )!=-1 ) {\n//                if ( bytesRead < bufferSize ) {\n//                    byte[] validData = Arrays.copyOfRange(buffer, 0, bytesRead);\n//                    buffer = validData;\n//                }\n//\n//                fileDistribution.transmitClusterFrame( head, new UFMDClusterFrame( buffer, path, i, fileClusterNum ) );\n//                //this.producer.issueInform( topic, \"com.walnut.sparta.ucdn.console.umc.FileDistribution.transmitClusterFrame\",new UFMDClusterFrame(buffer,path,i));\n//            }\n\n            int bufferSize = 2 * 1024 * 1024; // 2MB\n            byte[] buffer = new byte[bufferSize];\n            int bytesRead;\n            int chunkSize = this.config.getFileFrameSize();\n            long currentPosition = 0;\n\n            try {\n                while ( (bytesRead = fileInputStream.read(buffer)) != -1 ) {\n                    int chunksToProcess = (bytesRead + chunkSize - 1) / chunkSize; // 计算需要拆分的块数\n\n                    for ( int j = 0; j < chunksToProcess; ++j ) {\n                        // 计算当前块的起始和结束位置\n                        int start = j * chunkSize;\n                        int end = Math.min(start + chunkSize, bytesRead);\n\n                        byte[] chunkData = Arrays.copyOfRange( buffer, start, end ); // 拆分出当前块\n\n                        // 发送当前块的数据\n                        fileDistribution.transmitClusterFrame(\n                                head,\n                                new UFMDClusterFrame(chunkData, path, i, fileClusterNum, currentPosition)\n                        );\n                        currentPosition = currentPosition + (end - start);\n                    }\n                }\n\n                fileInputStream.close();\n            }\n            finally {\n                fileInputStream.close();\n                tempFile.delete();\n            }\n\n\n            ++distributionFrameNum;\n            if( distributionFrameNum == this.config.getBatchTransmitMemberThreshold() ){\n                synchronized( this.sessionPhaser.getFileLock( fileNode.getGuid() ) ){\n                    this.sessionPhaser.getFileLock( fileNode.getGuid() ).wait();\n                }\n                distributionFrameNum = 0;\n            }\n        }\n    }\n\n    @Override\n    public void test() throws UMBServiceException {\n        FileMultiDistributionIface fileDistribution = this.transmitProducer.getIface(FileMultiDistributionIface.class,\"testTopic\");\n        FileNode fileNode = this.primaryFileSystem.getFileNode(GUIDs.GUID128(\"1214792-000373-0003-00\"));\n        String path = this.primaryFileSystem.getPath(fileNode.getGuid());\n        //fileDistribution.startDistribution( path );\n        BroadcastControlConsumer consumer = this.getTransmitConsumer(\"testTopic\", \"testGroup\");\n        consumer.start();\n    }\n\n    @Override\n    public BroadcastControlConsumer getTransmitConsumer( String topic,String group ) {\n        return this.transmitConsumer;\n    }\n\n    @Override\n    public BroadcastControlProducer getTransmitProducer() {\n        return this.transmitProducer;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/event/UFMEventSubscriber.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm.event;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\n\nimport java.io.IOException;\n\npublic interface UFMEventSubscriber extends Pinenut {\n    void afterEventTriggered( String path, String serviceId, FileNode fileNode ) ;\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/protocol/FileMeta64.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm.protocol;\n\npublic class FileMeta64 {\n\n    protected String sourceName;\n\n    protected long size;\n\n    protected long validateVal;\n\n    public FileMeta64() {}\n\n    public FileMeta64( String sourceName, long size, long validateVal ) {\n        this.size        = size;\n        this.sourceName  = sourceName;\n        this.validateVal = validateVal;\n    }\n\n\n    public String getSourceName() {\n        return this.sourceName;\n    }\n\n    public void setSourceName( String sourceName ) {\n        this.sourceName = sourceName;\n    }\n\n    public long getSize() {\n        return this.size;\n    }\n\n    public void setSize( long size ) {\n        this.size = size;\n    }\n\n    public long getValidateVal() {\n        return this.validateVal;\n    }\n\n    public void setValidateVal( long validateVal ) {\n        this.validateVal = validateVal;\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/protocol/RequestHead.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm.protocol;\n\npublic class RequestHead {\n    protected long sessionId;\n\n    public RequestHead setSessionId(long sessionId ) {\n        this.sessionId = sessionId;\n        return this;\n    }\n\n    public long getSessionId() {\n        return this.sessionId;\n    }\n\n\n    public static RequestHead newRequest() {\n        return new RequestHead();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/ufm/session/UFMTransaction.java",
    "content": "package com.walnut.sparta.ucdn.console.ufm.session;\n\nimport com.pinecone.framework.util.id.GUID;\n\nimport java.util.List;\nimport java.util.concurrent.atomic.AtomicBoolean;\n\npublic class UFMTransaction {\n    protected GUID          localFileGUID;\n\n    protected AtomicBoolean startTransmit;\n\n    protected AtomicBoolean transmitFileContent;\n\n    protected AtomicBoolean fileDistributionComplete;\n\n    protected long          lastEventArrivedMills;\n\n    public UFMTransaction( GUID localFileGUID ) {\n        this.localFileGUID = localFileGUID;\n        this.startTransmit = new AtomicBoolean(false);\n        this.transmitFileContent = new AtomicBoolean(false);\n        this.fileDistributionComplete = new AtomicBoolean(false);\n    }\n\n    public GUID getLocalFileGUID() {\n        return this.localFileGUID;\n    }\n\n    public long getLastEventArrivedMills() {\n        return this.lastEventArrivedMills;\n    }\n\n    public void setLastEventArrivedMills( long lastEventArrivedMills ) {\n        this.lastEventArrivedMills = lastEventArrivedMills;\n    }\n\n    public boolean finishStartTransmit() {\n        return this.startTransmit.compareAndSet(false, true);\n    }\n\n    public boolean finishTransmitFileContent() {\n        return this.transmitFileContent.compareAndSet(false, true);\n    }\n\n    public boolean finishFileDistributionComplete() {\n        return this.fileDistributionComplete.compareAndSet(false, true);\n    }\n\n\n    public boolean isStartTransmit() {\n        return this.startTransmit.get();\n    }\n\n    public boolean isTransmitFileContent() {\n        return this.transmitFileContent.get();\n    }\n\n    public boolean isFileDistributionComplete() {\n        return this.fileDistributionComplete.get();\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/main/java/com/walnut/sparta/ucdn/console/util/JWTUtil.java",
    "content": "package com.walnut.sparta.ucdn.console.util;\n\nimport com.auth0.jwt.JWT;\nimport com.auth0.jwt.algorithms.Algorithm;\nimport com.auth0.jwt.interfaces.DecodedJWT;\n\nimport java.util.Date;\nimport java.util.HashMap;\n\npublic class JWTUtil {\n    private static final String SIGN = \"!^&%&*!@$*%!!@(&%2ar^2t\";\n    //学生登录生成JWT令牌\n    public static String createJWT(){\n        HashMap<String, Object> map = new HashMap<>();\n        String token = JWT.create()\n                .withHeader(map) //设置头信息\n                .withExpiresAt(new Date(System.currentTimeMillis() + 3600 * 1000)) //设置失效时间\n                .sign(Algorithm.HMAC256(SIGN)); //设置签名以及签名方式 这里使用HMAC256加密方式\n        return token;\n    }\n\n    public static DecodedJWT ParseJWt(String jwt){\n        return JWT.require(Algorithm.HMAC256(SIGN)).build().verify(jwt);\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-console/src/test/java/cn/ken/AppTest.java",
    "content": "package cn.ken;\n\nimport junit.framework.Test;\nimport junit.framework.TestCase;\nimport junit.framework.TestSuite;\n\n/**\n * Unit test for simple App.\n */\npublic class AppTest \n    extends TestCase\n{\n    /**\n     * Create the test case\n     *\n     * @param testName name of the test case\n     */\n    public AppTest( String testName )\n    {\n        super( testName );\n    }\n\n    /**\n     * @return the suite of tests being tested\n     */\n    public static Test suite()\n    {\n        return new TestSuite( AppTest.class );\n    }\n\n    /**\n     * Rigourous Test :-)\n     */\n    public void testApp()\n    {\n        assertTrue( true );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sparta</artifactId>\n        <groupId>com.walnuts.sparta</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.sparta.ucdn.service</groupId>\n    <artifactId>sparta-ucdn-service</artifactId>\n    <version>2.1.0</version>\n\n    <dependencies>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter</artifactId>\n        </dependency>\n\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-test</artifactId>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n            <version>2.6.13</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.summer.springram</groupId>\n            <artifactId>springram</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n            <artifactId>hydra-kom-default-driver</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n\n</project>\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/SpartaBoot.java",
    "content": "package com.walnut.sparta.ucdn.service;\n\nimport org.springframework.boot.autoconfigure.SpringBootApplication;\n\n@SpringBootApplication\npublic class SpartaBoot {\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/UCDNBoot.java",
    "content": "package com.walnut.sparta.ucdn.service;\n\nimport com.pinecone.Pinecone;\nimport com.walnut.sparta.ucdn.service.infrastructure.UOFSContentDelivery;\n\n\npublic class UCDNBoot {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            UOFSContentDelivery ucdn = (UOFSContentDelivery) Pinecone.sys().getTaskManager().add(\n                    new UOFSContentDelivery( args, Pinecone.sys() )\n            );\n            ucdn.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/api/controller/v2/ClientController.java",
    "content": "package com.walnut.sparta.ucdn.service.api.controller.v2;\n\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.io.TitanOutputStreamChanface;\nimport com.pinecone.hydra.storage.bucket.BucketInstrument;\nimport com.pinecone.hydra.storage.bucket.entity.Site;\nimport com.pinecone.hydra.storage.bucket.source.SiteManipulator;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64;\nimport com.pinecone.hydra.storage.version.VersionManage;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlProducer;\nimport com.pinecone.hydra.umb.kafka.WolfMCKafkaClient;\nimport com.pinecone.hydra.umb.wolf.WolfMCBClient;\nimport com.pinecone.hydra.umct.WolfMCExpress;\nimport com.walnut.sparta.ucdn.service.api.iface.v2.FileSyncDistributionController;\nimport com.walnut.sparta.ucdn.service.infrastructure.UOFSContentDelivery;\nimport com.walnut.sparta.ucdn.service.infrastructure.constants.PolicyConstants;\nimport com.walnut.sparta.ucdn.service.infrastructure.exception.IllegalPathException;\nimport com.walnut.sparta.ucdn.service.umct.FileSyncDistribution;\nimport org.apache.kafka.common.serialization.StringDeserializer;\nimport org.apache.kafka.common.serialization.StringSerializer;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.RestController;\n\nimport javax.annotation.PostConstruct;\nimport javax.annotation.Resource;\nimport javax.servlet.ServletOutputStream;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport java.io.IOException;\nimport java.util.Map;\n\n@RestController\n@CrossOrigin\npublic class ClientController {\n\n    @Resource\n    private KOMFileSystem               primaryFileSystem;\n\n    @Resource\n    private UniformVolumeManager        primaryVolume;\n\n    @Resource\n    private VersionManage               primaryVersion;\n\n    @Resource\n    private BucketInstrument            bucketInstrument;\n\n    @GetMapping(\"/titan/version\")\n    public String queryVersion(HttpServletRequest request, HttpServletResponse response){\n        return \"undefined\";\n    }\n\n    @GetMapping(\"/**\")\n    public void  getFile(HttpServletRequest request, HttpServletResponse response) throws IOException,IllegalPathException {\n        String servletPath = request.getServletPath();\n        String[] pathPart = servletPath.split(PolicyConstants.FORWARD_SLASH);\n\n        if( pathPart.length < 3 ){\n            //todo 路径500问题\n            throw new IllegalPathException();\n        }\n\n        String siteName = pathPart[1];\n\n        //todo 统一路径解析\n        StringBuilder filePath = new StringBuilder();\n        for( int i = 2; i < pathPart.length; i++ ){\n            if( i == pathPart.length - 1 ){\n                int dotIndex = pathPart[i].lastIndexOf(PolicyConstants.PERIOD);\n                String baseName = pathPart[i].substring(0, dotIndex);\n                filePath.append(baseName);\n            }else {\n                filePath.append(pathPart[i]).append(PolicyConstants.FORWARD_SLASH);\n            }\n\n        }\n\n\n        SiteManipulator siteManipulator = this.bucketInstrument.getSiteManipulator();\n        Site site = siteManipulator.querySiteByName(siteName);\n\n        String realFilePath = this.primaryFileSystem.getPath(site.getMountPointGuid()) + PolicyConstants.FORWARD_SLASH + filePath;\n\n        //后续升级成责任链获得更好的扩展性\n        Map<String, String[]> parameterMap = request.getParameterMap();\n\n        String version = \"\";\n        if( parameterMap.get(\"version\") != null ){\n            version = parameterMap.get(\"version\")[0];\n        }\n\n        ServletOutputStream outputStream = response.getOutputStream();\n        TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface(outputStream);\n\n        Folder folder = (Folder) this.primaryFileSystem.get(this.primaryFileSystem.queryGUIDByPath(realFilePath));\n        GUID storageObjectGuid = this.primaryVersion.queryObjectGuid(version, folder.getGuid());\n        FileNode storageObject = (FileNode) this.primaryFileSystem.get(storageObjectGuid);\n\n        TitanFileExportEntity64 entity = new TitanFileExportEntity64(this.primaryFileSystem, this.primaryVolume, storageObject, kChannel);\n        this.primaryFileSystem.export( entity );\n\n    }\n\n\n    @Resource\n    protected UOFSContentDelivery uofsContentDelivery;\n\n    @Resource\n    protected FileSyncDistributionController fileSyncDistributionController;\n\n    @PostConstruct\n    private void init() throws Exception {\n        String server = \"localhost:9092\";\n        String keySerializer = StringSerializer.class.getName();\n        String valueSerializer = StringSerializer.class.getName();\n        String topic = \"testTopic\";\n        String group = \"testGroup\";\n        String keyDeserializer = StringDeserializer.class.getName();\n        String valueDeserializer = StringDeserializer.class.getName();\n        String autoOffsetReset = \"earliest\";\n\n        WolfMCBClient client = new WolfMCBClient(new WolfMCKafkaClient(server), \"\", this.uofsContentDelivery, WolfMCExpress.class);\n\n\n        client.compile( FileSyncDistribution.class, false );\n        BroadcastControlProducer producer = client.createBroadcastControlProducer();\n        producer.start();\n\n\n        FileSyncDistribution raccoon = producer.getIface( FileSyncDistribution.class, topic );\n\n\n\n        BroadcastControlConsumer consumer = client.createBroadcastControlConsumer(topic,group);\n        consumer.registerController( this.fileSyncDistributionController );\n        Thread thread = new Thread(()->{\n            try {\n                consumer.start();\n            } catch (UMBServiceException e) {\n                throw new RuntimeException(e);\n            }\n        });\n        thread.start();\n\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/api/controller/v2/ConsoleController.java",
    "content": "package com.walnut.sparta.ucdn.service.api.controller.v2;\n\n\nimport com.pinecone.hydra.storage.io.TitanFileChannelChanface;\nimport com.pinecone.hydra.storage.bucket.BucketInstrument;\nimport com.pinecone.hydra.storage.bucket.entity.Site;\nimport com.pinecone.hydra.storage.bucket.source.SiteManipulator;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FSNodeAllotment;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64;\nimport com.pinecone.hydra.storage.version.VersionManage;\nimport com.pinecone.hydra.storage.version.entity.TitanVersion;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.walnut.sparta.ucdn.service.api.response.BasicResultResponse;\nimport com.walnut.sparta.ucdn.service.infrastructure.constants.PolicyConstants;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.PostMapping;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\nimport org.springframework.web.multipart.MultipartFile;\n\nimport javax.annotation.Resource;\nimport java.io.File;\nimport java.io.IOException;\nimport java.nio.channels.FileChannel;\nimport java.nio.file.StandardOpenOption;\n\n@RestController\n@RequestMapping( \"/api/v2/ucdn/console\" )\n@CrossOrigin\npublic class ConsoleController {\n    @Resource\n    private KOMFileSystem           primaryFileSystem;\n\n    @Resource\n    private UniformVolumeManager    primaryVolume;\n\n    @Resource\n    private VersionManage           primaryVersion;\n\n    @Resource\n    private BucketInstrument        bucketInstrument;\n\n    /**\n     * 上传文件\n     * @param filePath 目标路径\n     * @param version 版本号\n     * @param file 文件\n     * @param siteName 站点\n     * @return 返回操作结果\n     */\n    @PostMapping(\"/upload\")\n    public BasicResultResponse<String> upload(@RequestParam(\"siteName\") String siteName, @RequestParam(\"filePath\") String filePath, @RequestParam(\"version\") String version, @RequestParam(\"file\") MultipartFile file) throws IOException {\n        SiteManipulator siteManipulator = this.bucketInstrument.getSiteManipulator();\n        Site site = siteManipulator.querySiteByName(siteName);\n        if( site == null ){\n            return BasicResultResponse.error(\"站点不存在\");\n        }\n        int dotIndex = filePath.lastIndexOf(PolicyConstants.PERIOD);\n        String baseName = filePath.substring(0, dotIndex);\n        String extension = filePath.substring(dotIndex + 1);\n        String realFilePath = this.primaryFileSystem.getPath(site.getMountPointGuid()) + PolicyConstants.FORWARD_SLASH + baseName;\n\n        FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment();\n        Folder node = this.primaryFileSystem.affirmFolder(realFilePath);\n        String storageObjectPath = realFilePath + PolicyConstants.VERSION_PREFIX+ PolicyConstants.FORWARD_SLASH + version +PolicyConstants.PERIOD+ extension;\n        File tempFile = File.createTempFile(\"upload\",\".temp\");\n        if(!tempFile.exists()){\n            throw new IOException( \"Creating file compromised, what :\" + tempFile.toPath() );\n        }\n        file.transferTo(tempFile);\n\n        FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.READ);\n        TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel );\n        FileNode fileNode = fsNodeAllotment.newFileNode();\n        fileNode.setDefinitionSize( tempFile.length() );\n        fileNode.setName( tempFile.getName() );\n        TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64( this.primaryFileSystem,storageObjectPath, fileNode,titanFileChannelKChannel,this.primaryVolume );\n\n        this.primaryFileSystem.receive( receiveEntity );\n\n        FileTreeNode storageObject = this.primaryFileSystem.get(this.primaryFileSystem.queryGUIDByPath(storageObjectPath));\n        TitanVersion titanVersion = new TitanVersion();\n        titanVersion.setVersion( version );\n        titanVersion.setFileGuid( node.getGuid() );\n        titanVersion.setTargetStorageObjectGuid( storageObject.getGuid() );\n\n        this.primaryVersion.insert( titanVersion );\n\n        if( !tempFile.delete() ){\n            throw new IOException( \"Purging temporary file compromised, what :\" + tempFile.toPath() );\n        }\n        return BasicResultResponse.success();\n    }\n\n    \n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/api/iface/v2/FileSyncDistributionController.java",
    "content": "package com.walnut.sparta.ucdn.service.api.iface.v2;\n\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.umct.AddressMapping;\nimport com.pinecone.hydra.umct.stereotype.Controller;\nimport com.walnut.sparta.ucdn.service.umct.FileSyncDistribution;\n\nimport org.springframework.stereotype.Service;\n\nimport javax.annotation.Resource;\nimport java.io.IOException;\n\n@Controller\n@AddressMapping( \"com.walnut.sparta.ucdn.service.umct.FileSyncDistribution.\" )\n@Service\npublic class FileSyncDistributionController {\n\n    @Resource\n    private FileSyncDistribution fileSyncDistribution;\n\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    @AddressMapping( \"dino\" )\n    public void dino( String name ) {\n\n    }\n\n    @AddressMapping(\" fileDistribution \")\n    public void fileDistribution( String path, String topic, String server, long startSegId, long endSegId ) throws IOException {\n        ElementNode elementNode = this.primaryFileSystem.queryElement(path);\n        if( elementNode instanceof FileNode){\n            this.fileSyncDistribution.fileDistribution( (FileNode) elementNode, topic, server, startSegId, endSegId );\n        }\n\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/api/response/BasicResultResponse.java",
    "content": "package com.walnut.sparta.ucdn.service.api.response;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport org.springframework.http.HttpStatus;\n\nimport java.io.Serializable;\n\npublic class BasicResultResponse<T> implements Pinenut, Serializable {\n    private Integer    code = HttpStatus.OK.value();\n    private String     msg; //错误信息\n    private T          data; //数据\n\n    public static <T> BasicResultResponse<T > success() {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > successMsg( String msg  ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.msg  = msg;\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > success( T object ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.data = object;\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > error( String msg ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.msg  = msg;\n        result.code = HttpStatus.INTERNAL_SERVER_ERROR.value();\n        return result;\n    }\n\n\n    /**\n     * 获取\n     * @return code\n     */\n    public Integer getCode() {\n        return this.code;\n    }\n\n    /**\n     * 设置\n     * @param code\n     */\n    public void setCode(Integer code) {\n        this.code = code;\n    }\n\n    /**\n     * 获取\n     * @return msg\n     */\n    public String getMsg() {\n        return this.msg;\n    }\n\n    /**\n     * 设置\n     * @param msg\n     */\n    public void setMsg(String msg) {\n        this.msg = msg;\n    }\n\n    /**\n     * 获取\n     * @return data\n     */\n    public T getData() {\n        return this.data;\n    }\n\n    /**\n     * 设置\n     * @param data\n     */\n    public void setData(T data) {\n        this.data = data;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"code\" , this.code ),\n                new KeyValue<>( \"msg\"  , this.msg ),\n                new KeyValue<>( \"data\" , this.data )\n        } );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/FSContentDeliveryService.java",
    "content": "package com.walnut.sparta.ucdn.service.infrastructure;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface FSContentDeliveryService extends Pinenut {\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/SpartaUCDNService.java",
    "content": "package com.walnut.sparta.ucdn.service.infrastructure;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.functions.Executor;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.hydra.bucket.ibatis.hydranium.BucketMappingDriver;\nimport com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver;\nimport com.pinecone.hydra.servgram.Servgram;\nimport com.pinecone.hydra.storage.bucket.TitanBucketInstrument;\nimport com.pinecone.hydra.storage.file.FileSystemConfig;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.KernelFileSystemConfig;\nimport com.pinecone.hydra.storage.file.UniformObjectFileSystem;\nimport com.pinecone.hydra.storage.version.TitanVersionManage;\nimport com.pinecone.hydra.storage.version.VersionManage;\nimport com.pinecone.hydra.storage.volume.KernelVolumeConfig;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.version.ibatis.hydranium.VersionMappingDriver;\nimport com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.summer.spring.Springron;\nimport com.pinecone.tritium.Tritium;\nimport com.walnut.sparta.ucdn.service.SpartaBoot;\n\nimport org.springframework.context.ApplicationContextInitializer;\nimport org.springframework.context.ConfigurableApplicationContext;\nimport org.springframework.context.support.GenericApplicationContext;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\n\npublic class SpartaUCDNService extends Springron implements UCDNService {\n    protected KOIMappingDriver koiMappingDriver;\n\n    protected KOIMappingDriver koiFileMappingDriver;\n\n    protected KOIMappingDriver koiBucketMappingDriver;\n\n    protected KOIMappingDriver koiVersionMappingDriver;\n\n\n    protected KOMFileSystem fileSystem;\n\n    protected UniformVolumeManager volumeTree;\n\n    protected TitanBucketInstrument bucketInstrument;\n\n    protected TitanVersionManage versionManage;\n\n\n    protected void initSubsystem() {\n        this.koiMappingDriver = new VolumeMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n        this.koiFileMappingDriver = new FileMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n        this.koiBucketMappingDriver = new BucketMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n        this.koiVersionMappingDriver = new VersionMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n\n\n        JSONConfig selfConfig = (JSONConfig) this.getConfig();\n        FileSystemConfig fileSystemConfig = new KernelFileSystemConfig( selfConfig.queryJSONObject( \"service.PrimaryUniformFileSystem\" ) );\n        this.fileSystem = new UniformObjectFileSystem( this.koiFileMappingDriver, fileSystemConfig );\n\n        VolumeConfig volumeConfig = new KernelVolumeConfig( selfConfig.queryJSONObject( \"service.PrimaryUniformVolumeManager\" ) );\n        this.volumeTree = new UniformVolumeManager( this.koiMappingDriver, volumeConfig );\n        this.bucketInstrument = new TitanBucketInstrument( this.koiBucketMappingDriver );\n        this.versionManage = new TitanVersionManage( this.koiVersionMappingDriver );\n\n\n        this.setPrimarySources( SpartaBoot.class );\n\n        this.setInitializer(new Executor() {\n            @Override\n            public void execute() throws Exception {\n                SpartaUCDNService.this.getSpringApplication().addInitializers(new ApplicationContextInitializer<ConfigurableApplicationContext>() {\n                    @Override\n                    public void initialize( ConfigurableApplicationContext applicationContext ) {\n                        GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext;\n                        genericApplicationContext.registerBean(\"primaryFileSystem\", UniformObjectFileSystem.class, () -> (UniformObjectFileSystem)fileSystem);\n                        genericApplicationContext.registerBean(\"primaryVolume\", UniformVolumeManager.class, () -> (UniformVolumeManager) volumeTree);\n                        genericApplicationContext.registerBean(\"primaryBucket\", TitanBucketInstrument.class, () -> (TitanBucketInstrument) bucketInstrument);\n                        genericApplicationContext.registerBean(\"primaryVersion\", VersionManage.class, () -> (VersionManage) versionManage);\n\n                        genericApplicationContext.registerBean(\"uofsContentDelivery\", UOFSContentDelivery.class, () -> (UOFSContentDelivery) SpartaUCDNService.this.parentSystem());\n                    }\n                });\n            }\n        });\n    }\n\n    public SpartaUCDNService( String szName, Processum parent, String[] springbootArgs ) {\n        super( szName, parent, springbootArgs );\n        this.mSpringKernel.setPrimarySources( SpartaBoot.class );\n\n        this.initSubsystem();\n    }\n\n    public SpartaUCDNService( String szName, Processum parent ) {\n        this( szName, parent, new String[0] );\n    }\n\n    @Override\n    protected void loadConfig() {\n        this.mServgramList     = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey );\n        Object dyServgramConf  = this.mServgramList.get( this.gramName() );\n        if( dyServgramConf instanceof String ) {\n            try{\n                this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) );\n            }\n            catch ( IOException ignore ) {\n                this.getLogger().info( \"[Notice] Spring will use the default config `application.yaml`.\" );\n            }\n        }\n        else {\n            this.mServgramConf = this.mServgramList.getChild( this.gramName() );\n        }\n    }\n\n    @Override\n    public Tritium parentSystem() {\n        return (Tritium)super.parentSystem();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/UCDNBeans.java",
    "content": "package com.walnut.sparta.ucdn.service.infrastructure;\n\nimport com.pinecone.hydra.umct.WolfMCExpress;\n\npublic class UCDNBeans {\n//    @Bean\n//    public UlfKafkaClient ulfKafkaClient() {\n//        return new WolfMCBClient(new WolfMCKafkaClient(server), \"\", this, WolfMCExpress.class);\n//    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/UCDNService.java",
    "content": "package com.walnut.sparta.ucdn.service.infrastructure;\n\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\n\npublic interface UCDNService extends Slf4jTraceable {\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/UOFSContentDelivery.java",
    "content": "package com.walnut.sparta.ucdn.service.infrastructure;\n\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.tritium.Tritium;\n\npublic class UOFSContentDelivery extends Tritium implements FSContentDeliveryService {\n    protected SpartaUCDNService spartaUCDNService;\n\n    public UOFSContentDelivery( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public UOFSContentDelivery( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        this.spartaUCDNService = new SpartaUCDNService( \"SpartaUCDNService\", this );\n        this.spartaUCDNService.execute();\n\n\n        this.getTaskManager().add(this.spartaUCDNService);\n        this.getTaskManager().syncWaitingTerminated();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/constants/PolicyConstants.java",
    "content": "package com.walnut.sparta.ucdn.service.infrastructure.constants;\n\npublic class PolicyConstants {\n    public static String VERSION_PREFIX = \"/$version\";\n\n    public static String FORWARD_SLASH = \"/\";\n\n    public static String PERIOD = \".\";\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/dto/UploadDTO.java",
    "content": "package com.walnut.sparta.ucdn.service.infrastructure.dto;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class UploadDTO implements Pinenut {\n\n    private String          version;\n\n    private String          filePath;\n\n\n    public UploadDTO() {\n    }\n\n    public UploadDTO(String version, String filePath) {\n        this.version = version;\n        this.filePath = filePath;\n    }\n\n\n    public String getVersion() {\n        return version;\n    }\n\n\n    public void setVersion(String version) {\n        this.version = version;\n    }\n\n\n    public String getFilePath() {\n        return filePath;\n    }\n\n\n    public void setFilePath(String filePath) {\n        this.filePath = filePath;\n    }\n\n\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/infrastructure/exception/IllegalPathException.java",
    "content": "package com.walnut.sparta.ucdn.service.infrastructure.exception;\n\npublic class IllegalPathException extends Exception{\n\n    @Override\n    public String toString() {\n        return \"illegal path !!!\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/umct/FileSyncDistribution.java",
    "content": "package com.walnut.sparta.ucdn.service.umct;\n\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.umct.stereotype.Iface;\n\nimport java.io.IOException;\n\n@Iface\npublic interface FileSyncDistribution {\n    void fileDistribution(FileNode fileNode, String topic, String server, long startSegId, long endSegId ) throws IOException;\n}\n"
  },
  {
    "path": "Sparta/sparta-ucdn-service/src/main/java/com/walnut/sparta/ucdn/service/umct/FileSyncDistributionImpl.java",
    "content": "package com.walnut.sparta.ucdn.service.umct;\n\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.umb.kafka.UlfKafkaClient;\n\nimport org.springframework.stereotype.Service;\n\nimport javax.annotation.Resource;\n\n@Service\npublic class FileSyncDistributionImpl implements FileSyncDistribution {\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    @Resource\n    private UniformVolumeManager primaryVolume;\n\n    @Resource\n    UlfKafkaClient ulfKafkaClient;\n\n    @Override\n    public void fileDistribution(FileNode fileNode, String topic, String server,long startSegId, long endSegId)   {\n//        FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment();\n//        WolfMCBClient client = new WolfMCBClient(new WolfMCKafkaClient(server), \"\", this, WolfMCExpress.class);\n//        client.compile(  );\n//        BroadcastControlProducer producer = client.createBroadcastControlProducer();\n//        producer.start();\n//        TreeMap<Long, Frame> frames = fileNode.getClusters();\n//        for( long i = startSegId; i < endSegId; i++ ){\n//            LocalCluster frame = (LocalCluster)frames.get(i);\n//            File tempFile = File.createTempFile(\"temp\", frame.getSegGuid().toString());\n//            FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND);\n//            TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel );\n//            FileNode newFileNode = fsNodeAllotment.newFileNode();\n//\n//            newFileNode.setPath( frame.getSourceName() );\n//            newFileNode.setDefinitionSize( frame.getSize() );\n//            TitanFileExportEntity64 exportEntity = new TitanFileExportEntity64( this.primaryFileSystem, this.primaryVolume, newFileNode, kChannel );\n//\n//            exportEntity.export( frame );\n//        }\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sparta</artifactId>\n        <groupId>com.walnuts.sparta</groupId>\n        <version>2.5.1</version>\n    </parent>\n<!--    <build>-->\n<!--        <plugins>-->\n<!--            <plugin>-->\n<!--                <groupId>org.apache.maven.plugins</groupId>-->\n<!--                <artifactId>maven-compiler-plugin</artifactId>-->\n<!--                <configuration>-->\n<!--                    <source>11</source>-->\n<!--                    <target>11</target>-->\n<!--                </configuration>-->\n<!--            </plugin>-->\n<!--        </plugins>-->\n<!--    </build>-->\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.springframework.boot</groupId>\n                <artifactId>spring-boot-maven-plugin</artifactId>\n\n                <executions>\n                    <execution>\n                        <phase>package</phase>\n                        <goals>\n                            <goal>repackage</goal>\n                        </goals>\n                    </execution>\n                </executions>\n\n                <configuration>\n                    <includeSystemScope>true</includeSystemScope>\n                    <mainClass>com.walnut.sparta.uofs.console.UOFSBoot</mainClass>\n                </configuration>\n            </plugin>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.sparta.uofs.console</groupId>\n    <artifactId>sparta-uofs-console</artifactId>\n    <version>2.1.0</version>\n\n\n    <dependencies>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter</artifactId>\n        </dependency>\n\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-test</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n            <version>2.6.13</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.summer.springram</groupId>\n            <artifactId>springram</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n            <artifactId>hydra-kom-default-driver</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.apache.thrift</groupId>\n            <artifactId>libthrift</artifactId>\n            <version>0.18.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.sdk.thrift</groupId>\n            <artifactId>hydra-lib-thrift-sdk</artifactId>\n            <version>1.2.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.sparta.api.uac</groupId>\n            <artifactId>sparta-api-uac</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>commons-fileupload</groupId>\n            <artifactId>commons-fileupload</artifactId>\n            <version>1.3.1</version>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.auth0</groupId>\n            <artifactId>java-jwt</artifactId>\n            <version>4.4.0</version>\n        </dependency>\n    </dependencies>\n\n</project>\n\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/SpartaBoot.java",
    "content": "package com.walnut.sparta.uofs.console;\n\nimport org.springframework.boot.autoconfigure.SpringBootApplication;\n\n@SpringBootApplication\npublic class SpartaBoot {\n\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/UOFSBoot.java",
    "content": "package com.walnut.sparta.uofs.console;\n\nimport com.pinecone.Pinecone;\nimport com.walnut.sparta.uofs.console.infrastructure.UOFSContentDelivery;\n\npublic class UOFSBoot {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            UOFSContentDelivery uofs = (UOFSContentDelivery) Pinecone.sys().getTaskManager().add(\n                    new UOFSContentDelivery( args, Pinecone.sys() )\n            );\n            uofs.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/BucketController.java",
    "content": "//package com.walnuts.sparta.uofs.console.api.controller.v2;\n//\n//import com.pinecone.hydra.storage.io.TitanFileChannelChanface;\n//import com.pinecone.hydra.storage.bucket.BucketInstrument;\n//import com.pinecone.hydra.storage.bucket.entity.Bucket;\n//import com.pinecone.hydra.storage.bucket.entity.GenericBucket;\n//import com.pinecone.hydra.storage.file.KOMFileSystem;\n//import com.pinecone.hydra.storage.file.entity.FileNode;\n//import com.pinecone.hydra.storage.file.entity.Folder;\n//import com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64;\n//import com.pinecone.hydra.storage.io.UIOException;\n//import com.pinecone.hydra.storage.volume.UniformVolumeManager;\n//import com.pinecone.ulf.util.guid.GUIDs;\n//import com.walnuts.sparta.uofs.console.api.response.BasicResultResponse;\n//import org.springframework.web.bind.annotation.CrossOrigin;\n//import org.springframework.web.bind.annotation.DeleteMapping;\n//import org.springframework.web.bind.annotation.GetMapping;\n//import org.springframework.web.bind.annotation.PathVariable;\n//import org.springframework.web.bind.annotation.PutMapping;\n//import org.springframework.web.bind.annotation.RequestBody;\n//import org.springframework.web.bind.annotation.RequestMapping;\n//import org.springframework.web.bind.annotation.RequestParam;\n//import org.springframework.web.bind.annotation.RestController;\n//import org.springframework.web.multipart.MultipartFile;\n//\n//import javax.annotation.Resource;\n//import javax.servlet.http.HttpServletRequest;\n//import java.io.File;\n//import java.io.IOException;\n//import java.nio.channels.FileChannel;\n//import java.nio.file.StandardOpenOption;\n//import java.time.LocalDateTime;\n//import java.util.List;\n//\n//@RestController\n//@RequestMapping( \"/api/v2/uofs/bucket\" )\n//@CrossOrigin\n//public class BucketController {\n//    @Resource\n//    private KOMFileSystem primaryFileSystem;\n//\n//    @Resource\n//    private BucketInstrument bucketInstrument;\n//    @Resource\n//    private TransmitController transmitController;\n//\n//    @Resource\n//    private UniformVolumeManager primaryVolume;\n//\n//\n//    /**\n//     * 创建bucket\n//     * @param bucketName 桶名\n//     * @param accountGuid 用户Guid\n//     * @return 返回bucketGuid\n//     */\n//    @PutMapping(\"/{bucketName}\")\n//    public BasicResultResponse<String> createBucket(@PathVariable String bucketName, @RequestBody String accountGuid ){\n//        Folder folder = this.primaryFileSystem.affirmFolder(bucketName);\n//        GenericBucket bucket = new GenericBucket();\n//        bucket.setBucketName( bucketName );\n//        bucket.setCreateTime(LocalDateTime.now());\n//        bucket.setMountPoint( folder.getGuid() );\n//        bucket.setUserGuid( GUIDs.GUID72( accountGuid ) );\n//        this.bucketInstrument.createBucket( bucket );\n//        return BasicResultResponse.success( bucket.getBucketGuid().toString() );\n//    }\n//\n//    /**\n//     * 获取账号下的所有桶\n//     * @param accountGuid 用户账号guid\n//     * @return 返回所有桶信息\n//     */\n//    @GetMapping(\"/\")\n//    public String listBuckets(@RequestParam(\"accountGuid\") String accountGuid ){\n//        List<Bucket> buckets = this.bucketInstrument.queryBucketsByUserGuid(GUIDs.GUID72(accountGuid));\n//        return BasicResultResponse.success(buckets).toJSONString();\n//    }\n//\n//    /**\n//     * 删除桶\n//     * @param bucketName 桶名\n//     * @param accountGuid 账号Guid\n//     * @return  返回操作结果\n//     */\n//    @DeleteMapping(\"/{bucketName}\")\n//    public BasicResultResponse<String> deleteBucket( @PathVariable String bucketName, @RequestBody String accountGuid ){\n//        this.bucketInstrument.removeBucketByAccountAndBucketName( GUIDs.GUID72(accountGuid), bucketName  );\n//        this.primaryFileSystem.remove( bucketName );\n//        return BasicResultResponse.success();\n//    }\n//\n//    /**\n//     *  获取存储对象\n//     * @param bucketName 桶名\n//     * @param objectName 对象名\n//     * @param targetPath 目标地址\n//     * @return 操作结果\n//     */\n//    @GetMapping(\"/{bucketName}/{objectName}\")\n//    public BasicResultResponse<String> getObject(@PathVariable String bucketName, @PathVariable String objectName, @RequestBody String targetPath) throws IOException {\n//        FileNode fileNode = (FileNode) this.primaryFileSystem.get(this.primaryFileSystem.queryGUIDByPath(bucketName + \"/\" + objectName));\n//        File file = new File(targetPath);\n//        FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND);\n//        TitanFileChannelChanface kChannel = new TitanFileChannelChanface( channel );\n//        TitanFileExportEntity64 exportEntity = new TitanFileExportEntity64(this.primaryFileSystem, this.primaryVolume , fileNode, kChannel );\n//        this.primaryFileSystem.export( exportEntity );\n//        return BasicResultResponse.success();\n//    }\n//\n//    @GetMapping(\"/*\")\n//    public void test(HttpServletRequest httpRequest){\n//\n//    }\n//    /**\n//     * 上传储存对象\n//     * @param bucketName 桶名\n//     * @param objectName 对象名\n//     * @return\n//     */\n//    @PutMapping(\"/{bucketName}/{objectName}\")\n//    public BasicResultResponse<String> putObject(@PathVariable String bucketName, @PathVariable String objectName, @RequestBody MultipartFile file){\n//        return BasicResultResponse.success();\n//    }\n//\n//    /**\n//     * 删除存储对象\n//     * @param bucketName 桶名\n//     * @param objectName 存储对象名\n//     * @return 返回操作信息\n//     */\n//    @DeleteMapping(\"/{bucketName}/{objectName}\")\n//    public BasicResultResponse<String> deleteObject( @PathVariable String bucketName, @PathVariable String objectName ){\n//        return BasicResultResponse.success();\n//    }\n//\n//    /**\n//     * 列出储存桶中的对象\n//     * @param bucketName 桶名\n//     * @return 返回对象列表\n//     */\n//    public BasicResultResponse<List<FileNode>> listObjects( @PathVariable String bucketName ){\n//        return BasicResultResponse.success();\n//    }\n//}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/ExternalSymbolicController.java",
    "content": "package com.walnut.sparta.uofs.console.api.controller.v2;\n\n\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.external.ExternalFile;\nimport com.pinecone.hydra.storage.file.external.ExternalFolder;\nimport com.pinecone.hydra.storage.file.external.GenericNativeExternalFolder;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.ExternalSymbolic;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.walnut.sparta.uofs.console.api.response.BasicResultResponse;\nimport com.walnut.sparta.uofs.console.domain.dto.CreateExternalDTO;\nimport com.walnut.sparta.uofs.console.domain.dto.CreateExternalSymbolicDTO;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.DeleteMapping;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.PostMapping;\nimport org.springframework.web.bind.annotation.PutMapping;\nimport org.springframework.web.bind.annotation.RequestBody;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\nimport org.springframework.web.multipart.MultipartFile;\n\nimport javax.annotation.Resource;\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.nio.file.Files;\nimport java.nio.file.Path;\nimport java.nio.file.Paths;\nimport java.nio.file.StandardCopyOption;\n\n@RestController\n@RequestMapping( \"/api/v2/uofs/externalSymbolic\" )\n@CrossOrigin\npublic class ExternalSymbolicController {\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    /**\n     * 获取外部目录所有内容\n     * @param path 路径\n     * @return 返回内容\n     */\n    @GetMapping(\"/listItem\")\n    public String listItem(@RequestParam(\"path\") String path){\n        ElementNode elementNode = this.primaryFileSystem.queryElement(path);\n        if(elementNode != null){\n            ExternalSymbolic externalSymbolic = (ExternalSymbolic) elementNode;\n            GenericNativeExternalFolder externalFolder = new GenericNativeExternalFolder(new File(externalSymbolic.getReparsedPoint()));\n            return BasicResultResponse.success(externalFolder.listItem()).toJSONString();\n        }\n        return BasicResultResponse.success().toJSONString();\n    }\n\n    /**\n     * 获取外部文件夹的所有内容\n     * @param path 路径\n     * @return 返回内容信息\n     */\n    @GetMapping(\"/listItem/externalFoldr\")\n    public String externalFolderListItem(@RequestParam(\"path\") String path){\n        File file = new File(path);\n        GenericNativeExternalFolder externalFolder = new GenericNativeExternalFolder(file);\n        return BasicResultResponse.success(externalFolder.listItem()).toJSONString();\n    }\n\n    /**\n     * 外部复制\n     * @param sourcePath 目标路径\n     * @param destinationPath 源路径\n     */\n    @PutMapping(\"/copy\")\n    public BasicResultResponse<String> directCopy( @RequestParam(\"sourcePath\") String sourcePath, @RequestParam(\"destinationPath\") String destinationPath ) throws IOException {\n        ElementNode elementNode = this.primaryFileSystem.queryElement(sourcePath);\n        if(elementNode instanceof ExternalFolder){\n            ExternalFolder externalFolder = (ExternalFolder) elementNode;\n            this.primaryFileSystem.directCopy( externalFolder.getPath(),destinationPath );\n        }\n\n        return BasicResultResponse.success();\n    }\n\n    public BasicResultResponse<String> createExternalSymbolic(@RequestBody CreateExternalSymbolicDTO dto){\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 直接上传文件\n     * @param file 文件\n     * @param sourcePath 目标文件夹\n     */\n    @PostMapping(\"/directUpload\")\n    public BasicResultResponse<String> directUpload( @RequestParam(\"file\") MultipartFile file, @RequestParam(\"sourcePath\") String sourcePath ) throws IOException {\n        ElementNode elementNode = this.primaryFileSystem.queryElement(sourcePath);\n        if( elementNode instanceof ExternalFolder){\n            ExternalFolder externalFolder = (ExternalFolder) elementNode;\n            String path = externalFolder.getPath();\n\n            // 确保目标文件夹存在\n            Path targetDir = Paths.get(path);\n            if (!Files.exists(targetDir)) {\n                Files.createDirectories(targetDir);\n            }\n\n            // 构建目标文件路径\n            Path targetFile = targetDir.resolve(file.getOriginalFilename());\n\n            // 将 MultipartFile 写入目标文件\n            try (InputStream inputStream = file.getInputStream()) {\n                Files.copy(inputStream, targetFile, StandardCopyOption.REPLACE_EXISTING);\n            }\n        }\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 删除外部文件\n     * @param path 文件路径\n     * @return 返回操作结果\n     */\n    @DeleteMapping(\"/remove\")\n    public BasicResultResponse<String> remove( @RequestParam(\"path\") String path ){\n        ElementNode elementNode = this.primaryFileSystem.queryElement(path);\n        if( elementNode instanceof ExternalFolder ){\n            ExternalFolder externalFolder = (ExternalFolder) elementNode;\n            externalFolder.delete();\n        }else if( elementNode instanceof ExternalFile ){\n            ExternalFile externalFile = (ExternalFile) elementNode;\n            externalFile.delete();\n        }\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 创建外接文件\n     * @param dto 创建外接文件信息\n     * @return 返回操作结果\n     */\n    @PostMapping(\"/createExternalFile\")\n    public BasicResultResponse<String> createExternalFile(@RequestBody CreateExternalDTO dto){\n        ElementNode elementNode = this.primaryFileSystem.queryElement(dto.getFolderPath());\n\n        Folder folder = elementNode.evinceFolder();\n        folder.createExternalSymbolic( dto.getExternalSymbolicName(), dto.getReparsedPoint());\n        return BasicResultResponse.success();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/FileController.java",
    "content": "package com.walnut.sparta.uofs.console.api.controller.v2;\n\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.version.VersionManage;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.sparta.uofs.console.api.response.BasicResultResponse;\nimport com.walnut.sparta.uofs.console.domain.dto.RenameDTO;\nimport com.walnut.sparta.uofs.console.domain.dto.UpdateFileNameDTO;\nimport com.walnut.sparta.uofs.console.service.FileService;\n\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.DeleteMapping;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.PostMapping;\nimport org.springframework.web.bind.annotation.RequestBody;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\n\nimport javax.annotation.Resource;\n\n@RestController\n@RequestMapping( \"/api/v2/uofs/file\" )\n@CrossOrigin\npublic class FileController {\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    @Resource\n    private FileService fileService;\n\n    @Resource\n    private VersionManage primaryVersion;\n\n    /**\n     * 创建文件\n     * @param filePath 文件路径\n     * @return 返回操作状态\n     */\n    @GetMapping(\"/creat/file\")\n    public BasicResultResponse<String> createFile(@RequestParam String filePath ){\n        this.primaryFileSystem.affirmFileNode( filePath );\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 获取文件或文件夹属性\n     * @param nodeGuid 文件或文件夹guid\n     * @return 返回属性信息\n     */\n    @GetMapping(\"/attribute\")\n    public BasicResultResponse<FileTreeNode> attribute(@RequestParam(\"nodeGuid\") String nodeGuid ){\n        FileTreeNode fileTreeNode = this.primaryFileSystem.get(GUIDs.GUID128(nodeGuid));\n        return BasicResultResponse.success( fileTreeNode );\n    }\n\n    /**\n     * 移除文件夹或者文件\n     * @param fileGuid 文件夹或者文件guid\n     * @return 返回操作结果\n     */\n    @DeleteMapping(\"/remove/file\")\n    public BasicResultResponse<String> removeFile( String fileGuid ){\n        this.fileService.remove( GUIDs.GUID128( fileGuid ) );\n        this.primaryFileSystem.remove( GUIDs.GUID128( fileGuid ) );\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 重命名文件或文件夹\n     * @param dto 信息\n     * @return 返回操作信息\n     */\n    @PostMapping(\"/rename\")\n    public BasicResultResponse<String> renameFile(@RequestBody RenameDTO dto){\n        this.primaryFileSystem.renameFile( dto.getPath(), dto.getNewName() );\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 重命名接口\n     * @param dto 重命名数据\n     * @return\n     */\n    @PostMapping(\"/updateFileName\")\n    public BasicResultResponse<String> updateFileName(@RequestBody UpdateFileNameDTO dto){\n        this.primaryFileSystem.renameFile( dto.getFilePath(), dto.getNewFileName() );\n        return BasicResultResponse.success();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/FolderController.java",
    "content": "package com.walnut.sparta.uofs.console.api.controller.v2;\n\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.sparta.uofs.console.api.response.BasicResultResponse;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.DeleteMapping;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.PutMapping;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\n\nimport javax.annotation.Resource;\nimport java.io.IOException;\nimport java.util.List;\n\n@RestController\n@RequestMapping( \"/api/v2/uofs/folder\" )\n@CrossOrigin\npublic class FolderController {\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    @Resource\n    private UniformVolumeManager primaryVolume;\n\n    /**\n     * 获取文件夹下所有内容\n     * @param folderGuid 文件夹guid\n     * @returnS\n     */\n    @GetMapping(\"/folder/listItem\")\n    public String listItem(@RequestParam String folderGuid ){\n        Folder folder = this.primaryFileSystem.getFolder(GUIDs.GUID128(folderGuid));\n        List<FileTreeNode> fileTreeNodes = folder.listItem();\n        return  BasicResultResponse.success(fileTreeNodes).toJSONString() ;\n    }\n\n\n    public String listItemByPath( @RequestParam String folderPath ){\n        return null;\n    }\n\n\n    /**\n     * 创建文件夹\n     * @param destDirPath 文件夹路径\n     * @return 返回操作状态\n     */\n    @GetMapping(\"/creat/folder\")\n    public BasicResultResponse<String> createFolder( @RequestParam(\"destDirPath\") String destDirPath ){\n        this.primaryFileSystem.affirmFolder( destDirPath );\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 获取文件或文件夹属性\n     * @param nodeGuid 文件或文件夹guid\n     * @return 返回属性信息\n     */\n    @GetMapping(\"/attribute\")\n    public             BasicResultResponse< FileTreeNode > attribute( @RequestParam(\"nodeGuid\") String nodeGuid ){\n        FileTreeNode fileTreeNode = this.primaryFileSystem.get(GUIDs.GUID128(nodeGuid));\n        return BasicResultResponse.success( fileTreeNode );\n    }\n\n    /**\n     * 获取所有根文件夹\n     * @return 返回根信息\n     */\n    @GetMapping(\"/list/root\")\n    public String listRoot(){\n        List<FileTreeNode> roots = this.primaryFileSystem.fetchRoot();\n        return BasicResultResponse.success( roots ).toJSONString();\n    }\n\n    /**\n     * 移除文件夹或者文件\n     * @param fileGuid 文件夹或者文件guid\n     * @return 返回操作结果\n     */\n    @DeleteMapping(\"/remove/file\")\n    public BasicResultResponse<String> removeFile( @RequestParam(\"fileGuid\") String fileGuid ){\n        this.primaryFileSystem.remove( GUIDs.GUID128( fileGuid ) );\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 文件复制接口\n     * @param sourcePath 目标路径\n     * @param destinationPath 源路径\n     */\n    @PutMapping(\"/copy\")\n    public BasicResultResponse<String> copy( @RequestParam(\"sourcePath\") String sourcePath, @RequestParam(\"destinationPath\") String destinationPath ) throws IOException {\n        this.primaryFileSystem.copy( sourcePath, destinationPath, primaryVolume );\n        return BasicResultResponse.success(\"复制成功\");\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/SiteController.java",
    "content": "package com.walnut.sparta.uofs.console.api.controller.v2;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.storage.bucket.BucketInstrument;\nimport com.pinecone.hydra.storage.bucket.entity.GenericSite;\nimport com.pinecone.hydra.storage.bucket.entity.Site;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.walnut.sparta.uofs.console.api.response.BasicResultResponse;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.DeleteMapping;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.PutMapping;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\n\nimport javax.annotation.Resource;\nimport java.util.List;\n\n@RestController\n@RequestMapping( \"/api/v2/uofs/site\" )\n@CrossOrigin\npublic class SiteController implements Pinenut {\n    @Resource\n    private BucketInstrument bucketInstrument;\n\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    /**\n     * 创建站点\n     * @param siteName 站点名\n     * @return 返回操作结果\n     */\n    @PutMapping(\"/createSite\")\n    public BasicResultResponse<String> createSite(@RequestParam(\"siteName\") String siteName){\n        Folder folder = this.primaryFileSystem.affirmFolder(siteName);\n        GenericSite site = new GenericSite();\n        site.setSiteName( siteName );\n        site.setMountPointGuid( folder.getGuid() );\n        this.bucketInstrument.createSite( site );\n\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 删除站点\n     * @param siteName 站点名\n     * @return 操作结果\n     */\n    @DeleteMapping(\"/deleteSite\")\n    public BasicResultResponse<String> removeSite( @RequestParam(\"siteName\") String siteName ){\n        this.bucketInstrument.removeSite(siteName);\n\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 获取全部站点\n     * @return 返回全部站点\n     */\n    @GetMapping(\"/listSite\")\n    public String listSite(){\n        List<Site> sites = this.bucketInstrument.listSite();\n        return BasicResultResponse.success(sites).toJSONString();\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/TransmitController.java",
    "content": "package com.walnut.sparta.uofs.console.api.controller.v2;\n\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.external.ExternalFile;\nimport com.pinecone.hydra.storage.file.external.GenericNativeExternalFile;\nimport com.pinecone.hydra.storage.file.entity.ElementNode;\nimport com.pinecone.hydra.storage.file.entity.GenericFileNode;\nimport com.pinecone.hydra.storage.io.Chanface;\nimport com.pinecone.hydra.storage.io.TitanFileChannelChanface;\nimport com.pinecone.hydra.storage.io.TitanOutputStreamChanface;\nimport com.pinecone.hydra.storage.bucket.BucketInstrument;\nimport com.pinecone.hydra.storage.bucket.entity.Site;\nimport com.pinecone.hydra.storage.bucket.source.SiteManipulator;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.FSNodeAllotment;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.file.transmit.exporter.TitanFileExportEntity64;\nimport com.pinecone.hydra.storage.file.transmit.receiver.TitanFileReceiveEntity64;\nimport com.pinecone.hydra.storage.version.VersionManage;\nimport com.pinecone.hydra.storage.version.entity.TitanVersion;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.sparta.uofs.console.api.response.BasicResultResponse;\nimport com.walnut.sparta.uofs.console.domain.dto.DownloadObjectByChannelDTO;\nimport com.walnut.sparta.uofs.console.domain.dto.UpdateObjectByChannelDTO;\nimport com.walnut.sparta.uofs.console.infrastructure.UOFSConsoleContents;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.PostMapping;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\nimport org.springframework.web.multipart.MultipartFile;\n\nimport javax.annotation.Resource;\nimport javax.servlet.ServletOutputStream;\nimport javax.servlet.http.HttpServletRequest;\nimport javax.servlet.http.HttpServletResponse;\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.nio.channels.FileChannel;\nimport java.nio.file.StandardOpenOption;\nimport java.sql.SQLException;\nimport java.util.Map;\n\nimport static org.apache.commons.io.FilenameUtils.getExtension;\n\n@RestController\n@RequestMapping( \"/api/v2/uofs/transmit\" )\n@CrossOrigin\npublic class TransmitController {\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    @Resource\n    private UniformVolumeManager primaryVolume;\n\n    @Resource\n    private BucketInstrument bucketInstrument;\n\n    @Resource\n    private VersionManage primaryVersion;\n\n//    @Resource\n//    private UOFSConfig uofsConfig;\n\n    /**\n     * 使用channel上传对象\n     * @param dto 上传所需数据\n     * @return 返回操作结果\n     * @throws IOException\n     * @throws SQLException\n     */\n    @PostMapping(\"/channel/update\")\n    public  BasicResultResponse<String> updateObjectByChannel(UpdateObjectByChannelDTO dto ) throws IOException {\n        MultipartFile object = dto.getObject();\n        File file = File.createTempFile( \"uofs\",\".\"+ getExtension(object.getOriginalFilename()) );\n        if( !file.exists() ){\n            throw new IOException( \"Creating file compromised, what :\" + file.toPath() );\n        }\n        object.transferTo( file );\n        Chanface chanface = this.getKChannel(file);\n\n        FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment();\n        FileNode fileNode = fsNodeAllotment.newFileNode();\n        fileNode.setDefinitionSize( file.length() );\n        fileNode.setName( file.getName() );\n\n        TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64(\n                this.primaryFileSystem, dto.getDestDirPath(), fileNode, chanface, this.primaryVolume\n        );\n\n        this.primaryFileSystem.receive( receiveEntity );\n        if(!file.delete()){\n            throw new IOException( \"Purging temporary file compromised, what :\" + file.toPath() );\n        }\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 使用channel将对象下载到本地\n     * @param dto 下载所需的数据\n     * @return 返回操作结果\n     * @throws IOException\n     * @throws SQLException\n     */\n    @PostMapping(\"/channel/download\")\n    public BasicResultResponse<String> downloadObjectByChannel( DownloadObjectByChannelDTO dto ) throws IOException {\n        File file = new File( dto.getTargetPath());\n        FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.APPEND);\n        TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel );\n\n        FileNode fileNode = (FileNode) this.primaryFileSystem.get(this.primaryFileSystem.queryGUIDByPath(dto.getDestDirPath()));\n        TitanFileExportEntity64 exportEntity = new TitanFileExportEntity64( this.primaryFileSystem, this.primaryVolume, fileNode, titanFileChannelKChannel );\n        primaryFileSystem.export( exportEntity );\n        return BasicResultResponse.success();\n    }\n\n    @GetMapping(\"/download/guid\")\n    public void  getFile(HttpServletRequest request, HttpServletResponse response) throws IOException {\n        Map<String, String[]> parameterMap = request.getParameterMap();\n        String[] guids = parameterMap.get(\"guid\");\n        GUID storageObjectGuid = null;\n        if( guids != null ){\n            storageObjectGuid = GUIDs.GUID128( guids[0] );\n        }\n\n        ServletOutputStream outputStream = response.getOutputStream();\n        TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface(outputStream);\n\n        FileNode storageObject = (FileNode) this.primaryFileSystem.get(storageObjectGuid);\n\n        TitanFileExportEntity64 entity = new TitanFileExportEntity64(this.primaryFileSystem, this.primaryVolume, storageObject, kChannel);\n        this.primaryFileSystem.export( entity );\n\n    }\n\n    /**\n     * 使用文件路径下载文件\n     */\n    @GetMapping(\"/download/path\")\n    public void getFileByPath(HttpServletRequest request, HttpServletResponse response) throws IOException {\n        Map<String, String[]> parameterMap = request.getParameterMap();\n        String[] paths = parameterMap.get(\"path\");\n        String path = null;\n\n        if(paths != null){\n            path = paths[0];\n        }\n\n        ServletOutputStream outputStream = response.getOutputStream();\n        TitanOutputStreamChanface kChannel = new TitanOutputStreamChanface(outputStream);\n\n        ElementNode elementNode = this.primaryFileSystem.queryElement(path);\n        if(elementNode instanceof GenericNativeExternalFile){\n            ExternalFile externalFile = (ExternalFile) elementNode;\n            File nativeFile = externalFile.getNativeFile();\n            try (FileInputStream fileInputStream = new FileInputStream(nativeFile)) {\n                byte[] buffer = new byte[1024];\n                int bytesRead;\n                while ((bytesRead = fileInputStream.read(buffer)) != -1) {\n                    outputStream.write(buffer, 0, bytesRead);\n                }\n                // 刷新输出流\n                outputStream.flush();\n                return;\n            } catch (IOException e) {\n                // 处理异常，比如记录日志等\n                e.printStackTrace();\n            }\n        }\n\n        if( elementNode instanceof GenericFileNode){\n            FileNode fileNode = (FileNode) elementNode;\n            TitanFileExportEntity64 entity = new TitanFileExportEntity64(this.primaryFileSystem, this.primaryVolume, fileNode, kChannel);\n            this.primaryFileSystem.export( entity );\n        }\n    }\n\n    /**\n     * 上传文件\n     * @param filePath 目标路径\n     * @param version 版本号\n     * @param file 文件\n     * @param siteName 站点\n     * @return 返回操作结果\n     */\n    @PostMapping(\"/CDNUpload\")\n    public BasicResultResponse<String> CDNUpload(@RequestParam(\"siteName\") String siteName, @RequestParam(\"filePath\") String filePath, @RequestParam(\"version\") String version, @RequestParam(\"file\") MultipartFile file) throws IOException {\n        SiteManipulator siteManipulator = this.bucketInstrument.getSiteManipulator();\n        Site site = siteManipulator.querySiteByName(siteName);\n        if( site == null ){\n            return BasicResultResponse.error(\"站点不存在\");\n        }\n        int dotIndex = filePath.lastIndexOf(UOFSConsoleContents.PERIOD);\n        String baseName = filePath.substring(0, dotIndex);\n        String extension = filePath.substring(dotIndex + 1);\n        String realFilePath = this.primaryFileSystem.getPath(site.getMountPointGuid()) + UOFSConsoleContents.FORWARD_SLASH + baseName;\n\n        FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment();\n        Folder node = this.primaryFileSystem.affirmFolder(realFilePath);\n        String storageObjectPath = realFilePath +UOFSConsoleContents.VERSION_PREFIX+ UOFSConsoleContents.FORWARD_SLASH + version +UOFSConsoleContents.PERIOD+ extension;\n        File tempFile = File.createTempFile(\"upload\",\".temp\");\n        if( !tempFile.exists() ){\n            throw new IOException( \"Creating file compromised, what :\" + tempFile.toPath() );\n        }\n        file.transferTo(tempFile);\n\n        FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.READ);\n        TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel );\n        FileNode fileNode = fsNodeAllotment.newFileNode();\n        fileNode.setDefinitionSize( tempFile.length() );\n        fileNode.setName( tempFile.getName() );\n        TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64( this.primaryFileSystem,storageObjectPath, fileNode,titanFileChannelKChannel,this.primaryVolume );\n\n        this.primaryFileSystem.receive( receiveEntity );\n\n        FileTreeNode storageObject = this.primaryFileSystem.get(this.primaryFileSystem.queryGUIDByPath(storageObjectPath));\n        TitanVersion titanVersion = new TitanVersion();\n        titanVersion.setVersion( version );\n        titanVersion.setFileGuid( node.getGuid() );\n        titanVersion.setTargetStorageObjectGuid( storageObject.getGuid() );\n\n        this.primaryVersion.insert( titanVersion );\n        if( !tempFile.delete() ){\n            throw new IOException( \"Purging temporary file compromised, what :\" + tempFile.toPath() );\n        }\n\n        return BasicResultResponse.success();\n    }\n\n    /**\n     *\n     * @param filePath 文件要上传的路径\n     * @param file 文件本体\n     * @return\n     */\n    @PostMapping(\"/upload\")\n    public BasicResultResponse<String> upload(@RequestParam(\"filePath\") String filePath, @RequestParam(\"file\") MultipartFile file ) throws IOException {\n            File tempFile = File.createTempFile(\"upload\",\".temp\");\n            if(!tempFile.exists()){\n                throw new IOException( \"Creating file compromised, what :\" + tempFile.toPath() );\n            }\n            file.transferTo(tempFile);\n\n            FSNodeAllotment fsNodeAllotment = this.primaryFileSystem.getFSNodeAllotment();\n            FileChannel channel = FileChannel.open(tempFile.toPath(), StandardOpenOption.READ);\n            TitanFileChannelChanface titanFileChannelKChannel = new TitanFileChannelChanface( channel );\n            FileNode fileNode = fsNodeAllotment.newFileNode();\n            fileNode.setDefinitionSize( tempFile.length() );\n            fileNode.setName( tempFile.getName() );\n            TitanFileReceiveEntity64 receiveEntity = new TitanFileReceiveEntity64( this.primaryFileSystem,filePath, fileNode,titanFileChannelKChannel,this.primaryVolume );\n\n            this.primaryFileSystem.receive( receiveEntity );\n            if(!tempFile.delete()){\n                throw new IOException( \"Temporary file has been purged failed.\" );\n            }\n            return BasicResultResponse.success();\n    }\n\n    @PostMapping(\"/stream\")\n    public String handleStreamUpload(HttpServletRequest request) throws IOException {\n        try (InputStream inputStream = request.getInputStream()) {\n            // 处理输入流\n            return \"File stream processed.\";\n        }\n    }\n\n    private Chanface getKChannel(File file ) throws IOException {\n        FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.READ);\n        return new TitanFileChannelChanface( channel );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/UserController.java",
    "content": "package com.walnut.sparta.uofs.console.api.controller.v2;\n\n\nimport com.walnut.sparta.uofs.console.util.JWTUtil;\nimport com.walnut.sparta.uofs.console.api.response.BasicResultResponse;\nimport com.walnut.sparta.uofs.console.domain.dto.UserLoginDTO;\n\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.PostMapping;\nimport org.springframework.web.bind.annotation.RequestBody;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RestController;\n\n@RestController\n@RequestMapping( \"/api/v2/user\" )\n@CrossOrigin\npublic class UserController {\n\n    @PostMapping(\"/login\")\n    public BasicResultResponse<String> login(@RequestBody UserLoginDTO dto){\n       if( dto.getPassword().equals(\"11122233\") ){\n           return BasicResultResponse.success(JWTUtil.createJWT());\n       }\n       return BasicResultResponse.error(\"Permission code error\");\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/v2/VolumeController.java",
    "content": "package com.walnut.sparta.uofs.console.api.controller.v2;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.storage.volume.entity.LogicVolume;\nimport com.pinecone.hydra.storage.volume.entity.MountPoint;\nimport com.pinecone.hydra.storage.volume.entity.PhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.SimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.Volume;\nimport com.pinecone.hydra.storage.volume.entity.VolumeAllotment;\nimport com.pinecone.hydra.storage.volume.entity.VolumeCapacity64;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalPhysicalVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSimpleVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalSpannedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.LocalStripedVolume;\nimport com.pinecone.hydra.storage.volume.entity.local.simple.TitanLocalSimpleVolume;\nimport com.pinecone.ulf.util.guid.GUIDs;\nimport com.walnut.sparta.uofs.console.api.response.BasicResultResponse;\nimport com.walnut.sparta.uofs.console.domain.dto.PhysicalVolumeDTO;\nimport com.walnut.sparta.uofs.console.domain.dto.LogicVolumeDTO;\nimport com.walnut.sparta.uofs.console.domain.dto.StorageExpansionDTO;\nimport com.walnut.sparta.uofs.console.infrastructure.UOFSConsoleContents;\nimport org.springframework.web.bind.annotation.CrossOrigin;\nimport org.springframework.web.bind.annotation.GetMapping;\nimport org.springframework.web.bind.annotation.PostMapping;\nimport org.springframework.web.bind.annotation.RequestBody;\nimport org.springframework.web.bind.annotation.RequestMapping;\nimport org.springframework.web.bind.annotation.RequestParam;\nimport org.springframework.web.bind.annotation.RestController;\n\nimport javax.annotation.Resource;\nimport java.sql.SQLException;\nimport java.util.ArrayList;\nimport java.util.List;\n\n@RestController\n@RequestMapping( \"/api/v2/uofs/volume\" )\n@CrossOrigin\npublic class VolumeController {\n    @Resource\n    private UniformVolumeManager primaryVolume;\n\n//    @Resource\n//    private UOFSConfig  uofsConfig;\n\n    /**\n     * 创建物理卷\n     * @param dto 创建物理卷需要的参数\n     * @return 返回操作结果\n     */\n    @PostMapping(\"/create/physicalVolume\")\n    public BasicResultResponse<String> createPhysicalVolume(@RequestBody PhysicalVolumeDTO dto){\n        VolumeAllotment volumeAllotment = this.primaryVolume.getVolumeAllotment();\n        LocalPhysicalVolume physicalVolume = volumeAllotment.newLocalPhysicalVolume();\n\n        physicalVolume.setName( dto.getName() );\n        physicalVolume.setType( UOFSConsoleContents.VOLUME_TYPE_PHYSICAL );\n        physicalVolume.setExtConfig(dto.getExtConfig() );\n\n        VolumeCapacity64 volumeCapacity = volumeAllotment.newVolumeCapacity();\n        volumeCapacity.setDefinitionCapacity( dto.getDefinitionCapacity() );\n\n        MountPoint mountPoint = volumeAllotment.newMountPoint();\n        mountPoint.setMountPoint(dto.getMountPoint() );\n\n        physicalVolume.setMountPoint( mountPoint );\n        physicalVolume.setVolumeCapacity( volumeCapacity );\n\n        this.primaryVolume.insertPhysicalVolume( physicalVolume );\n\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 创建简单卷\n     * @param dto 创建简单卷所需的参数\n     * @return 返回操作结果\n     * @throws SQLException sqlite异常\n     */\n    @PostMapping(\"/create/simpleVolume\")\n    public BasicResultResponse<String> createSimpleVolume(@RequestBody LogicVolumeDTO dto) throws SQLException {\n        VolumeAllotment volumeAllotment = this.primaryVolume.getVolumeAllotment();\n        LocalSimpleVolume simpleVolume = volumeAllotment.newLocalSimpleVolume();\n\n        simpleVolume.setType(UOFSConsoleContents.VOLUME_TYPE_SIMPLE);\n        simpleVolume.setName(dto.getName() );\n        simpleVolume.setExtConfig(dto.getExtConfig() );\n\n        VolumeCapacity64 volumeCapacity = volumeAllotment.newVolumeCapacity();\n        volumeCapacity.setDefinitionCapacity( dto.getDefinitionCapacity() );\n\n        simpleVolume.setVolumeCapacity( volumeCapacity );\n        simpleVolume.build();\n\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 创建跨区卷\n     * @param dto 跨区卷所需的参数\n     * @return 返回操作结果\n     * @throws SQLException sqlite异常\n     */\n    @PostMapping(\"/create/spannedVolume\")\n    public BasicResultResponse<String> createSpannedVolume(@RequestBody LogicVolumeDTO dto) throws SQLException {\n        VolumeAllotment volumeAllotment = this.primaryVolume.getVolumeAllotment();\n        LocalSpannedVolume spannedVolume = volumeAllotment.newLocalSpannedVolume();\n\n        spannedVolume.setType(UOFSConsoleContents.VOLUME_TYPE_SPANNED);\n        spannedVolume.setName(dto.getName() );\n        spannedVolume.setExtConfig(dto.getExtConfig());\n\n        VolumeCapacity64 volumeCapacity = volumeAllotment.newVolumeCapacity();\n        volumeCapacity.setDefinitionCapacity( dto.getDefinitionCapacity() );\n\n        spannedVolume.setVolumeCapacity( volumeCapacity );\n        spannedVolume.build();\n\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 创建条带卷\n     * @param dto 创建条带卷所需的参数\n     * @return 返回操作结果\n     * @throws SQLException sqlite异常\n     */\n    @PostMapping(\"/create/stripedVolume\")\n    public BasicResultResponse<String> createStripedVolume(@RequestBody LogicVolumeDTO dto) throws SQLException {\n        VolumeAllotment volumeAllotment = this.primaryVolume.getVolumeAllotment();\n        LocalStripedVolume stripedVolume = volumeAllotment.newLocalStripedVolume();\n\n        stripedVolume.setType( UOFSConsoleContents.VOLUME_TYPE_STRIPED );\n        stripedVolume.setName(dto.getName() );\n        stripedVolume.setExtConfig(dto.getExtConfig() );\n\n        VolumeCapacity64 volumeCapacity = volumeAllotment.newVolumeCapacity();\n        volumeCapacity.setDefinitionCapacity( dto.getDefinitionCapacity() );\n\n        stripedVolume.setVolumeCapacity( volumeCapacity );\n        stripedVolume.build();\n\n        return BasicResultResponse.success();\n    }\n\n    /**\n     *获取物理卷\n     * @param guid 物理卷guid\n     * @return 返回物理卷详细信息\n     */\n    @GetMapping(\"/query/physical\")\n    public String queryPhysicalVolume( @RequestParam(\"guid\") String guid ){\n        PhysicalVolume physicalVolume = this.primaryVolume.getPhysicalVolume(GUIDs.GUID128(guid));\n        return BasicResultResponse.success(physicalVolume).toJSONString();\n    }\n\n    /**\n     * 获取逻辑卷\n     * @param guid 逻辑卷guid\n     * @return 返回逻辑卷详细信息\n     */\n    @GetMapping(\"/query/logic\")\n    public String queryLogicVolume( @RequestParam(\"guid\") String guid ){\n        LogicVolume logicVolume = this.primaryVolume.get(GUIDs.GUID128(guid));\n        return BasicResultResponse.success(logicVolume).toJSONString();\n    }\n\n    /**\n     * 逻辑卷扩容\n     * @param dto 扩容所需参数\n     * @return 返回操作结果\n     */\n    @PostMapping(\"/storageExpansion\")\n    public BasicResultResponse<String> storageExpansion(@RequestBody StorageExpansionDTO dto){\n        GUID logicGuid = GUIDs.GUID128( dto.getLogicGuid() );\n        GUID physicalGuid = GUIDs.GUID128( dto.getChildGuid() );\n\n        LogicVolume logicVolume = this.primaryVolume.get(logicGuid);\n\n        logicVolume.storageExpansion( physicalGuid );\n        return BasicResultResponse.success();\n    }\n\n    /**\n     * 获取子卷\n     * @param volumeGuid 卷guid\n     * @return 返回子集情况\n     */\n    @GetMapping(\"/getChildren\")\n    public String getChildren(@RequestParam(\"volumeGuid\") String volumeGuid){\n        PhysicalVolume physicalVolume = this.primaryVolume.getPhysicalVolume(GUIDs.GUID128(volumeGuid));\n\n        if(physicalVolume != null){\n            return BasicResultResponse.error(\"物理卷不存在子卷\").toJSONString();\n        }\n        LogicVolume logicVolume = this.primaryVolume.get(GUIDs.GUID128(volumeGuid));\n        if( logicVolume instanceof TitanLocalSimpleVolume){\n            SimpleVolume simpleVolume = (SimpleVolume) logicVolume;\n            List<GUID> guids = simpleVolume.listPhysicalVolume();\n            if(guids.isEmpty()){\n                return BasicResultResponse.success().toJSONString();\n            }\n            PhysicalVolume volumePhysicalVolume = this.primaryVolume.getPhysicalVolume(guids.get(0));\n            ArrayList<Volume> volumes = new ArrayList<>();\n            volumes.add(volumePhysicalVolume);\n            return BasicResultResponse.success(volumes).toJSONString();\n        }\n        List<LogicVolume> volumes = logicVolume.queryChildren();\n        ArrayList<LogicVolume> arrayList = new ArrayList<>(volumes);\n\n        return BasicResultResponse.success(arrayList).toJSONString();\n    }\n\n    /**\n     * 获取全部卷\n     * @return 返回卷信息\n     */\n    @GetMapping(\"/queryAllVolumes\")\n    public String queryAllVolumes(){\n        List<Volume> volumes = this.primaryVolume.queryAllVolumes();\n        return BasicResultResponse.success(volumes).toJSONString();\n    }\n\n    /**\n     * 获取全部逻辑卷\n     * @return 返回卷信息\n     */\n    @GetMapping(\"/listLogicVolumes\")\n    public String queryLogicVolumes(){\n        List<Volume> volumes = this.primaryVolume.listLogicVolumes();\n        return BasicResultResponse.success(volumes).toJSONString();\n    }\n\n    /**\n     * 获取全部物理卷\n     * @return 返回卷信息\n     */\n    @GetMapping(\"/listPhysicsVolumes\")\n    public String queryPhysicsVolumes(){\n        List<Volume> volumes = this.primaryVolume.listPhysicsVolumes();\n        return BasicResultResponse.success(volumes).toJSONString();\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/controller/xxx.java",
    "content": "package com.walnut.sparta.uofs.console.api.controller;\n\npublic class xxx {\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/api/response/BasicResultResponse.java",
    "content": "package com.walnut.sparta.uofs.console.api.response;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\nimport org.springframework.http.HttpStatus;\n\nimport java.io.Serializable;\n\npublic class BasicResultResponse<T> implements Pinenut, Serializable {\n    private Integer    code = HttpStatus.OK.value();\n    private String     msg; //错误信息\n    private T          data; //数据\n\n    public static <T> BasicResultResponse<T > success() {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > successMsg( String msg  ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.msg  = msg;\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > success( T object ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.data = object;\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > error( String msg ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.msg  = msg;\n        result.code = HttpStatus.INTERNAL_SERVER_ERROR.value();\n        return result;\n    }\n\n\n    /**\n     * 获取\n     * @return code\n     */\n    public Integer getCode() {\n        return this.code;\n    }\n\n    /**\n     * 设置\n     * @param code\n     */\n    public void setCode(Integer code) {\n        this.code = code;\n    }\n\n    /**\n     * 获取\n     * @return msg\n     */\n    public String getMsg() {\n        return this.msg;\n    }\n\n    /**\n     * 设置\n     * @param msg\n     */\n    public void setMsg(String msg) {\n        this.msg = msg;\n    }\n\n    /**\n     * 获取\n     * @return data\n     */\n    public T getData() {\n        return this.data;\n    }\n\n    /**\n     * 设置\n     * @param data\n     */\n    public void setData(T data) {\n        this.data = data;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"code\" , this.code ),\n                new KeyValue<>( \"msg\"  , this.msg ),\n                new KeyValue<>( \"data\" , this.data )\n        } );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/config/AsyncConfig.java",
    "content": "package com.walnut.sparta.uofs.console.config;\n\nimport org.springframework.context.annotation.Configuration;\nimport org.springframework.scheduling.annotation.EnableAsync;\n\n@Configuration\npublic class AsyncConfig {\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/config/WebConfig.java",
    "content": "package com.walnut.sparta.uofs.console.config;\n\nimport com.walnut.sparta.uofs.console.interceptor.JWTInterceptor;\n\nimport org.springframework.context.annotation.Configuration;\nimport org.springframework.web.servlet.config.annotation.InterceptorRegistry;\nimport org.springframework.web.servlet.config.annotation.WebMvcConfigurer;\n\nimport javax.annotation.Resource;\n\n@Configuration\npublic class WebConfig implements WebMvcConfigurer {\n    @Resource\n    private JWTInterceptor jwtInterceptor;\n\n    @Override\n    public void addInterceptors(InterceptorRegistry registry) {\n        registry.addInterceptor(jwtInterceptor).addPathPatterns(\"/**\");\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/CreateExternalDTO.java",
    "content": "package com.walnut.sparta.uofs.console.domain.dto;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class CreateExternalDTO implements Pinenut {\n    private String folderPath;\n\n    private String externalSymbolicName;\n\n    private String reparsedPoint;\n\n    public String getReparsedPoint() {\n        return reparsedPoint;\n    }\n\n    public void setReparsedPoint(String reparsedPoint) {\n        this.reparsedPoint = reparsedPoint;\n    }\n\n    public String getFolderPath() {\n        return folderPath;\n    }\n\n    public void setFolderPath(String folderPath) {\n        this.folderPath = folderPath;\n    }\n\n    public String getExternalSymbolicName() {\n        return externalSymbolicName;\n    }\n\n    public void setExternalSymbolicName(String externalSymbolicName) {\n        this.externalSymbolicName = externalSymbolicName;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/CreateExternalSymbolicDTO.java",
    "content": "package com.walnut.sparta.uofs.console.domain.dto;\n\npublic class CreateExternalSymbolicDTO {\n    private String folderPath;\n\n    private String externalSymbolicName;\n\n    private String reparsedPoint;\n\n    public String getFolderPath() {\n        return folderPath;\n    }\n\n    public void setFolderPath(String folderPath) {\n        this.folderPath = folderPath;\n    }\n\n    public String getExternalSymbolicName() {\n        return externalSymbolicName;\n    }\n\n    public void setExternalSymbolicName(String externalSymbolicName) {\n        this.externalSymbolicName = externalSymbolicName;\n    }\n\n    public String getReparsedPoint() {\n        return reparsedPoint;\n    }\n\n    public void setReparsedPoint(String reparsedPoint) {\n        this.reparsedPoint = reparsedPoint;\n    }\n\n\n\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/DownloadObjectByChannelDTO.java",
    "content": "package com.walnut.sparta.uofs.console.domain.dto;\n\npublic class DownloadObjectByChannelDTO {\n    private String destDirPath;\n    private String targetPath;\n\n    public DownloadObjectByChannelDTO() {\n    }\n\n    public DownloadObjectByChannelDTO(String destDirPath, String targetPath) {\n        this.destDirPath = destDirPath;\n        this.targetPath = targetPath;\n    }\n\n\n    public String getDestDirPath() {\n        return destDirPath;\n    }\n\n\n    public void setDestDirPath(String destDirPath) {\n        this.destDirPath = destDirPath;\n    }\n\n\n    public String getTargetPath() {\n        return targetPath;\n    }\n\n\n    public void setTargetPath(String targetPath) {\n        this.targetPath = targetPath;\n    }\n\n    public String toString() {\n        return \"downloadObjectByChannelDto{destDirPath = \" + destDirPath + \", targetPath = \" + targetPath + \"}\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/LogicVolumeDTO.java",
    "content": "package com.walnut.sparta.uofs.console.domain.dto;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class LogicVolumeDTO implements Pinenut {\n    private String name;\n\n    private long definitionCapacity;\n\n    private String extConfig;\n\n\n    public LogicVolumeDTO() {\n    }\n\n    public LogicVolumeDTO(String name, long definitionCapacity, String extConfig) {\n        this.name = name;\n        this.definitionCapacity = definitionCapacity;\n        this.extConfig = extConfig;\n    }\n\n    public String getName() {\n        return name;\n    }\n\n    public void setName(String name) {\n        this.name = name;\n    }\n\n\n    public long getDefinitionCapacity() {\n        return definitionCapacity;\n    }\n\n\n    public void setDefinitionCapacity(long definitionCapacity) {\n        this.definitionCapacity = definitionCapacity;\n    }\n\n\n    public String getExtConfig() {\n        return extConfig;\n    }\n\n\n    public void setExtConfig(String extConfig) {\n        this.extConfig = extConfig;\n    }\n\n    public String toString() {\n        return \"SimpleVolumeDTO{name = \" + name + \", definitionCapacity = \" + definitionCapacity + \", extConfig = \" + extConfig + \"}\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/PhysicalVolumeDTO.java",
    "content": "package com.walnut.sparta.uofs.console.domain.dto;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class PhysicalVolumeDTO implements Pinenut {\n    private String name;\n\n    private long definitionCapacity;\n\n    private String extConfig;\n\n    private String mountPoint;\n\n\n    public PhysicalVolumeDTO() {\n    }\n\n    public PhysicalVolumeDTO(String name, long definitionCapacity, String extConfig, String mountPoint) {\n        this.name = name;\n        this.definitionCapacity = definitionCapacity;\n        this.extConfig = extConfig;\n        this.mountPoint = mountPoint;\n    }\n\n\n    public String getName() {\n        return name;\n    }\n\n\n    public void setName(String name) {\n        this.name = name;\n    }\n\n\n    public long getDefinitionCapacity() {\n        return definitionCapacity;\n    }\n\n\n    public void setDefinitionCapacity(long definitionCapacity) {\n        this.definitionCapacity = definitionCapacity;\n    }\n\n\n    public String getExtConfig() {\n        return extConfig;\n    }\n\n\n    public void setExtConfig(String extConfig) {\n        this.extConfig = extConfig;\n    }\n\n\n    public String getMountPoint() {\n        return mountPoint;\n    }\n\n\n    public void setMountPoint(String mountPoint) {\n        this.mountPoint = mountPoint;\n    }\n\n    public String toString() {\n        return \"PhysicalVolumeDTO{name = \" + name + \", definitionCapacity = \" + definitionCapacity + \", extConfig = \" + extConfig + \", mountPoint = \" + mountPoint + \"}\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/RenameDTO.java",
    "content": "package com.walnut.sparta.uofs.console.domain.dto;\n\npublic class RenameDTO {\n    private String path;\n\n    private String newName;\n\n    public String getPath() {\n        return path;\n    }\n\n    public void setPath(String path) {\n        this.path = path;\n    }\n\n    public String getNewName() {\n        return newName;\n    }\n\n    public void setNewName(String newName) {\n        this.newName = newName;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/StorageExpansionDTO.java",
    "content": "package com.walnut.sparta.uofs.console.domain.dto;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class StorageExpansionDTO implements Pinenut {\n    public String logicGuid;\n\n    public String childGuid;\n\n\n    public StorageExpansionDTO() {\n    }\n\n    public StorageExpansionDTO(String logicGuid, String childGuid) {\n        this.logicGuid = logicGuid;\n        this.childGuid = childGuid;\n    }\n\n\n    public String getLogicGuid() {\n        return logicGuid;\n    }\n\n\n    public void setLogicGuid(String logicGuid) {\n        this.logicGuid = logicGuid;\n    }\n\n\n    public String getChildGuid() {\n        return childGuid;\n    }\n\n\n    public void setChildGuid(String childGuid) {\n        this.childGuid = childGuid;\n    }\n\n    public String toString() {\n        return \"StorageExpansionDTO{logicGuid = \" + logicGuid + \", physicalGuid = \" + childGuid + \"}\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/UpdateFileNameDTO.java",
    "content": "package com.walnut.sparta.uofs.console.domain.dto;\n\npublic class UpdateFileNameDTO {\n    private String filePath;\n\n    private String newFileName;\n\n    public String getFilePath() {\n        return this.filePath;\n    }\n\n    public void setFilePath(String filePath) {\n        this.filePath = filePath;\n    }\n\n    public String getNewFileName() {\n        return this.newFileName;\n    }\n\n    public void setNewFileName(String newFileName) {\n        this.newFileName = newFileName;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/UpdateObjectByChannelDTO.java",
    "content": "package com.walnut.sparta.uofs.console.domain.dto;\n\nimport org.springframework.web.multipart.MultipartFile;\n\npublic class UpdateObjectByChannelDTO {\n    private String          volumeGuid;\n    private String          destDirPath;\n    private MultipartFile   object;\n\n\n    public UpdateObjectByChannelDTO() {\n    }\n\n    public UpdateObjectByChannelDTO(String volumeGuid, String destDirPath, MultipartFile object) {\n        this.volumeGuid = volumeGuid;\n        this.destDirPath = destDirPath;\n        this.object = object;\n    }\n\n    public String getVolumeGuid() {\n        return volumeGuid;\n    }\n\n\n    public void setVolumeGuid(String volumeGuid) {\n        this.volumeGuid = volumeGuid;\n    }\n\n\n    public String getDestDirPath() {\n        return destDirPath;\n    }\n\n\n    public void setDestDirPath(String destDirPath) {\n        this.destDirPath = destDirPath;\n    }\n\n\n    public MultipartFile getObject() {\n        return object;\n    }\n\n\n    public void setObject(MultipartFile object) {\n        this.object = object;\n    }\n\n    public String toString() {\n        return \"updateObjectDto{volumeGuid = \" + volumeGuid + \", destDirPath = \" + destDirPath + \", object = \" + object + \"}\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/dto/UserLoginDTO.java",
    "content": "package com.walnut.sparta.uofs.console.domain.dto;\n\npublic class UserLoginDTO {\n    private String username;\n\n    private String password;\n\n    public String getUsername() {\n        return username;\n    }\n\n    public void setUsername(String username) {\n        this.username = username;\n    }\n\n    public String getPassword() {\n        return password;\n    }\n\n    public void setPassword(String password) {\n        this.password = password;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/domain/vo/FolderContentVo.java",
    "content": "package com.walnut.sparta.uofs.console.domain.vo;\n\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\n\nimport java.util.List;\n\npublic class FolderContentVo {\n    private List< FileTreeNode > fileTreeNodes;\n\n\n    public FolderContentVo() {\n    }\n\n    public FolderContentVo(List<FileTreeNode> fileTreeNodes) {\n        this.fileTreeNodes = fileTreeNodes;\n    }\n\n    public List<FileTreeNode> getFileTreeNodes() {\n        return fileTreeNodes;\n    }\n\n\n    public void setFileTreeNodes(List<FileTreeNode> fileTreeNodes) {\n        this.fileTreeNodes = fileTreeNodes;\n    }\n\n    public String toString() {\n        return \"FolderContentVo{fileTreeNodes = \" + fileTreeNodes + \"}\";\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/SpartaUOFSService.java",
    "content": "package com.walnut.sparta.uofs.console.infrastructure;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.framework.system.functions.Executor;\nimport com.pinecone.framework.util.config.JSONConfig;\nimport com.pinecone.hydra.bucket.ibatis.hydranium.BucketMappingDriver;\nimport com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver;\nimport com.pinecone.hydra.servgram.Servgram;\nimport com.pinecone.hydra.service.ibatis.hydranium.ServiceMappingDriver;\nimport com.pinecone.hydra.service.kom.ServiceInstrument;\nimport com.pinecone.hydra.service.kom.UniformServiceInstrument;\nimport com.pinecone.hydra.storage.bucket.TitanBucketInstrument;\nimport com.pinecone.hydra.storage.file.FileSystemConfig;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.KernelFileSystemConfig;\nimport com.pinecone.hydra.storage.file.UniformObjectFileSystem;\nimport com.pinecone.hydra.storage.version.TitanVersionManage;\nimport com.pinecone.hydra.storage.version.VersionManage;\nimport com.pinecone.hydra.storage.volume.KernelVolumeConfig;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.storage.volume.VolumeConfig;\nimport com.pinecone.hydra.system.component.ComponentInitializationException;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.version.ibatis.hydranium.VersionMappingDriver;\nimport com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\nimport com.pinecone.summer.spring.Springron;\nimport com.walnut.sparta.uofs.console.SpartaBoot;\nimport org.springframework.context.ApplicationContextInitializer;\nimport org.springframework.context.ConfigurableApplicationContext;\nimport org.springframework.context.support.GenericApplicationContext;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\n\npublic class SpartaUOFSService extends Springron implements UOFSService {\n    protected KOIMappingDriver koiMappingDriver;\n\n    protected KOIMappingDriver koiFileMappingDriver;\n\n    protected KOIMappingDriver koiBucketMappingDriver;\n\n    protected KOIMappingDriver koiVersionMappingDriver;\n\n    protected KOIMappingDriver koiServiceMappingDriver;\n\n\n    protected KOMFileSystem fileSystem;\n\n    protected UniformVolumeManager volumeTree;\n\n    protected TitanBucketInstrument bucketInstrument;\n\n    protected TitanVersionManage versionManage;\n\n    protected ServiceInstrument servicesInstrument;\n\n    protected void initKOMSubsystem() throws ComponentInitializationException {\n        this.koiMappingDriver = new VolumeMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n        this.koiFileMappingDriver = new FileMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n        this.koiBucketMappingDriver = new BucketMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n        this.koiVersionMappingDriver = new VersionMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n        this.koiServiceMappingDriver = new ServiceMappingDriver(\n                this, (IbatisClient)this.parentSystem().getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.parentSystem().getDispenserCenter()\n        );\n        JSONConfig selfConfig = (JSONConfig) this.getConfig();\n        FileSystemConfig fileSystemConfig = new KernelFileSystemConfig( selfConfig.queryJSONObject( \"service.PrimaryUniformFileSystem\" ) );\n        this.fileSystem         = new UniformObjectFileSystem( this.koiFileMappingDriver,fileSystemConfig );\n\n        VolumeConfig volumeConfig = new KernelVolumeConfig( selfConfig.queryJSONObject( \"service.PrimaryUniformVolumeManager\" ) );\n        this.volumeTree         = new UniformVolumeManager( this.koiMappingDriver,volumeConfig );\n        this.bucketInstrument   = new TitanBucketInstrument( this.koiBucketMappingDriver );\n        this.versionManage      = new TitanVersionManage( this.koiVersionMappingDriver );\n        this.servicesInstrument = new UniformServiceInstrument( this.koiServiceMappingDriver );\n    }\n\n    protected void initSpringBeanFactorySubsystem() throws ComponentInitializationException {\n        this.setPrimarySources( SpartaBoot.class );\n        this.setInitializer(new Executor() {\n            @Override\n            public void execute() throws Exception {\n                SpartaUOFSService.this.getSpringApplication().addInitializers(new ApplicationContextInitializer<ConfigurableApplicationContext>() {\n                    @Override\n                    public void initialize( ConfigurableApplicationContext applicationContext ) {\n                        GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext;\n                        genericApplicationContext.registerBean(\"primaryFileSystem\", UniformObjectFileSystem.class, () -> (UniformObjectFileSystem) fileSystem);\n                        genericApplicationContext.registerBean(\"primaryVolume\", UniformVolumeManager.class, () -> (UniformVolumeManager) volumeTree);\n                        genericApplicationContext.registerBean(\"primaryBucket\", TitanBucketInstrument.class, () -> (TitanBucketInstrument) bucketInstrument);\n                        genericApplicationContext.registerBean(\"primaryVersion\", VersionManage.class, () -> (VersionManage) versionManage);\n                        genericApplicationContext.registerBean(\"primaryService\", ServiceInstrument.class, () ->  servicesInstrument);\n                    }\n                });\n            }\n        });\n    }\n\n    protected void initSubsystem() throws ComponentInitializationException {\n        this.initKOMSubsystem();\n        this.initSpringBeanFactorySubsystem();\n    }\n\n    public SpartaUOFSService(String szName, Processum parent, String[] springbootArgs ) throws ComponentInitializationException {\n        super( szName, parent, springbootArgs );\n        this.mSpringKernel.setPrimarySources( SpartaBoot.class );\n\n        this.initSubsystem();\n    }\n    public SpartaUOFSService( String szName, Processum parent ) throws ComponentInitializationException {\n        this( szName, parent, new String[0] );\n    }\n\n    @Override\n    protected void loadConfig() {\n        this.mServgramList     = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey );\n        Object dyServgramConf  = this.mServgramList.get( this.gramName() );\n        if( dyServgramConf instanceof String ) {\n            try{\n                this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) );\n            }\n            catch ( IOException ignore ) {\n                this.getLogger().info( \"[Notice] Spring will use the default config `application.yaml`.\" );\n            }\n        }\n        else {\n            this.mServgramConf = this.mServgramList.getChild( this.gramName() );\n        }\n    }\n\n    @Override\n    public Tritium parentSystem() {\n        return (Tritium)super.parentSystem();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/UOFSCommonConfig.java",
    "content": "package com.walnut.sparta.uofs.console.infrastructure;\n\nimport java.util.Map;\n\npublic class UOFSCommonConfig implements UOFSConfig{\n    protected String mszPhysicalVolumeType;\n\n    protected String mszSimpleVolumeType;\n\n    protected String mszSpannedVolumeType;\n\n    protected String mszStripedVolumeType;\n\n    protected String mszVersionPrefix;\n\n    public UOFSCommonConfig(){}\n\n    public UOFSCommonConfig(Map<String, Object> config){\n        this.mszPhysicalVolumeType = (String) config.get(\"PhysicalVolumeType\");\n        this.mszSimpleVolumeType = (String) config.get(\"SimpleVolumeType\");\n        this.mszSpannedVolumeType = (String) config.get(\"SpannedVolumeType\");\n        this.mszStripedVolumeType = (String) config.get(\"StripedVolumeType\");\n        this.mszVersionPrefix = (String) config.get(\"VersionPrefix\");\n    }\n\n    @Override\n    public String getPhysicalVolumeType() {\n        return this.mszPhysicalVolumeType;\n    }\n\n    @Override\n    public String getSimpleVolumeType() {\n        return this.mszSimpleVolumeType;\n    }\n\n    @Override\n    public String getSpannedVolumeType() {\n        return this.mszSpannedVolumeType;\n    }\n\n    @Override\n    public String getStripedVolumeType() {\n        return this.mszStripedVolumeType;\n    }\n\n    @Override\n    public String getVersionPrefix() {\n        return this.mszVersionPrefix;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/UOFSConfig.java",
    "content": "package com.walnut.sparta.uofs.console.infrastructure;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface UOFSConfig extends Pinenut {\n    String getPhysicalVolumeType();\n\n    String getSimpleVolumeType();\n\n    String getSpannedVolumeType();\n\n    String getStripedVolumeType();\n\n    String getVersionPrefix();\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/UOFSConsoleContents.java",
    "content": "package com.walnut.sparta.uofs.console.infrastructure;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class UOFSConsoleContents implements Pinenut {\n    public static String VOLUME_TYPE_PHYSICAL = \"PhysicalVolume\";\n\n    public static String VOLUME_TYPE_SIMPLE = \"SimpleVolume\";\n\n    public static String VOLUME_TYPE_SPANNED = \"SpannedVolume\";\n\n    public static String VOLUME_TYPE_STRIPED = \"StripedVolume\";\n\n    public static String VERSION_PREFIX = \"/$version\";\n\n    public static String FORWARD_SLASH = \"/\";\n\n    public static String PERIOD = \".\";\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/UOFSContentDelivery.java",
    "content": "package com.walnut.sparta.uofs.console.infrastructure;\n\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.tritium.Tritium;\n\npublic class UOFSContentDelivery extends Tritium implements UOFSContentDeliveryService{\n    protected SpartaUOFSService spartaUOFSService;\n\n    public UOFSContentDelivery(String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public UOFSContentDelivery(String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    public SpartaUOFSService getSpartaUOFSService(){\n        return this.spartaUOFSService;\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        this.spartaUOFSService = new SpartaUOFSService( \"SpartaUOFSService\", this );\n        this.spartaUOFSService.execute();\n\n\n        this.getTaskManager().add(this.spartaUOFSService);\n        this.getTaskManager().syncWaitingTerminated();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/UOFSContentDeliveryService.java",
    "content": "package com.walnut.sparta.uofs.console.infrastructure;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface UOFSContentDeliveryService extends Pinenut {\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/infrastructure/UOFSService.java",
    "content": "package com.walnut.sparta.uofs.console.infrastructure;\n\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\n\npublic interface UOFSService extends Slf4jTraceable {\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/interceptor/JWTInterceptor.java",
    "content": "package com.walnut.sparta.uofs.console.interceptor;\n\nimport com.alibaba.fastjson.JSONObject;\nimport com.walnut.sparta.uofs.console.util.JWTUtil;\nimport com.walnut.sparta.uofs.console.api.response.BasicResultResponse;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.http.HttpMethod;\nimport org.springframework.stereotype.Component;\nimport org.springframework.util.StringUtils;\nimport org.springframework.web.servlet.HandlerInterceptor;\n\n@Component\npublic class JWTInterceptor implements HandlerInterceptor {\n    private Logger log = LoggerFactory.getLogger( this.getClass() );\n    @Override\n    public boolean preHandle(javax.servlet.http.HttpServletRequest request, javax.servlet.http.HttpServletResponse response, Object handler) throws Exception {\n        String url=request.getRequestURI();\n        if (request.getMethod().equals(HttpMethod.OPTIONS.name())) {\n            return true;\n        }\n        //log.info(\"请求的路径是：\"+ url);\n        if (url.contains(\"login\")||url.contains(\"register\")||url.contains(\"send_code\")||url.contains(\"download\")){\n            log.info(\"Allow login or registration operations\");\n            return true;\n        }\n        String jwt=request.getHeader(\"Token\");\n        if (!StringUtils.hasLength(jwt)){\n            log.info(\"The request header Token is empty\");\n            BasicResultResponse error = BasicResultResponse.error(\"not login\");\n            String jsonString = JSONObject.toJSONString(error);\n            response.getWriter().write(jsonString);\n            return false;\n        }\n        try {\n            JWTUtil.ParseJWt(jwt);\n        } catch (Exception e){\n            log.info(\"Token parsing failed\");\n            BasicResultResponse error = BasicResultResponse.error(\"Not logged in\");\n            String jsonString = JSONObject.toJSONString(error);\n            response.getWriter().write(jsonString);\n            return false;\n        }\n        return true;\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/service/FileService.java",
    "content": "package com.walnut.sparta.uofs.console.service;\n\nimport com.pinecone.framework.util.id.GUID;\n\npublic interface FileService {\n    void remove(GUID fileGuid);\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/service/impl/FileServiceImpl.java",
    "content": "package com.walnut.sparta.uofs.console.service.impl;\n\nimport com.pinecone.framework.util.id.GUID;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.entity.ClusterPage;\nimport com.pinecone.hydra.storage.file.entity.FileNode;\nimport com.pinecone.hydra.storage.file.entity.FileTreeNode;\nimport com.pinecone.hydra.storage.file.entity.Folder;\nimport com.pinecone.hydra.storage.file.entity.LocalCluster;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.unit.imperium.entity.TreeNode;\nimport com.walnut.sparta.uofs.console.service.FileService;\nimport org.springframework.stereotype.Service;\n\nimport javax.annotation.Resource;\nimport java.sql.SQLException;\nimport java.util.List;\n\n@Service\npublic class FileServiceImpl implements FileService {\n\n    @Resource\n    private KOMFileSystem primaryFileSystem;\n\n    @Resource\n    private UniformVolumeManager primaryVolume;\n\n    @Override\n    public void remove(GUID fileGuid){\n        FileTreeNode fileTreeNode = this.primaryFileSystem.get(fileGuid);\n        if( fileTreeNode instanceof Folder){\n            Folder folder = (Folder) fileTreeNode;\n            List<TreeNode> children = this.primaryFileSystem.getChildren(folder.getGuid());\n            for( TreeNode treeNode : children ){\n                this.remove( treeNode.getGuid() );\n            }\n        }else if( fileTreeNode instanceof FileNode){\n            FileNode fileNode = (FileNode) fileTreeNode;\n            ClusterPage clusterPage = this.primaryFileSystem.fetchClustersByFileGuid( fileNode.getGuid() );\n            long fileClusterNum = clusterPage.getClusters();\n            for( long i = 0; i < fileClusterNum; i++ ){\n                LocalCluster frame = clusterPage.getLocalCluster( i );\n                try {\n                    this.primaryVolume.removeStorageObject( frame );\n                } catch (SQLException e) {\n                    throw new RuntimeException(e);\n                }\n            }\n        }\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/java/com/walnut/sparta/uofs/console/util/JWTUtil.java",
    "content": "package com.walnut.sparta.uofs.console.util;\n\nimport com.auth0.jwt.JWT;\nimport com.auth0.jwt.algorithms.Algorithm;\nimport com.auth0.jwt.interfaces.DecodedJWT;\n\nimport java.util.Date;\nimport java.util.HashMap;\n\npublic class JWTUtil {\n    private static final String SIGN = \"!^&%&*!@$*%!!@(&%2ar^2t\";\n    //学生登录生成JWT令牌\n    public static String createJWT(){\n        HashMap<String, Object> map = new HashMap<>();\n        String token = JWT.create()\n                .withHeader(map) //设置头信息\n                .withExpiresAt(new Date(System.currentTimeMillis() + 3600 * 1000)) //设置失效时间\n                .sign(Algorithm.HMAC256(SIGN)); //设置签名以及签名方式 这里使用HMAC256加密方式\n        return token;\n    }\n\n    public static DecodedJWT ParseJWt(String jwt){\n        return JWT.require(Algorithm.HMAC256(SIGN)).build().verify(jwt);\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/main/resources/UOFSIface.thrift",
    "content": "service UOFSIface{\n\n    void affirmFileNode(1:string filePath);\n\n}"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/test/java/com/walnut/sparta/uofs/SparteUcdnServiceApplicationTests.java",
    "content": "package com.walnut.sparta.uofs;\n\nimport org.junit.Test;\nimport org.springframework.boot.test.context.SpringBootTest;\n\n@SpringBootTest\npublic class SparteUcdnServiceApplicationTests {\n\n    @Test\n    public void contextLoads() {\n    }\n\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-console/src/test/java/com/walnut/sparta/uofs/TestSparta.java",
    "content": "package com.walnut.sparta.uofs;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.tritium.Tritium;\n\nclass JesusChrist extends Tritium {\n    public JesusChrist( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public JesusChrist( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n//        Sparta sparta = new Sparta( \"Sparta\", this );\n//\n//\n//        Thread shutdowner = new Thread(()->{\n//            Debug.sleep( 5000 );\n//            sparta.terminate();\n//        });\n//        //shutdowner.start();\n//\n//\n//\n//\n//        sparta.setPrimarySources( SpartaBoot.class );\n//\n//        KOIMappingDriver koiMappingDriver = new VolumeMappingDriver(\n//                sparta, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n//        );\n//        KOIMappingDriver koiFileMappingDriver = new FileMappingDriver(\n//                sparta, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n//        );\n//        KOIMappingDriver koiBucketMappingDriver = new BucketMappingDriver(\n//                sparta, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n//        );\n//        KOIMappingDriver koiVersionMappingDriver = new VersionMappingDriver(\n//                sparta, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n//        );\n//\n//\n//\n//        KOMFileSystem fileSystem = new UniformObjectFileSystem( koiFileMappingDriver );\n//        UniformVolumeManager volumeTree = new UniformVolumeManager( koiMappingDriver );\n//        TitanBucketInstrument bucketInstrument = new TitanBucketInstrument( koiBucketMappingDriver );\n//        TitanVersionManage versionManage = new TitanVersionManage( koiVersionMappingDriver );\n//\n//        sparta.setInitializer(new Executor() {\n//            @Override\n//            public void execute() throws Exception {\n//                sparta.getSpringApplication().addInitializers(new ApplicationContextInitializer<ConfigurableApplicationContext>() {\n//                    @Override\n//                    public void initialize( ConfigurableApplicationContext applicationContext ) {\n//                        GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext;\n//                        genericApplicationContext.registerBean(\"primaryFileSystem\", UniformObjectFileSystem.class, () -> (UniformObjectFileSystem)fileSystem);\n//                        genericApplicationContext.registerBean(\"primaryVolume\", UniformVolumeManager.class, () -> (UniformVolumeManager) volumeTree);\n//                        genericApplicationContext.registerBean(\"primaryBucket\", TitanBucketInstrument.class, () -> (TitanBucketInstrument) bucketInstrument);\n//                        genericApplicationContext.registerBean(\"primaryVersion\", VersionManage.class, () -> (VersionManage) versionManage);\n//                    }\n//                });\n//            }\n//        });\n//\n//\n//        sparta.execute();\n//\n//\n//\n//\n//\n//        this.getTaskManager().add( sparta );\n//        this.getTaskManager().syncWaitingTerminated();\n    }\n}\n\n\npublic class TestSparta {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            JesusChrist jesus = (JesusChrist) Pinecone.sys().getTaskManager().add( new JesusChrist( args, Pinecone.sys() ) );\n            jesus.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-service/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sparta</artifactId>\n        <groupId>com.walnuts.sparta</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.sparta.uofs.service</groupId>\n    <artifactId>sparta-uofs-service</artifactId>\n    <version>2.1.0</version>\n\n    <properties>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n        </dependency>\n        <dependency>\n            <groupId>org.mybatis.spring.boot</groupId>\n            <artifactId>mybatis-spring-boot-starter</artifactId>\n            <version>2.2.2</version>\n        </dependency>\n\n        <dependency>\n            <groupId>org.projectlombok</groupId>\n            <artifactId>lombok</artifactId>\n            <optional>true</optional>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-test</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.summer.springram</groupId>\n            <artifactId>springram</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.tritium</groupId>\n            <artifactId>hydra-system-tritium</artifactId>\n            <version>2.1.0</version>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kom.driver.default</groupId>\n            <artifactId>hydra-kom-default-driver</artifactId>\n            <version>2.1.0</version>\n            <scope>test</scope>\n        </dependency>\n    </dependencies>\n\n</project>\n"
  },
  {
    "path": "Sparta/sparta-uofs-service/src/main/java/com/walnut/sparta/uofs/service/Sparta.java",
    "content": "package com.walnut.sparta.uofs.service;\n\nimport com.pinecone.framework.system.executum.Processum;\nimport com.pinecone.hydra.servgram.Servgram;\nimport com.pinecone.hydra.system.component.Slf4jTraceable;\nimport com.pinecone.summer.spring.Springron;\n\nimport java.io.IOException;\nimport java.nio.file.Path;\n\npublic class Sparta extends Springron implements Slf4jTraceable {\n    public Sparta(String szName, Processum parent, String[] springbootArgs ) {\n        super( szName, parent, springbootArgs );\n        this.mSpringKernel.setPrimarySources( SpartaBoot.class );\n    }\n\n    public Sparta( String szName, Processum parent ) {\n        this( szName, parent, new String[0] );\n    }\n\n    @Override\n    protected void loadConfig() {\n        this.mServgramList     = this.getAttachedOrchestrator().getSectionConfig().getChild( Servgram.ConfigServgramsKey );\n        Object dyServgramConf  = this.mServgramList.get( this.gramName() );\n        if( dyServgramConf instanceof String ) {\n            try{\n                this.mServgramConf = this.mServgramList.getChildFromPath( Path.of((String) dyServgramConf) );\n            }\n            catch ( IOException ignore ) {\n                this.getLogger().info( \"[Notice] Spring will use the default config `application.yaml`.\" );\n            }\n        }\n        else {\n            this.mServgramConf = this.mServgramList.getChild( this.gramName() );\n        }\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-service/src/main/java/com/walnut/sparta/uofs/service/SpartaBoot.java",
    "content": "package com.walnut.sparta.uofs.service;\n\nimport org.springframework.boot.autoconfigure.SpringBootApplication;\n\n@SpringBootApplication\npublic class SpartaBoot {\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-service/src/main/java/com/walnut/sparta/uofs/service/api/controller/xxx.java",
    "content": "package com.walnut.sparta.uofs.service.api.controller;\n\npublic class xxx {\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-service/src/main/java/com/walnut/sparta/uofs/service/api/response/BasicResultResponse.java",
    "content": "package com.walnut.sparta.uofs.service.api.response;\n\nimport java.io.Serializable;\n\nimport org.springframework.http.HttpStatus;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.unit.KeyValue;\nimport com.pinecone.framework.util.json.JSONEncoder;\n\npublic class BasicResultResponse<T> implements Pinenut, Serializable {\n    private Integer    code = HttpStatus.OK.value();\n    private String     msg; //错误信息\n    private T          data; //数据\n\n    public static <T> BasicResultResponse<T > success() {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > successMsg( String msg  ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.msg  = msg;\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > success( T object ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.data = object;\n        result.code = HttpStatus.OK.value();\n        return result;\n    }\n\n    public static <T> BasicResultResponse<T > error( String msg ) {\n        BasicResultResponse<T> result = new BasicResultResponse<>();\n        result.msg  = msg;\n        result.code = HttpStatus.INTERNAL_SERVER_ERROR.value();\n        return result;\n    }\n\n\n    /**\n     * 获取\n     * @return code\n     */\n    public Integer getCode() {\n        return this.code;\n    }\n\n    /**\n     * 设置\n     * @param code\n     */\n    public void setCode(Integer code) {\n        this.code = code;\n    }\n\n    /**\n     * 获取\n     * @return msg\n     */\n    public String getMsg() {\n        return this.msg;\n    }\n\n    /**\n     * 设置\n     * @param msg\n     */\n    public void setMsg(String msg) {\n        this.msg = msg;\n    }\n\n    /**\n     * 获取\n     * @return data\n     */\n    public T getData() {\n        return this.data;\n    }\n\n    /**\n     * 设置\n     * @param data\n     */\n    public void setData(T data) {\n        this.data = data;\n    }\n\n    @Override\n    public String toJSONString() {\n        return JSONEncoder.stringifyMapFormat( new KeyValue[]{\n                new KeyValue<>( \"code\" , this.code ),\n                new KeyValue<>( \"msg\"  , this.msg ),\n                new KeyValue<>( \"data\" , this.data )\n        } );\n    }\n\n    @Override\n    public String toString() {\n        return this.toJSONString();\n    }\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-service/src/main/java/com/walnut/sparta/uofs/service/infrastructure/Contents.java",
    "content": "package com.walnut.sparta.uofs.service.infrastructure;\n\npublic class Contents {\n    public static String LOCAL_ACCOUNT = \"0000000-000000-0000-00\";\n}\n"
  },
  {
    "path": "Sparta/sparta-uofs-service/src/test/java/com/sparta/TestSparta.java",
    "content": "package com.sparta;\n\nimport com.pinecone.Pinecone;\nimport com.pinecone.framework.system.CascadeSystem;\nimport com.pinecone.framework.system.functions.Executor;\nimport com.pinecone.framework.util.Debug;\nimport com.pinecone.hydra.bucket.ibatis.hydranium.BucketMappingDriver;\nimport com.pinecone.hydra.file.ibatis.hydranium.FileMappingDriver;\nimport com.pinecone.hydra.storage.bucket.TitanBucketInstrument;\nimport com.pinecone.hydra.storage.file.KOMFileSystem;\nimport com.pinecone.hydra.storage.file.UniformObjectFileSystem;\nimport com.pinecone.hydra.storage.volume.UniformVolumeManager;\nimport com.pinecone.hydra.system.ko.driver.KOIMappingDriver;\nimport com.pinecone.hydra.volume.ibatis.hydranium.VolumeMappingDriver;\nimport com.pinecone.tritium.Tritium;\nimport com.pinecone.slime.jelly.source.ibatis.IbatisClient;\n\nimport com.walnut.sparta.uofs.service.Sparta;\nimport com.walnut.sparta.uofs.service.SpartaBoot;\nimport org.springframework.context.ApplicationContextInitializer;\nimport org.springframework.context.ConfigurableApplicationContext;\nimport org.springframework.context.support.GenericApplicationContext;\n\nclass JesusChrist extends Tritium {\n    public JesusChrist( String[] args, CascadeSystem parent ) {\n        this( args, null, parent );\n    }\n\n    public JesusChrist( String[] args, String szName, CascadeSystem parent ){\n        super( args, szName, parent );\n    }\n\n    @Override\n    public void vitalize () throws Exception {\n        Sparta sparta = new Sparta( \"Sparta\", this );\n\n\n        Thread shutdowner = new Thread(()->{\n            Debug.sleep( 5000 );\n            sparta.terminate();\n        });\n        //shutdowner.start();\n\n\n\n        KOIMappingDriver koiMappingDriver = new VolumeMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        KOIMappingDriver koiFileMappingDriver = new FileMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n        KOIMappingDriver koiBucketMappingDriver = new BucketMappingDriver(\n                this, (IbatisClient)this.getMiddlewareDirector().getRDBManager().getRDBClientByName( \"MySQLKingHydranium\" ), this.getDispenserCenter()\n        );\n\n\n\n        KOMFileSystem fileSystem = new UniformObjectFileSystem( koiFileMappingDriver, null );\n        UniformVolumeManager volumeTree = new UniformVolumeManager( koiMappingDriver, null );\n        TitanBucketInstrument bucketInstrument = new TitanBucketInstrument( koiBucketMappingDriver );\n\n\n        sparta.setPrimarySources( SpartaBoot.class );\n\n        sparta.setInitializer(new Executor() {\n            @Override\n            public void execute() throws Exception {\n                sparta.getSpringApplication().addInitializers(new ApplicationContextInitializer<ConfigurableApplicationContext>() {\n                    @Override\n                    public void initialize( ConfigurableApplicationContext applicationContext ) {\n                        GenericApplicationContext genericApplicationContext = (GenericApplicationContext) applicationContext;\n                        genericApplicationContext.registerBean(\"primaryFileSystem\", UniformObjectFileSystem.class, () -> (UniformObjectFileSystem)fileSystem);\n                        genericApplicationContext.registerBean(\"primaryVolume\", UniformVolumeManager.class, () -> (UniformVolumeManager) volumeTree);\n                        genericApplicationContext.registerBean(\"primaryBucket\", TitanBucketInstrument.class, () -> (TitanBucketInstrument) bucketInstrument);\n                    }\n                });\n            }\n        });\n\n\n        sparta.execute();\n\n\n\n\n\n        this.getTaskManager().add( sparta );\n        this.getTaskManager().syncWaitingTerminated();\n    }\n}\n\n\npublic class TestSparta {\n    public static void main( String[] args ) throws Exception {\n        Pinecone.init( (Object...cfg )->{\n            JesusChrist jesus = (JesusChrist) Pinecone.sys().getTaskManager().add( new JesusChrist( args, Pinecone.sys() ) );\n            jesus.vitalize();\n            return 0;\n        }, (Object[]) args );\n    }\n}\n"
  },
  {
    "path": "TaskJuggler/TaskJuggler.iml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<module version=\"4\">\n  <component name=\"NewModuleRootManager\" LANGUAGE_LEVEL=\"JDK_11\">\n    <output url=\"file://$MODULE_DIR$/target/classes\" />\n    <output-test url=\"file://$MODULE_DIR$/target/test-classes\" />\n    <content url=\"file://$MODULE_DIR$\">\n      <sourceFolder url=\"file://$MODULE_DIR$/src/main/java\" isTestSource=\"false\" />\n      <excludeFolder url=\"file://$MODULE_DIR$/target\" />\n    </content>\n    <orderEntry type=\"inheritedJdk\" />\n    <orderEntry type=\"sourceFolder\" forTests=\"false\" />\n    <orderEntry type=\"module\" module-name=\"Pinecone\" />\n    <orderEntry type=\"library\" name=\"Maven: mysql:mysql-connector-java:8.0.23\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.google.protobuf:protobuf-java:3.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: commons-fileupload:commons-fileupload:1.3.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: commons-io:commons-io:2.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-web:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-autoconfigure:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-logging:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: ch.qos.logback:logback-classic:1.2.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: ch.qos.logback:logback-core:1.2.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.logging.log4j:log4j-to-slf4j:2.13.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.logging.log4j:log4j-api:2.13.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.slf4j:jul-to-slf4j:1.7.30\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: jakarta.annotation:jakarta.annotation-api:1.3.5\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-core:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-jcl:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.yaml:snakeyaml:1.27\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-json:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.core:jackson-databind:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.module:jackson-module-parameter-names:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-tomcat:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.glassfish:jakarta.el:3.0.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.tomcat.embed:tomcat-embed-websocket:9.0.41\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-web:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-beans:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-webmvc:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-aop:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-context:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-expression:5.3.3\" level=\"project\" />\n    <orderEntry type=\"module\" module-name=\"Messenger\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-test:2.3.9.RELEASE\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-test:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-test-autoconfigure:2.4.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.jayway.jsonpath:json-path:2.4.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: net.minidev:json-smart:2.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: net.minidev:accessors-smart:1.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.ow2.asm:asm:5.0.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.slf4j:slf4j-api:1.7.30\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: jakarta.xml.bind:jakarta.xml.bind-api:2.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: jakarta.activation:jakarta.activation-api:1.2.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.assertj:assertj-core:3.18.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.hamcrest:hamcrest:2.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.junit.jupiter:junit-jupiter:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.junit.jupiter:junit-jupiter-api:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.opentest4j:opentest4j:1.2.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.junit.platform:junit-platform-commons:1.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.junit.jupiter:junit-jupiter-params:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" scope=\"RUNTIME\" name=\"Maven: org.junit.jupiter:junit-jupiter-engine:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.junit.vintage:junit-vintage-engine:5.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apiguardian:apiguardian-api:1.1.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.junit.platform:junit-platform-engine:1.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: junit:junit:4.13.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.mockito:mockito-core:3.6.28\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: net.bytebuddy:byte-buddy:1.10.18\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: net.bytebuddy:byte-buddy-agent:1.10.18\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.objenesis:objenesis:3.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.mockito:mockito-junit-jupiter:3.6.28\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.skyscreamer:jsonassert:1.5.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.vaadin.external.google:android-json:0.0.20131108.vaadin1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-test:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.xmlunit:xmlunit-core:2.7.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.dataformat:jackson-dataformat-xml:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.core:jackson-core:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.core:jackson-annotations:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.jackson.module:jackson-module-jaxb-annotations:2.11.4\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.codehaus.woodstox:stax2-api:4.2.1\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.fasterxml.woodstox:woodstox-core:6.2.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.alibaba:fastjson:1.2.75\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.boot:spring-boot-starter-amqp:2.3.9.RELEASE\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-messaging:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.amqp:spring-rabbit:2.3.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.amqp:spring-amqp:2.3.2\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework.retry:spring-retry:1.3.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: com.rabbitmq:amqp-client:5.10.0\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.springframework:spring-tx:5.3.3\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.tomcat.embed:tomcat-embed-core:9.0.41\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.apache.tomcat:tomcat-annotations-api:9.0.41\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.projectlombok:lombok:1.18.16\" level=\"project\" />\n    <orderEntry type=\"library\" name=\"Maven: org.aspectj:aspectjweaver:1.9.6\" level=\"project\" />\n  </component>\n</module>"
  },
  {
    "path": "TaskJuggler/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <groupId>com.Sauron</groupId>\n        <artifactId>sauron</artifactId>\n        <version>1.0-SNAPSHOT</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <artifactId>TaskJuggler</artifactId>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n    </properties>\n\n    <dependencies>\n\n        <dependency>\n            <groupId>com.Sauron</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>3.3.1</version>\n        </dependency>\n\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n        </dependency>\n\n        <dependency>\n            <groupId>com.Sauron</groupId>\n            <artifactId>Messenger</artifactId>\n            <version>1.0-SNAPSHOT</version>\n        </dependency>\n\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-amqp</artifactId>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "TaskJuggler/src/main/java/com/genius/common/Heist.java",
    "content": "package com.genius.common;\n\nimport com.genius.core.HeistCenter;\nimport com.genius.util.SystemUtils;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.util.Random;\n\n/**\n * @author Genius\n * @date 2023/05/08 23:41\n **/\npublic class Heist implements Runnable{\n\n    private Logger logger;\n\n    private String heistName;\n\n    private HeistCenter heistCenter;\n\n    private int spoilSum;\n\n    private int failureRetryTimes;\n\n    public Heist(HeistCenter heistCenter){\n\n        this.heistCenter = heistCenter;\n        this.heistName = \"Heist:\"+this.hashCode();\n        this.spoilSum = this.heistCenter.getSpoilNum() + this.heistCenter.getSpoilBase();\n        this.failureRetryTimes = this.heistCenter.getProtoConfig().getFailureRetryTimes();\n\n        logger = LoggerFactory.getLogger(SystemUtils.getLoggerFormatName(heistName));\n    }\n\n    //根据任务数量获取线程数\n\n    private int getSpoil(){\n        int index = this.heistCenter.getNowSpoil().getAndIncrement();\n        if(index<=spoilSum){\n            logger.info(\"{} get the spoil[{}]\",heistName,index);\n        }\n        return index;\n    }\n\n    private boolean handlerSpoil(int index){\n        logger.info(\"{} handler the spoil[{}]\",heistName,index);\n        return new Random().nextInt(100)+1>80;\n    }\n\n    private boolean failureRetry(int index,int retryTimes){\n        logger.info(\"{} retry the spoil[{}] retryTimes:{}\",heistName,index,retryTimes);\n        return this.handlerSpoil(index);\n    }\n\n    private boolean completeRob(int index){\n        logger.info(\"{}  complete the spoil[{}]\",heistName,index);\n        this.heistCenter.getSpoilsLock().countDown();\n        return true;\n    }\n\n    private void failureHandler(int index){\n        logger.info(\"{} report failure info: spoil[{}] rob fail\",heistName,index);\n    }\n\n    @Override\n    public void run() {\n        int index = this.getSpoil();\n        while(index<=spoilSum){\n            int nowFailureRetryTimes = 0;\n            if(!handlerSpoil(index)){\n                while((nowFailureRetryTimes++)<this.failureRetryTimes&&!failureRetry(index,nowFailureRetryTimes));\n                if(nowFailureRetryTimes>this.failureRetryTimes){\n                    failureHandler(index);\n                }\n            }\n            completeRob(index);\n            index = this.getSpoil();\n        }\n    }\n}\n"
  },
  {
    "path": "TaskJuggler/src/main/java/com/genius/config/HeistConfig.java",
    "content": "package com.genius.config;\n\nimport com.genius.constpool.RadiumConstPool;\nimport lombok.AllArgsConstructor;\nimport lombok.Data;\nimport lombok.NoArgsConstructor;\nimport org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;\nimport org.springframework.boot.context.properties.ConfigurationProperties;\nimport org.springframework.context.annotation.Configuration;\nimport org.springframework.stereotype.Component;\n\n/**\n * @author Genius\n * @date 2023/05/09 13:06\n **/\n@Data\n@Component\n@AllArgsConstructor\n@NoArgsConstructor\n@ConfigurationProperties(prefix = RadiumConstPool.CONFIG_COMPONENTS_PREFIX+\"heist\")\npublic class HeistConfig {\n\n    private Integer failureRetryTimes;\n\n    private Integer heistNum;\n\n    private Long exceptionWaitTime;  //线程池异常等待时间\n\n    private String robTaskName; //执行的任务\n\n}\n"
  },
  {
    "path": "TaskJuggler/src/main/java/com/genius/config/MqConfig.java",
    "content": "package com.genius.config;\n\nimport com.genius.pool.MqPool;\nimport org.springframework.amqp.core.*;\nimport org.springframework.boot.autoconfigure.AutoConfigureAfter;\nimport org.springframework.context.annotation.Bean;\nimport org.springframework.context.annotation.Configuration;\n\n\n/**\n * @author Genius\n * @date 2023/05/12 18:57\n **/\n@Configuration\n@AutoConfigureAfter(SystemConfig.class)\npublic class MqConfig {\n\n\n    public static final String TASK_REPLY= String.format(\"task.%s.reply\",SystemConfig.ServiceId);\n    @Bean\n    public DirectExchange nonjronTaskDirectExchange(){\n        return new DirectExchange(MqPool.EXCHANGE_TOPIC_NONJRON_TASK);\n    }\n\n    @Bean\n    public Queue taskSendQueue(){\n        return new Queue(MqPool.MASTER_TASK_SEND_CENTER);\n    }\n\n\n    @Bean\n    public Queue taskReplyQueue(){\n        return new Queue( \"task.Nonaron-Kingpin-Prime.reply\");\n\n    }\n\n    @Bean\n    public Binding bindingReplyQueue(Queue taskReplyQueue,DirectExchange nonjronTaskDirectExchange){\n        return BindingBuilder.bind(taskReplyQueue).to(nonjronTaskDirectExchange).with(SystemConfig.ServiceId);\n    }\n\n}\n"
  },
  {
    "path": "TaskJuggler/src/main/java/com/genius/core/HeistCenter.java",
    "content": "package com.genius.core;\n\nimport com.genius.common.Heist;\nimport com.genius.common.UlfUMC.UlfUMCMessage;\nimport com.genius.config.HeistConfig;\nimport com.genius.config.SystemConfig;\nimport com.genius.mq.Harbor;\nimport com.genius.pool.FunctionNamePool;\nimport com.genius.util.SystemUtils;\nimport lombok.Data;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.concurrent.CountDownLatch;\nimport java.util.concurrent.ExecutorService;\nimport java.util.concurrent.Executors;\nimport java.util.concurrent.atomic.AtomicInteger;\n\n/**\n * @author Genius\n * @date 2023/05/08 23:08\n **/\n\n@Data\n@Component\n//负责劫匪的任务调度分配\npublic class HeistCenter {\n\n    @Autowired\n    private Harbor harbor;  //港口，负责和master结点进行通信\n    @Resource\n    private HeistConfig heistConfig;\n    private ExecutorService heistPool;\n\n    private AtomicInteger nowSpoil;\n\n    private int spoilNum;       //赃物数量\n\n    private int spoilBase;      //赃物基数\n\n    private CountDownLatch spoilsLock;\n\n    Logger logger = LoggerFactory.getLogger(SystemUtils.getLoggerFormatName(\"HeistCenter\"));\n\n    public HeistCenter(){\n\n    }\n\n    public HeistCenter(HeistConfig heistConfig){\n        this.heistConfig = heistConfig;\n    }\n\n    private void initHeistPool(){\n        heistPool = Executors.newFixedThreadPool(heistConfig.getHeistNum());\n    }\n\n    private boolean init(){\n        if(heistPool==null){\n            initHeistPool();\n        }\n        getSpoil();\n        if(this.spoilNum<=0){\n            return false;\n        }\n        nowSpoil = new AtomicInteger(spoilBase);\n        spoilsLock = new CountDownLatch(spoilNum+1);\n        return true;\n    }\n\n    public void getSpoil(){\n        //从港口获取任务数量\n        try {\n            UlfUMCMessage msg;\n\n            while((msg = harbor.getSpoil(heistConfig.getRobTaskName()))==null);\n\n            if(msg.getFunction().equals(FunctionNamePool.SHUTDOWN)||msg.getFunction().equals(FunctionNamePool.ERROR)){\n                spoilNum = -1;\n            }\n            else{\n                int upLimit = Integer.parseInt(msg.getData().get(\"upLimit\").toString());\n                int lowLimit = Integer.parseInt(msg.getData().get(\"lowLimit\").toString());\n                spoilNum = upLimit-lowLimit;\n                spoilBase = lowLimit;\n            }\n        }\n        catch (Exception e) {\n            spoilNum = -1;\n        }\n    }\n\n    public void start() throws InterruptedException {\n        while (init()) {\n            logger.info(\"{} Robbing {}[{}~{}]\", SystemConfig.ServiceId,heistConfig.getRobTaskName(),spoilBase,spoilBase+spoilNum);\n            for ( int i = 0; i < heistConfig.getHeistNum(); i++ ) {\n                heistPool.submit(new Heist(this));\n            }\n            this.spoilsLock.await();\n        }\n    }\n\n\n}\n"
  },
  {
    "path": "TaskJuggler/src/main/java/com/genius/mq/Harbor.java",
    "content": "package com.genius.mq;\n\nimport com.genius.common.UlfUMC.*;\nimport com.genius.pool.FunctionNamePool;\nimport com.genius.pool.MqPool;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\nimport org.springframework.amqp.rabbit.annotation.RabbitListener;\nimport org.springframework.amqp.rabbit.core.RabbitTemplate;\nimport org.springframework.context.annotation.Lazy;\nimport org.springframework.stereotype.Component;\n\nimport javax.annotation.Resource;\nimport java.util.Map;\nimport java.util.Objects;\nimport java.util.concurrent.BlockingDeque;\nimport java.util.concurrent.LinkedBlockingDeque;\nimport java.util.concurrent.TimeUnit;\n\n/**\n * @author Genius\n * @date 2023/05/12 18:08\n **/\n@Lazy\n@Component\npublic class Harbor {\n\n    @Resource\n    RabbitTemplate mqPublisher;\n\n    private BlockingDeque<UlfUMCMessage> messageQueue = new LinkedBlockingDeque<>();\n\n    private Logger logger = LoggerFactory.getLogger(\"<Harbor>\");\n\n    public void stockWithGoods(String goodsName){\n\n        UlfUMCMessage message = MessageFactory.getMessageBuilder(MessageFactory.MessageBuilderType.SLAVE)\n                .method(UlfUMCMessageType.GET)\n                .func(FunctionNamePool.QUERY_TASK_RANGE)\n                .data(Map.of(\"task\", goodsName)).build();\n\n\n        mqPublisher.convertAndSend(MqPool.MASTER_TASK_SEND_CENTER,UlfUMCMessage.encode(message));\n    }\n\n    //TODO need MQ confirm to optimize Message robustness\n    @RabbitListener(queues = \"task.Nonaron-Kingpin-Prime.reply\")\n    private void getSpoilFromMaster(byte[] data){\n        if(!Objects.isNull(data)){\n            try {\n                UlfUMCMessage msg = UlfUMCMessage.decode(data);\n                logger.info(\"Get instructations from the boss :{}\",msg);\n                messageQueue.add(msg);\n            }catch (UlfUMCMessageException e){\n                ErrorMessageBuilder messageBuilder = (ErrorMessageBuilder) MessageFactory.getMessageBuilder(MessageFactory.MessageBuilderType.ERROR);\n                messageQueue.add(messageBuilder.error(e.getMessage()).build());\n            }\n        }\n    }\n\n    public UlfUMCMessage getSpoil(String name) throws InterruptedException {\n        UlfUMCMessage msg = messageQueue.poll(2L, TimeUnit.SECONDS);\n        if(msg == null){\n            stockWithGoods(name);\n            return null;\n        }\n        return msg;\n    }\n\n}\n"
  },
  {
    "path": "TestJar.cmd",
    "content": "PATH=%PATH%D:\\ProgramFiles\\ToolChains\\Java\\jdk11x64\\bin;\nSTART java -Xdebug -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=5005 -jar ./Saurons/Shadow/target/shadow-1.2.7.jar --server.port=8000"
  },
  {
    "path": "Walnuts/pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>sauron</artifactId>\n        <groupId>com.sauron</groupId>\n        <version>1.2.7</version>\n    </parent>\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnuts</groupId>\n    <artifactId>walnuts</artifactId>\n    <packaging>pom</packaging>\n    <version>2.5.1</version>\n\n    <modules>\n        <module>sailor-stream-distribute-sdk</module>\n    </modules>\n\n\n\n</project>"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/pom.xml",
    "content": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <parent>\n        <artifactId>walnuts</artifactId>\n        <groupId>com.walnuts</groupId>\n        <version>2.5.1</version>\n    </parent>\n    <build>\n        <plugins>\n            <plugin>\n                <groupId>org.apache.maven.plugins</groupId>\n                <artifactId>maven-compiler-plugin</artifactId>\n                <configuration>\n                    <source>11</source>\n                    <target>11</target>\n                </configuration>\n            </plugin>\n        </plugins>\n    </build>\n\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.walnut.sailor.sdd</groupId>\n    <artifactId>sailor-stream-distribute-sdk</artifactId>\n    <version>2.1.0</version>\n\n\n    <dependencies>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter</artifactId>\n        </dependency>\n\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-test</artifactId>\n            <scope>test</scope>\n        </dependency>\n        <dependency>\n            <groupId>org.springframework.boot</groupId>\n            <artifactId>spring-boot-starter-web</artifactId>\n            <version>2.6.13</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone</groupId>\n            <artifactId>pinecone</artifactId>\n            <version>2.5.1</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-framework-runtime</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.pinecone.hydra.kernel</groupId>\n            <artifactId>hydra-message-broadcast</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n        <dependency>\n            <groupId>com.walnut.sparta.api.uac</groupId>\n            <artifactId>sparta-api-uac</artifactId>\n            <version>2.1.0</version>\n            <scope>compile</scope>\n        </dependency>\n    </dependencies>\n\n</project>\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/Dummy.java",
    "content": "package com.walnut.sailor.stream;\n\npublic class Dummy {\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/FileMultiDistributionIface.java",
    "content": "package com.walnut.sailor.stream.fm;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umct.stereotype.Iface;\nimport com.walnut.sailor.stream.fm.protocol.RequestHead;\n\n@Iface(\"com.walnut.sailor.stream.fm.FileMultiDistributionIface\")\npublic interface FileMultiDistributionIface extends Pinenut {\n    void startDistribution( RequestHead head, String fileName, String directionRouteToken );\n\n    void transmitFileContent( RequestHead head, SFMFileFrame fileContent );\n\n    void fileTransmitComplete( RequestHead head );\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SFMConfig.java",
    "content": "package com.walnut.sailor.stream.fm;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface SFMConfig extends Pinenut {\n\n    int getFileFrameSize();\n\n    long getSessionExpiredTimeMillis();\n\n    String getFileCloudDistributeTransmitTopic();\n\n    String getStorageDirectory();\n\n    String getFileServiceTransmitGroup();\n\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SFMDistributionController.java",
    "content": "package com.walnut.sailor.stream.fm;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.framework.util.StringUtils;\nimport com.pinecone.hydra.umct.AddressMapping;\nimport com.pinecone.hydra.umct.stereotype.Controller;\nimport com.walnut.sailor.stream.fm.event.SFMEventSubscriber;\nimport com.walnut.sailor.stream.fm.protocol.RequestHead;\nimport com.walnut.sailor.stream.fm.session.SFMTransaction;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.RandomAccessFile;\nimport java.nio.file.Path;\nimport java.util.Collection;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@Controller\n@AddressMapping( \"com.walnut.sailor.stream.fm.FileMultiDistributionIface.\" )\npublic class SFMDistributionController implements Pinenut {\n\n    protected Logger              logger;\n\n    protected SessionPhaser       sessionPhaser;\n\n    protected SFMSessionValidator sessionValidator;\n\n    protected SFMConfig           config;\n\n    protected SingleStreamFileMultiDistributionService distributionService;\n\n    public SFMDistributionController( SingleStreamFileMultiDistributionService service ) {\n        this.logger = LoggerFactory.getLogger( this.getClass() );\n        this.distributionService = service;\n        this.config              = service.getConfig();\n        this.sessionPhaser       = new SFMSessionPhaser();\n        this.sessionValidator    = new SFMSessionValidator( service );\n    }\n\n    protected String queryDirectoryPath( String directionRouteToken ) {\n        String directoryPath = this.config.getStorageDirectory();\n        if ( StringUtils.isNoneEmpty( directionRouteToken ) ) {\n            String sz = this.distributionService.queryDestinedDirectoryByToken( directionRouteToken );\n            if ( StringUtils.isNoneEmpty( sz ) ) {\n                directoryPath = sz;\n            }\n        }\n\n        return directoryPath;\n    }\n\n    protected Path formatFilePath( long sessionId, String fileName, String directionRouteToken ) {\n        String directoryPath = this.queryDirectoryPath( directionRouteToken );\n        this.sessionPhaser.registerDestinationDirectory( sessionId, directoryPath );\n        return Path.of( directoryPath, fileName );\n    }\n\n    protected Path formatFilePath( long sessionId, String fileName ) {\n        String directoryPath = this.config.getStorageDirectory();\n        String sz = this.sessionPhaser.getDestinationDirectory( sessionId );\n        if ( StringUtils.isNoneEmpty( sz ) ) {\n            directoryPath = sz;\n        }\n\n        return Path.of( directoryPath, fileName );\n    }\n\n    @AddressMapping( \"startDistribution\" )\n    public void startDistribution( RequestHead head, String fileName, String directionRouteToken ) throws IOException {\n        if( this.sessionPhaser.getSFMTransaction(head.getSessionId()) != null ){\n            this.logger.warn( \"[Warning] SFMService `startDistribution` session assertion compromised.\" );\n            this.sessionPhaser.removeSession( head.getSessionId() );\n        }\n\n        this.logger.info( \"SFMService invoked `startDistribution`. <Start>\" );\n        long sessionId = head.getSessionId();\n\n        Path desPath = this.formatFilePath( sessionId, fileName, directionRouteToken );\n        File newFile = new File( desPath.toString() );\n        if( newFile.length() != 0 ){\n            if ( !newFile.delete() ) {\n                throw new IOException( \"Purging file has compromised, what => \" + fileName );\n            }\n            this.logger.info( \"The destination file (\" + fileName + \") exists, and has been successfully eliminated. \" );\n        }\n\n        SFMTransaction SFMTransaction = new SFMTransaction();\n        SFMTransaction.setLastEventArrivedMills( System.currentTimeMillis() );\n        SFMTransaction.finishStartTransmit();\n        this.sessionPhaser.registerSessionTransaction( sessionId, SFMTransaction);\n        this.sessionPhaser.getSFMTransaction( sessionId ).finishStartTransmit();\n        this.sessionPhaser.registerFileHandler( sessionId, new RandomAccessFile(newFile, \"rw\") );\n\n        this.logger.info( \"SFMService invoked `startDistribution`. <Done>\" );\n    }\n\n    @AddressMapping( \"transmitFileContent\" )\n    public void transmitFileContent( RequestHead head, SFMFileFrame fileFrame ) throws IOException {\n        if ( this.assertTransmitTransaction( head, fileFrame.getFileName() ) ){\n            this.logger.warn( \"[Warning] SFMService `transmitFileContent` session assertion compromised.\" );\n            return;\n        }\n\n        this.logger.info( \"SFMService invoked `transmitFileContent`. <Start>\" );\n\n        long sessionId = head.getSessionId();\n        String fileName  = fileFrame.getFileName();\n        Path desPath     = this.formatFilePath( sessionId, fileName );\n        String szDesPath = desPath.toString();\n        File file = new File( szDesPath );\n        RandomAccessFile randomAccessFile = this.sessionPhaser.getFileHandler(sessionId);\n\n        randomAccessFile.seek( fileFrame.getOffset() );\n        randomAccessFile.write( fileFrame.getBytes() );\n        this.sessionPhaser.getSFMTransaction( sessionId ).setLastEventArrivedMills( System.currentTimeMillis() );\n\n        String desDirectory = this.sessionPhaser.getDestinationDirectory( sessionId );\n        if( file.length() == fileFrame.getFileSize() ){\n            randomAccessFile.close();\n            this.sessionPhaser.removeSession( sessionId );\n            this.sessionValidator.fileTransmitComplete( head );\n\n            this.notifyFileTransmitCompleteEventSubscribers( sessionId, szDesPath, fileName, desDirectory );\n        }\n\n        this.logger.info( \"SFMService invoked `transmitFileContent`. <Done>\" );\n    }\n\n    protected void notifyFileTransmitCompleteEventSubscribers( long sessionId, String szDesPath, String fileName, String desDirectory ) {\n\n        Collection<SFMEventSubscriber> subscribers = this.distributionService.fetchFileTransmitCompleteEventSubscribers();\n        for ( SFMEventSubscriber subscriber : subscribers ) {\n            subscriber.afterEventTriggered( szDesPath, fileName, desDirectory );\n        }\n    }\n\n    protected boolean assertTransmitTransaction( RequestHead head, String fileName ) {\n        long sessionId = head.getSessionId();\n        SFMTransaction transaction = this.sessionPhaser.getSFMTransaction( sessionId );\n\n        if( transaction == null ){\n            this.logger.warn( \"[Warning] SFMService `assertTransmitTransaction` session doesn`t existed. <Pass>\" );\n            this.sessionPhaser.removeSession( sessionId );\n            return true;\n        }\n\n        long currentTimeMillis = System.currentTimeMillis();\n        if( currentTimeMillis - transaction.getLastEventArrivedMills() > this.config.getSessionExpiredTimeMillis() ){\n            this.logger.warn( \"[Warning] SFMService `assertTransmitTransaction` session has expired. <Pass>\" );\n            this.rollBack( sessionId, fileName );\n            return true;\n        }\n\n        if( !transaction.isStartTransmit() ){\n            this.logger.warn( \"[Warning] SFMService `assertTransmitTransaction` illegal transaction stage, which should never has started yet. <Pass>\" );\n            this.rollBack( sessionId, fileName );\n            return true;\n        }\n\n        return false;\n    }\n\n    protected void rollBack( long sessionId, String fileName ) {\n        this.logger.warn( \"[Warning] SFMService `rollBack`. <Start>\" );\n\n        Path desPath = this.formatFilePath( sessionId, fileName );\n        File file = new File( desPath.toString() );\n        if ( !file.delete() ) {\n            throw new IllegalStateException( \"Purging file has compromised, what :\" + fileName );\n        }\n        this.sessionPhaser.removeSession( sessionId );\n\n        this.logger.warn( \"[Warning] SFMService `rollBack`. <Done>\" );\n    }\n\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SFMFileFrame.java",
    "content": "package com.walnut.sailor.stream.fm;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class SFMFileFrame implements Pinenut {\n    private byte[] bytes;\n\n    private long fileSize;\n\n    private String fileName;\n\n    private long offset;\n\n    private int  bufferLength;\n\n    public SFMFileFrame(){}\n\n    public SFMFileFrame( byte[] bytes, long fileSize, String fileName, long offset, int bufferLength ){\n        this.bytes        = bytes;\n        this.fileSize     = fileSize;\n        this.fileName     = fileName;\n        this.offset       = offset;\n        this.bufferLength = bufferLength;\n    }\n\n\n    public byte[] getBytes(){\n        return this.bytes;\n    }\n\n    public void setBytes( byte[] bytes ){\n        this.bytes = bytes;\n    }\n\n    public long getFileSize(){\n        return this.fileSize;\n    }\n\n    public void setFileSize( long fileSize ){\n        this.fileSize = fileSize;\n    }\n\n    public String getFileName(){\n        return this.fileName;\n    }\n\n    public void setFileName( String fileName ){\n        this.fileName = fileName;\n    }\n\n    public long getOffset(){\n        return this.offset;\n    }\n\n    public void setOffset( long offset ){\n        this.offset = offset;\n    }\n\n    public long getBufferLength(){\n        return this.bufferLength;\n    }\n\n    public void setBufferLength( int bufferLength ){\n        this.bufferLength = bufferLength;\n    }\n\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SFMSessionPhaser.java",
    "content": "package com.walnut.sailor.stream.fm;\n\nimport com.walnut.sailor.stream.fm.session.SFMTransaction;\n\nimport java.io.RandomAccessFile;\nimport java.util.concurrent.ConcurrentHashMap;\nimport java.util.concurrent.ConcurrentMap;\n\npublic class SFMSessionPhaser implements SessionPhaser {\n    private ConcurrentMap<Long, PhaseHandler>    sessionHandlers;\n\n    public SFMSessionPhaser() {\n        this.sessionHandlers = new ConcurrentHashMap<>();\n    }\n\n    @Override\n    public void registerSessionTransaction( Long sessionId, SFMTransaction transaction ) {\n        PhaseHandler handler = this.sessionHandlers.computeIfAbsent( sessionId, (k)->{\n            return new PhaseHandler();\n        } );\n        handler.sfmTransaction = transaction;\n    }\n\n    @Override\n    public void registerDestinationDirectory( Long sessionId, String destinationDirectory ) {\n        PhaseHandler handler = this.sessionHandlers.computeIfAbsent( sessionId, (k)->{\n            return new PhaseHandler();\n        } );\n        handler.destinationDirectory = destinationDirectory;\n    }\n\n    @Override\n    public SFMTransaction getSFMTransaction( Long sessionId ) {\n        PhaseHandler handler = this.sessionHandlers.get( sessionId );\n        if ( handler != null ) {\n            return handler.sfmTransaction;\n        }\n        return null;\n    }\n\n    @Override\n    public String getDestinationDirectory( Long sessionId ) {\n        PhaseHandler handler = this.sessionHandlers.get( sessionId );\n        if ( handler != null ) {\n            return handler.destinationDirectory;\n        }\n        return null;\n    }\n\n    @Override\n    public void removeSession( Long sessionId ) {\n        this.sessionHandlers.remove( sessionId );\n    }\n\n    @Override\n    public void registerFileHandler( Long sessionId, RandomAccessFile randomAccessFile ) {\n        PhaseHandler handler = this.sessionHandlers.computeIfAbsent( sessionId, (k)->{\n            return new PhaseHandler();\n        } );\n        handler.fileHandler = randomAccessFile;\n    }\n\n    @Override\n    public RandomAccessFile getFileHandler( Long sessionId ) {\n        PhaseHandler handler = this.sessionHandlers.get( sessionId );\n        if ( handler != null ) {\n            return handler.fileHandler;\n        }\n        return null;\n    }\n\n    public static class PhaseHandler {\n        public SFMTransaction    sfmTransaction;\n\n        public RandomAccessFile  fileHandler;\n\n        public String            destinationDirectory;\n    }\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SFMSessionValidator.java",
    "content": "package com.walnut.sailor.stream.fm;\n\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlProducer;\nimport com.walnut.sailor.stream.fm.protocol.RequestHead;\n\npublic class SFMSessionValidator implements SessionValidator {\n\n    protected BroadcastControlProducer producer;\n\n    protected SessionValidator sessionValidator;\n\n    protected SingleStreamFileMultiDistributionService distributionService;\n\n    public SFMSessionValidator( SingleStreamFileMultiDistributionService service ) {\n        this.producer            = service.getTransmitProducer();\n        this.distributionService = service;\n        this.sessionValidator    = this.producer.getIface( SessionValidator.class, this.distributionService.getConfig().getFileCloudDistributeTransmitTopic() );\n    }\n\n    @Override\n    public void fileTransmitComplete( RequestHead head ) {\n        this.sessionValidator.fileTransmitComplete( head );\n    }\n\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SFMSessionValidatorController.java",
    "content": "package com.walnut.sailor.stream.fm;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umct.AddressMapping;\nimport com.pinecone.hydra.umct.stereotype.Controller;\nimport com.walnut.sailor.stream.fm.protocol.RequestHead;\n\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\n@Controller\n@AddressMapping( \"com.walnut.sailor.stream.fm.SessionValidator.\" )\npublic class SFMSessionValidatorController implements Pinenut {\n    protected Logger logger;\n\n    public SFMSessionValidatorController(){\n        this.logger = LoggerFactory.getLogger( this.getClass() );\n    }\n\n    @AddressMapping( \"fileTransmitComplete\" )\n    public void fileTransmitComplete( RequestHead head ){\n        this.logger.info( \"FileTransmitComplete sessionId：\" + head.getSessionId() );\n    }\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SailorFMConfig.java",
    "content": "package com.walnut.sailor.stream.fm;\n\nimport java.util.Map;\n\npublic class SailorFMConfig implements SFMConfig {\n    protected String   mszStorageDirectory;\n\n    protected int      mnFileFrameSize;\n\n    protected long     mnSessionExpiredTimeMillis;\n\n    protected String   mszFileCloudDistributeTransmitTopic;\n\n    protected String   mszFileServiceTransmitGroup;\n\n    public SailorFMConfig ( Map<String, Object > configMap ) {\n        this.mnFileFrameSize                     = ( (Number)configMap.get(\"fileFrameSize\") ).intValue();\n        this.mnSessionExpiredTimeMillis          = ( (Number)configMap.get(\"sessionExpiredTimeMillis\") ).longValue();\n        this.mszFileCloudDistributeTransmitTopic = (String) configMap.get(\"fileCloudDistributeTransmitTopic\");\n        this.mszFileServiceTransmitGroup         = (String) configMap.get(\"fileServiceTransmitGroup\");\n        this.mszStorageDirectory                 = (String) configMap.get(\"storageDirectory\");\n    }\n\n    @Override\n    public int getFileFrameSize() {\n        return this.mnFileFrameSize;\n    }\n\n    @Override\n    public long getSessionExpiredTimeMillis() {\n        return this.mnSessionExpiredTimeMillis;\n    }\n\n    @Override\n    public String getFileCloudDistributeTransmitTopic() {\n        return this.mszFileCloudDistributeTransmitTopic;\n    }\n\n    @Override\n    public String getStorageDirectory() {\n        return this.mszStorageDirectory;\n    }\n\n    @Override\n    public String getFileServiceTransmitGroup() {\n        return this.mszFileServiceTransmitGroup;\n    }\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SailorFMDistributionService.java",
    "content": "package com.walnut.sailor.stream.fm;\n\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlProducer;\nimport com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode;\nimport com.walnut.sailor.stream.fm.event.SFMEventSubscriber;\nimport com.walnut.sailor.stream.fm.protocol.RequestHead;\n\nimport java.io.File;\nimport java.io.FileInputStream;\nimport java.io.IOException;\nimport java.nio.file.Path;\nimport java.util.ArrayList;\nimport java.util.Arrays;\nimport java.util.Collection;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.concurrent.ConcurrentHashMap;\n\npublic class SailorFMDistributionService implements SingleStreamFileMultiDistributionService {\n\n    protected UlfBroadcastControlNode       transmitClient;\n\n    protected BroadcastControlProducer      transmitProducer;\n\n    protected BroadcastControlConsumer      transmitConsumer;\n\n    protected SFMConfig                     config;\n\n    protected Map<String, String >          directionRoute;\n\n    protected List<SFMEventSubscriber>      fileTransmitCompleteEventSubscribers;\n\n    public SailorFMDistributionService( UlfBroadcastControlNode client, SFMConfig config ) {\n        this.transmitClient  = client;\n        this.config          = config;\n        this.directionRoute  = new ConcurrentHashMap<>();\n\n        this.fileTransmitCompleteEventSubscribers = new ArrayList<>();\n    }\n\n    @Override\n    public SingleStreamFileMultiDistributionService registerFileTransmitCompleteEventSubscriber( SFMEventSubscriber subscriber ) {\n        if ( this.hasStarted() ) {\n            throw new IllegalStateException( \"FileMultiDistributionService has already started.\" );\n        }\n\n        this.fileTransmitCompleteEventSubscribers.add( subscriber );\n        return this;\n    }\n\n    @Override\n    public SingleStreamFileMultiDistributionService deregisterFileTransmitCompleteEventSubscriber( SFMEventSubscriber subscriber ) {\n        if ( this.hasStarted() ) {\n            throw new IllegalStateException( \"FileMultiDistributionService has already started.\" );\n        }\n\n        this.fileTransmitCompleteEventSubscribers.remove( subscriber );\n        return this;\n    }\n\n    @Override\n    public Collection<SFMEventSubscriber> fetchFileTransmitCompleteEventSubscribers() {\n        return this.fileTransmitCompleteEventSubscribers;\n    }\n\n    @Override\n    public UlfBroadcastControlNode getTransmitClient() {\n        return this.transmitClient;\n    }\n\n    @Override\n    public BroadcastControlConsumer getTransmitConsumer() {\n        return this.transmitConsumer;\n    }\n\n    @Override\n    public BroadcastControlProducer getTransmitProducer() {\n        return this.transmitProducer;\n    }\n\n    @Override\n    public String queryDestinedDirectoryByToken( String token ) {\n        return this.directionRoute.get( token );\n    }\n\n    @Override\n    public void registerDirectionRoute( String token, String directoryPath ) {\n        this.directionRoute.put( token, directoryPath );\n    }\n\n    @Override\n    public void deregisterDirectionRoute( String token ) {\n        this.directionRoute.remove( token );\n    }\n\n    @Override\n    public boolean hasStarted() {\n        return this.transmitProducer != null;\n    }\n\n    @Override\n    public void start() throws UMBServiceException {\n        if ( !this.hasStarted() ) {\n            this.transmitProducer = this.transmitClient.createBroadcastControlProducer();\n            this.transmitConsumer = this.transmitClient.createBroadcastControlConsumer( this.config.getFileCloudDistributeTransmitTopic(), this.config.getFileServiceTransmitGroup() );\n            this.transmitProducer.compile( FileMultiDistributionIface.class,false );\n            this.transmitProducer.compile( SessionValidator.class, false );\n            this.transmitConsumer.registerController( new SFMDistributionController( this ) );\n            this.transmitConsumer.start();\n            this.transmitProducer.start();\n        }\n    }\n\n    @Override\n    public void shutdown() {\n        if ( this.hasStarted() ) {\n            this.transmitConsumer.close();\n            this.transmitProducer.close();\n            this.transmitConsumer = null;\n            this.transmitProducer = null;\n        }\n    }\n\n    @Override\n    public SFMConfig getConfig() {\n        return this.config;\n    }\n\n    @Override\n    public void distributeFile( File file, String directionRouteToken ) throws IOException {\n        FileMultiDistributionIface distributionIface = this.transmitProducer.getIface( FileMultiDistributionIface.class, this.config.getFileCloudDistributeTransmitTopic() );\n        RequestHead head = RequestHead.newRequest().setSessionId(System.currentTimeMillis());\n        distributionIface.startDistribution( head, file.getName(), directionRouteToken );\n\n        long fileSize = file.length();\n        try ( FileInputStream fileInputStream = new FileInputStream(file) ) {\n            int bufferSize = this.config.getFileFrameSize();\n            byte[] buffer  = new byte[ bufferSize ];\n            int bytesRead;\n            long currentPosition = 0;\n\n            while ( (bytesRead = fileInputStream.read(buffer)) != -1 ) {\n                byte[] dataChunk = bytesRead == bufferSize ? buffer : Arrays.copyOf(buffer, bytesRead);\n                distributionIface.transmitFileContent(head, new SFMFileFrame( dataChunk, fileSize, file.getName(), currentPosition, bytesRead) );\n                currentPosition += bytesRead;\n            }\n        }\n    }\n\n    @Override\n    public void distributeFile( String szFileName, String originalDirectory, String directionRouteToken ) throws IOException {\n        Path targetPath = Path.of( originalDirectory, szFileName );\n        File file = new File( targetPath.toString() );\n\n        this.distributeFile( file, directionRouteToken );\n    }\n\n    /*@Override\n    public void fileDistributionJar(File file, String topic) throws IOException {\n        if (isJarFile(file)) {\n            stopCurrentJarProcess();\n            cleanExistingBackup(); // 清理旧备份\n            File backupFile = createBackup(file); // 创建新备份\n            this.fileDistribution(backupFile, topic);\n            this.currentJarFile = backupFile;\n            startJarProcess(this.currentJarFile);\n        } else {\n            this.fileDistribution(file, topic);\n        }\n    }\n\n    private File createBackup(File jarFile) throws IOException {\n        File backupDir = new File(BACKUP_DIR);\n        if (!backupDir.exists() && !backupDir.mkdirs()) {\n            throw new IOException(\"无法创建备份目录: \" + BACKUP_DIR);\n        }\n\n        String baseName = jarFile.getName().replaceFirst(\"\\\\.jar$\", \"\");\n        File backupFile = new File(backupDir, baseName + BACKUP_SUFFIX);\n\n        // 覆盖\n        try ( InputStream in = new FileInputStream(jarFile);\n             OutputStream out = new FileOutputStream(backupFile, false)) {\n            byte[] buffer = new byte[1024 * 1024];\n            int bytesRead;\n            while ((bytesRead = in.read(buffer)) != -1) {\n                out.write(buffer, 0, bytesRead);\n            }\n            System.out.println(\"已更新备份文件: \" + backupFile.getAbsolutePath());\n            return backupFile;\n        }\n    }\n\n    private void cleanExistingBackup() {\n        if (currentJarFile != null && currentJarFile.exists()) {\n            try {\n                Files.delete(currentJarFile.toPath());\n                System.out.println(\"已清理旧文件: \" + currentJarFile.getName());\n            } catch (IOException e) {\n                System.err.println(\"清理旧文件失败: \" + e.getMessage());\n            }\n        }\n    }\n\n    private boolean isJarFile(File file) {\n        return file != null && file.isFile() && file.getName().toLowerCase().endsWith(\".jar\");\n    }\n\n    private void stopCurrentJarProcess() {\n        if (currentJarProcess != null) {\n            currentJarProcess.destroyForcibly();\n            try {\n                if (!currentJarProcess.waitFor(10, TimeUnit.SECONDS)) {\n                    System.err.println(\"警告: 进程终止超时\");\n                }\n            } catch (InterruptedException e) {\n                Thread.currentThread().interrupt();\n                System.err.println(\"进程终止被中断\");\n            }\n            currentJarProcess = null;\n        }\n    }\n\n    private void startJarProcess(File jarFile) throws IOException {\n        if (!jarFile.exists()) {\n            throw new FileNotFoundException(\"JAR文件不存在: \" + jarFile.getAbsolutePath());\n        }\n        String javaPath = System.getProperty(\"java.home\") + File.separator + \"bin\" + File.separator + \"java\";\n        ProcessBuilder processBuilder = new ProcessBuilder(javaPath, \"-jar\", jarFile.getAbsolutePath());\n        processBuilder.redirectErrorStream(true);\n        try {\n            this.currentJarProcess = processBuilder.start();\n            new Thread(() -> readStream(currentJarProcess.getInputStream())).start();\n            System.out.println(\"已启动最新版本: \" + jarFile.getName());\n        } catch (IOException e) {\n            throw new IOException(\"进程启动失败: \" + e.getMessage(), e);\n        }\n    }\n\n    private void readStream(InputStream inputStream) {\n        try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream))) {\n            String line;\n            while ((line = reader.readLine()) != null) {\n                System.out.println(\"[JAR输出] \" + line);\n            }\n        } catch (IOException e) {\n            System.err.println(\"输出读取错误: \" + e.getMessage());\n        }\n    }*/\n}"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SessionPhaser.java",
    "content": "package com.walnut.sailor.stream.fm;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.walnut.sailor.stream.fm.session.SFMTransaction;\n\nimport java.io.RandomAccessFile;\n\npublic interface SessionPhaser extends Pinenut {\n\n    void registerSessionTransaction( Long sessionId, SFMTransaction SFMTransaction );\n\n    SFMTransaction getSFMTransaction( Long sessionId );\n\n    void registerDestinationDirectory( Long sessionId, String destinationDirectory );\n\n    String getDestinationDirectory( Long sessionId );\n\n    void removeSession( Long sessionId );\n\n    void registerFileHandler( Long sessionId, RandomAccessFile randomAccessFile );\n\n    RandomAccessFile getFileHandler( Long sessionId );\n\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SessionValidator.java",
    "content": "package com.walnut.sailor.stream.fm;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umct.stereotype.Iface;\nimport com.walnut.sailor.stream.fm.protocol.RequestHead;\n\n@Iface\npublic interface SessionValidator extends Pinenut {\n\n    void fileTransmitComplete( RequestHead head );\n\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/SingleStreamFileMultiDistributionService.java",
    "content": "package com.walnut.sailor.stream.fm;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Collection;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\nimport com.pinecone.hydra.umb.UMBServiceException;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlConsumer;\nimport com.pinecone.hydra.umb.broadcast.BroadcastControlProducer;\nimport com.pinecone.hydra.umb.wolf.UlfBroadcastControlNode;\nimport com.walnut.sailor.stream.fm.event.SFMEventSubscriber;\n\npublic interface SingleStreamFileMultiDistributionService extends Pinenut {\n\n    void distributeFile( File file, String directionRouteToken ) throws IOException;\n\n    void distributeFile( String szFileName, String originalDirectory, String directionRouteToken ) throws IOException;\n\n    boolean hasStarted();\n\n    void start() throws UMBServiceException;\n\n    void shutdown();\n\n    SFMConfig getConfig();\n\n    UlfBroadcastControlNode getTransmitClient() ;\n\n    BroadcastControlConsumer getTransmitConsumer() ;\n\n    BroadcastControlProducer getTransmitProducer() ;\n\n    String queryDestinedDirectoryByToken( String token );\n\n    void registerDirectionRoute( String token, String directoryPath );\n\n    void deregisterDirectionRoute( String token );\n\n    SingleStreamFileMultiDistributionService registerFileTransmitCompleteEventSubscriber( SFMEventSubscriber subscriber );\n\n    SingleStreamFileMultiDistributionService deregisterFileTransmitCompleteEventSubscriber( SFMEventSubscriber subscriber );\n\n    Collection<SFMEventSubscriber> fetchFileTransmitCompleteEventSubscribers();\n\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/event/SFMEventSubscriber.java",
    "content": "package com.walnut.sailor.stream.fm.event;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic interface SFMEventSubscriber extends Pinenut {\n    void afterEventTriggered( String path, String fileName, String directoryPath ) ;\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/protocol/RequestHead.java",
    "content": "package com.walnut.sailor.stream.fm.protocol;\n\npublic class RequestHead {\n    protected long sessionId;\n\n    public RequestHead setSessionId(long sessionId ) {\n        this.sessionId = sessionId;\n        return this;\n    }\n\n    public long getSessionId() {\n        return this.sessionId;\n    }\n\n\n    public static RequestHead newRequest() {\n        return new RequestHead();\n    }\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/protocol/UFMCFileMeta64.java",
    "content": "package com.walnut.sailor.stream.fm.protocol;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class UFMCFileMeta64 implements Pinenut {\n    protected String sourceName;\n\n    public UFMCFileMeta64(){}\n\n    public UFMCFileMeta64( String sourceName ){\n        this.sourceName = sourceName;\n    }\n\n    public String getSourceName(){\n        return this.sourceName;\n    }\n\n    public void setSourceName( String sourceName ){\n        this.sourceName = sourceName;\n    }\n}\n"
  },
  {
    "path": "Walnuts/sailor-stream-distribute-sdk/src/main/java/com/walnut/sailor/stream/fm/session/SFMTransaction.java",
    "content": "package com.walnut.sailor.stream.fm.session;\n\nimport java.util.concurrent.atomic.AtomicBoolean;\n\nimport com.pinecone.framework.system.prototype.Pinenut;\n\npublic class SFMTransaction implements Pinenut {\n    protected AtomicBoolean startTransmit;\n\n    protected AtomicBoolean transmitFileContent;\n\n    protected AtomicBoolean fileDistributionComplete;\n\n    protected long          lastEventArrivedMills;\n\n    public SFMTransaction() {\n        this.startTransmit = new AtomicBoolean(false);\n        this.transmitFileContent = new AtomicBoolean(false);\n        this.fileDistributionComplete = new AtomicBoolean(false);\n    }\n\n    public long getLastEventArrivedMills() {\n        return this.lastEventArrivedMills;\n    }\n\n    public void setLastEventArrivedMills( long lastEventArrivedMills ) {\n        this.lastEventArrivedMills = lastEventArrivedMills;\n    }\n\n    public boolean finishStartTransmit() {\n        return this.startTransmit.compareAndSet(false, true);\n    }\n\n    public boolean finishTransmitFileContent() {\n        return this.transmitFileContent.compareAndSet(false, true);\n    }\n\n    public boolean finishFileDistributionComplete() {\n        return this.fileDistributionComplete.compareAndSet(false, true);\n    }\n\n\n    public boolean isStartTransmit() {\n        return this.startTransmit.get();\n    }\n\n    public boolean isTransmitFileContent() {\n        return this.transmitFileContent.get();\n    }\n\n    public boolean isFileDistributionComplete() {\n        return this.fileDistributionComplete.get();\n    }\n}\n"
  },
  {
    "path": "gitignore.txt",
    "content": "# Compiled class file\n*.class\n\n# Log file\n*.log\n\n# BlueJ files\n*.ctxt\n\n# Mobile Tools for Java (J2ME)\n.mtj.tmp/\n\n# Package Files #\n*.jar\n*.war\n*.nar\n*.ear\n*.zip\n*.tar.gz\n*.rar\n\n# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml\nhs_err_pid*\nreplay_pid*\n"
  },
  {
    "path": "pom.xml",
    "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n         xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd\">\n    <modelVersion>4.0.0</modelVersion>\n\n    <groupId>com.sauron</groupId>\n    <artifactId>sauron</artifactId>\n    <packaging>pom</packaging>\n    <version>1.2.7</version>\n    <modules>\n        <module>pinecones</module>\n        <module>hydra</module>\n        <module>saurons</module>\n        <module>sparta</module>\n        <module>walnuts</module>\n        <module>odin</module>\n        <module>archcraft</module>\n        <module>redqueen</module>\n        <module>skynet</module>\n        <!--<module>TaskJuggler</module>-->\n        <!--<module>Messenger</module>-->\n        <!--<module>file</module>-->\n        <!--<module>Logger</module>-->\n    </modules>\n\n    <parent>\n        <groupId>org.springframework.boot</groupId>\n        <artifactId>spring-boot-starter-parent</artifactId>\n        <version>2.4.1</version>\n        <relativePath/>\n    </parent>\n\n    <properties>\n        <maven.compiler.source>11</maven.compiler.source>\n        <maven.compiler.target>11</maven.compiler.target>\n        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n        <spring-cloud.version>2020.0.3</spring-cloud.version>\n        <spring-cloud-start.version>3.0.1</spring-cloud-start.version>\n        <mysql.version>8.0.23</mysql.version>\n        <mybatis.version>2.1.1</mybatis.version>\n        <nacos.version>2.2.5.RELEASE</nacos.version>\n        <redis.version>2.3.9.RELEASE</redis.version>\n        <boot.version>2.3.9.RELEASE</boot.version>\n        <rockermq.version>2.3.9.RELEASE</rockermq.version>\n        <mybatis-plus.version>3.4.1</mybatis-plus.version>\n        <io-lettcue.version>6.1.6.RELEASE</io-lettcue.version>\n        <commons-pool2.version>2.9.0</commons-pool2.version>\n        <fastjson.version>1.2.75</fastjson.version>\n    </properties>\n\n    <dependencyManagement>\n        <dependencies>\n            <!--springCloud-->\n            <dependency>\n                <groupId>org.springframework.cloud</groupId>\n                <artifactId>spring-cloud-dependencies</artifactId>\n                <version>${spring-cloud.version}</version>\n                <type>pom</type>\n                <scope>import</scope>\n            </dependency>\n            <dependency>\n                <groupId>org.springframework.cloud</groupId>\n                <artifactId>spring-cloud-starter-bootstrap</artifactId>\n                <version>${spring-cloud-start.version}</version>\n                <type>pom</type>\n                <scope>import</scope>\n            </dependency>\n            <!--nacos的管理依赖-->\n            <dependency>\n                <groupId>com.alibaba.cloud</groupId>\n                <artifactId>spring-cloud-alibaba-dependencies</artifactId>\n                <version>${nacos.version}</version>\n                <type>pom</type>\n                <scope>import</scope>\n            </dependency>\n            <!--mysql驱动-->\n            <dependency>\n                <groupId>mysql</groupId>\n                <artifactId>mysql-connector-java</artifactId>\n                <version>${mysql.version}</version>\n            </dependency>\n            <!--redis-->\n            <dependency>\n                <groupId>org.springframework.boot</groupId>\n                <artifactId>spring-boot-starter-data-redis</artifactId>\n                <exclusions>\n                    <exclusion>\n                        <artifactId>spring-data-redis</artifactId>\n                        <groupId>org.springframework.data</groupId>\n                    </exclusion>\n                    <exclusion>\n                        <artifactId>lettuce-core</artifactId>\n                        <groupId>io.lettuce</groupId>\n                    </exclusion>\n                </exclusions>\n                <version>${redis.version}</version>\n            </dependency>\n            <!--springBoot-->\n            <dependency>\n                <groupId>org.springframework.boot</groupId>\n                <artifactId>spring-boot-starter-test</artifactId>\n                <version>${boot.version}</version>\n            </dependency>\n            <!--mybatis-->\n            <dependency>\n                <groupId>org.mybatis.spring.boot</groupId>\n                <artifactId>mybatis-spring-boot-starter</artifactId>\n                <version>${mybatis.version}</version>\n            </dependency>\n            <!--mybatis-plus-->\n            <dependency>\n                <groupId>com.baomidou</groupId>\n                <artifactId>mybatis-plus-boot-starter</artifactId>\n                <version>${mybatis-plus.version}</version>\n            </dependency>\n            <!--AMQP依赖，包含RabbitMQ-->\n            <dependency>\n                <groupId>org.springframework.boot</groupId>\n                <artifactId>spring-boot-starter-amqp</artifactId>\n                <version>2.3.9.RELEASE</version>\n            </dependency>\n\n            <dependency>\n                <groupId>io.lettuce</groupId>\n                <artifactId>lettuce-core</artifactId>\n                <version>${io-lettcue.version}</version>\n            </dependency>\n            <!--池化技术-->\n            <dependency>\n                <groupId>org.apache.commons</groupId>\n                <artifactId>commons-pool2</artifactId>\n                <version>${commons-pool2.version}</version>\n            </dependency>\n            <!--fastjson-->\n            <dependency>\n                <groupId>com.alibaba</groupId>\n                <artifactId>fastjson</artifactId>\n                <version>${fastjson.version}</version>\n            </dependency>\n            <dependency>\n                <groupId>org.springframework.cloud</groupId>\n                <artifactId>spring-cloud-context</artifactId>\n                <version>3.0.3</version>\n            </dependency>\n\n            <dependency>\n                <groupId>org.springframework.cloud</groupId>\n                <artifactId>spring-cloud-commons</artifactId>\n                <version>3.0.3</version>\n            </dependency>\n        </dependencies>\n    </dependencyManagement>\n\n    <dependencies>\n        <!--tomcat-->\n<!--        <dependency>-->\n<!--            <groupId>org.apache.tomcat.embed</groupId>-->\n<!--            <artifactId>tomcat-embed-core</artifactId>-->\n<!--        </dependency>-->\n\n        <dependency>\n            <groupId>org.projectlombok</groupId>\n            <artifactId>lombok</artifactId>\n        </dependency>\n        <!--AOP-->\n        <dependency>\n            <groupId>org.aspectj</groupId>\n            <artifactId>aspectjweaver</artifactId>\n        </dependency>\n\n        <dependency>\n            <groupId>com.google.protobuf</groupId>\n            <artifactId>protobuf-java</artifactId>\n            <!--<version>3.23.4</version>-->\n            <version>4.28.2</version>\n        </dependency>\n    </dependencies>\n</project>\n"
  },
  {
    "path": "prompt/base_front_standard.md",
    "content": "# 编码规范\n本 Skill 对 vue 都生效，以下是核心规范：\n\n1. 纯中文展示：所有标题、描述、分类均使用流畅的中文，严禁出现“中文(英文)”的括号备注形式。\n2. 独立界面架构：摒弃传统的“面包屑导航”和“弹窗对话框”。采用全屏分块式布局或侧边栏联动式单页应用结构。\n3. 每个技能模块都是一个独立的视觉区域，点击后通过平滑滚动或视图切换直接展示详情，而非弹出窗口对话框。"
  },
  {
    "path": "prompt/coding_standard.md",
    "content": "# 编码规范\n本 Skill 对 C/C++、Java、ECMAScript 都生效，以下是核心风格规范：\n\n1. 不要省略任何 this，无论是函数调用还是成员变量访问等。\n2. 不要省略任何括号，例如：\n```java\n    switch ( exp ) {\n        case 1: {\n            doSomething();\n            break;\n        }\n        default: {\n            doSomethingElse();\n        }\n    }\n```\n3. 除了泛型括号之外（<T>），其他所有括号前后都要有空格，例如：\n3.1. if、for、while、switch 等控制流语句的括号前后都要有空格，例如：\n```java\n    if ( 2 * ( condition[ \"key\" ] + 4 ) ) {\n        doSomething();\n    }\n```\n4. 对于代码单行不超过140字符时，不要换行（字符串场景除外）\n5. 对于 C/C++、Java使用如下匈牙利命名法（除非上下文禁用）：\n5.1. 成员变量(基本数据类型需要叠加): mMember，mszString，mnNumber，mbFlag\n5.2. 字符串: szString，const char*: lpszString\n5.3. 任意数字（integer, decimal）: nNumber\n5.4. 逻辑（bool）: bFlag\n5.5. 结构体、对象: DataStruct dataStruct, mDataStruct（成员态） 全称即可，不用额外叠加\n5.6. 指针: pDataStruct, lpDataStruct\n5.7. 临时变量无须使用匈牙利命名法\n\n6. 抽象类：\n模板模式下强制使用：ArchClass（Archetypic Class）\n其他场景下使用：AbstractClass\n7. 对齐，例如：\n```\n    public class MyClass {\n        private int     mMemberVariable     = XXX;\n        private String  mszStringVariable   = XX ;\n    }\n    \n    function() {\n        var i  = 1234;\n        var sz = \"Hello World\";\n    }\n    \n```\n8. 单词：\n8.1 接口默认实现使用 Generic，除非明确给出。\n8.2 getOrCreate 语义统一使用 affirm，如affirmPath( String szPath )\n8.3 子类对基类映射实现，统一使用 evince，例如：\n```java\n    public interface ElementNode extends TaskTreeNode {\n        default AppElement evinceAppElement() { return null; }\n    \n        default TaskElement evinceTaskElement() { return null; }\n    }\n\n    public interface TaskElement extends ElementNode {\n        @Override\n        default TaskElement evinceTaskElement() {\n            return this;\n        }\n    }\n```\n\n\n\n\n"
  },
  {
    "path": "prompt/mysql_table_standard.md",
    "content": "# MYSQL 建表与SQL编码规范\n本 Skill 对MYSQL生效，以下是核心风格规范：\n\n## 目标\n建立统一、可审计、可扩展、适合高并发 + 分库分表演进的 MySQL DDL 风格。\n\n## 1. 标准结构模板（强制规范）\n```sql\nCREATE TABLE `{{table_name}}` (\n  `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT COMMENT '主键ID',\n\n  -- 业务字段写在此处\n\n  `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',\n  `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间',\n\n  PRIMARY KEY (`id`) USING BTREE\n) ENGINE=InnoDB \n  DEFAULT CHARSET=utf8 \n  ROW_FORMAT=DYNAMIC;\n```\n\n### 2. 时间字段规范（强制）\n所有业务表必须包含：\n```sql\ncreate_time\nupdate_time\n```\n---\n### 3. 存储引擎规范\n必须：\n```\nENGINE=InnoDB\n```\n\n### 4. 字符集规范\n默认都使用 utf8，内容类大text字段使用utf8mb4\n\n### 5. 所有字段必须带 COMMENT\n禁止无注释字段\n\n### 6. SQL 语法\n6.1 禁用 SELECT *\n6.2 所有表别名不允许使用单字母\n6.3 表别名可以使用别名缩写，不要使用全名，此外对于非联合查询无需使用别名和前缀表名。\n6.4 关键字大写。\n6.5 冲突Key使用字符'\\`' 如 '`key`' 标记。\ne.g.\n```sql\nSELECT `key` FROM table WHERE id > 1234;\n```\n\n```sql\nSELECT t1.`key`, t2.`k2` FROM table as t1 LEFT JOIN table2 as t2 ON t1.x = t2.x WHERE t1.id > 1234;\n```\n\n\n### 7. Ibatis \n7.1 Param 不要使用匈牙利命名法，如 @Param(\"guid\") GUID guid\n7.2 GUID直接用GUID，如 @Param(\"guid\") GUID guid\n7.3 XML 中特殊字符使用如：\n<![CDATA[ <= ]]>"
  },
  {
    "path": "system/setup/CenterMessagram.json5",
    "content": "{\n  \"Engine\"            : \"com.pinecone.tritium.messagron.Messagron\",\n  \"Enable\"            : true,\n  \"ExpressFactory\"    : \"com.pinecone.framework.util.lang.GenericDynamicFactory\",\n\n  \"Expresses\"         : {\n    \"WolfMCExpress\": {\n      \"Engine\": \"com.pinecone.hydra.umct.WolfMCExpress\"\n    }\n  }\n}"
  },
  {
    "path": "system/setup/ServersScope.json5",
    "content": "{\n  \"CentralCluster\": {\n    \"Kingpin\": { // Intranet out-bound master\n      \"Name\"              : \"BeanServerKingpin\",\n      \"NickName\"          : \"SerKingpin\",\n      \"LocalDomain\"       : \"B-ServerKingpin\",\n      \"WideDomain\"        : \"\",\n      \"Location\"          : \"LocalNetwork\",\n      \"Enable\"            : true,\n      \"Station\"           : \"Master\",\n      \"DevArchitecture\"   : [ \"CPU\", \"MEM\", \"SSD\", \"HDD\" ]\n    },\n    \"Paladin\": [ // Intranet sub-net master\n      {\n        \"Name\"             : \"BeanServerPaladinMasterTask\",\n        \"NickName\"         : \"SerPaladinMT\",\n        \"LocalDomain\"      : \"B-ServerPaladin\",\n        \"WideDomain\"       : \"\",\n        \"Location\"         : \"LocalNetwork\",\n        \"Enable\"           : true,\n        \"Station\"          : \"MasterTask\",\n        \"DevArchitecture\"  : [ \"CPU\", \"MEM\", \"SSD\", \"HDD\" ]\n      },\n      {\n        \"Name\"             : \"BeanServerPaladinMasterProcess\",\n        \"NickName\"         : \"SerPaladinMP\",\n        \"LocalDomain\"      : \"Bean-PC-PH317\",\n        \"WideDomain\"       : \"\",\n        \"Location\"         : \"LocalNetwork\",\n        \"Enable\"           : true,\n        \"Station\"          : \"MasterProcess\",\n        \"DevArchitecture\"  : [ \"CPU\", \"MEM\", \"GPU\", \"NVMe\", \"SSD\" ]\n      }\n    ],\n    \"Minion\": [\n      {\n        \"Name\"             : \"BeanServerNutRoot\",\n        \"NickName\"         : \"SerNutRoot\",\n        \"LocalDomain\"      : \"B-ServerNutRoot\",\n        \"WideDomain\"       : \"root.nutgit.com\",\n        \"Location\"         : \"WideNetwork\",\n        \"Enable\"           : true,\n        \"Station\"          : \"MasterOutBoundRouter\",\n        \"DevArchitecture\"  : [ \"NET\" ]\n      },\n      {\n        \"Name\"             : \"BeanServerNutNode1\",\n        \"NickName\"         : \"SerNutNode1\",\n        \"LocalDomain\"      : \"B-ServerNutNode1\",\n        \"WideDomain\"       : \"node1.nutgit.com\",\n        \"Location\"         : \"WideNetwork\",\n        \"Enable\"           : true,\n        \"Station\"          : \"AuxiliaryOutBoundRouter\",\n        \"DevArchitecture\"  : [ \"NET\" ]\n      }\n    ],\n    \"Slave\": [\n\n    ]\n  },\n  \"EdgeChains\": {\n\n  }\n}"
  },
  {
    "path": "system/setup/SpringBootApplication.json5",
    "content": "{\n  \"server\": {\n    \"port\": 8080,\n    \"servlet\": {\n      \"context-path\": \"/\"\n    }\n  },\n\n  \"spring\": {\n    \"servlet\": {\n      \"multipart\": {\n        \"max-file-size\": \"4096MB\",\n        \"max-request-size\": \"4096MB\"\n      }\n    },\n\n    \"datasource\": {\n//        \"url\": \"jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true\",\n//        \"username\": \"root\",\n//        \"password\": \"123456\",\n        \"url\": \"jdbc:mysql://b-serverkingpin:33062/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true&autoReconnect=true\",\n        \"username\": \"root\",\n        \"password\": \"\",\n        \"driver-class-name\": \"com.mysql.cj.jdbc.Driver\",\n\n    },\n    \"mybatis\":{\n      \"mapper-locations\": \"classpath:mapper/*.xml\"\n    },\n\n    \"redis\": {\n      \"host\": \"localhost\",\n      \"port\": 6379,\n      \"password\": \"Genius123\",\n      \"lettuce\": {\n        \"pool\": {\n          \"max-active\": 8,\n          \"max-idle\": 8,\n          \"min-idle\": 0,\n          \"max-wait\": 100,\n          \"time-between-eviction-runs\": \"10s\"\n        }\n      }\n    },\n    \"rabbitmq\": {\n      \"host\": \"node1.nutgit.com\", // 主机名\n      \"port\": 13394, // 端口\n      \"virtual-host\": \"/wolf\", // 虚拟主机\n      \"username\": \"test\", // 用户名\n      \"password\": \"test\" // 密码\n    },\n    \"sparta\": {\n      \"datasource\": \"mysql\"\n    }\n  }\n}"
  },
  {
    "path": "system/setup/StorageSystem.json5",
    "content": "{\n  \"PathScope\" : {\n\n    \"Reinterpret\": {\n      \"ARBOmnium\"          : \"\\\\\\\\${SerKingpin}\\\\ARBOmnium\",\n      \"OmniumEnderChest\"   : \"${ARBOmnium}/EnderChest\",\n      \"OmniumFacility\"     : \"${OmniumEnderChest}/Facility\",\n      \"OmniumMegaH\"        : \"${OmniumEnderChest}/MegaH\",\n      \"OmniumKingpin0\"     : \"${OmniumEnderChest}/Kingpin0\",\n      \"OmniumKingStream0\"  : \"${OmniumEnderChest}/SauronL3Stream\",\n\n      \"PaladinHive\"        : \"\\\\\\\\${SerPaladinMT}\\\\PaladinHive\",\n      \"EnderPaladin\"       : \"${PaladinHive}/EnderChest\",\n      \"PalSauotion0\"       : \"${EnderPaladin}/Sauotion0\",\n      \"PalSauotion1\"       : \"${EnderPaladin}/Sauotion1\",\n      \"PalSaurmion2\"       : \"${EnderPaladin}/Saurmion2\",\n      \"PalSauotion3\"       : \"${EnderPaladin}/Sauotion3\",\n      \"PalSaurtion4\"       : \"${EnderPaladin}/Saurtion4\",\n      \"PalSauotion5\"       : \"${EnderPaladin}/Sauotion5\",\n      \"PalSauegaion0\"      : \"${EnderPaladin}/Sauegaion0\",\n      \"PalSauegaion1\"      : \"${EnderPaladin}/Sauegaion1\",\n\n\n      \"SystemConfRoot\"     : \"./system/setup\"\n    },\n\n  },\n\n  \"Protocols\" : {\n    \"files\"  : { \"Provide\": \"\" }, // Default local filesystem.\n    \"http\"   : { \"Provide\": \"org.apache.commons.vfs2.provider.http5.Http5FileProvider\"  },\n    \"https\"  : { \"Provide\": \"org.apache.commons.vfs2.provider.http5s.Http5sFileProvider\" },\n    \"smb\"    : { \"Provide\": \"\" }, // Using windows.\n    \"webdav\" : { \"Provide\": \"org.apache.commons.vfs2.provider.webdav.WebdavFileProvider\" },\n    \"hdfs\"   : { \"Provide\": \"\" }, // TODO\n  },\n\n  \"CacheStrategy\" : \"ON_CALL\",\n  \"FilesCache\"    : \"org.apache.commons.vfs2.cache.SoftRefFilesCache\",\n\n}"
  },
  {
    "path": "system/setup/config.json5",
    "content": "{\n  \"System\" : {\n    \"MinionName\"    : \"ShadowKingpin\",\n    \"ServiceID\"     : \"Shadow-Kingpin-Prime\",\n    \"ServiceArch\"   : \"Master\", // Master, Paladin, Minion, Slave\n    \"MasterQuery\"   : false,\n    \"DebugMode\"     : true,\n\n    // For, C/C++ Edition, Java todo.\n    \"Tracer\"        : {\n      \"ConsoleTrace\"       : true,\n      \"InfoTracer\"         : \"./system/logs/${pid}_SysTrace.log\",\n      \"ErrTracer\"          : \"./system/logs/${pid}_SysError.log\",\n    },\n\n    \"Middleware\"    : {\n      \"RDBs\": {\n        \"Configs\": {\n          \"Engine\": \"com.pinecone.tritium.ally.rdb.GenericIbatisClient\",\n          \"Enable\": true,\n          \"JDBC\": {\n            \"Driver\"   : \"com.mysql.cj.jdbc.Driver\",\n            \"ExURL\"    : \"autoReconnect=true\"\n          },\n          \"Ibatis\": {\n            \"Environment\"       : \"development\",\n            \"DataSource\"        : \"org.apache.ibatis.datasource.pooled.PooledDataSource\",\n            \"TransactionFactory\": \"org.apache.ibatis.transaction.jdbc.JdbcTransactionFactory\",\n            \"PooledConfig\"      : {\n              \"InitialSize\": 0,\n              \"MaxActive\"  : 20,\n              \"MaxIdle\"    : 20,\n              \"MinIdle\"    : 1,\n              \"MaxWait\"    : 60000\n            },\n            \"DataAccessObject\"  : {\n              \"Scanner\"    : \"com.pinecone.slime.jelly.source.ibatis.IbatisDAOScanner\",\n              \"ScanScopes\" : []\n            }\n          }\n        },\n\n        \"Databases\": {\n          \"MySQLKingSystem\": {\n            \"dbType\"     : \"mysql\",\n            \"host\"       : \"b-serverkingpin\",\n            \"username\"   : \"root\",\n            \"password\"   : \"\",\n            \"database\"   : \"hydranium\",\n            \"port\"       : 3306,\n            \"charset\"    : \"utf8\",\n            \"tablePrefix\": \"nona_\",\n            \"Enable\"     : true\n          },\n          \"MySQLKingHydranium\": {\n            \"dbType\"     : \"mysql\",\n            \"host\"       : \"b-serverkingpin\",\n            \"username\"   : \"root\",\n            \"password\"   : \"\",\n            \"database\"   : \"hydranium\",\n            \"port\"       : 33062,\n            \"charset\"    : \"utf8\",\n            \"tablePrefix\": \"nona_\",\n            \"Enable\"     : true\n          },\n          \"MySQLKingData0\": {\n            \"dbType\"     : \"mysql\",\n            \"host\"       : \"b-serverkingpin\",\n            \"username\"   : \"root\",\n            \"password\"   : \"\",\n            \"database\"   : \"nonaron\",\n            \"port\"       : 33062,\n            \"charset\"    : \"utf8\",\n            \"tablePrefix\": \"nona_\",\n            \"Enable\"     : true\n          }\n        }\n      },\n\n      \"Indexables\": {\n        \"Configs\": {\n\n        },\n\n        \"Databases\": {\n          \"RedisKingpin\": {\n            \"host\"        : \"b-serverkingpin\",\n            \"password\"    : \"\",\n            \"port\"        : 6379,\n            \"Enable\"      : true\n          }\n        }\n      },\n\n      \"Messagers\": {\n        \"Configs\": {\n          \"Enable\"                 : true,\n          \"IsRecipient\"            : false,  // Is recipient or server.\n          \"AutoStartInMasterMode\"  : false   // For client or messenger.\n        },\n\n        \"Messagers\": {\n          \"CenterMessagram\" : \"./system/setup/CenterMessagram.json5\",\n\n          \"RabbitMQKingpin\" : {\n            \"host\"        : \"b-serverkingpin\",\n            \"username\"    : \"undefined\",\n            \"password\"    : \"\",\n            \"port\"        : 5672,\n            \"vhost\"       : \"/wolf\",\n            \"frame_max\"   : 131072,\n            \"channel_max\" : 0,\n            \"heartbeat\"   : 0,\n            \"Enable\"      : false\n          },\n\n          \"WolfKing\"        : { // For RPC-Server\n            \"host\"                : \"0.0.0.0\",\n            \"port\"                : 5777,\n            \"username\"            : \"undefined\",     // TODO, UMCs\n            \"password\"            : \"\",  // TODO, UMCs\n            \"SocketTimeout\"       : 800,\n            \"KeepAliveTimeout\"    : 3, //3600,\n            \"MaximumConnections\"  : 1e6,\n            \"ExtraHeadCoder\"      : \"com.pinecone.hydra.umc.msg.extra.GenericExtraHeadCoder\",\n            \"DefaultExtraEncode\"  : \"JSONString\", // JSONString / Binary\n            \"Engine\"              : \"com.pinecone.hydra.umc.wolf.server.WolfMCServer\",\n            \"MessageHandler\"      : \"WolfMCExpress\",\n            \"IsRecipient\"         : true,\n            \"Enable\"              : false\n          },\n\n          \"WolfMCKingpin\"   : { // For RPC-Client\n            \"host\"                   : \"localhost\",\n            \"port\"                   : 5777,\n            \"SocketTimeout\"          : 800,\n            \"KeepAliveTimeout\"       : 3, //3600, // 0 to close keep-alive\n            \"ParallelChannels\"       : 5,\n            \"ExtraHeadCoder\"         : \"com.pinecone.hydra.umc.msg.extra.GenericExtraHeadCoder\",\n            \"DefaultExtraEncode\"     : \"JSONString\", // JSONString / Binary\n            \"Engine\"                 : \"com.pinecone.hydra.umc.wolf.client.WolfMCClient\",\n            \"MessageHandler\"         : \"WolfMCExpress\",\n            \"AutoReconnect\"          : true,\n            \"EnableHeartbeat\"        : false,\n            \"HeartbeatInterval\"      : 2000,\n            \"Enable\"                 : true\n          }\n        }\n      }\n    },\n\n    \"Servers\"       : \"./system/setup/ServersScope.json5\",\n\n    \"StorageSystem\" : \"./system/setup/StorageSystem.json5\",\n\n    \"WolfKingOFS\"   : { // TODO, OFS\n      \"ParentServiceID\"        : \"\", // \"\" is the root server service(Kingpin), else is the paladin server service.\n      \"EnableWolfUMService\"    : true,\n      \"Port\"                   : 7577,\n      \"KeepAliveTimeout\"       : 3600, // 0 to close keep-alive\n      \"MaximumClients\"         : 10000,\n      \"MaxSoloQueue\"           : 100,  // Inner solo task-thread maximum MessageQueue length.\n      \"UIOFS\"                  : {     // Ulfhedinn Unified Interface OSS file system\n        \"EnableDirectPost\"        : true,        // Enable any-size file frame, e.g. Tiny 4K file (1MB <).\n        \"FileFrameSize\"           : 1048576,     // Multiple files of FileFrame size 1 MB\n        \"FilePagedPoolSize\"       : 104857600,   // FileBufferPagedPool:: Buffered file caches size(100 MB) in resident memory waiting for store in disk.\n        \"ForceBufferedWrite\"      : false        // Any disk-IO must store in resident memory first waiting for write.\n      }\n    },\n\n    \"SystemDaemon\"  : {\n      \"SurveillanceTime\": 1000\n    },\n\n    \"SystemCabinet\" : {\n      \"MicroSystems\": {\n        //\"Crusade\": \"./system/setup/cabinet/Crusade.json5\"\n      }\n    }\n  },\n\n  \"MasterOrchestrator\": {\n    \"Orchestration\"         : {\n      \"Name\": \"ServgramOrchestrator\",\n      \"Type\": \"Parallel\", // Enum: { Sequential, Parallel, Loop }\n\n      // Servgram-Classes scanning package-scopes\n      \"ServgramScopes\": [\n        \"com.sauron.heist.heistron\"\n      ],\n\n      \"Transactions\": [\n        { \"Name\": \"Heist\", \"Type\": \"Sequential\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/, \"Primary\": true }\n      ]\n    },\n\n    \"Servgrams\": {\n\n      \"Heist\": \"./system/setup/heist.json5\",\n\n      \"Sparta\": \"./system/setup/SpringBootApplication.json5\",\n\n      \"SpartaAccountService\": \"./system/setup/sparta/AccountServiceSpring.json5\",\n\n      \"SpartaUCDNService\": \"./system/setup/sparta/SpartaUCDNService.json5\",\n\n    }\n  }\n}"
  },
  {
    "path": "system/setup/heist.json5",
    "content": "{\n  \"ExertAffinity\"    : \"SerKingpin\", // Which server to execute, if is in joint-mission which must located at ONLY SerKingpin.\n  \"TraceLifecycle\"   : true,\n  \"EnableCmdCall\"    : true,         // Designate with startup command: --heist==Heist\n\n  \"Orchestration\"    : {\n    \"Name\": \"HeistronOrchestrator\",\n    \"Type\": \"Parallel\", // Enum: { Sequential, Parallel, Loop }\n\n    \"DirectlyLoad\" : {\n      \"Prefix\": [],\n      \"Suffix\": [ \"Heist\" ]\n    },\n\n    \"ServgramScopes\": [\n      \"com.sauron.shadow.heists\",\n      \"com.sauron.zeron.chronicle\",\n      \"com.sauron.crusade.heists\",\n      \"com.sauron.zeron.heists\"\n    ],\n\n    \"Transactions\": [\n      //{ \"Name\": \"heist_intel_personage_news\", \"Type\": \"Parallel\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ },\n\n      //{ \"Name\": \"FuturesDelivery\", \"Type\": \"Parallel\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ },\n      //{ \"Name\": \"TradingChronicle\", \"Type\": \"Parallel\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ },\n      //{ \"Name\": \"FestivalEvent\", \"Type\": \"Parallel\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ },\n      //{ \"Name\": \"FiscalEvent\", \"Type\": \"Parallel\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ },\n      //{ \"Name\": \"Chronicle\", \"Type\": \"Parallel\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ },\n      //{ \"Name\": \"Prometheus\", \"Type\": \"Parallel\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ },\n      { \"Name\": \"Void\", \"Type\": \"Sequential\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ },\n      //{ \"Name\": \"Void\", \"Type\": \"Sequential\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ },\n      //{ \"Name\": \"Void\", \"Type\": \"Sequential\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ }\n    ]\n  },\n\n  \"ConfigScope\"      : {\n    \"LocalConfigs\": {\n      \"PathScopes\": [\n        \"${SystemConfRoot}/heists/\",\n        \"${SystemConfRoot}/chronicle/\"\n      ],\n      \"FileExtends\": [\n        \"json5\", \"jplus\", \"json\"\n      ]\n    }\n  },\n\n  \"TemplatedConfig\"  : {\n    \"HeistArch\"             : \"Radium\",\n    \"HeistType\"             : \"Templated\",\n    \"HeistURL\"             : \"\",\n\n    \"StorageDriver\"         : \"${OmniumKingpin0}\",\n    \"IndexPath\"             : \"${this.DriverPath}Sauron\\\\${ProjectName}\\\\index\\\\\",\n    \"SpoilPath\"             : \"${this.DriverPath}Sauron\\\\${ProjectName}\\\\Pages\\\\\",\n    \"FragBase\"              : 0,\n    \"FragRange\"             : 100,\n\n    \"WorkingPath\"           : \".\\\\Sauron\\\\Heist\\\\${ProjectName}\\\\\",\n    \"InfoTracer\"            : \"heistInfo.log\",\n    \"ErrTracer\"             : \"heistErr.log\",\n    \"SysTracer\"             : \"heistSys.log\",\n    \"DyingMsgFile\"          : \"dyingMsg.json5\",  // Enable full-status tombstone dying msg. Empty string to close.\n    \"MasterConfQuery\"       : false, // Enable Task::ConfQuery from master server, else use next arguments.\n    \"TaskFrom\"              : 0,\n    \"TaskTo\"                : 0,\n    \"MaximumThread\"         : 5,\n    \"ReaverTasks\"           : 10000,\n    \"EnableRawIdQuery\"      : false, // 'True' to open the MutualIndexFile.\n    \"MutualIndexFile\"       : \"index_list.json5\",\n\n    \"FromDeathPoint\"        : true,\n    \"Metier\"                : \"Stalker\", // Reaver(To loot), Stalker(To search index), Embezzler(To profile and store spoils)\n\n    \"FailureConf\"           : {\n      \"FailedFileSize\"         : 2000,\n      \"FileRetrieveTime\"       : 3        // 1 for no retrieve retrying.\n    },\n\n    \"ExParentHeist\"         : \"\",\n\n    \"Children\"              : {\n\n    },\n\n    \"Orchestration\"         : {\n      \"Type\": \"Parallel\", // Enum: { Sequential, Parallel, Loop }\n\n      \"ServgramScopes\": [\n      ],\n\n      \"Transactions\": [\n      ]\n    }\n  },\n\n  \"Heists\"           : {\n    \"Void\"              : \"${SystemConfRoot}/heists/Void.json5\",\n\n    \"Urukhai\"           : \"${SystemConfRoot}/heists/UrukhaiHeists.json5\",\n\n    \"Wikipedia\"         : \"${SystemConfRoot}/heists/Wikipedia.json5\",\n\n    \"IMDB\"              : {\n      \"HeistType\"             : \"Templated\",\n      \"HeistURL\"             : \"https://www.imdb.com\",\n      \"IndexPath\"             : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Heist\\\\IMDB\\\\index\\\\\",\n      \"SpoilPath\"             :  \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Heist\\\\IMDB\\\\Pages\\\\\",\n      \"FragBase\"              : 10000,\n      \"FragRange\"             : 1000000,\n\n      \"WorkingPath\"           : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Sauron\\\\Heist\\\\IMDB\\\\\",\n      \"TaskFrom\"              : 0,\n      \"TaskTo\"                : 11064506, // 11064506\n      \"MaximumThread\"         : 5,\n      \"ReaverTasks\"           : 1000,\n\n      \"FromDeathPoint\"        : true,\n      \"Metier\"                : \"Stalker\",\n\n      \"IndexSniffer\"          : {\n        \"Type\"                  : \"NextPageBased\",\n        \"IndexApiHref\"          : \"/search/title/?release_date=1970-01-01,2023-01-01&count=250\", // IMDB Only support Video since 1970. To update use last date.\n        \"NextSelector\"          : \".lister-page-next.next-page\",\n        \"NextPageKeyWord\"       : \"Next »\"\n      }\n    },\n\n    \"DouBan\"            : \"${SystemConfRoot}/heists/DouBan.json5\",\n\n    \"NeteaseMusic\"      : \"${SystemConfRoot}/heists/NeteaseMusic.json5\",\n\n    \"DownCC\"            : \"${SystemConfRoot}/heists/DownCC.json5\",\n\n    \"GeoNames\"          : \"${SystemConfRoot}/heists/GeoNames.json5\",\n\n    \"ArtStation\"        : \"${SystemConfRoot}/heists/ArtStation.json5\",\n\n    \"MobyGames\"         : \"${SystemConfRoot}/heists/MobyGames.json5\",\n\n    \"AZLyrics\"          : {\n      \"HeistType\"             : \"Templated\",\n      \"HeistURL\"             : \"https://www.azlyrics.com\",\n      \"IndexPath\"             : \"${OmniumFacility}\\\\Heist\\\\AZLyrics\\\\index\\\\\",\n      \"SpoilPath\"             : \"${OmniumFacility}\\\\Heist\\\\AZLyrics\\\\pages\\\\\",\n      \"WorkingPath\"           : \"${OmniumFacility}\\\\Sauron\\\\Heist\\\\AZLyrics\\\\\",\n      \"TaskFrom\"              : 0,\n      \"TaskTo\"                : 500000, // 2491995\n      \"MaximumThread\"         : 5,\n      \"Metier\"                : \"Stalker\",\n\n      \"FailureConf\"           : {\n        \"FailedFileSize\"         : 10000,\n        \"FileRetrieveTime\"       : 1        // 1 for no retrieve retrying.\n      },\n\n      \"BrowserSimConf\"        : {\n        \"HeadConfigGroup\"        : [\n          {\n            \"MaxHTTPPackageSize\"     : 1024,\n            \"AgentName\"              : \"Mozilla/5.0\",\n            \"UserAgentHead\"          : \"User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\\n\"\n          }\n        ],\n        \"GroupConfusion\"         : true,\n        \"EnableRandomDelay\"      : true,\n        \"RandomDelayMin\"         : 25000,\n        \"RandomDelayMax\"         : 30000\n      },\n\n      \"IndexSniffer\"          : {\n        \"Type\"                  : \"NextPageBased\",\n        \"IndexApiHref\"          : \"/wiki/Special:AllPages\",\n        \"NextSelector\"          : \".mw-allpages-nav\",\n        \"NextPageKeyWord\"       : \"Next page\"\n      }\n    },\n\n    \"LyricsTranslate\"   : {\n      \"HeistType\"             : \"Sophisticate\",\n      \"HeistURL\"             : \"https://lyricstranslate.com\",\n      \"IndexPath\"             : \"${OmniumFacility}\\\\Heist\\\\LyricsTranslate\\\\index\\\\\",\n      \"SpoilPath\"             : \"${OmniumKingpin0}/Sauron/Heist/LyricsTranslate/Pages/\",\n      \"FragBase\"              : 10000,\n      \"FragRange\"             : 1000000,\n\n      \"WorkingPath\"           : \"${OmniumFacility}\\\\Sauron\\\\Heist\\\\LyricsTranslate\\\\\",\n      \"TaskFrom\"              : 0,\n      \"TaskTo\"                : 2355032, //2355032\n      \"MaximumThread\"         : 10,\n      \"ReaverTasks\"           : 1000,\n      \"MutualIndexFile\"       : \"MutualSongIndex.json5\",\n\n      \"FromDeathPoint\"        : true,\n      //\"Metier\"                : \"Stalker\",\n      \"Metier\"                : \"Reaver\",\n\n      \"ArtistsAPIHref\"        : \"/en/artists\",\n      \"IndexSniffer\"          : {\n        \"Type\"                  : \"NextPageBased\",\n        \"IndexApiHref\"          : \"\",\n        \"NextSelector\"          : \".pager-next\",\n        \"NextPageKeyWord\"       : \"next\",\n        // Defaulted\n        \"StorageFmt\"            : \"index_${mutualId}.html\",\n        \"NextHrefFmt\"           : \"${DomainHref}${this}\"\n      }\n    },\n\n    \"LatinIsSimple\"     : {\n      \"HeistType\"             : \"Templated\",\n      \"HeistURL\"             : \"https://www.latin-is-simple.com\",\n      \"IndexPath\"             : \"${OmniumFacility}\\\\Heist\\\\LatinIsSimple\\\\index\\\\\",\n      \"SpoilPath\"             : \"${OmniumFacility}\\\\Heist\\\\LatinIsSimple\\\\Pages\\\\\",\n      \"FragBase\"              : 1000,\n      \"FragRange\"             : 100000,\n\n      \"WorkingPath\"           : \"${OmniumFacility}\\\\Sauron\\\\Heist\\\\LatinIsSimple\\\\\",\n      \"TaskFrom\"              : 0,\n      \"TaskTo\"                : 49980,\n      \"MaximumThread\"         : 5,\n      \"ReaverTasks\"           : 1000,\n\n      \"FromDeathPoint\"        : true,\n      \"Metier\"                : \"Reaver\",\n      //\"Metier\"                : \"Stalker\",\n      //\"Metier\"                : \"Embezzler\",\n      \"CategoryIdxMap\"        : [ \"Nouns\", \"Verbs\", \"Adjectives\", \"Adverbs\", \"OtherWords\", \"Phrases\" ],\n\n      \"ArtistsAPIHref\"        : \"/en/artists\",\n      \"IndexSniffer\"          : {\n        \"Type\"                  : \"NextPageBased\",\n        \"IndexApiHref\"          : \"\",\n        \"NextSelector\"          : \".pager-next\",\n        \"NextPageKeyWord\"       : \"next\",\n        // Defaulted\n        \"StorageFmt\"            : \"index_${mutualId}.html\",\n        \"NextHrefFmt\"           : \"${DomainHref}${this}\"\n      }\n    },\n\n    \"Prometheus\"        : \"${SystemConfRoot}/heists/Prometheus.json5\",\n\n    \"FiscalEvent\"       : \"${SystemConfRoot}/heists/FiscalEvent.json5\",\n\n    \"FestivalEvent\"     : \"${SystemConfRoot}/heists/FestivalEvent.json5\",\n\n\n\n    \"TradingChronicle\"  : \"${SystemConfRoot}/heists/TradingChronicle.json5\",\n\n    \"FuturesDelivery\"   : \"${SystemConfRoot}/heists/FuturesDelivery.json5\"\n  },\n\n  \"Components\"       : {\n\n    \"HttpBrowser\"      : {\n      \"HeadConfigGroup\"        : [\n        {\n          \"AgentName\"              : \"Mozilla/5.0\",\n          \"UserAgent\"              : \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\\n\"\n        },\n        {\n          \"AgentName\"              : \"Baiduspider\",\n          \"UserAgent\"              : \"Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\\n\"\n        }\n      ],\n      \"AgentConfusion\"         : true,\n\n      \"SystemProxy\"            : {\n        \"host\": \"127.0.0.1\",\n        \"port\": 7890\n      },\n      \"ProxyGroup\"             : [\n        { \"host\": \"127.0.0.1\", \"port\": 7890 }\n      ],\n\n      //\"ProxyStrategy\"          : \"NoProxy\",\n      \"ProxyStrategy\"          : \"SystemOnly\",\n      //\"ProxyStrategy\"          : \"ProxyGroup\",\n      \"EnableRandomDelay\"      : false,\n      \"RandomDelayMin\"         : 9200,\n      \"RandomDelayMax\"         : 10500,\n      \"SocketTimeout\"          : 60000,\n\n      \"Charset\"                : \"UTF-8\"\n    }\n\n  }\n\n\n}"
  },
  {
    "path": "system/setup/heists/Apesk.json5",
    "content": "{\n  \"HeistArch\"                : \"Radium\",\n  \"HeistType\"                : \"Templated\",\n  \"HeistURL\"                : \"https://apesk.com\",\n  //\"WorkingMode\"              : \"\", // Exhaust all possible inlet pages (Artist pages as inlet)\n\n//  \"SubHref\"                  : \"/p/result_for_gzh.asp?rid=\",\n//  \"IndexPath\"                : \"\\\\\\\\b-serverkingpin/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/Apesk/mbti/index/\",\n//  \"SpoilPath\"                : \"\\\\\\\\b-serverkingpin/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/Apesk/mbti/pages/\",\n//  \"TaskFrom\"                 : 20419496,\n//  \"TaskTo\"                   : 50274406,\n\n  \"SubHref\"                  : \"/p/result_for_gzh.asp?rid=\",\n  \"IndexPath\"                : \"\\\\\\\\b-serverkingpin/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/Apesk/big5/index/\",\n  \"SpoilPath\"                : \"\\\\\\\\b-serverkingpin/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/Apesk/big5/pages/\",\n\n  \"WorkingPath\"              : \"\\\\\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/Apesk/\",\n\n  \"MaximumThread\"            : 1,\n  \"FragBase\"                 : 10000,\n  \"FragRange\"                : 1000000,\n\n  \"FailureConf\"              : {\n    \"FailedFileSize\"           : 1000,\n    \"FileRetrieveTime\"         : 1\n  },\n\n  \"HttpBrowser\"              : {\n    \"HeadConfigGroup\"        : [\n      {\n        \"AgentName\"              : \"Mozilla/5.0\",\n        \"UserAgent\"              : \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\\n\"\n      },\n      {\n        \"AgentName\"              : \"Baiduspider\",\n        \"UserAgent\"              : \"Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\\n\"\n      }\n    ],\n    \"AgentConfusion\"         : true,\n\n    \"SystemProxy\"            : {\n      \"host\": \"127.0.0.1\",\n      \"port\": 7890\n    },\n    \"ProxyGroup\"             : [\n      { \"host\": \"127.0.0.1\", \"port\": 7890 }\n    ],\n\n    //\"ProxyStrategy\"          : \"NoProxy\",\n    \"ProxyStrategy\"          : \"SystemOnly\",\n    //\"ProxyStrategy\"          : \"ProxyGroup\",\n    \"EnableRandomDelay\"      : false,\n    \"RandomDelayMin\"         : 1000,\n    \"RandomDelayMax\"         : 2000,\n    \"SocketTimeout\"          : 20000,\n\n    \"Charset\"                : \"UTF-8\"\n  },\n\n  \"FromDeathPoint\"           : true,\n  //\"Metier\"                   : \"Stalker\",\n  \"Metier\"                   : \"Reaver\",\n  //\"Metier\"                   : \"Embezzler\",\n\n  \"QueryCookie\"              : \"__cf_bm=ESNiAVu2p_Y6rt7WJ7vJ7y33tb127eCuyKzMP8Rm7oc-1687420936-0-Ac+moMROCH1X8OGPiE5dFWa+RHj2/FPNrWoFM/s02gXSOrvdxl/x5663yOEIvOgeWxGcFIpZT4fYrytMMCPcuTEbOWFXbbgRNOUr65juI3JH\",\n  \"InletSitemap\"             : \"sitemap-index.xml\"\n}"
  },
  {
    "path": "system/setup/heists/ArtStation.json5",
    "content": "{\n  \"HeistType\"                : \"Templated\",\n  \"HeistURL\"                : \"https://www.artstation.com\",\n  //\"WorkingMode\"              : \"\", // Exhaust all possible inlet pages (Artist pages as inlet)\n\n  \"IndexPath\"                : \"\\\\\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion5/Sauron/Heist/ArtStation/inlet/index/\",\n  \"SpoilPath\"                : \"\\\\\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion5/Sauron/Heist/ArtStation/inlet/pages/\",\n\n  \"WorkingPath\"              : \"\\\\\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/ArtStation/\",\n  \"TaskFrom\"                 : 0000000,\n  \"TaskTo\"                   : 1383854,\n  \"MaximumThread\"            : 5,\n  \"FragBase\"                 : 10000,\n  \"FragRange\"                : 1000000,\n\n  \"FailureConf\"              : {\n    \"FailedFileSize\"           : 1000,\n    \"FileRetrieveTime\"         : 1\n  },\n\n  \"HttpBrowser\"              : {\n    \"HeadConfigGroup\"        : [\n      {\n        \"AgentName\"              : \"Mozilla/5.0\",\n        \"UserAgent\"              : \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\\n\"\n      },\n      {\n        \"AgentName\"              : \"Baiduspider\",\n        \"UserAgent\"              : \"Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\\n\"\n      }\n    ],\n    \"AgentConfusion\"         : true,\n\n    \"SystemProxy\"            : {\n      \"host\": \"127.0.0.1\",\n      \"port\": 7890\n    },\n    \"ProxyGroup\"             : [\n      { \"host\": \"127.0.0.1\", \"port\": 7890 }\n    ],\n\n    //\"ProxyStrategy\"          : \"NoProxy\",\n    \"ProxyStrategy\"          : \"SystemOnly\",\n    //\"ProxyStrategy\"          : \"ProxyGroup\",\n    \"EnableRandomDelay\"      : false,\n    \"RandomDelayMin\"         : 1000,\n    \"RandomDelayMax\"         : 2000,\n    \"SocketTimeout\"          : 20000,\n\n    \"Charset\"                : \"UTF-8\"\n  },\n\n  \"FromDeathPoint\"           : true,\n  \"Metier\"                   : \"Stalker\",\n  //\"Metier\"                   : \"Reaver\",\n  //\"Metier\"                   : \"Embezzler\",\n\n  \"QueryCookie\"              : \"__cf_bm=ESNiAVu2p_Y6rt7WJ7vJ7y33tb127eCuyKzMP8Rm7oc-1687420936-0-Ac+moMROCH1X8OGPiE5dFWa+RHj2/FPNrWoFM/s02gXSOrvdxl/x5663yOEIvOgeWxGcFIpZT4fYrytMMCPcuTEbOWFXbbgRNOUr65juI3JH\",\n\n  \"SiteMaps\": {\n   \"sitemap_inlet\": {\n      \"href\": \"https://www.artstation.com/sitemap.xml\"\n    },\n  }\n}"
  },
  {
    "path": "system/setup/heists/Chronicle.json5",
    "content": "{\n  \"HeistType\"                : \"Period\",\n\n  \"ChronicPerAcc\"            : 750, // ms\n  \"ChronicPeriods\"           : [\n    \"0 0 0 * * *\", \"0 0 3 * * *\", \"0 0 6 * * *\", \"0 0 9 * * *\",\n    \"0 0 12 * * *\", \"0 0 15 * * *\", \"0 0 18 * * *\", \"0 0 21 * * *\"\n\n    //,\"* * * * * *\"\n  ],\n\n  \"PrimaryRDB\"               : \"MySQLKingData0\",\n\n  \"NewsDataTable\"            : \"nona_news_index_chronic\",\n\n  \"RaiderMarshaling\"         : {\n    \"DirectlyLoad\" : {\n      \"Prefix\": [],\n      \"Suffix\": [ \"Raider\", \"Clerk\" ]\n    },\n\n    \"RaiderScopes\": [\n      \"com.sauron.zeron.heists\"\n    ]\n  },\n\n  \"FromDeathPoint\"           : true,\n  //\"Metier\"                   : \"Stalker\",\n  \"Metier\"                   : \"Reaver\",\n  //\"Metier\"                   : \"Embezzler\",\n\n\n  \"Children\"                  : {\n    \"Newstron\": {\n      \"RaiderMarshaling\"      : {\n        \"UsingSedation\"      : true,\n        \"UsingUniformFeast\"  : true,\n\n        \"Raiders\"        : {\n\n          \"SimpleAjaxBased\"      : {\n            \"Tasks\": [\n              { \"ObjectName\": \"JinRiTouTiaoTops\",  \"Api\": \"https://www.toutiao.com/hot-event/hot-board/?origin=toutiao_pc\" },\n              { \"ObjectName\": \"BaiduIndex\"      ,  \"Api\": \"https://index.baidu.com/Interface/homePage/wiseConfig\"          },\n              { \"ObjectName\": \"WeiboHot\"        ,  \"Api\": \"https://weibo.com/ajax/side/hotSearch\"                          },\n              { \"ObjectName\": \"BaiduTiebaHot\"   ,  \"Api\": \"https://tieba.baidu.com/hottopic/browse/topicList\"              }\n            ]\n          },\n\n          \"Zhihu\"                : {\n            \"HotlineApi\": \"https://www.zhihu.com/api/v4/creators/rank/hot?domain=0\",  //https://www.zhihu.com/api/v4/creators/rank/hot?domain=0&limit=20&offset=20&period=hour\n            \"Topstory\"  : \"https://www.zhihu.com/api/v3/feed/topstory/hot-lists/total?limit=100\",\n            \"TopSearch\" : \"https://www.zhihu.com/api/v4/creators/rank/hot?domain=0&period=hour\",\n\n            \"Global\"    : {\n              \"IndexFrom\" : 0,\n              \"IndexTo\"   : 100,\n              \"IndexStep\" : 100\n            }\n          },\n\n          \"Baidu\"                : {\n//            \"ChronicPeriods\"           : [\n//              \"0 0 0 * * *\", \"* * * * * *\"\n//            ],\n\n            \"TopHref\": \"https://top.baidu.com/board?tab=realtime\"\n          },\n\n          \"BaiduTieba\"           : {\n            \"TopHref\": \"https://search.prod.di.api.cnn.io/content?q=&size=%d&from=0&page=1&sort=newest&request_id=%s\"\n          },\n\n          \"Google\"               : {\n            \"NewsHref\": \"https://www.google.com/search?q=news&source=lnms&tbm=nws&start=\", \"TopN\": 5\n          },\n\n          \"CNN\"                  : {\n            \"NewsHref\": \"https://search.prod.di.api.cnn.io/content?q=&size=%d&from=0&page=1&sort=newest&request_id=%s\", \"TopN\": 50, \"request_id\": \"pdx-search\"\n          },\n\n\n\n          \"Personage\"            : {\n            \"ChronicPeriods\"           : [\n              \"0/30 * * * * *\"\n            ]\n          }\n        },\n\n        \"MarshalingList\" : [\n          \"SimpleAjaxBased\",\n          \"Zhihu\",\n          \"Baidu\",\n          \"Google\",\n          \"CNN\",\n\n\n          \"Personage\"\n        ]\n      },\n\n      \"Orchestration\"         : {\n        \"Type\": \"Parallel\", // Enum: { Sequential, Parallel, Loop }\n\n        \"Transactions\": [\n          //{ \"Name\": \"Jesus\", \"Type\": \"Sequential\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ },\n        ]\n      },\n\n      \"HttpBrowser\"      : {\n      }\n    }\n  },\n\n  \"Orchestration\"         : {\n    \"Name\": \"ChronicleOrchestrator\",\n    \"Type\": \"Parallel\", // Enum: { Sequential, Parallel, Loop }\n\n    \"Transactions\": [\n      { \"Name\": \"Newstron\", \"Type\": \"Parallel\"  },\n      //{ \"Name\": \"Newstron\", \"Type\": \"Parallel\"  }\n    ]\n  },\n\n  \"HttpBrowser\"      : {\n  }\n}"
  },
  {
    "path": "system/setup/heists/DeviantArt.json5",
    "content": "{\n  \"HeistArch\"                : \"Radium\",\n  \"HeistType\"                : \"Templated\",\n  \"HeistURL\"                : \"https://www.deviantart.com\", // https://www.deviantart.com/sitemap-index.xml.gz\n  //\"WorkingMode\"              : \"\", // Exhaust all possible inlet pages (Artist pages as inlet)\n\n  \"IndexPath\"                : \"\\\\\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion5/Sauron/Heist/DeviantArt/inlet/index/\",\n  \"SpoilPath\"                : \"\\\\\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion5/Sauron/Heist/DeviantArt/inlet/pages/\",\n\n  \"WorkingPath\"              : \"\\\\\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/DeviantArt/\",\n  \"TaskFrom\"                 : 0000000,\n  \"TaskTo\"                   : 1383854,\n  \"MaximumThread\"            : 5,\n  \"FragBase\"                 : 10000,\n  \"FragRange\"                : 1000000,\n\n  \"FailureConf\"              : {\n    \"FailedFileSize\"           : 1000,\n    \"FileRetrieveTime\"         : 1\n  },\n\n  \"HttpBrowser\"              : {\n    \"HeadConfigGroup\"        : [\n      {\n        \"AgentName\"              : \"Mozilla/5.0\",\n        \"UserAgent\"              : \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\\n\"\n      },\n      {\n        \"AgentName\"              : \"Baiduspider\",\n        \"UserAgent\"              : \"Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\\n\"\n      }\n    ],\n    \"AgentConfusion\"         : true,\n\n    \"SystemProxy\"            : {\n      \"host\": \"127.0.0.1\",\n      \"port\": 7890\n    },\n    \"ProxyGroup\"             : [\n      { \"host\": \"127.0.0.1\", \"port\": 7890 }\n    ],\n\n    //\"ProxyStrategy\"          : \"NoProxy\",\n    \"ProxyStrategy\"          : \"SystemOnly\",\n    //\"ProxyStrategy\"          : \"ProxyGroup\",\n    \"EnableRandomDelay\"      : false,\n    \"RandomDelayMin\"         : 1000,\n    \"RandomDelayMax\"         : 2000,\n    \"SocketTimeout\"          : 20000,\n\n    \"Charset\"                : \"UTF-8\"\n  },\n\n  \"FromDeathPoint\"           : true,\n  \"Metier\"                   : \"Stalker\",\n  //\"Metier\"                   : \"Reaver\",\n  //\"Metier\"                   : \"Embezzler\",\n\n  \"QueryCookie\"              : \"__cf_bm=ESNiAVu2p_Y6rt7WJ7vJ7y33tb127eCuyKzMP8Rm7oc-1687420936-0-Ac+moMROCH1X8OGPiE5dFWa+RHj2/FPNrWoFM/s02gXSOrvdxl/x5663yOEIvOgeWxGcFIpZT4fYrytMMCPcuTEbOWFXbbgRNOUr65juI3JH\",\n  \"InletSitemap\"             : \"sitemap-index.xml\"\n}"
  },
  {
    "path": "system/setup/heists/DouBan.json5",
    "content": "{\n  \"HeistType\"                : \"Templated\",\n  \"HeistURL\"                : \"https://www.douban.com\",\n  \"HeistTopicName\"           : \"book\",//\"music\", // Null using id-index.\n  \"IndexPath\"                : \"${OmniumKingpin0}/Sauron/Heist/DouBan/index/\",\n  \"SpoilPath\"                : \"${OmniumKingpin0}/Sauron/Heist/DouBan/Pages/\",\n  //\"SpoilPath\"                : \"${OmniumKingStream0}/Sauron/Heist/DouBan/Pages/\",\n\n  \"WorkingPath\"              : \"${ARBOmnium}/Sauron/Heist/DouBan/\",\n  \"TaskFrom\"                 : 4900000,\n  \"TaskTo\"                   : 5811459, //1002823, // book 5811459 ilmen 38091 location 37372 movie 7131301 music 1002823 www 420893077 8700 435014842\n  //\"TaskFrom\"                 : 0,\n  //\"TaskTo\"                   : 100, //1002823, // book 5811459 ilmen 38091 location 37372 movie 7131301 music 1002823 www 420893077 8700 435014842\n  \"MaximumThread\"            : 1, // Do not too fast, fuck douban !\n  \"ReaverTasks\"              : 1000,\n\n  \"FailureConf\"              : {\n    \"FailedFileSize\"           : 14000,\n    \"FileRetrieveTime\"         : 1\n  },\n  \"FromDeathPoint\"           : true,\n  \"Metier\"                   : \"Reaver\",\n  //\"Metier\"                   : \"Embezzler\",\n\n  \"BrowserSimConf\"        : {\n    \"HeadConfigGroup\"        : [\n      //                {\n      //                  \"MaxHTTPPackageSize\"     : 1024,\n      //                  \"AgentName\"              : \"Mozilla/5.0\",\n      //                  \"UserAgentHead\"          : \"User-Agent: Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\\n\"\n      //                },\n      {\n        \"MaxHTTPPackageSize\"     : 1024,\n        \"AgentName\"              : \"Mozilla/5.0\",\n        \"UserAgentHead\"          : \"User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\\n\"\n      }\n    ],\n    \"GroupConfusion\"         : true,\n    \"EnableRandomDelay\"      : true,\n    \"RandomDelayMin\"         : 9200,\n    \"RandomDelayMax\"         : 10500\n  },\n\n  \"EdgeGuerrillaHeist\"    : {\n    \"EnableGuerrilla\"        : false,\n    \"KingpinHost\"            : \"127.0.0.1\",\n    \"KingpinPort\"            : 9901\n  },\n\n  \"SitemapApiHref\"           : \"sitemap_index.xml\",\n  \"MapIndexPath\"             : \"${OmniumFacility}/Heist/DouBan/\",\n  \"UsingBuffIdxQuery\"        : true,\n  \"BuffIdxQuerySize\"         : 10000,   // -1 for SELECT ALL.\n}"
  },
  {
    "path": "system/setup/heists/DownloadCNet.json5",
    "content": "{\n  \"HeistType\"                : \"Templated\",\n  \"HeistURL\"                : \"https://download.cnet.com\",\n  //\"WorkingMode\"              : \"\", // Exhaust all possible inlet pages (Artist pages as inlet)\n\n  \"IndexPath\"                : \"\\\\\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion3/Sauron/Heist/DownloadCNet/index/\",\n  \"SpoilPath\"                : \"\\\\\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion3/Sauron/Heist/DownloadCNet/pages/\",\n\n  \"WorkingPath\"              : \"\\\\\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/DownloadCNet/\",\n  \"TaskFrom\"                 : 0000000,\n  \"TaskTo\"                   : 1383854,\n  \"MaximumThread\"            : 5,\n  \"FragBase\"                 : 10000,\n  \"FragRange\"                : 1000000,\n\n  \"FailureConf\"              : {\n    \"FailedFileSize\"           : 1000,\n    \"FileRetrieveTime\"         : 1\n  },\n\n  \"HttpBrowser\"              : {\n    \"HeadConfigGroup\"        : [\n      {\n        \"AgentName\"              : \"Mozilla/5.0\",\n        \"UserAgent\"              : \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\\n\"\n      },\n      {\n        \"AgentName\"              : \"Baiduspider\",\n        \"UserAgent\"              : \"Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\\n\"\n      }\n    ],\n    \"AgentConfusion\"         : true,\n\n    \"SystemProxy\"            : {\n      \"host\": \"127.0.0.1\",\n      \"port\": 7890\n    },\n    \"ProxyGroup\"             : [\n      { \"host\": \"127.0.0.1\", \"port\": 7890 }\n    ],\n\n    //\"ProxyStrategy\"          : \"NoProxy\",\n    \"ProxyStrategy\"          : \"SystemOnly\",\n    //\"ProxyStrategy\"          : \"ProxyGroup\",\n    \"EnableRandomDelay\"      : false,\n    \"RandomDelayMin\"         : 1000,\n    \"RandomDelayMax\"         : 2000,\n    \"SocketTimeout\"          : 20000,\n\n    \"Charset\"                : \"UTF-8\"\n  },\n\n  \"FromDeathPoint\"           : true,\n  //\"Metier\"                   : \"Stalker\",\n  \"Metier\"                   : \"Reaver\",\n  //\"Metier\"                   : \"Embezzler\",\n\n  \"SiteMaps\": {\n   \"products\": {\n      \"href\": \"https://download.cnet.com/sitemaps/products_index.xml\"\n    },\n  }\n}"
  },
  {
    "path": "system/setup/heists/NeteaseMusic.json5",
    "content": "{\n  \"HeistType\"                : \"Templated\",\n  \"HeistURL\"                : \"https://music.163.com\",\n  //\"WorkingMode\"              : \"\", // Exhaust all possible inlet pages (Artist pages as inlet)\n  \"SubPathNode\"              : \"Pages/\",\n\n  \"WorkingMode\"              : \"Expansion\",\n  //\"SubPathNode\"              : \"${this.WorkingMode}/\",\n  \"IndexPath\"                : \"${OmniumKingpin0}/Sauron/Heist/NeteaseMusic/index/\",\n  //\"SpoilPath\"                : \"I:/Sauron/Heist/NeteaseMusic/${this.SubPathNode}\",\n  \"SpoilPath\"                : \"${OmniumKingpin0}/Sauron/Heist/NeteaseMusic/Pages/\",\n\n  \"WorkingPath\"              : \"${ARBOmnium}/Sauron/Heist/NeteaseMusic/\",\n  \"TaskFrom\"                 : 1000000,    // 1872 -> Due to the range of Netease is indefinite, this record is the min inlet point.\n  \"TaskTo\"                   : 10000000, // 13959689 !!! -> Exhaustion methodology to exhaust all possible artists, this record is an overestimated compact upper bound.\n  \"MaximumThread\"            : 4,\n  \"ReaverTasks\"              : 1000,\n\n  \"FailureConf\"              : {\n    \"FailedFileSize\"           : 14000, // For Netease abort this validates metric, but use key-word to validate.\n    \"FileRetrieveTime\"         : 1\n  },\n  \"FromDeathPoint\"           : true,\n  //\"Metier\"                   : \"Stalker\",\n  \"Metier\"                   : \"Reaver\",\n  //\"Metier\"                   : \"Embezzler\",\n\n  \"BrowserSimConf\"        : {\n    \"HeadConfigGroup\"        : [\n      {\n        \"MaxHTTPPackageSize\"     : 64,\n        \"AgentName\"              : \"Mozilla/5.0\",\n        \"UserAgentHead\"          : \"User-Agent: Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\\n\"\n      },\n      {\n        \"MaxHTTPPackageSize\"     : 64,\n        \"AgentName\"              : \"Mozilla/5.0\",\n        \"UserAgentHead\"          : \"User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\\n\"\n      }\n    ],\n    \"GroupConfusion\"         : true,\n    \"EnableRandomDelay\"      : false,\n    \"RandomDelayMin\"         : 9200,\n    \"RandomDelayMax\"         : 10500,\n\n    \"CompressHTTPCon\"        : false, // Only read compressed stream-content, but will disable persistent-connection.\n  }\n}"
  },
  {
    "path": "system/setup/heists/PubChem.json5",
    "content": "{\n  \"HeistType\"                : \"Templated\",\n  \"HeistURL\"                : \"https://pubchem.ncbi.nlm.nih.gov\",\n  //\"WorkingMode\"              : \"\", // Exhaust all possible inlet pages (Artist pages as inlet)\n\n  \"IndexPath\"                : \"\\\\\\\\b-serverkingpin/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/PubChem/index/\",\n  \"SpoilPath\"                : \"\\\\\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion4/Sauron/Heist/PubChem/pages/\",\n\n  \"WorkingPath\"              : \"\\\\\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/PubChem/\",\n  \"TaskFrom\"                 : 00111000,\n  \"TaskTo\"                   : 01000000, //20501203\n  \"MaximumThread\"            : 8,\n  \"FragBase\"                 : 10000,\n  \"FragRange\"                : 1000000,\n\n  \"FailureConf\"              : {\n    \"FailedFileSize\"           : 1000,\n    \"FileRetrieveTime\"         : 1\n  },\n\n  \"HttpBrowser\"              : {\n    \"HeadConfigGroup\"        : [\n      {\n        \"AgentName\"              : \"Mozilla/5.0\",\n        \"UserAgent\"              : \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\\n\"\n      },\n      {\n        \"AgentName\"              : \"Baiduspider\",\n        \"UserAgent\"              : \"Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\\n\"\n      }\n    ],\n    \"AgentConfusion\"         : true,\n\n    \"SystemProxy\"            : {\n      \"host\": \"127.0.0.1\",\n      \"port\": 7890\n    },\n    \"ProxyGroup\"             : [\n      { \"host\": \"127.0.0.1\", \"port\": 7890 }\n    ],\n\n    //\"ProxyStrategy\"          : \"NoProxy\",\n    \"ProxyStrategy\"          : \"SystemOnly\",\n    //\"ProxyStrategy\"          : \"ProxyGroup\",\n    \"EnableRandomDelay\"      : true,\n    \"RandomDelayMin\"         : 1000,\n    \"RandomDelayMax\"         : 2000,\n    \"SocketTimeout\"          : 20000,\n\n    \"Charset\"                : \"UTF-8\"\n  },\n\n  \"FromDeathPoint\"           : true,\n  //\"Metier\"                   : \"Stalker\",\n  \"Metier\"                   : \"Reaver\",\n  //\"Metier\"                   : \"Embezzler\",\n\n  \"SiteMaps\": {\n    annotation: {\n      \"href\": \"https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=annotation/sitemap_index.xml\"\n    },\n//    \"assay\": {\n//      \"href\": \"https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=assay/sitemap_index.xml\"\n//    },\n////    \"source\": {\n////      \"href\": \"https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=source/sitemap.xml\"\n////    },\n//    \"gene\": {\n//      \"href\": \"https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=gene/sitemap_index.xml\"\n//    },\n//    \"protein\": {\n//      \"href\": \"https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=protein/sitemap_index.xml\"\n//    },\n//    \"patent\": {\n//      \"href\": \"https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=patent/sitemap_index.xml\"\n//    },\n////    \"concept\": {\n////      \"href\": \"https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=concept/sitemap_1.xml\"\n////    },\n//    \"cell\": {\n//      \"href\": \"https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=cell/sitemap_index.xml\"\n//    },\n//    \"taxonomy\": {\n//      \"href\": \"https://pubchem.ncbi.nlm.nih.gov/sitemap/sitemap.cgi?p=taxonomy/sitemap_index.xml\"\n//    }\n  }\n}"
  },
  {
    "path": "system/setup/heists/Steam.json5",
    "content": "{\n  \"HeistArch\"                : \"Radium\",\n  \"HeistType\"                : \"Templated\",\n  \"HeistURL\"                : \"https://store.steampowered.com\", //\"https://store.steampowered.com/search/?ndl=1&ignore_preferences=1&page=\",\n\n  //\"WorkingMode\"              : \"\", // Exhaust all possible inlet pages (Artist pages as inlet)\n\n  \"IndexPath\"                : \"\\\\\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion5/Sauron/Heist/Steam/inlet/index/\",\n  \"SpoilPath\"                : \"\\\\\\\\B-ServerPaladin/PaladinHive/EnderChest/Sauotion5/Sauron/Heist/Steam/inlet/pages/\",\n\n  \"WorkingPath\"              : \"\\\\\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/Steam/\",\n  \"TaskFrom\"                 : 0000000,\n  \"TaskTo\"                   : 0006169,\n  \"MaximumThread\"            : 8,\n  \"FragBase\"                 : 10000,\n  \"FragRange\"                : 1000000,\n\n  \"FailureConf\"              : {\n    \"FailedFileSize\"           : 1000,\n    \"FileRetrieveTime\"         : 1\n  },\n\n  \"HttpBrowser\"              : {\n    \"HeadConfigGroup\"        : [\n      {\n        \"AgentName\"              : \"Mozilla/5.0\",\n        \"UserAgent\"              : \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.4750.0 Safari/537.36\\n\"\n      },\n      {\n        \"AgentName\"              : \"Baiduspider\",\n        \"UserAgent\"              : \"Mozilla/5.0 (compatible; Baiduspider/2.0;+http://www.baidu.com/search/spider.html)\\n\"\n      }\n    ],\n    \"AgentConfusion\"         : true,\n\n    \"SystemProxy\"            : {\n      \"host\": \"127.0.0.1\",\n      \"port\": 7890\n    },\n    \"ProxyGroup\"             : [\n      { \"host\": \"127.0.0.1\", \"port\": 7890 }\n    ],\n\n    //\"ProxyStrategy\"          : \"NoProxy\",\n    \"ProxyStrategy\"          : \"SystemOnly\",\n    //\"ProxyStrategy\"          : \"ProxyGroup\",\n    \"EnableRandomDelay\"      : false,\n    \"RandomDelayMin\"         : 1000,\n    \"RandomDelayMax\"         : 2000,\n    \"SocketTimeout\"          : 20000,\n\n    \"Charset\"                : \"UTF-8\"\n  },\n\n  \"FromDeathPoint\"           : true,\n  //\"Metier\"                   : \"Stalker\",\n  \"Metier\"                   : \"Reaver\",\n  //\"Metier\"                   : \"Embezzler\",\n\n  \"QueryCookie\"              : \"__cf_bm=ESNiAVu2p_Y6rt7WJ7vJ7y33tb127eCuyKzMP8Rm7oc-1687420936-0-Ac+moMROCH1X8OGPiE5dFWa+RHj2/FPNrWoFM/s02gXSOrvdxl/x5663yOEIvOgeWxGcFIpZT4fYrytMMCPcuTEbOWFXbbgRNOUr65juI3JH\",\n\n  \"ExtendMode\"               : \"Instance\",\n  \"InstanceExtend\"           : \"FetchInletList\"\n}\n\n\n"
  },
  {
    "path": "system/setup/heists/UrukhaiHeists.json5",
    "content": "/**\n  Bean Nuts Hazelnut Sauron Nonabyte(Nonaron) Heistum-Urukhai-heists Model Configs\n  Templated Reprogrammable Auto-Crawler Sub-system\n\n  [ ReaverSystem ->override<Extended>-> OrcsSystem ]\n  Nomenclature:\n    for those Explicit-Index or Explicit-Inlet-Terminator naming with corporeal-fantasy-figure.\n    [ Troll, Orc, Ghoul, Hellhound ]\n\n    for those Conundrum-Index or Conundrum-Inlet-Terminator naming with incorporeal-fantasy-figure.\n    [ Wraith, Ghast ]\n  this->Archetypes: {\n    \"Troll\"     => [SimpletonIndex] DOM-Page-Index-Based Auto-Massive-Crawler [DataUnitRange: 1 GB ~ 1 TB],\n    \"Orc\"       => [SimpletonIndex] Sitemap-Index-Based Auto-Massive-Crawler [DataUnitRange: 1 GB ~ 1 TB],\n    \"Ghoul\"     => [SavageSniffer ] Recursive Whole-Site-Links-Parse Savage-Sniffer-Crawler [DataUnitRange: 100 GB ≤],\n    \"Cerberus\"  => [PursuitSniffer] Full-Text-Index-Based Pursuit-Sniffer-Crawler [DataUnitRange: Unlimited]\n  }\n\n  Templated:\n  this->Archetypes: [\n    DictionaryWebsites, WikiLikesWebsites, MovieWebsites, NewsWebsites\n  ]\n**/\n\n{\n  \"HeistType\"             : \"Templated\",\n  //\"HeistURL\"             : \"\",  // @Override\n  \"IndexPath\"             : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Heist\\\\Urukhai\\\\index\\\\\",\n  \"SpoilPath\"             : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\MegaH\\\\Sauron\\\\Urukhai\\\\pages\\\\\",\n  \"FragBase\"              : 10000,\n  \"FragRange\"             : 1000000,\n\n  \"WorkingPath\"           : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Sauron\\\\Heist\\\\Urukhai\\\\\",\n  \"InfoTracer\"            : \"heistInfo.log\",\n  \"ErrTracer\"             : \"heistErr.log\",\n  \"SysTracer\"             : \"heistSys.log\",\n  \"DyingMsgFile\"          : \"dyingMsg.json5\",  // Enable full-status tombstone dying msg. Empty string to close.\n  \"TaskFrom\"              : 0,\n  \"TaskTo\"                : 1000000, // 16906325\n  \"MaximumThread\"         : 5,\n  \"ReaverTasks\"           : 10000,\n\n  \"FailureConf\"              : {\n    \"FailedFileSize\"           : 2000,\n    \"FileRetrieveTime\"         : 3\n  },\n  \"FromDeathPoint\"        : true,\n  \"Metier\"                : \"Reaver\",\n  \"SnifferMode\"           : \"IndexSniffer\",\n\n  \"IndexSniffer\"          : {\n    \"Type\"                  : \"NextPageBased\",\n    \"IndexApiHref\"          : \"wiki/Special:AllPages\",\n    \"NextPageClassName\"     : \".mw-allpages-nav\",\n    \"NextPageKeyWord\"       : \"Next page\",\n    // Defaulted\n    \"StorageFmt\"            : \"index_${id}.html\",\n    \"NextHrefFmt\"           : \"${DomainHref}${this}\"\n  },\n  /*\"IndexSniffer\"          : {\n    \"Type\"                  : \"DOMPageIdBased\",\n    \"IndexApiHref\"          : \"wiki/Special:AllPages\",\n    \"PageIdFrom\"            : 0,\n    \"PageIdTo\"              : 10000,\n  },*/\n  /*\"IndexSniffer\"          : {\n    \"Type\"                  : \"SitemapBased\",\n    \"SitemapApiHref\"        : \"sitemap_index.xml\",\n    \"MapIndexPath\"          : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Heist\\\\DouBan\\\\\"\n  },*/\n\n\n  \"ExtendMode\"            : \"Instance\",\n  //\"InstanceExtend\"        : \"WikipediaCN\",\n  \"InstanceExtend\"        : \"LatinIsSimple\",\n  //\"InstanceExtend\"        : \"AZLyrics\",\n  \"Children\"               : {\n    \"LatinIsSimple\"  : {\n      \"HeistURL\"             : \"https://www.latin-is-simple.com\",\n      \"IndexPath\"             : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Heist\\\\LatinIsSimple\\\\index\\\\\",\n      \"SpoilPath\"             : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Heist\\\\LatinIsSimple\\\\pages\\\\\",\n      \"WorkingPath\"           : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Sauron\\\\Heist\\\\LatinIsSimple\\\\\",\n      \"TaskFrom\"              : 0,\n      \"TaskTo\"                : 500000, // 2491995\n      \"MaximumThread\"         : 5,\n      \"ReaverTasks\"           : 10000,\n      \"Metier\"                : \"Embezzler\", //\"Stalker\",\n      \"SnifferMode\"           : \"GraphCerberus\",\n\n      \"IndexSniffer\"          : {\n        \"IndexApiHref\"          : \"/en/vocabulary/noun/\",\n        \"NextSelector\"          : \".next\",\n        \"NextPageKeyWord\"       : \"Next\",\n        \"NextHrefFmt\"           : \"${DomainHref}${IndexApiHref}${this}\"\n      },\n\n      \"GraphCerberus\"         : {\n        \"DomainHref\"            : \"https://www.latin-is-simple.com\",\n        \"DirectionPath\"         : [\n          {\n            \"StratumName\"    : \"MajorStratum\",\n            \"SiblingLayers\"  : [\n              {\n                \"LayerName\"             : \"EnumAllTypeWords\",\n                \"DomainHref\"            : \"https://www.latin-is-simple.com\",\n                \"IndexMajorHref\"        : \"/en/vocabulary/\",\n                \"LayerType\"             : \"SnifferLayer\",\n                \"StoragePathFmt\"        : \"\",\n                \"NextHrefFmt\"           : \"https:${this}\",\n                \"NextLayerSelector\"     : \"section:nth-child(7) p:nth-child(2) a\",\n                \"NextLayerFetchType\"    : \"BFS\",\n                \"NextLayers\"            : {\n                  \"LayerName\"             : \"FetchStorageLayer\",\n                  \"DomainHref\"            : \"https://www.latin-is-simple.com\",\n                  \"IndexMajorHref\"        : \"\",\n                  \"LayerType\"             : \"FetchStorageLayer\",\n                  \"CategoryIdxMap\"        : [ \"Nouns\", \"Verbs\", \"Adjectives\", \"Adverbs\", \"OtherWords\", \"Phrases\", \"Groups\" ],\n                  \"StoragePathFmt\"        : \"${parent.IndexPath}${LayerID}_${this.CategoryIdxMap[LayerID]}/${PageID}.html\",\n                  \"NextHrefFmt\"           : \"${DomainHref}${IterMajorHref}${this}\",\n                  \"NextHrefKeyWord\"       : \"Next\",\n                  \"NextLayerSelector\"     : \".next a\",\n                  \"NextLayerFetchType\"    : \"ChainIterUntil\",\n                  \"NextLayers\"            : null\n                }\n              }\n            ]\n          }\n        ]\n\n      },\n\n      \"DOMCentaur\"            : {\n        \"Templated\": [\n          [{ // Make array\n            \"WordGroup\"     : [ \"$primary td:nth-child(1)\" ],\n            \"EnglishGroup\"  : [ \"$array td:nth-child(1)\"   ]\n          }],\n        ]\n      }\n\n\n    }\n\n  }\n}"
  },
  {
    "path": "system/setup/heists/Void.json5",
    "content": "{\n  \"HeistType\"                : \"Templated\",\n  \"HeistURL\"                 : \"https://pubchem.ncbi.nlm.nih.gov\",\n  //\"WorkingMode\"              : \"\", // Exhaust all possible inlet pages (Artist pages as inlet)\n\n  \"IndexPath\"                : \"${OmniumKingpin0}/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/PubChem/index/\",\n  \"SpoilPath\"                : \"${OmniumKingpin0}/ARBOmnium/EnderChest/Kingpin0/Sauron/Heist/PubChem/pages/\",\n\n  \"WorkingPath\"              : \"\\\\\\\\b-serverkingpin/ARBOmnium/EnderChest/ARBOmnium/Sauron/Heist/PubChem/\",\n\n  \"TaskFrom\"                 : 0,\n  \"TaskTo\"                   : 1,\n  \"MaximumThread\"            : 5,\n  \"FragBase\"                 : 10000,\n  \"FragRange\"                : 1000000,\n\n  \"FailureConf\"              : {\n    \"FailedFileSize\"           : 1000,\n    \"FileRetrieveTime\"         : 1,\n  },\n\n  \"FromDeathPoint\"           : true,\n  //\"Metier\"                   : \"Stalker\",\n  \"Metier\"                   : \"Reaver\",\n  //\"Metier\"                   : \"Embezzler\",\n\n  \"Children\"                  : {\n    \"Jesus\": {\n      \"IndexPath\"                : \"Hello hi, I am Jesucristo\",\n      \"parentk\"                  : \"${super.k}\",\n      \"TaskFrom\"                 : 777,\n      \"TaskTo\"                   : 778,\n\n      \"Orchestration\"         : {\n        \"Type\": \"Parallel\", // Enum: { Sequential, Parallel, Loop }\n\n        \"Transactions\": [\n          //{ \"Name\": \"Jesus\", \"Type\": \"Sequential\" /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/ },\n        ]\n      },\n\n      \"HttpBrowser\"      : {\n        \"Charset\"                : \"UTF-10\"\n      }\n    },\n\n    \"Satan\": {\n      \"IndexPath\"                : \"Hello hi, I am Satanas\",\n      \"TaskFrom\"                 : 666,\n      \"TaskTo\"                   : 667,\n    },\n\n    \"Rick\": {\n      \"IndexPath\"                : \"Hello hi, I am Rick\",\n      \"TaskFrom\"                 : 137,\n      \"TaskTo\"                   : 138,\n\n      \"Children\"                  : {\n        \"Sauron\": {\n          \"IndexPath\"                : \"Hello hi, I am Sauron\",\n          \"TaskFrom\"                 : 999,\n          \"TaskTo\"                   : 1000,\n        },\n        \"Cthulhu\": {\n          \"IndexPath\"                : \"Hello hi, I am Cthulhu\",\n          \"TaskFrom\"                 : 1024,\n          \"TaskTo\"                   : 1025,\n        },\n        \"Absolute\": {\n          \"IndexPath\"                : \"Hello hi, I am Elder Brain\",\n          \"TaskFrom\"                 : 2048,\n          \"TaskTo\"                   : 2049,\n        }\n      },\n\n      \"Orchestration\"         : {\n        \"Type\": \"Parallel\",\n\n        \"Transactions\": [\n          { \"Name\": \"Sauron\", \"Type\": \"Sequential\"  },\n\n          {\n            \"Name\": \"Child\" , \"Type\": \"ParallelActions\", \"Transactions\": [\n            { \"Name\": \"Cthulhu\", \"Type\": \"Parallel\"  },\n            { \"Name\": \"Absolute\", \"Type\": \"Parallel\"  }\n            ]\n          },\n\n        ]\n      }\n    }\n  },\n\n  \"Orchestration\"         : {\n    \"Name\": \"VoidOrchestrator\",\n    \"Type\": \"Parallel\", // Enum: { Sequential, Parallel, Loop }\n\n    \"Transactions\": [\n      /* Enum: { Sequential, Parallel, SequentialActions, ParallelActions, LoopActions }*/\n      { \"Name\": \"Jesus\", \"Type\": \"Sequential\"  },\n      { \"Name\": \"Satan\", \"Type\": \"Sequential\"  },\n      { \"Name\": \"Rick\" , \"Type\": \"Sequential\"  }\n    ]\n  },\n\n  \"HttpBrowser\"      : {\n    \"RandomDelayMin\"         : 7418,\n\n    \"Charset\"                : \"UTF-9\"\n  }\n}"
  },
  {
    "path": "system/setup/heists/Wikipedia.json5",
    "content": "{\n  \"HeistType\"             : \"Templated\",\n  \"HeistURL\"             : \"https://en.wikipedia.org\",\n  \"IndexPath\"             : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Heist\\\\Wiki\\\\index\\\\\",\n  \"SpoilPath\"             : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\MegaH\\\\Sauron\\\\Wiki\\\\Pages\\\\\", //\"Z:\\\\ARBFacility\\\\Wiki\\\\Pages\\\\\",\n  \"FragBase\"              : 10000,\n  \"FragRange\"             : 1000000,\n\n  \"WorkingPath\"           : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Sauron\\\\Heist\\\\Wiki\\\\\",\n  \"TaskFrom\"              : 0,\n  \"TaskTo\"                : 1000000, // 16906325\n  \"MaximumThread\"         : 5,\n  \"ReaverTasks\"           : 10000,\n\n  \"FromDeathPoint\"        : true,\n  \"Metier\"                : \"Reaver\",\n\n  \"IndexSniffer\"          : {\n    \"Type\"                  : \"NextPageBased\",\n    \"IndexApiHref\"          : \"/wiki/Special:AllPages\",\n    \"NextSelector\"          : \".mw-allpages-nav\",\n    \"NextPageKeyWord\"       : \"Next page\"\n  },\n\n  \"ExtendMode\"            : \"Instance\",\n  //\"InstanceExtend\"        : \"WikipediaCN\",\n  //\"InstanceExtend\"        : \"YiXueCom\",\n  \"InstanceExtend\"        : \"Wiktionary\",\n  //\"InstanceExtend\"        : \"Wikipedia\",\n  \"Children\"               : {\n    \"Wikipedia\"    : {\n      \"Metier\"                : \"Embezzler\"\n    },\n    \"WikipediaCN\"  : {\n      \"HeistURL\"             : \"https://zh.wikipedia.org\",\n      \"IndexPath\"             : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\WikiCN\\\\index\\\\\",\n      \"SpoilPath\"             : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Kingpin0\\\\Sauron\\\\Heist\\\\WikiCN\\\\pages\\\\\",\n      \"WorkingPath\"           : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Sauron\\\\Heist\\\\WikiCN\\\\\",\n      \"TaskFrom\"              : 0,\n      \"TaskTo\"                : 500000, // 2491995\n      \"MaximumThread\"         : 5,\n      \"ReaverTasks\"           : 10000,\n      \"Metier\"                : \"Reaver\",\n\n      \"IndexSniffer\"          : {\n        \"IndexApiHref\"          : \"/wiki/Special:%E6%89%80%E6%9C%89%E9%A1%B5%E9%9D%A2\",\n        \"NextSelector\"          : \".mw-allpages-nav\",\n        \"NextPageKeyWord\"       : \"下一页\"\n      }\n    },\n    \"Wiktionary\"   : {\n      \"HeistURL\"             : \"https://en.wiktionary.org\",\n      \"IndexPath\"             : \"${OmniumKingStream0}\\\\Sauron\\\\Heist\\\\Wiktionary\\\\index\\\\\",\n      \"SpoilPath\"             : \"${OmniumKingStream0}\\\\Sauron\\\\Heist\\\\Wiktionary\\\\pages\\\\\",\n      \"WorkingPath\"           : \"${OmniumFacility}\\\\Sauron\\\\Heist\\\\Wiktionary\\\\\",\n      \"TaskFrom\"              : 0,\n      \"TaskTo\"                : 7268746, // 2491995\n      \"MaximumThread\"         : 8,\n      \"ReaverTasks\"           : 1000000,\n      //\"SQLPath\"               : \"${OmniumKingpin0}\\\\Sauron\\\\Heist\\\\\",\n      \"SQLPath\"               : \"${OmniumFacility}\\\\SQLs/\",\n      //\"SQLPath\"               : \"E:\\\\\",\n\n      \"Metier\"                : \"Embezzler\",\n      //\"Metier\"                : \"Reaver\",\n      \"WordTypeProperties\"    : {\n        \"Noun\": \"Noun\", \"Proper noun\": \"ProperNoun\", \"Adjective\": \"Adjective\", \"Adverb\": \"Adverb\", \"Verb\": \"Verb\", \"Conjugation\": \"Conjugation\", \"Interjection\": \"Interjection\",\n        \"Article\": \"Article\", \"Preposition\": \"Preposition\", \"Abbreviations\": \"Abbreviations\", \"Abbreviation\": \"Abbreviation\", \"Determiner\": \"Determiner\", \"Particle\": \"Particle\",\n        \"Letter\": \"Letter\", \"Pronoun\": \"Pronoun\", \"Auxiliary\": \"Auxiliary\", \"Contraction\": \"Contraction\", \"Numeral\": \"Numeral\",\n        \"Proverb\": \"Proverb\", \"Participle\": \"Participle\", \"Conjunction\": \"Conjunction\", \"Phrase\": \"Phrase\", \"Number\": \"Number\",\n        \"Postposition\": \"PostPosition\", \"Symbol\": \"Symbol\", \"Suffix\": \"Suffix\", \"Root\": \"Root\", \"Prefix\": \"Prefix\", \"Han character\": \"HanCharacter\",\n        \"Decimal fractions\": \"DecimalFractions\", \"Affix\": \"Affix\",\"Stem\": \"Stem\", \"Preverb\": \"Preverb\", \"Infix\": \"Infix\",\"Interfix\": \"Interfix\", \"Romanization\": \"Romanization\",\n        \"Proverbs\": \"Proverbs\", \"Abstract nouns\": \"AbstractNouns\", \"Concrete nouns\": \"ConcreteNouns\"\n      },\n\n\n      \"IndexSniffer\"          : {\n        \"IndexApiHref\"          : \"/wiki/Special:AllPages\",\n        \"NextSelector\"          : \".mw-allpages-nav\",\n        \"NextPageKeyWord\"       : \"Next page\"\n      }\n    },\n    \"YiXueCom\"     : {\n      \"HeistURL\"             : \"https://www.yixue.com\",\n      \"IndexPath\"             : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Heist\\\\YiXueCom\\\\index\\\\\",\n      \"SpoilPath\"             : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Heist\\\\YiXueCom\\\\Pages\\\\\",\n      \"WorkingPath\"           : \"\\\\\\\\B-SERVERKINGPIN\\\\ARBOmnium\\\\EnderChest\\\\Facility\\\\Sauron\\\\Heist\\\\YiXueCom\\\\\",\n      \"TaskFrom\"              : 0,\n      \"TaskTo\"                : 500000,\n      \"MaximumThread\"         : 5,\n      \"ReaverTasks\"           : 10000,\n      \"Metier\"                : \"Stalker\",\n\n      \"IndexSniffer\"          : {\n        \"IndexApiHref\"          : \"/%E7%89%B9%E6%AE%8A:%E6%89%80%E6%9C%89%E9%A1%B5%E9%9D%A2\",\n        \"NextSelector\"          : \".mw-allpages-nav\",\n        \"NextPageKeyWord\"       : \"下一页\"\n      }\n    }\n  }\n}"
  },
  {
    "path": "system/setup/lords/odin.json5",
    "content": "{\n  \"Name\"     : \"KernelOdinLord\",\n  \"MainClass\": \"com.walnut.odin.system.Odin\",\n  \"LifecycleWithPrimarySystem\" : false,\n\n\n  \"metaDependent\": {\n    \"atlasDatabase\"    : \"MySQLKingHydranium\",\n    \"taskInstrument\"   : \"MySQLKingHydranium\",\n    \"controlRPCDriver\" : \"TaskWolfKing\",\n    \"processManager\"   : \"__SystemTaskManager__\",\n  },\n\n  \"kernelConfig\": {\n    \"instanceTitleTimeFormat\": \"yyyy_MM_dd_HH_mm_ss\",\n    \"defaultDateTimeFormat\": \"yyyy-MM-dd HH:mm:ss\",\n    \"scheduleScanThreadCount\": 8,\n    \"scheduleScanIdWindow\": 1000\n  },\n\n  \"scheduler\": {\n\n    \"partitionName\": \"__DEFAULT__\",\n\n    \"globalDispatcher\": {\n      \"__DEFAULT__\": {\n        \"name\": \"__DEFAULT__\",\n\n        \"globalConcurrentInstance\": 100000,\n\n        \"quota\": {\n\n          \"L0\": {\n            \"priority\": 50,\n            \"maximumRatio\": 0.3,\n            \"minimumRatio\": 0.1,\n          },\n          \"L1\": {\n            \"priority\": 40,\n            \"maximumRatio\": 0.2,\n            \"minimumRatio\": 0.1,\n          },\n          \"L2\": {\n            \"priority\": 30,\n            \"maximumRatio\": 0.2,\n            \"minimumRatio\": 0.1,\n          },\n          \"L3\": {\n            \"priority\": 20,\n            \"maximumRatio\": 0.15,\n            \"minimumRatio\": 0.05,\n          },\n          \"L4\": {\n            \"priority\": 10,\n            \"maximumRatio\": 0.1,\n            \"minimumRatio\": 0.05,\n          },\n          \"L5\": {\n            \"priority\": 0,\n            \"maximumRatio\": 0.05,\n            \"minimumRatio\": 0.01,\n          },\n\n          \"unlimited\": {\n            \"priority\": 500,\n            \"maximumRatio\": -1.0,\n            \"minimumRatio\": -1.0,\n            \"maximumCnt\": -1,\n            \"minimumCnt\": -1,\n          },\n\n          \"default\": {\n            \"priority\": -1,\n            \"maximumRatio\": -1.0,\n            \"minimumRatio\": -1.0,\n            \"maximumCnt\": 1000,\n            \"minimumCnt\": 1000,\n          },\n\n        }\n      }\n    },\n\n  },\n\n\n}"
  },
  {
    "path": "system/setup/lords/redqueen.json5",
    "content": "{\n  \"Name\"     : \"KernelRedQueenLord\",\n  \"MainClass\": \"com.acorn.redqueen.RedQueen\",\n  \"LifecycleWithPrimarySystem\" : true\n}"
  },
  {
    "path": "system/setup/lords/skynet.json5",
    "content": "{\n  \"Name\"     : \"KernelSkynetLord\",\n  \"MainClass\": \"com.acorn.skynet.Skynet\",\n  \"LifecycleWithPrimarySystem\" : true\n}"
  },
  {
    "path": "system/setup/sparta/AccountServiceSpring.json5",
    "content": "{\n  \"server\": {\n    \"port\": 8081,\n    \"servlet\": {\n      \"context-path\": \"/\"\n    }\n  },\n\n  \"spring\": {\n    \"servlet\": {\n      \"multipart\": {\n        \"max-file-size\": \"4096MB\",\n        \"max-request-size\": \"4096MB\"\n      }\n    },\n\n    \"datasource\": {\n      //        \"url\": \"jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true\",\n      //        \"username\": \"root\",\n      //        \"password\": \"123456\",\n      \"url\": \"jdbc:mysql://b-serverkingpin:33062/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true\",\n      \"username\": \"root\",\n      \"password\": \"\",\n      \"driver-class-name\": \"com.mysql.cj.jdbc.Driver\"\n    },\n    \"mybatis\":{\n      \"mapper-locations\": \"classpath:mapper/*.xml\"\n    },\n\n    \"sparta\": {\n      \"datasource\": \"mysql\"\n    }\n  }\n}"
  },
  {
    "path": "system/setup/sparta/SpartaUCDNService.json5",
    "content": "{\n  \"server\": {\n    \"port\": 8082,\n    \"servlet\": {\n      \"context-path\": \"/\"\n    }\n  },\n\n  \"spring\": {\n    \"servlet\": {\n      \"multipart\": {\n        \"max-file-size\": \"4096MB\",\n        \"max-request-size\": \"4096MB\"\n      }\n    },\n\n    \"datasource\": {\n      //        \"url\": \"jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true\",\n      //        \"username\": \"root\",\n      //        \"password\": \"123456\",\n      \"url\": \"jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true&autoReconnect=true&charset=utf8\",\n      \"username\": \"root\",\n      \"password\": \"123456\",\n      \"driver-class-name\": \"com.mysql.cj.jdbc.Driver\"\n    },\n    \"mybatis\":{\n      \"mapper-locations\": \"classpath:mapper/*.xml\"\n    },\n\n    \"sparta\": {\n      \"datasource\": \"mysql\",\n    }\n  },\n\n  \"service\": {\n    \"LocalUploadTemporaryWorkingDirectory\": \"D:/文件系统/temp\",\n    \"TemporaryFileExtends\": \".temp\",\n\n    \"PrimaryUniformFileSystem\": {\n      \"DefaultVolumeGuid\":'1b18a5e-0002af-0000-3c',\n      \"DefaultTempFilePath\": 'D:/文件系统/temp/'\n    },\n\n    \"PrimaryUniformVolumeManager\": {\n      \"DefaultVolumeGuid\":'1b18a5e-0002af-0000-3c',\n      \"DefaultTempFilePath\": 'D:/文件系统/temp/'\n    },\n\n\n    \"ClusterFileSynchronizationConfig\": {\n      \"fileFrameSize\": 972800,\n      \"batchTransmitMemberThreshold\": 10,\n      \"sessionExpiredTimeMillis\": 7200000,\n      \"fileCloudDistributeTransmitTopic\": \"ucdn-file-cloud-distribute-transmit-topic\",\n      \"fileCloudDistributeEventTopic\": \"ucdn-file-cloud-distribute-event-topic\",\n      \"fileServiceTransmitGroup\": \"UCDNFileServiceTransmitGroup\",\n      \"temporaryFileExtends\": \".temp\",\n      \"majorTemporaryClusterFileDirectory\": \"D:/文件系统/temp\",\n      \"localMasterTemporaryClusterFileDirectory\": \"D:/文件系统/frameTemp\" // Online, replace this as equals to the `majorTemporaryClusterFileDirectory`\n    }\n  }\n}"
  },
  {
    "path": "system/setup/sparta/SpartaUISService.json5",
    "content": "{\n  \"server\": {\n    \"port\": 8080,\n    \"servlet\": {\n      \"context-path\": \"/\"\n    }\n  },\n\n  \"spring\": {\n    \"servlet\": {\n      \"multipart\": {\n        \"max-file-size\": \"4096MB\",\n        \"max-request-size\": \"4096MB\"\n      }\n    },\n\n    \"datasource\": {\n      //        \"url\": \"jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true\",\n      //        \"username\": \"root\",\n      //        \"password\": \"123456\",\n      \"url\": \"jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true&autoReconnect=true&charset=utf8\",\n      \"username\": \"root\",\n      \"password\": \"123456\",\n      \"driver-class-name\": \"com.mysql.cj.jdbc.Driver\",\n      //德鲁伊连接池配置\n      \"type\": \"com.alibaba.druid.pool.DruidDataSource\",\n      \"druid\": {\n        \"initial-size\": 5,\n        \"min-idle\": 5,\n        \"max-active\": 20,\n        \"max-wait\": 60000,\n        \"time-between-eviction-runs-millis\": 60000,\n        \"min-evictable-idle-time-millis\": 300000\n      }\n\n    },\n    \"mybatis\":{\n      \"mapper-locations\": \"classpath:mapper/*.xml\"\n    },\n\n    \"sparta\": {\n      \"datasource\": \"mysql\",\n    }\n  },\n\n  \"service\": {\n\n  }\n}"
  },
  {
    "path": "system/setup/sparta/SpartaUOFSService.json5",
    "content": "{\n  \"server\": {\n    \"port\": 8080,\n    \"servlet\": {\n      \"context-path\": \"/\"\n    }\n  },\n\n  \"spring\": {\n    \"servlet\": {\n      \"multipart\": {\n        \"max-file-size\": \"4096MB\",\n        \"max-request-size\": \"4096MB\"\n      }\n    },\n\n    \"datasource\": {\n      //        \"url\": \"jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true\",\n      //        \"username\": \"root\",\n      //        \"password\": \"123456\",\n      \"url\": \"jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true&autoReconnect=true&charset=utf8\",\n      \"username\": \"root\",\n      \"password\": \"123456\",\n      \"driver-class-name\": \"com.mysql.cj.jdbc.Driver\"\n    },\n    \"mybatis\":{\n      \"mapper-locations\": \"classpath:mapper/*.xml\"\n    },\n\n    \"sparta\": {\n      \"datasource\": \"mysql\",\n    }\n  },\n\n  \"service\": {\n    \"LocalUploadTemporaryWorkingDirectory\": \"D:/文件系统/temp\",\n    \"TemporaryFileExtends\": \".temp\",\n\n    \"PrimaryUniformFileSystem\": {\n      \"DefaultVolumeGuid\":'1b18a5e-0002af-0000-3c',\n      \"DefaultTempFilePath\": 'D:/文件系统/temp/'\n    },\n\n    \"PrimaryUniformVolumeManager\": {\n      \"DefaultVolumeGuid\":'1b18a5e-0002af-0000-3c',\n      \"DefaultTempFilePath\": 'D:/文件系统/temp/'\n    },\n\n\n    \"ClusterFileSynchronizationConfig\": {\n      \"fileFrameSize\": 972800,\n      \"batchTransmitMemberThreshold\": 10,\n      \"sessionExpiredTimeMillis\": 7200000,\n      \"fileCloudDistributeTransmitTopic\": \"ucdn-file-cloud-distribute-transmit-topic\",\n      \"fileCloudDistributeEventTopic\": \"ucdn-file-cloud-distribute-event-topic\",\n      \"fileServiceTransmitGroup\": \"UCDNFileServiceTransmitGroup\",\n      \"temporaryFileExtends\": \".temp\",\n      \"majorTemporaryClusterFileDirectory\": \"D:/文件系统/temp\",\n      \"localMasterTemporaryClusterFileDirectory\": \"D:/文件系统/frameTemp\" // Online, replace this as equals to the `majorTemporaryClusterFileDirectory`\n    }\n  }\n}"
  },
  {
    "path": "system/setup/sparta/SpartaUTASKService.json5",
    "content": "{\n  \"server\": {\n    \"port\": 5080,\n    \"servlet\": {\n      \"context-path\": \"/\"\n    }\n  },\n\n  \"spring\": {\n    \"servlet\": {\n      \"multipart\": {\n        \"max-file-size\": \"4096MB\",\n        \"max-request-size\": \"4096MB\"\n      }\n    },\n\n    \"datasource\": {\n      //        \"url\": \"jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true\",\n      //        \"username\": \"root\",\n      //        \"password\": \"123456\",\n      \"url\": \"jdbc:mysql://localhost:3306/hydranium?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true&autoReconnect=true&charset=utf8\",\n      \"username\": \"root\",\n      \"password\": \"$IDCWolf19310918\",\n      \"driver-class-name\": \"com.mysql.cj.jdbc.Driver\",\n      //德鲁伊连接池配置\n      \"type\": \"com.alibaba.druid.pool.DruidDataSource\",\n      \"druid\": {\n        \"initial-size\": 5,\n        \"min-idle\": 5,\n        \"max-active\": 20,\n        \"max-wait\": 60000,\n        \"time-between-eviction-runs-millis\": 60000,\n        \"min-evictable-idle-time-millis\": 300000\n      }\n\n    },\n    \"mybatis\":{\n      \"mapper-locations\": \"classpath:mapper/*.xml\"\n    },\n\n    \"sparta\": {\n      \"datasource\": \"mysql\",\n    }\n  },\n\n  \"service\": {\n\n  }\n}"
  }
]